answer stringlengths 17 10.2M |
|---|
package edu.neu.ccs.pyramid.experiment;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.elasticsearch.ESIndex;
import edu.neu.ccs.pyramid.elasticsearch.MultiLabelIndex;
import edu.neu.ccs.pyramid.elasticsearch.SingleLabelIndex;
import edu.neu.ccs.pyramid.eval.Accuracy;
import edu.neu.ccs.pyramid.feature.*;
import edu.neu.ccs.pyramid.feature_extraction.*;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBConfig;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGradientBoosting;
import edu.neu.ccs.pyramid.util.Pair;
import edu.neu.ccs.pyramid.util.Sampling;
import org.apache.commons.lang3.time.StopWatch;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.search.SearchHit;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class Exp15 {
public static void main(String[] args) throws Exception{
if (args.length !=1){
throw new IllegalArgumentException("please specify the config file");
}
Config config = new Config(args[0]);
System.out.println(config);
MultiLabelIndex index = loadIndex(config);
build(config,index);
index.close();
}
static MultiLabelIndex loadIndex(Config config) throws Exception{
MultiLabelIndex.Builder builder = new MultiLabelIndex.Builder()
.setIndexName(config.getString("index.indexName"))
.setClusterName(config.getString("index.clusterName"))
.setClientType(config.getString("index.clientType"))
.setExtMultiLabelField(config.getString("index.extMultiLabelField"))
.setDocumentType(config.getString("index.documentType"));
if (config.getString("index.clientType").equals("transport")){
String[] hosts = config.getString("index.hosts").split(Pattern.quote(","));
String[] ports = config.getString("index.ports").split(Pattern.quote(","));
builder.addHostsAndPorts(hosts,ports);
}
MultiLabelIndex index = builder.build();
System.out.println("index loaded");
System.out.println("there are "+index.getNumDocs()+" documents in the index.");
// for (int i=0;i<index.getNumDocs();i++){
// System.out.println(i);
// System.out.println(index.getLabel(""+i));
return index;
}
static String[] sampleTrain(Config config, MultiLabelIndex index){
int numDocsInIndex = index.getNumDocs();
String[] trainIds = null;
if (config.getString("split.fashion").equalsIgnoreCase("fixed")){
String splitField = config.getString("index.splitField");
trainIds = IntStream.range(0, numDocsInIndex).
filter(i -> index.getStringField("" + i, splitField).
equalsIgnoreCase("train")).
mapToObj(i -> "" + i).collect(Collectors.toList()).
toArray(new String[0]);
} else if (config.getString("split.fashion").equalsIgnoreCase("random")){
trainIds = Arrays.stream(Sampling.sampleByPercentage(numDocsInIndex, config.getDouble("split.random.trainPercentage"))).
mapToObj(i-> ""+i).
collect(Collectors.toList()).
toArray(new String[0]);
// todo : how to do stratified sampling?
// double trainPercentage = config.getDouble("split.random.trainPercentage");
// int[] labels = new int[numDocsInIndex];
// for (int i=0;i<labels.length;i++){
// labels[i] = index.getLabel(""+i);
// List<Integer> sample = Sampling.stratified(labels, trainPercentage);
// trainIds = new String[sample.size()];
// for (int i=0;i<trainIds.length;i++){
// trainIds[i] = ""+sample.get(i);
} else {
throw new RuntimeException("illegal split fashion");
}
return trainIds;
}
static String[] sampleTest(int numDocsInIndex, String[] trainIndexIds){
Set<String> test = new HashSet<>(numDocsInIndex);
for (int i=0;i<numDocsInIndex;i++){
test.add(""+i);
}
List<String> _trainIndexIds = new ArrayList<>(trainIndexIds.length);
for (String id: trainIndexIds){
_trainIndexIds.add(id);
}
test.removeAll(_trainIndexIds);
return test.toArray(new String[0]);
}
static IdTranslator loadIdTranslator(String[] indexIds) throws Exception{
IdTranslator idTranslator = new IdTranslator();
for (int i=0;i<indexIds.length;i++){
idTranslator.addData(i,""+indexIds[i]);
}
return idTranslator;
}
static void addInitialFeatures(Config config, MultiLabelIndex index,
FeatureMappers featureMappers,
String[] ids) throws Exception{
String featureFieldPrefix = config.getString("index.featureFieldPrefix");
Set<String> allFields = index.listAllFields();
List<String> featureFields = allFields.stream().
filter(field -> field.startsWith(featureFieldPrefix)).
collect(Collectors.toList());
System.out.println("all possible initial features:"+featureFields);
for (String field: featureFields){
String featureType = index.getFieldType(field);
if (featureType.equalsIgnoreCase("string")){
CategoricalFeatureMapperBuilder builder = new CategoricalFeatureMapperBuilder();
builder.setFeatureName(field);
builder.setStart(featureMappers.nextAvailable());
builder.setSource("field");
for (String id: ids){
String category = index.getStringField(id, field);
// missing value is not a category
if (!category.equals(ESIndex.STRING_MISSING_VALUE)){
builder.addCategory(category);
}
}
boolean toAdd = true;
CategoricalFeatureMapper mapper = builder.build();
if (config.getBoolean("categFeature.filter")){
double threshold = config.getDouble("categFeature.percentThreshold");
int numCategories = mapper.getNumCategories();
if (numCategories> ids.length*threshold){
toAdd=false;
System.out.println("field "+field+" has too many categories "
+"("+numCategories+"), omitted.");
}
}
if(toAdd){
featureMappers.addMapper(mapper);
}
} else {
NumericalFeatureMapperBuilder builder = new NumericalFeatureMapperBuilder();
builder.setFeatureName(field);
builder.setFeatureIndex(featureMappers.nextAvailable());
builder.setSource("field");
NumericalFeatureMapper mapper = builder.build();
featureMappers.addMapper(mapper);
}
}
}
//todo keep track of feature types(numerical /binary)
static MultiLabelClfDataSet loadData(Config config, MultiLabelIndex index,
FeatureMappers featureMappers,
IdTranslator idTranslator, int totalDim,
LabelTranslator labelTranslator) throws Exception{
int numDataPoints = idTranslator.numData();
int numClasses = labelTranslator.getNumClasses();
MultiLabelClfDataSet dataSet;
if(config.getBoolean("featureMatrix.sparse")){
dataSet= new SparseMLClfDataSet(numDataPoints,totalDim,numClasses);
} else {
dataSet= new DenseMLClfDataSet(numDataPoints,totalDim,numClasses);
}
for(int i=0;i<numDataPoints;i++){
String dataIndexId = idTranslator.toExtId(i);
List<String> extMultiLabel = index.getExtMultiLabel(dataIndexId);
for (String extLabel: extMultiLabel){
int intLabel = labelTranslator.toIntLabel(extLabel);
dataSet.addLabel(i,intLabel);
}
}
String[] dataIndexIds = idTranslator.getAllExtIds();
featureMappers.getCategoricalFeatureMappers().stream().parallel().
forEach(categoricalFeatureMapper -> {
String featureName = categoricalFeatureMapper.getFeatureName();
String source = categoricalFeatureMapper.getSource();
if (source.equalsIgnoreCase("field")){
for (String id: dataIndexIds){
int algorithmId = idTranslator.toIntId(id);
String category = index.getStringField(id,featureName);
// if a value is missing, set nan
if (category.equals(ESIndex.STRING_MISSING_VALUE)){
for (int featureIndex=categoricalFeatureMapper.getStart();featureIndex<categoricalFeatureMapper.getEnd();featureIndex++){
dataSet.setFeatureValue(algorithmId,featureIndex,Double.NaN);
}
}
// might be a new category unseen in training
if (categoricalFeatureMapper.hasCategory(category)){
int featureIndex = categoricalFeatureMapper.getFeatureIndex(category);
dataSet.setFeatureValue(algorithmId,featureIndex,1);
}
}
}
});
featureMappers.getNumericalFeatureMappers().stream().parallel().
forEach(numericalFeatureMapper -> {
String featureName = numericalFeatureMapper.getFeatureName();
String source = numericalFeatureMapper.getSource();
int featureIndex = numericalFeatureMapper.getFeatureIndex();
if (source.equalsIgnoreCase("field")){
for (String id: dataIndexIds){
int algorithmId = idTranslator.toIntId(id);
// if it is missing, it is nan automatically
float value = index.getFloatField(id,featureName);
dataSet.setFeatureValue(algorithmId,featureIndex,value);
}
}
if (source.equalsIgnoreCase("matching_score")){
SearchResponse response = null;
//todo assume unigram, so slop doesn't matter
response = index.matchPhrase(index.getBodyField(), featureName, dataIndexIds, 0);
SearchHit[] hits = response.getHits().getHits();
for (SearchHit hit: hits){
String indexId = hit.getId();
float score = hit.getScore();
int algorithmId = idTranslator.toIntId(indexId);
dataSet.setFeatureValue(algorithmId,featureIndex,score);
}
}
});
DataSetUtil.setIdTranslator(dataSet, idTranslator);
DataSetUtil.setLabelTranslator(dataSet, labelTranslator);
return dataSet;
}
static MultiLabelClfDataSet loadTrainData(Config config, MultiLabelIndex index, FeatureMappers featureMappers,
IdTranslator idTranslator, LabelTranslator labelTranslator) throws Exception{
int totalDim = config.getInt("maxNumColumns");
System.out.println("creating training set");
System.out.println("allocating "+totalDim+" columns for training set");
MultiLabelClfDataSet dataSet = loadData(config,index,featureMappers,idTranslator,totalDim,labelTranslator);
System.out.println("training set created");
return dataSet;
}
static MultiLabelClfDataSet loadTestData(Config config, MultiLabelIndex index,
FeatureMappers featureMappers, IdTranslator idTranslator,
LabelTranslator labelTranslator) throws Exception{
System.out.println("creating test set");
int totalDim = featureMappers.getTotalDim();
MultiLabelClfDataSet dataSet = loadData(config,index,featureMappers,idTranslator,totalDim,labelTranslator);
System.out.println("test set created");
return dataSet;
}
// static void showDistribution(Config config, ClfDataSet dataSet, Map<Integer, String> labelTranslator){
// int numClasses = labelTranslator.size();
// int[] counts = new int[numClasses];
// int[] labels = dataSet.getLabels();
// for (int i=0;i<dataSet.getNumDataPoints();i++){
// int label = labels[i];
// counts[label] += 1;
// System.out.println("label distribution:");
// for (int i=0;i<numClasses;i++){
// System.out.print(i+"("+labelTranslator.get(i)+"):"+counts[i]+", ");
// System.out.println("");
static void saveDataSet(Config config, MultiLabelClfDataSet dataSet, String name) throws Exception{
String archive = config.getString("archive.folder");
File dataFile = new File(archive,name);
TRECFormat.save(dataSet, dataFile);
DataSetUtil.dumpDataSettings(dataSet,new File(dataFile,"data_settings.txt"));
DataSetUtil.dumpFeatureSettings(dataSet,new File(dataFile,"feature_settings.txt"));
System.out.println("data set saved to "+dataFile.getAbsolutePath());
}
static void dumpTrainFields(Config config, MultiLabelIndex index, IdTranslator idTranslator) throws Exception{
String archive = config.getString("archive.folder");
String trecFile = new File(archive,config.getString("archive.trainingSet")).getAbsolutePath();
String file = new File(trecFile,"dumped_fields.txt").getAbsolutePath();
dumpFields(config, index, idTranslator, file);
}
static void dumpTestFields(Config config, MultiLabelIndex index, IdTranslator idTranslator) throws Exception{
String archive = config.getString("archive.folder");
String trecFile = new File(archive,config.getString("archive.testSet")).getAbsolutePath();
String file = new File(trecFile,"dumped_fields.txt").getAbsolutePath();
dumpFields(config, index, idTranslator, file);
}
static void dumpFields(Config config, MultiLabelIndex index, IdTranslator idTranslator, String fileName) throws Exception{
String[] fields = config.getString("archive.dumpedFields").split(",");
int numDocs = idTranslator.numData();
try(BufferedWriter bw = new BufferedWriter(new FileWriter(fileName))
){
for (int intId=0;intId<numDocs;intId++){
bw.write("intId=");
bw.write(""+intId);
bw.write(",");
bw.write("extId=");
String extId = idTranslator.toExtId(intId);
bw.write(extId);
bw.write(",");
for (int i=0;i<fields.length;i++){
String field = fields[i];
bw.write(field+"=");
bw.write(index.getStringField(extId,field));
if (i!=fields.length-1){
bw.write(",");
}
}
bw.write("\n");
}
}
}
static void trainModel(Config config, MultiLabelClfDataSet dataSet, FeatureMappers featureMappers,
MultiLabelIndex index, IdTranslator trainIdTranslator) throws Exception{
String archive = config.getString("archive.folder");
int numIterations = config.getInt("train.numIterations");
int numClasses = dataSet.getNumClasses();
int numLeaves = config.getInt("train.numLeaves");
double learningRate = config.getDouble("train.learningRate");
int trainMinDataPerLeaf = config.getInt("train.minDataPerLeaf");
String modelName = config.getString("archive.model");
boolean overwriteModels = config.getBoolean("train.overwriteModels");
int numDocsToSelect = config.getInt("extraction.numDocsToSelect");
int numNgramsToExtract = config.getInt("extraction.numNgramsToExtract");
double extractionFrequency = config.getDouble("extraction.frequency");
if (extractionFrequency>1 || extractionFrequency<0){
throw new IllegalArgumentException("0<=extraction.frequency<=1");
}
LabelTranslator labelTranslator = dataSet.getSetting().getLabelTranslator();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
System.out.println("extracting features");
IMLGBConfig imlgbConfig = new IMLGBConfig.Builder(dataSet)
.learningRate(learningRate).minDataPerLeaf(trainMinDataPerLeaf)
.numLeaves(numLeaves)
.build();
IMLGradientBoosting boosting = new IMLGradientBoosting(numClasses);
boosting.setPriorProbs(dataSet);
boosting.setTrainConfig(imlgbConfig);
TermTfidfSplitExtractor tfidfSplitExtractor = new TermTfidfSplitExtractor(index,
trainIdTranslator,numNgramsToExtract).
setMinDf(config.getInt("extraction.tfidfSplitExtractor.minDf")).
setNumSurvivors(config.getInt("extraction.tfidfSplitExtractor.numSurvivors")).
setMinDataPerLeaf(config.getInt("extraction.tfidfSplitExtractor.minDataPerLeaf"));
PhraseSplitExtractor phraseSplitExtractor = new PhraseSplitExtractor(index,trainIdTranslator)
.setMinDataPerLeaf(config.getInt("extraction.phraseSplitExtractor.minDataPerLeaf"))
.setMinDf(config.getInt("extraction.phraseSplitExtractor.minDf"))
.setTopN(config.getInt("extraction.phraseSplitExtractor.topN"));
System.out.println("loading initial seeds...");
DFStats dfStats = loadDFStats(index,trainIdTranslator,labelTranslator);
List<Set<String>> seedsForAllClasses = new ArrayList<>();
for (int i=0;i<numClasses;i++){
Set<String> set = new HashSet<>();
set.addAll(dfStats.getSortedTerms(i,config.getInt("extraction.seeds.minDf"),
config.getInt("extraction.seeds.numPerClass")));
seedsForAllClasses.add(set);
}
System.out.println("seeds loaded");
Set<String> blackList = new HashSet<>();
//start the matrix with the seeds
//may have duplicates, but should not be a big deal
for(Set<String> seeds: seedsForAllClasses){
for (String term: seeds){
int featureIndex = featureMappers.nextAvailable();
SearchResponse response = index.match(index.getBodyField(),
term,trainIdTranslator.getAllExtIds(), MatchQueryBuilder.Operator.AND);
for (SearchHit hit: response.getHits().getHits()){
String indexId = hit.getId();
int algorithmId = trainIdTranslator.toIntId(indexId);
float score = hit.getScore();
dataSet.setFeatureValue(algorithmId, featureIndex,score);
}
NumericalFeatureMapper mapper = NumericalFeatureMapper.getBuilder().
setFeatureIndex(featureIndex).setFeatureName(term).
setSource("matching_score").build();
featureMappers.addMapper(mapper);
blackList.add(term);
}
}
// //todo
// List<Integer> validationSet = new ArrayList<>();
// for (int i=0;i<trainIndex.getNumDocs();i++){
// validationSet.add(i);
for (int iteration=0;iteration<numIterations;iteration++){
System.out.println("iteration "+iteration);
boosting.calGradients();
boolean condition1 = (featureMappers.getTotalDim()
+numNgramsToExtract*numClasses*2
+config.getInt("extraction.phraseSplitExtractor.topN")*numClasses*2
<dataSet.getNumFeatures());
boolean condition2 = (Math.random()<extractionFrequency);
//should start with some feature
boolean condition3 = (iteration==0);
boolean shouldExtractFeatures = condition1&&condition2||condition3;
if (!shouldExtractFeatures){
if (!condition1){
System.out.println("we have reached the max number of columns " +
"and will not extract new features");
}
if (!condition2){
System.out.println("no feature extraction is scheduled for this round");
}
}
/**
* from easy set
*/
if (shouldExtractFeatures&&config.getBoolean("extraction.fromEasySet")){
//generate easy set
FocusSet focusSet = new FocusSet(numClasses);
for (int k=0;k<numClasses;k++){
double[] gradient = boosting.getGradients(k);
Comparator<Pair<Integer,Double>> comparator = Comparator.comparing(Pair::getSecond);
List<Integer> easyExamples = IntStream.range(0,gradient.length)
.mapToObj(i -> new Pair<>(i,gradient[i]))
.filter(pair -> pair.getSecond()>0)
.sorted(comparator)
.limit(numDocsToSelect)
.map(Pair::getFirst)
.collect(Collectors.toList());
for(Integer doc: easyExamples){
focusSet.add(doc,k);
}
}
List<Integer> validationSet = focusSet.getAll();
for (int k=0;k<numClasses;k++){
double[] allGradients = boosting.getGradients(k);
List<Double> gradientsForValidation = validationSet.stream()
.map(i -> allGradients[i]).collect(Collectors.toList());
List<String> goodTerms = null;
goodTerms = tfidfSplitExtractor.getGoodTerms(focusSet,
validationSet,
blackList, k, gradientsForValidation);
seedsForAllClasses.get(k).addAll(goodTerms);
List<String> focusSetIndexIds = focusSet.getDataClassK(k)
.parallelStream().map(trainIdTranslator::toExtId)
.collect(Collectors.toList());
System.out.println("easy set for class " +k+ "("+labelTranslator.toExtLabel(k)+ "):");
System.out.println(focusSetIndexIds.toString());
System.out.println("terms extracted from easy set for class " + k+" ("+labelTranslator.toExtLabel(k)+"):");
System.out.println(goodTerms);
//phrases
System.out.println("seeds for class " +k+ "("+labelTranslator.toExtLabel(k)+ "):");
System.out.println(seedsForAllClasses.get(k));
List<String> goodPhrases = phraseSplitExtractor.getGoodPhrases(focusSet,validationSet,blackList,k,
gradientsForValidation,seedsForAllClasses.get(k));
System.out.println("phrases extracted from easy set for class " + k+" ("+labelTranslator.toExtLabel(k)+"):");
System.out.println(goodPhrases);
blackList.addAll(goodPhrases);
for (String ngram:goodTerms){
int featureIndex = featureMappers.nextAvailable();
SearchResponse response = index.match(index.getBodyField(),
ngram,trainIdTranslator.getAllExtIds(), MatchQueryBuilder.Operator.AND);
for (SearchHit hit: response.getHits().getHits()){
String indexId = hit.getId();
int algorithmId = trainIdTranslator.toIntId(indexId);
float score = hit.getScore();
dataSet.setFeatureValue(algorithmId, featureIndex,score);
}
NumericalFeatureMapper mapper = NumericalFeatureMapper.getBuilder().
setFeatureIndex(featureIndex).setFeatureName(ngram).
setSource("matching_score").build();
featureMappers.addMapper(mapper);
blackList.add(ngram);
}
for (String phrase:goodPhrases){
int featureIndex = featureMappers.nextAvailable();
SearchResponse response = index.matchPhrase(index.getBodyField(),
phrase,trainIdTranslator.getAllExtIds(), 0);
for (SearchHit hit: response.getHits().getHits()){
String indexId = hit.getId();
int algorithmId = trainIdTranslator.toIntId(indexId);
float score = hit.getScore();
dataSet.setFeatureValue(algorithmId, featureIndex,score);
}
NumericalFeatureMapper mapper = NumericalFeatureMapper.getBuilder().
setFeatureIndex(featureIndex).setFeatureName(phrase).
setSource("matching_score").build();
featureMappers.addMapper(mapper);
}
}
}
/**
* focus set
*/
if (shouldExtractFeatures&&config.getBoolean("extraction.fromHardSet")){
//generate focus set
FocusSet focusSet = new FocusSet(numClasses);
for (int k=0;k<numClasses;k++){
double[] gradient = boosting.getGradients(k);
Comparator<Pair<Integer,Double>> comparator = Comparator.comparing(Pair::getSecond);
List<Integer> hardExamples = IntStream.range(0,gradient.length)
.mapToObj(i -> new Pair<>(i,gradient[i]))
.filter(pair -> pair.getSecond()>0)
.sorted(comparator.reversed())
.limit(numDocsToSelect)
.map(Pair::getFirst)
.collect(Collectors.toList());
for(Integer doc: hardExamples){
focusSet.add(doc,k);
}
}
List<Integer> validationSet = focusSet.getAll();
for (int k=0;k<numClasses;k++){
double[] allGradients = boosting.getGradients(k);
List<Double> gradientsForValidation = validationSet.stream()
.map(i -> allGradients[i]).collect(Collectors.toList());
List<String> goodTerms = null;
goodTerms = tfidfSplitExtractor.getGoodTerms(focusSet,
validationSet,
blackList, k, gradientsForValidation);
seedsForAllClasses.get(k).addAll(goodTerms);
List<String> focusSetIndexIds = focusSet.getDataClassK(k)
.parallelStream().map(trainIdTranslator::toExtId)
.collect(Collectors.toList());
System.out.println("hard set for class " +k+ "("+labelTranslator.toExtLabel(k)+ "):");
System.out.println(focusSetIndexIds.toString());
System.out.println("terms extracted from hard set for class " + k+" ("+labelTranslator.toExtLabel(k)+"):");
System.out.println(goodTerms);
//phrases
System.out.println("seeds for class " +k+ "("+labelTranslator.toExtLabel(k)+ "):");
System.out.println(seedsForAllClasses.get(k));
List<String> goodPhrases = phraseSplitExtractor.getGoodPhrases(focusSet,validationSet,blackList,k,
gradientsForValidation,seedsForAllClasses.get(k));
System.out.println("phrases extracted from hard set for class " + k+" ("+labelTranslator.toExtLabel(k)+"):");
System.out.println(goodPhrases);
blackList.addAll(goodPhrases);
for (String ngram:goodTerms){
int featureIndex = featureMappers.nextAvailable();
SearchResponse response = index.match(index.getBodyField(),
ngram,trainIdTranslator.getAllExtIds(), MatchQueryBuilder.Operator.AND);
for (SearchHit hit: response.getHits().getHits()){
String indexId = hit.getId();
int algorithmId = trainIdTranslator.toIntId(indexId);
float score = hit.getScore();
dataSet.setFeatureValue(algorithmId, featureIndex,score);
}
NumericalFeatureMapper mapper = NumericalFeatureMapper.getBuilder().
setFeatureIndex(featureIndex).setFeatureName(ngram).
setSource("matching_score").build();
featureMappers.addMapper(mapper);
blackList.add(ngram);
}
for (String phrase:goodPhrases){
int featureIndex = featureMappers.nextAvailable();
SearchResponse response = index.matchPhrase(index.getBodyField(),
phrase,trainIdTranslator.getAllExtIds(), 0);
for (SearchHit hit: response.getHits().getHits()){
String indexId = hit.getId();
int algorithmId = trainIdTranslator.toIntId(indexId);
float score = hit.getScore();
dataSet.setFeatureValue(algorithmId, featureIndex,score);
}
NumericalFeatureMapper mapper = NumericalFeatureMapper.getBuilder().
setFeatureIndex(featureIndex).setFeatureName(phrase).
setSource("matching_score").build();
featureMappers.addMapper(mapper);
}
}
}
int[] activeFeatures = IntStream.range(0, featureMappers.getTotalDim()).toArray();
boosting.setActiveFeatures(activeFeatures);
boosting.fitRegressors();
}
File serializedModel = new File(archive,modelName);
if (!overwriteModels && serializedModel.exists()){
throw new RuntimeException(serializedModel.getAbsolutePath()+"already exists");
}
boosting.serialize(serializedModel);
System.out.println("model saved to "+serializedModel.getAbsolutePath());
System.out.println("accuracy on training set = "+ Accuracy.accuracy(boosting,
dataSet));
System.out.println("time spent = "+stopWatch);
}
static DFStats loadDFStats(MultiLabelIndex index, IdTranslator trainIdTranslator, LabelTranslator labelTranslator) throws IOException {
DFStats dfStats = new DFStats(labelTranslator.getNumClasses());
String[] trainIds = trainIdTranslator.getAllExtIds();
dfStats.update(index,labelTranslator,trainIds);
dfStats.sort();
return dfStats;
}
static void build(Config config, MultiLabelIndex index) throws Exception{
int numDocsInIndex = index.getNumDocs();
String[] trainIndexIds = sampleTrain(config,index);
System.out.println("number of training documents = "+trainIndexIds.length);
IdTranslator trainIdTranslator = loadIdTranslator(trainIndexIds);
FeatureMappers featureMappers = new FeatureMappers();
LabelTranslator trainLabelTranslator = loadTrainLabelTranslator(index,trainIndexIds);
if (config.getBoolean("useInitialFeatures")){
addInitialFeatures(config,index,featureMappers,trainIndexIds);
}
MultiLabelClfDataSet trainDataSet = loadTrainData(config,index,featureMappers, trainIdTranslator, trainLabelTranslator);
trainModel(config,trainDataSet,featureMappers,index, trainIdTranslator);
//only keep used columns
List<Integer> columns = IntStream.range(0,featureMappers.getTotalDim()).mapToObj(i-> i)
.collect(Collectors.toList());
MultiLabelClfDataSet trimmedTrainDataSet = DataSetUtil.trim(trainDataSet,columns);
DataSetUtil.setFeatureMappers(trimmedTrainDataSet,featureMappers);
saveDataSet(config, trimmedTrainDataSet, config.getString("archive.trainingSet"));
if (config.getBoolean("archive.dumpFields")){
dumpTrainFields(config, index, trainIdTranslator);
}
String[] testIndexIds = sampleTest(numDocsInIndex,trainIndexIds);
IdTranslator testIdTranslator = loadIdTranslator(testIndexIds);
LabelTranslator testLabelTranslator = loadTestLabelTranslator(index,testIndexIds,trainLabelTranslator);
MultiLabelClfDataSet testDataSet = loadTestData(config,index,featureMappers,testIdTranslator,testLabelTranslator);
DataSetUtil.setFeatureMappers(testDataSet,featureMappers);
saveDataSet(config, testDataSet, config.getString("archive.testSet"));
if (config.getBoolean("archive.dumpFields")){
dumpTestFields(config, index, testIdTranslator);
}
}
static LabelTranslator loadTrainLabelTranslator(MultiLabelIndex index, String[] trainIndexIds) throws Exception{
Set<String> extLabelSet = new HashSet<>();
for (String i: trainIndexIds){
List<String> extLabel = index.getExtMultiLabel(i);
extLabelSet.addAll(extLabel);
}
LabelTranslator labelTranslator = new LabelTranslator(extLabelSet);
System.out.println("there are "+labelTranslator.getNumClasses()+" classes in the training set.");
System.out.println(labelTranslator);
return labelTranslator;
}
static LabelTranslator loadTestLabelTranslator(MultiLabelIndex index, String[] testIndexIds, LabelTranslator trainLabelTranslator){
List<String> extLabels = new ArrayList<>();
for (int i=0;i<trainLabelTranslator.getNumClasses();i++){
extLabels.add(trainLabelTranslator.toExtLabel(i));
}
Set<String> testExtLabelSet = new HashSet<>();
for (String i: testIndexIds){
List<String> extLabel = index.getExtMultiLabel(i);
testExtLabelSet.addAll(extLabel);
}
testExtLabelSet.removeAll(extLabels);
for (String extLabel: testExtLabelSet){
extLabels.add(extLabel);
}
return new LabelTranslator(extLabels);
}
} |
package fi.csc.microarray.client;
import java.util.HashMap;
import java.util.Map;
import fi.csc.microarray.client.operation.OperationDefinition;
public class HelpMapping {
private static final String DEFAULT_HELP_PAGE = "chipster-manual/tools.html";
private static Map<String, String> mappings = new HashMap<String, String>();
static {
mappings.put("Preprocessing/Filter by CV", "chipster-manual/filter-cv.html");
mappings.put("Preprocessing/Filter by expression", "chipster-manual/filter-expression.html");
mappings.put("Preprocessing/Filter by flags", "chipster-manual/filter-flags.html");
mappings.put("Preprocessing/Filter by interquartile range", "chipster-manual/filter-iqr.html");
mappings.put("Preprocessing/Filter by standard deviation", "chipster-manual/filter-sd.html");
mappings.put("Preprocessing/Filter using a column term", "chipster-manual/filter-by-column-term.html");
mappings.put("Preprocessing/Filter using a column value", "chipster-manual/filter-by-column-value.html");
mappings.put("Preprocessing/Impute missing values", "chipster-manual/impute.html");
mappings.put("Preprocessing/Remove missing values", "chipster-manual/na-omit.html");
mappings.put("Quality control/Affymetrix basic", "chipster-manual/qc-affy.html");
mappings.put("Quality control/Affymetrix - using RLE and NUSE", "chipster-manual/qc-affy-rle-nuse.html");
mappings.put("Quality control/Agilent 1-color", "chipster-manual/qc-agilent-one-color.html");
mappings.put("Quality control/Agilent 2-color", "chipster-manual/qc-agilent.html");
mappings.put("Quality control/cDNA", "chipster-manual/qc-cdna.html");
mappings.put("Quality control/Illumina", "chipster-manual/qc-illumina.html");
mappings.put("Normalisation/Affymetrix exon arrays", "chipster-manual/norm-affy-exon.html");
mappings.put("Normalisation/Affymetrix", "chipster-manual/norm-affy.html");
mappings.put("Normalisation/Affymetrix SNP arrays", "chipster-manual/norm-affy-snp.html");
mappings.put("Normalisation/Affymetrix gene arrays", "chipster-manual/norm-affy-gene.html");
mappings.put("Normalisation/Agilent miRNA", "chipster-manual/norm-agilent-mirna.html");
mappings.put("Normalisation/Illumina SNP arrays", "chipster-manual/norm-illumina-snp.html");
mappings.put("Normalisation/Process prenormalized", "chipster-manual/norm-process-prenormalized.html");
mappings.put("Normalisation/Process prenormalized affy", "chipster-manual/norm-prenormalized-affy.html");
mappings.put("Normalisation/Agilent 1-color", "chipster-manual/norm-agilent-1color.html");
mappings.put("Normalisation/Agilent 2-color", "chipster-manual/norm-agilent.html");
mappings.put("Normalisation/cDNA", "chipster-manual/norm-cdna.html");
mappings.put("Normalisation/Illumina", "chipster-manual/norm-illumina.html");
mappings.put("Normalisation/Illumina - lumi pipeline", "chipster-manual/norm-illumina-lumi.html");
mappings.put("Normalisation/Random effects", "chipster-manual/norm-lme.html");
mappings.put("Normalisation/Normalize to chip average", "chipster-manual/norm-chip-average.html");
mappings.put("Normalisation/Normalize to gene average", "chipster-manual/norm-gene-average.html");
mappings.put("Normalisation/Normalize to specific samples", "chipster-manual/norm-specific-samples.html");
mappings.put("Normalisation/Normalize to specific genes", "chipster-manual/norm-specific-genes.html");
mappings.put("Statistics/Gene set test", "chipster-manual/stat-geneset.html");
mappings.put("Statistics/One sample tests", "chipster-manual/stat-one-group.html");
mappings.put("Statistics/Several groups tests", "chipster-manual/stat-several-groups.html");
mappings.put("Statistics/Single-slide methods", "chipster-manual/stat-singleslide.html");
mappings.put("Statistics/Time series", "chipster-manual/stat-timeseries.html");
mappings.put("Statistics/Two groups tests", "chipster-manual/stat-two-groups.html");
mappings.put("Statistics/ROTS", "chipster-manual/stat-ROTS.html");
mappings.put("Statistics/NMDS", "chipster-manual/ordination-nmds.html");
mappings.put("Statistics/PCA", "chipster-manual/ordination-pca.html");
mappings.put("Statistics/Sample size estimation", "chipster-manual/stat-estimate-sample-size.html");
mappings.put("Statistics/Sample size calculations with an adapted BH method", "chipster-manual/sample-size-with-bh.html");
mappings.put("Statistics/Correlate with phenodata", "chipster-manual/stat-correlate-phenodata.html");
mappings.put("Statistics/Correlate miRNA with target expression", "chipster-manual/correlate-mirna.html");
mappings.put("Statistics/Linear modelling", "chipster-manual/stat-linear-modelling.html");
mappings.put("Statistics/SAM", "chipster-manual/stat-sam.html");
mappings.put("Statistics/Adjust p-values", "chipster-manual/stat-adjust-p-values.html");
mappings.put("Statistics/Calculate descriptive statistics", "chipster-manual/calculate-descriptive-statistics.html");
mappings.put("Statistics/DCA", "chipster-manual/ordination-dca.html");
mappings.put("Statistics/Association analysis", "chipster-manual/stat-chisq-snp.html");
mappings.put("Statistics/Up-down analysis of miRNA targets", "chipster-manual/up-down-analysis-mirna.html");
mappings.put("Clustering/Hierarchical", "chipster-manual/cluster-hierarchical.html");
mappings.put("Clustering/K-Means", "chipster-manual/cluster-kmeans.html");
mappings.put("Clustering/KNN classification", "chipster-manual/cluster-knn-classification.html");
mappings.put("Clustering/Quality Threshold (QT)", "chipster-manual/cluster-qt.html");
mappings.put("Clustering/Self-organizing map (SOM)", "chipster-manual/cluster-som.html");
mappings.put("Clustering/K-Means - estimate K", "chipster-manual/cluster-kmeans-testk.html");
mappings.put("Clustering/Classification", "chipster-manual/cluster-classification.html");
mappings.put("Pathways/Bayesian network", "chipster-manual/pathway-bayesian.html");
mappings.put("Pathways/Boolean network", "chipster-manual/pathway-boolean-bestim.html");
mappings.put("Pathways/Hypergeometric test for GO", "chipster-manual/pathways-hypergeometric-go.html");
mappings.put("Pathways/Hypergeometric test for KEGG or PFAM", "chipster-manual/pathways-hypergeometric-kegg.html");
mappings.put("Pathways/SAFE test for KEGG pathway enrichment", "chipster-manual/pathways-hypergeometric-safe.html");
mappings.put("Pathways/Gene set test", "chipster-manual/stat-geneset.html");
mappings.put("Pathways/Protein interactions from IntAct", "chipster-manual/pathways-intact.html");
mappings.put("Pathways/Associations to Reactome pathways", "chipster-manual/pathways-reactome.html");
mappings.put("Pathways/Hypergeometric test for ConsensusPathDB", "chipster-manual/pathways-hypergeometric-cpdb.html");
mappings.put("Pathways/Hypergeometric test for cytobands", "chipster-manual/pathways-hypergeometric-cytobands.html");
mappings.put("Pathways/KEGG enrichment for miRNA targets", "chipster-manual/pathways-hyperg-mirna-kegg.html");
mappings.put("Pathways/GO enrichment for miRNA targets", "chipster-manual/pathways-hyperg-mirna-go.html");
mappings.put("Visualisation/Boxplot", "chipster-manual/plot-boxplot.html");
mappings.put("Visualisation/Chromosomal position", "chipster-manual/plot-chrom-pos.html");
mappings.put("Visualisation/Correlogram", "chipster-manual/plot-correlogram.html");
mappings.put("Visualisation/Dendrogram", "chipster-manual/plot-dendrogram.html");
mappings.put("Visualisation/Heatmap", "chipster-manual/plot-heatmap.html");
mappings.put("Visualisation/Histogram", "chipster-manual/plot-histogram.html");
mappings.put("Visualisation/Idiogram", "chipster-manual/plot-idiogram.html");
mappings.put("Visualisation/Venn diagram", "chipster-manual/plot-venn-diagram.html");
mappings.put("Visualisation/Volcano plot from existing results", "chipster-manual/plot-volcano-data-exists.html");
mappings.put("Visualisation/Volcano plot", "chipster-manual/plot-volcano.html");
mappings.put("Promoter Analysis/Retrieve promoters", "chipster-manual/promoter-retrprom.html");
mappings.put("Promoter Analysis/Weeder", "chipster-manual/promoter-tfbs.html");
mappings.put("Promoter Analysis/ClusterBuster", "chipster-manual/promoter-cbust.html");
mappings.put("Promoter Analysis/Cosmo", "chipster-manual/promoter-tfbs-cosmo.html");
mappings.put("Annotation/Agilent, Affymetrix or Illumina genelist", "chipster-manual/annotate-genelist2html.html");
mappings.put("Annotation/Find miRNA targets", "chipster-manual/annotate-miRNA-targets.html");
mappings.put("Annotation/Add annotations to data", "chipster-manual/annotate-add-to-data.html");
mappings.put("Annotation/Agilent miRNA", "chipster-manual/annotate-miRNA.html");
mappings.put("Utilities/Word-based query", "chipster-manual/search-queryword.html");
mappings.put("Utilities/Export GEO's SOFT format", "chipster-manual/export-soft.html");
mappings.put("Utilities/Export tab2mage format", "chipster-manual/export-tab2mage.html");
mappings.put("Utilities/Extract genes from clustering", "chipster-manual/extract-genes-from-clustering.html");
mappings.put("Utilities/Extract genes from GO term", "chipster-manual/extract-genes-from-go.html");
mappings.put("Utilities/Extract genes using a p-value", "chipster-manual/extract-genes-from-stattest.html");
mappings.put("Utilities/Extract samples from dataset", "chipster-manual/extract-samples-from-dataset.html");
mappings.put("Utilities/Average replicate chips", "chipster-manual/average-replicates.html");
mappings.put("Utilities/Calculate fold change", "chipster-manual/calculate-fold-change.html");
mappings.put("Utilities/Generate phenodata", "chipster-manual/generate-phenodata.html");
mappings.put("Utilities/Import from GEO", "chipster-manual/import-from-geo.html");
mappings.put("Utilities/Merge tables", "chipster-manual/merge-tables.html");
mappings.put("Utilities/Merge data sets", "chipster-manual/merge-datasets.html");
mappings.put("Utilities/Search by correlation", "chipster-manual/search-correlation.html");
mappings.put("Utilities/Search by gene name", "chipster-manual/search-queryword.html");
mappings.put("Utilities/Merge tables", "chipster-manual/merge-tables.html");
mappings.put("Utilities/Sort samples", "chipster-manual/sort-samples.html");
mappings.put("Utilities/Delete columns", "chipster-manual/delete-columns.html");
mappings.put("Utilities/Filter using a column", "chipster-manual/filter-by-column.html");
mappings.put("Utilities/Combine probes to genes", "chipster-manual/combine-probes-to-genes.html");
mappings.put("Utilities/Extract genes", "chipster-manual/extract-genes.html");
mappings.put("Utilities/Change interpretation", "chipster-manual/change-interpretation.html");
mappings.put("Utilities/Sort genes", "chipster-manual/sort-genes.html");
mappings.put("Utilities/Random sampling", "chipster-manual/random-sampling.html");
mappings.put("Utilities/Intersect lists", "chipster-manual/intersect-lists.html");
mappings.put("aCGH/Import from CanGEM", "chipster-manual/import-from-cangem.html");
mappings.put("aCGH/Smooth waves from normalized aCGH data", "chipster-manual/smooth-acgh.html");
mappings.put("aCGH/Call copy number aberrations from aCGH data", "chipster-manual/detect-copy-number-aberrations.html");
mappings.put("aCGH/Plot copy number profiles from called aCGH data", "chipster-manual/plot-cgh-profile.html");
mappings.put("aCGH/Identify common regions from called aCGH data", "chipster-manual/detect-common-copy-number-aberration-regions.html");
mappings.put("aCGH/Cluster called aCGH data", "chipster-manual/cluster-acgh.html");
mappings.put("aCGH/Group tests for called aCGH data", "chipster-manual/stat-acgh.html");
mappings.put("aCGH/Convert called aCGH data from probes to genes", "chipster-manual/convert-cn-probes-to-genes.html");
mappings.put("aCGH/GO enrichment for called aCGH genes", "chipster-manual/pathways-acgh-hyperg-go.html");
mappings.put("aCGH/Match copy number and expression probes", "chipster-manual/match-cn-and-expression-probes.html");
mappings.put("aCGH/Plot profiles of matched copy number and expression", "chipster-manual/plot-cn-induced-expression-profile.html");
mappings.put("aCGH/Test for copy-number-induced expression changes", "chipster-manual/test-for-cn-induced-differential-expression.html");
mappings.put("aCGH/Plot copy-number-induced gene expression", "chipster-manual/plot-cn-induced-gene-expression.html");
mappings.put("aCGH/Fetch probe positions from CanGEM", "chipster-manual/fetch-probe-positions-from-cangem.html");
mappings.put("aCGH/Add cytogenetic bands", "chipster-manual/add-cytobands.html");
mappings.put("aCGH/Count overlapping CNVs", "chipster-manual/count-overlapping-cnvs.html");
mappings.put("aCGH/Update aberration frequencies for called aCGH data", "chipster-manual/calculate-aberration-frequencies.html");
mappings.put("Miscellaneous/Multiple sequence alignment", "chipster-manual/seqanal-msa.html");
mappings.put("Miscellaneous/Phylogenetics", "chipster-manual/seqanal-phylogenetics.html");
}
public static String mapToHelppage(OperationDefinition definition) {
String page = mappings.get(definition.getCategory().getName() + "/" + definition.getName());
if (page == null) {
page = DEFAULT_HELP_PAGE;
}
return page;
}
} |
package tektor.minecraft.talldoors.entities.drawbridge;
import java.util.List;
import tektor.minecraft.talldoors.TallDoorsBase;
import tektor.minecraft.talldoors.items.Connector;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.DamageSource;
import net.minecraft.world.World;
public class DrawbridgeBase extends Entity {
public double rotation;
public double lon = 7;
public double width2 = 4;
public double height2 = 0.125;
public int orientation;
boolean up, active;
public DrawbridgeMachine machine;
double mX;
double mY;
double mZ;
public DrawbridgeBase(World par1World) {
super(par1World);
rotation = 0;
this.setSize(7f, 0.125f);
this.ignoreFrustumCheck = true;
up = false;
active = false;
mX = 0;
mY = 0;
mZ = 0;
}
public void setPars(int width3, int depth3) {
width2 = width3;
lon = depth3;
this.dataWatcher.updateObject(22, (int) lon);
this.dataWatcher.updateObject(23, (int) width2);
this.setPosition(posX, posY, posZ);
}
public void onUpdate() {
if (this.worldObj.isRemote) {
orientation = this.dataWatcher.getWatchableObjectInt(28);
rotation = Double.parseDouble(this.dataWatcher
.getWatchableObjectString(29));
up = this.dataWatcher.getWatchableObjectInt(20) == 0 ? false : true;
active = this.dataWatcher.getWatchableObjectInt(21) == 0 ? false
: true;
mX = this.dataWatcher.getWatchableObjectInt(25);
mY = this.dataWatcher.getWatchableObjectInt(26);
mZ = this.dataWatcher.getWatchableObjectInt(27);
lon = this.dataWatcher.getWatchableObjectInt(22);
width2 = this.dataWatcher.getWatchableObjectInt(23);
}
if (!this.worldObj.isRemote) {
if (up && active) {
if (rotation < 90) {
rotation = rotation + 0.4;
this.dataWatcher.updateObject(29, "" + rotation);
} else {
active = false;
this.dataWatcher.updateObject(21, 0);
}
} else if (!up && active) {
if (rotation > 0) {
rotation = rotation - 0.4;
this.dataWatcher.updateObject(29, "" + rotation);
} else {
active = false;
this.dataWatcher.updateObject(21, 0);
}
}
}
if (machine == null) {
List<DrawbridgeMachine> list = (List<DrawbridgeMachine>) worldObj
.getEntitiesWithinAABB(DrawbridgeMachine.class, boundingBox
.getBoundingBox(mX - 1, mY - 1, mZ - 1, mX + 1,
mY + 1, mZ + 1));
machine = list.isEmpty() ? null : list.get(0);
}
setBoundsAt(posX, posY, posZ);
}
@Override
protected void entityInit() {
this.dataWatcher.addObject(28, 0);
this.dataWatcher.addObject(29, "" + 0);
this.dataWatcher.addObject(25, 0);
this.dataWatcher.addObject(26, 0);
this.dataWatcher.addObject(27, 0);
this.dataWatcher.addObject(20, 0);
this.dataWatcher.addObject(21, 0);
this.dataWatcher.addObject(22, 0);
this.dataWatcher.addObject(23, 0);
}
@Override
protected void readEntityFromNBT(NBTTagCompound nbt) {
height2 = nbt.getDouble("height");
rotation = nbt.getDouble("rotation");
this.dataWatcher.updateObject(29, "" + rotation);
width2 = nbt.getDouble("width");
lon = nbt.getDouble("lon");
this.dataWatcher.updateObject(22, (int) lon);
this.dataWatcher.updateObject(23, (int) width2);
this.setOrientation(nbt.getInteger("orientation"));
List<DrawbridgeMachine> list = (List<DrawbridgeMachine>) worldObj
.getEntitiesWithinAABB(DrawbridgeMachine.class, boundingBox
.getBoundingBox(nbt.getDouble("mX") - 1,
nbt.getDouble("mY") - 1,
nbt.getDouble("mZ") - 1,
nbt.getDouble("mx") + 1,
nbt.getDouble("mY") + 1,
nbt.getDouble("mZ") + 1));
machine = list.isEmpty() ? null : list.get(0);
this.mX = nbt.getDouble("mX");
this.mY = nbt.getDouble("mY");
this.mZ = nbt.getDouble("mZ");
this.dataWatcher.updateObject(25, (int) mX);
this.dataWatcher.updateObject(26, (int) mY);
this.dataWatcher.updateObject(27, (int) mZ);
active = nbt.getBoolean("active");
up = nbt.getBoolean("up");
if (active)
this.dataWatcher.updateObject(21, 1);
else
this.dataWatcher.updateObject(21, 0);
if (up)
this.dataWatcher.updateObject(20, 1);
else
this.dataWatcher.updateObject(20, 0);
}
@Override
protected void writeEntityToNBT(NBTTagCompound nbt) {
nbt.setDouble("height", height2);
nbt.setDouble("rotation", rotation);
nbt.setDouble("width", width2);
nbt.setDouble("lon", lon);
nbt.setInteger("orientation", orientation);
if (machine != null) {
nbt.setDouble("mX", machine.posX);
nbt.setDouble("mY", machine.posY);
nbt.setDouble("mZ", machine.posZ);
}
nbt.setBoolean("up", up);
nbt.setBoolean("active", active);
}
@Override
public AxisAlignedBB getBoundingBox() {
return this.boundingBox;
}
@Override
public AxisAlignedBB getCollisionBox(Entity par1Entity) {
return this.boundingBox;
}
@Override
public boolean canBeCollidedWith() {
return true;
}
public void setOrientation(int var24) {
orientation = var24;
this.dataWatcher.updateObject(28, var24);
}
@Override
public void onCollideWithPlayer(EntityPlayer par1EntityPlayer) {
}
@Override
public void setPositionAndRotation2(double par1, double par3, double par5,
float par7, float par8, int par9) {
this.setPosition(par1, par3, par5);
this.setRotation(par7, par8);
}
@Override
public void setPosition(double par1, double par3, double par5) {
this.posX = par1;
this.posY = par3;
this.posZ = par5;
setBoundsAt(par1, par3, par5);
}
public void setBoundsAt(double par1, double par3, double par5) {
float f = this.width / 2.0F;
float f1 = this.height;
if (this.active == false && this.up == false) {
if (orientation == 0) {
this.boundingBox.setBounds(par1, par3 - this.yOffset
+ this.ySize, par5, par1 + width2, par3 - this.yOffset
+ this.ySize + f1, par5 + lon);
} else if (orientation == 1) {
this.boundingBox.setBounds(par1 - lon + 1, par3 - this.yOffset
+ this.ySize, par5, par1 + 1, par3 - this.yOffset
+ this.ySize + f1, par5 + width2);
} else if (orientation == 2) {
this.boundingBox.setBounds(par1 - width2 + 1, par3
- this.yOffset + this.ySize, par5 - lon + 1, par1 + 1,
par3 - this.yOffset + this.ySize + f1, par5 + 1);
} else if (orientation == 3) {
this.boundingBox.setBounds(par1, par3 - this.yOffset
+ this.ySize, par5 - width2 + 1, par1 + lon, par3
- this.yOffset + this.ySize + f1, par5 + 1);
}
} else {
f1 = (float) lon;
if (orientation == 0) {
this.boundingBox.setBounds(par1, par3 - this.yOffset
+ this.ySize, par5, par1 + width2, par3 - this.yOffset
+ this.ySize + f1, par5 + 0.125f);
} else if (orientation == 1) {
this.boundingBox.setBounds(par1 - 0.125f + 1, par3
- this.yOffset + this.ySize, par5, par1 + 1, par3
- this.yOffset + this.ySize + f1, par5 + width2);
} else if (orientation == 2) {
this.boundingBox.setBounds(par1 - width2 + 1, par3
- this.yOffset + this.ySize, par5 - 0.125f + 1,
par1 + 1, par3 - this.yOffset + this.ySize + f1,
par5 + 1);
} else if (orientation == 3) {
this.boundingBox.setBounds(par1, par3 - this.yOffset
+ this.ySize, par5 - width2 + 1, par1 + 0.125f, par3
- this.yOffset + this.ySize + f1, par5 + 1);
}
}
}
public void activate() {
if (!this.up) {
up = true;
active = true;
this.dataWatcher.updateObject(21, 1);
this.dataWatcher.updateObject(20, 1);
} else {
up = false;
active = true;
this.dataWatcher.updateObject(21, 1);
this.dataWatcher.updateObject(20, 0);
}
System.out.println("activated");
}
public int func_82329_d() {
return 64;
}
public int func_82330_g() {
return 2;
}
@Override
public boolean func_130002_c(EntityPlayer player) {
if (!this.worldObj.isRemote) {
if (player.inventory.getCurrentItem() != null
&& player.inventory.getCurrentItem().itemID == TallDoorsBase.connector.itemID) {
((Connector) player.inventory.getCurrentItem().getItem()).base = this;
}
}
return true;
}
public void setMachinePos(double posX, double posY, double posZ) {
mX = posX;
this.dataWatcher.updateObject(25, (int) mX);
mY = posY;
this.dataWatcher.updateObject(26, (int) mY);
mZ = posZ;
this.dataWatcher.updateObject(27, (int) mZ);
}
@Override
public boolean attackEntityFrom(DamageSource par1DamageSource, float par2) {
if (this.isEntityInvulnerable()) {
return false;
} else {
if (!this.isDead && !this.worldObj.isRemote
&& par1DamageSource.getEntity() instanceof EntityPlayer) {
this.setDead();
this.setBeenAttacked();
this.func_110128_b(par1DamageSource.getEntity());
}
return true;
}
}
public void func_110128_b(Entity par1Entity) {
if (par1Entity instanceof EntityPlayer) {
EntityPlayer entityplayer = (EntityPlayer) par1Entity;
if (entityplayer.capabilities.isCreativeMode) {
return;
}
}
ItemStack drop = new ItemStack(TallDoorsBase.drawbridge, 1, 0);
drop.stackTagCompound = new NBTTagCompound();
drop.stackTagCompound.setInteger("width", (int)this.width2);
drop.stackTagCompound.setInteger("depth", (int)this.lon);
this.entityDropItem(drop, 0.0F);
}
} |
package fi.solita.utils.functional;
import static fi.solita.utils.functional.Collections.newList;
import static fi.solita.utils.functional.Functional.filter;
import static fi.solita.utils.functional.Functional.forAll;
import static fi.solita.utils.functional.Functional.headOption;
import static fi.solita.utils.functional.Functional.map;
import static fi.solita.utils.functional.Functional.max;
import static fi.solita.utils.functional.Functional.min;
import static fi.solita.utils.functional.Functional.sort;
import static fi.solita.utils.functional.Option.None;
import static fi.solita.utils.functional.Option.Some;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import java.util.PriorityQueue;
public abstract class Iterables {
static final class RangeIterable<T> extends PossiblySizeAwareIterable<T> {
private final Enumerable<T> enumeration;
private final T from;
private final Option<T> toInclusive;
private final Option<Integer> knownSize;
public RangeIterable(Enumerable<T> enumeration, T from, Option<T> toInclusive) {
this(enumeration, from, toInclusive, Option.<Integer>None());
}
public RangeIterable(Enumerable<T> enumeration, T from, T toInclusive, int knownSize) {
this(enumeration, from, Some(toInclusive), Some(knownSize));
}
private RangeIterable(Enumerable<T> enumeration, T from, Option<T> toInclusive, Option<Integer> knownSize) {
this.enumeration = enumeration;
this.from = from;
this.toInclusive = toInclusive;
this.knownSize = knownSize;
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
Option<T> nextToReturn = Some(from);
@Override
public boolean hasNext() {
return nextToReturn.isDefined();
}
@Override
public T next() {
if (!nextToReturn.isDefined()) {
throw new NoSuchElementException();
}
T ret = nextToReturn.get();
if (toInclusive.isDefined() && ret.equals(toInclusive.get())) {
nextToReturn = None();
} else {
nextToReturn = enumeration.succ(ret);
}
return ret;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Option<Integer> size() {
return knownSize;
}
}
static final class RepeatingIterable<T> extends PossiblySizeAwareIterable<T> {
private final T value;
private final Option<Integer> amount;
public RepeatingIterable(T value) {
this.value = value;
this.amount = None();
}
public RepeatingIterable(T value, int amount) {
this.value = value;
this.amount = Some(amount);
}
@Override
public Option<Integer> size() {
return amount.isDefined() ? amount : Option.<Integer>None();
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
int current = 0;
@Override
public boolean hasNext() {
return current < amount.getOrElse(Integer.MAX_VALUE);
}
@Override
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
current++;
return value;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
static abstract class PossiblySizeAwareIterable<T> implements Iterable<T> {
public abstract Option<Integer> size();
@Override
public String toString() {
return getClass().getSimpleName() + Collections.newList(this).toString();
}
}
static class TransposingIterable<T> extends PossiblySizeAwareIterable<Iterable<T>> {
private final Iterable<? extends Iterable<T>> elements;
public TransposingIterable(Iterable<? extends Iterable<T>> elements) {
this.elements = elements;
}
private static Transformer<Iterator<?>,Boolean> hasNext = new Transformer<Iterator<?>,Boolean>() {
@Override
public Boolean transform(Iterator<?> source) {
return source.hasNext();
}
};
private Transformer<Iterator<T>,T> next = new Transformer<Iterator<T>,T>() {
@Override
public T transform(Iterator<T> source) {
return source.next();
}
};
@Override
public Iterator<Iterable<T>> iterator() {
return new Iterator<Iterable<T>>() {
List<Iterator<T>> iterators = newList(map(elements, new Transformer<Iterable<T>,Iterator<T>>() {
@Override
public Iterator<T> transform(Iterable<T> source) {
return source.iterator();
}
}));
@Override
public boolean hasNext() {
return !iterators.isEmpty() && forAll(iterators, hasNext);
}
@Override
public Iterable<T> next() {
return newList(map(iterators, next));
}
@Override
public void remove() {
for (Iterator<?> it: iterators) {
it.remove();
}
}
};
}
@Override
public Option<Integer> size() {
Iterable<Option<Integer>> resolvedSizes = filter(map(elements, resolveSize), new Predicate<Option<?>>() {
@Override
public boolean accept(Option<?> candidate) {
return candidate.isDefined();
}
});
return headOption(sort(map(resolvedSizes, new Transformer<Option<Integer>,Integer>() {
@Override
public Integer transform(Option<Integer> source) {
return source.get();
}
})));
}
}
static class ZippingIterable<A,B> extends PossiblySizeAwareIterable<Tuple2<A, B>> {
private final Iterable<A> elements1;
private final Iterable<B> elements2;
public ZippingIterable(Iterable<A> elements1, Iterable<B> elements2) {
this.elements1 = elements1;
this.elements2 = elements2;
}
@Override
public Iterator<Tuple2<A, B>> iterator() {
return new Iterator<Tuple2<A, B>>() {
Iterator<A> it1 = elements1.iterator();
Iterator<B> it2 = elements2.iterator();
@Override
public boolean hasNext() {
return it1.hasNext() && it2.hasNext();
}
@Override
public Tuple2<A, B> next() {
return Tuple.of(it1.next(), it2.next());
}
@Override
public void remove() {
it1.remove();
it2.remove();
}
};
}
@Override
public Option<Integer> size() {
for (int a: resolveSize.apply(elements1)) {
for (int b: resolveSize.apply(elements2)) {
return Some(Functional.min(a,b));
}
}
return None();
}
}
static class ConcatenatingIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<? extends Iterable<? extends T>> elements;
public ConcatenatingIterable(Iterable<? extends Iterable<? extends T>> elements) {
this.elements = elements;
}
@Override
public Option<Integer> size() {
int size = 0;
for (Iterable<? extends T> it: elements) {
Option<Integer> resolvedSize = resolveSize.apply(it);
if (resolvedSize.isDefined()) {
size += resolvedSize.get();
} else {
return None();
}
}
return Some(size);
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
Iterator<Iterator<? extends T>> it = Functional.map(elements, new Transformer<Iterable<? extends T>, Iterator<? extends T>>() {
@Override
public Iterator<? extends T> transform(Iterable<? extends T> source) {
return source.iterator();
}
}).iterator();
private Iterator<? extends T> lastUsed = it.hasNext() ? it.next() : java.util.Collections.<T>emptyList().iterator();
@Override
public boolean hasNext() {
while (!lastUsed.hasNext() && it.hasNext()) {
lastUsed = it.next();
}
return lastUsed.hasNext();
}
@Override
public T next() {
hasNext();
return lastUsed.next();
}
@Override
public void remove() {
lastUsed.remove();
}
};
}
}
static class FilteringIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<T> iterable;
private final Apply<? super T, Boolean> filter;
public FilteringIterable(Iterable<T> iterable, Apply<? super T, Boolean> filter) {
this.iterable = iterable;
this.filter = filter;
}
@Override
public Option<Integer> size() {
return None();
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private Object[] next;
private Iterator<T> source = iterable.iterator();
{
readNext();
}
@Override
public boolean hasNext() {
return next != null;
}
private void readNext() {
next = null;
while (source.hasNext() && next == null) {
T n = source.next();
if (filter.apply(n)) {
next = new Object[]{n};
}
}
}
@Override
public T next() {
if (next == null) {
throw new NoSuchElementException();
}
@SuppressWarnings("unchecked")
T ret = (T) next[0];
readNext();
return ret;
}
@Override
public void remove() {
if (next == null) {
throw new IllegalStateException();
}
source.remove();
}
};
}
}
static class TransformingIterable<S,T> extends PossiblySizeAwareIterable<T> {
private final Iterable<S> iterable;
private final Apply<? super S, ? extends T> transformer;
public TransformingIterable(Iterable<S> iterable, Apply<? super S, ? extends T> transformer) {
this.iterable = iterable;
this.transformer = transformer;
}
@Override
public Option<Integer> size() {
return resolveSize.apply(iterable);
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private Iterator<S> source = iterable.iterator();
@Override
public boolean hasNext() {
return source.hasNext();
}
@Override
public T next() {
return transformer.apply(source.next());
}
@Override
public void remove() {
source.remove();
}
};
}
}
static class ReversingIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<T> iterable;
public ReversingIterable(Iterable<T> iterable) {
this.iterable = iterable;
}
@Override
public Option<Integer> size() {
return resolveSize.apply(iterable);
}
@Override
public Iterator<T> iterator() {
final List<T> list = iterable instanceof List ? (List<T>)iterable : newList(iterable);
return new Iterator<T>() {
ListIterator<T> underlying = list.listIterator(list.size());
@Override
public boolean hasNext() {
return underlying.hasPrevious();
}
@Override
public T next() {
return underlying.previous();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public String toString() {
return Collections.newList(this).toString();
}
}
static final class CharSequenceIterable extends PossiblySizeAwareIterable<Character> {
private final CharSequence charSeq;
public CharSequenceIterable(CharSequence charSeq) {
this.charSeq = charSeq;
}
@Override
public Iterator<Character> iterator() {
return new Iterator<Character>() {
int i = 0;
@Override
public boolean hasNext() {
return i < charSeq.length();
}
@Override
public Character next() {
if (i >= charSeq.length()) {
throw new NoSuchElementException();
}
i++;
return charSeq.charAt(i-1);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Option<Integer> size() {
return Some(charSeq.length());
}
}
static final class SortingIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<T> iterable;
private final Comparator<? super T> comparator;
public SortingIterable(Iterable<T> iterable, Comparator<? super T> comparator) {
this.iterable = iterable;
this.comparator = comparator;
}
@Override
public Iterator<T> iterator() {
int initialSize = resolveSize.apply(iterable).getOrElse(11);
if (initialSize == 0) {
return java.util.Collections.<T>emptyList().iterator();
}
final PriorityQueue<T> queue = new PriorityQueue<T>(initialSize, comparator);
for (T t: iterable) {
queue.add(t);
}
return new Iterator<T>() {
@Override
public boolean hasNext() {
return !queue.isEmpty();
}
@Override
public T next() {
return queue.remove();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Option<Integer> size() {
return resolveSize.apply(iterable);
}
}
static final class TakingIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<T> elements;
private final int amount;
public TakingIterable(Iterable<T> elements, int amount) {
if (amount < 0) {
throw new IllegalArgumentException("amount must be >= 0");
}
this.elements = elements;
this.amount = amount;
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
int left = amount;
Iterator<T> it = elements.iterator();
@Override
public boolean hasNext() {
return left > 0 && it.hasNext();
}
@Override
public T next() {
if (left == 0) {
throw new NoSuchElementException();
}
left
return it.next();
}
@Override
public void remove() {
it.remove();
}
};
}
@Override
public Option<Integer> size() {
Option<Integer> s = resolveSize.apply(elements);
if (s.isDefined()) {
return Some(min(s.get(), amount));
} else {
// a good guess, since it's probably rare that 'take' is
// called with an amount of significantly larger than the size
// of the iterable. Right?
return Some(amount);
}
}
}
static final class DroppingIterable<T> extends PossiblySizeAwareIterable<T> {
private final Iterable<T> elements;
private final int amount;
public DroppingIterable(Iterable<T> elements, int amount) {
if (amount < 0) {
throw new IllegalArgumentException("amount must be >= 0");
}
this.elements = elements;
this.amount = amount;
}
@Override
public Iterator<T> iterator() {
Iterator<T> it = elements.iterator();
int left = amount;
while (left > 0 && it.hasNext()) {
it.next();
left
}
return it;
}
@Override
public Option<Integer> size() {
Option<Integer> s = resolveSize.apply(elements);
if (s.isDefined()) {
return Some(max(s.get() - amount, 0));
} else {
return None();
}
}
}
public static Transformer<Iterable<?>,Option<Integer>> resolveSize = new Transformer<Iterable<?>,Option<Integer>>() {
@Override
public Option<Integer> transform(Iterable<?> source) {
if (source instanceof Collection) {
return Some(((Collection<?>)source).size());
}
if (source instanceof PossiblySizeAwareIterable) {
return ((PossiblySizeAwareIterable<?>)source).size();
}
return None();
}
};
} |
package fi.solita.utils.meta;
import static fi.solita.utils.functional.Collections.newList;
import static fi.solita.utils.functional.Collections.newMap;
import static fi.solita.utils.functional.Collections.newSet;
import static fi.solita.utils.functional.Functional.mkString;
import static fi.solita.utils.functional.Functional.takeWhile;
import static fi.solita.utils.functional.Predicates.equalTo;
import static fi.solita.utils.functional.Predicates.not;
import java.io.Serializable;
import java.io.Writer;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Generated;
import javax.annotation.processing.Filer;
import javax.lang.model.element.TypeElement;
import javax.tools.FileObject;
import fi.solita.utils.functional.Option;
public class ClassFileWriter {
private static final Pattern IMPORTS = Pattern.compile(Pattern.quote("{${") + "(([a-zA-Z0-9_]+\\.)*([a-zA-Z0-9_$]+))" + Pattern.quote("}$}"));
private static final String SERIALIZABLE = Serializable.class.getSimpleName();
private static final String GENERATED = Generated.class.getSimpleName();
private static final String LINE_SEP = System.getProperty("line.separator");
public static void writeClassFile(String packageName, String classSimpleName, Option<String> extendedClassName, Iterable<String> contentLines, Class<?> generator, Filer filer, TypeElement originalClass, Option<SuppressWarnings> classSupressWarnings, boolean isDeprecated) {
Map<String,String> toImport = newMap();
toImport.put(GENERATED, "javax.annotation.Generated");
toImport.put(SERIALIZABLE, "java.io.Serializable");
StringBuffer content = new StringBuffer();
for (String line: contentLines) {
Matcher m = IMPORTS.matcher(line);
while (m.find()) {
String fullyQualifiedName = m.group(1).replaceAll("[$]", ".");
String importClauseName = takeWhile(not(equalTo('$')), m.group(1));
String simpleName = takeWhile(not(equalTo('$')), m.group(3));
String importName = m.group(3).replaceAll("[$]", ".");
String alreadyImported = toImport.get(simpleName);
if (alreadyImported == null || alreadyImported.equals(importClauseName)) {
toImport.put(simpleName, importClauseName);
m.appendReplacement(content, importName);
} else {
m.appendReplacement(content, fullyQualifiedName);
}
}
m.appendTail(content);
content.append(LINE_SEP);
}
List<String> suppress = newList();
for (SuppressWarnings sw: classSupressWarnings) {
for (String sws: sw.value()) {
if (!"unused".equals(sws)) {
suppress.add(sws);
}
}
}
suppress.add("serial");
try {
String extend = extendedClassName.isDefined() ? " extends " + extendedClassName.get() : "";
FileObject fo = filer.createSourceFile((packageName.isEmpty() ? "" : packageName + ".") + classSimpleName, originalClass);
Writer pw = fo.openWriter();
if (!packageName.isEmpty()) {
pw.append("package ")
.append(packageName)
.append(';')
.append(LINE_SEP);
}
for (String qualifiedName: newSet(toImport.values())) {
pw.append("import ")
.append(qualifiedName)
.append(";")
.append(LINE_SEP);
}
pw.append(LINE_SEP)
.append('@')
.append(GENERATED)
.append("(\"")
.append(generator.getName())
.append("\")")
.append(LINE_SEP)
.append("@SuppressWarnings({\"" + mkString("\",\"", suppress) + "\"})")
.append(LINE_SEP);
if (isDeprecated) {
pw.append("@Deprecated")
.append(LINE_SEP);
}
pw.append("public class ")
.append(classSimpleName)
.append(extend)
.append(" implements ")
.append(SERIALIZABLE)
.append(" {")
.append(LINE_SEP)
.append(LINE_SEP)
.append(content)
.append('}');
pw.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} |
package com.bookbase.app.library;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.app.Fragment;
import android.support.v7.widget.DividerItemDecoration;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.bookbase.app.R;
import com.bookbase.app.database.AppDatabase;
import com.bookbase.app.library.addBook.AddBookActivity;
import com.bookbase.app.library.viewBook.ViewBookFragment;
import com.bookbase.app.model.entity.Book;
import com.bookbase.app.model.repository.Repository;
import com.bookbase.app.utils.BundleBookHelper;
import java.util.List;
public class BooksFragment extends Fragment implements Runnable{
private OnFragmentInteractionListener mListener;
private List<Book> books;
private AppDatabase database;
private RecyclerView bookList;
private Repository repository;
public interface OnFragmentInteractionListener { void onFragmentInteraction(Uri uri); }
public BooksFragment() {}
public static BooksFragment newInstance(String param1, String param2) {
return new BooksFragment();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnFragmentInteractionListener) {
mListener = (OnFragmentInteractionListener) context;
run();
} else {
throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
repository = Repository.getRepository();
books = repository.getBookList();
}
@Override
public void onResume() {
super.onResume();
setupAdapter(repository.getBookList());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_books, container, false);
bookList = view.findViewById(R.id.books_list);
LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity());
DividerItemDecoration dividerItemDecoration = new DividerItemDecoration(bookList.getContext(), layoutManager.getOrientation());
bookList.setLayoutManager(layoutManager);
bookList.addItemDecoration(dividerItemDecoration);
FloatingActionButton fab = view.findViewById(R.id.add_book_fab);
setupAdapter(books);
fab.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View view){
Intent intent = new Intent(getActivity(), AddBookActivity.class);
startActivity(intent);
}
});
bookList.addOnItemTouchListener(new RecyclerItemClickListener(getActivity(), bookList, new RecyclerItemClickListener.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
try {
Fragment fragment = (Fragment) (ViewBookFragment.class).newInstance();
fragment.setArguments(BundleBookHelper.bundleBook(books.get(position)));
// Bundle bundle = new Bundle();
// bundle.putParcelable("book", books.get(position));
// fragment.setArguments(bundle);
getActivity().getSupportFragmentManager().beginTransaction()
.replace(R.id.content_frame, fragment)
.addToBackStack(null)
.commit();
} catch(IllegalAccessException e){
e.printStackTrace();
} catch(java.lang.InstantiationException e) {
e.printStackTrace();
}
}
@Override
public void onItemLongClick(View view, int position) {
Snackbar.make(view, "Long touch", Snackbar.LENGTH_SHORT).show();
}
}));
return view;
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
@Override
public void run(){ database = AppDatabase.getDatabase(this.getContext()); }
private void setupAdapter(List<Book> books){
BooksAdapter adapter;
adapter = new BooksAdapter(getActivity(), books);
bookList.setAdapter(adapter);
int currSize = adapter.getItemCount();
adapter.notifyItemRangeInserted(currSize, books.size());
}
} |
package gaiamod.handlers;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import gaiamod.armor.ModArmor;
import net.minecraft.block.material.Material;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.attributes.AttributeModifier;
import net.minecraft.entity.ai.attributes.IAttributeInstance;
import net.minecraft.entity.effect.EntityLightningBolt;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.projectile.EntityArrow;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.DamageSource;
import net.minecraftforge.client.event.EntityViewRenderEvent;
import net.minecraftforge.client.event.FOVUpdateEvent;
import net.minecraftforge.event.entity.EntityStruckByLightningEvent;
import net.minecraftforge.event.entity.living.LivingAttackEvent;
import net.minecraftforge.event.entity.living.LivingEvent.LivingJumpEvent;
import net.minecraftforge.event.entity.living.LivingEvent.LivingUpdateEvent;
import net.minecraftforge.event.entity.living.LivingHurtEvent;
import org.lwjgl.opengl.GL11;
import java.util.UUID;
public class GaiaModEventHandler
{
private static final UUID wtvID = UUID.fromString("641470a0-ff51-4598-bdae-210184bbe083");
@SideOnly(Side.CLIENT)
@SubscribeEvent
public void onFOVUpdate(FOVUpdateEvent event)
{
IAttributeInstance atinst = event.entity.getEntityAttribute(SharedMonsterAttributes.movementSpeed);
if (atinst.getModifier(wtvID) != null)
{
event.newfov = (event.newfov * 2.0f) - 1.0f;
event.newfov = event.newfov / 1.4f; // Earth speed modifier
event.newfov = (event.newfov + 1.0f) / 2.0f;
}
}
@SideOnly(Side.CLIENT)
@SubscribeEvent
public void EntityViewRenderEvent(EntityViewRenderEvent.FogDensity event)
{
EntityLivingBase entityLiving = event.entity;
if (hasEquippedSet(entityLiving, ModArmor.fireHelmet, ModArmor.fireChest, ModArmor.fireLeggings, ModArmor.fireBoots)
|| hasEquippedSet(entityLiving, ModArmor.chaosHelmet, ModArmor.chaosChest, ModArmor.chaosLeggings, ModArmor.chaosBoots)
|| hasEquippedSet(entityLiving, ModArmor.orderHelmet, ModArmor.orderChest, ModArmor.orderLeggings, ModArmor.orderBoots))
{
if (event.entity.isInsideOfMaterial(Material.lava))
{
event.setCanceled(true);
event.density = 0.2f;
GL11.glFogi(GL11.GL_FOG_MODE, GL11.GL_EXP);
}
}
}
@SideOnly(Side.CLIENT)
@SubscribeEvent
public void EntityViewRenderEvent(EntityViewRenderEvent.FogColors event)
{
EntityLivingBase entityLiving = event.entity;
if (hasEquippedSet(entityLiving, ModArmor.fireHelmet, ModArmor.fireChest, ModArmor.fireLeggings, ModArmor.fireBoots)
|| hasEquippedSet(entityLiving, ModArmor.chaosHelmet, ModArmor.chaosChest, ModArmor.chaosLeggings, ModArmor.chaosBoots)
|| hasEquippedSet(entityLiving, ModArmor.orderHelmet, ModArmor.orderChest, ModArmor.orderLeggings, ModArmor.orderBoots))
{
if (event.entity.isInsideOfMaterial(Material.lava))
{
event.red = 0.5f;
event.green = 0;
event.blue = 0;
}
}
}
@SubscribeEvent
public void onPlayerUpdate(LivingUpdateEvent event)
{
EntityLivingBase entityLiving = event.entityLiving;
boolean isChaosEquipped = hasEquippedSet(entityLiving, ModArmor.chaosHelmet, ModArmor.chaosChest, ModArmor.chaosLeggings, ModArmor.chaosBoots);
boolean isOrderEquipped = hasEquippedSet(entityLiving, ModArmor.orderHelmet, ModArmor.orderChest, ModArmor.orderLeggings, ModArmor.orderBoots);
boolean isEarthEquipped = hasEquippedSet(entityLiving, ModArmor.earthHelmet, ModArmor.earthChest, ModArmor.earthLeggings, ModArmor.earthBoots);
boolean isWaterEquipped = hasEquippedSet(entityLiving, ModArmor.waterHelmet, ModArmor.waterChest, ModArmor.waterLeggings, ModArmor.waterBoots);
boolean isHeartEquipped = hasEquippedSet(entityLiving, ModArmor.heartHelmet, ModArmor.heartChest, ModArmor.heartLeggings, ModArmor.heartBoots);
boolean isFireEquipped = hasEquippedSet(entityLiving, ModArmor.fireHelmet, ModArmor.fireChest, ModArmor.fireLeggings, ModArmor.fireBoots);
boolean isWindEquipped = hasEquippedSet(entityLiving, ModArmor.windHelmet, ModArmor.windChest, ModArmor.windLeggings, ModArmor.windBoots);
if (!entityLiving.isSneaking() && (isEarthEquipped || isChaosEquipped || isOrderEquipped))
entityLiving.stepHeight = 1.0f;
else
entityLiving.stepHeight = 0.5f;
if (isWindEquipped || isChaosEquipped || isOrderEquipped)
{
entityLiving.fallDistance = 0.0f;
entityLiving.jumpMovementFactor = .05F;
}
else
{
entityLiving.jumpMovementFactor = .02F;
}
IAttributeInstance atinst = entityLiving.getEntityAttribute(SharedMonsterAttributes.movementSpeed);
if (isEarthEquipped)
{
if (atinst.getModifier(wtvID) == null)
{
atinst.applyModifier(new AttributeModifier(wtvID, "Gaia", 0.4, 2));
}
}
else
{
AttributeModifier mod;
if ((mod = atinst.getModifier(wtvID)) != null)
{
atinst.removeModifier(mod);
}
}
if (entityLiving instanceof EntityPlayer)
{
EntityPlayer player = (EntityPlayer) entityLiving;
if (isWaterEquipped || isChaosEquipped || isOrderEquipped)
player.setAir(300);
boolean flyingInWater = player.isInWater() && isWaterEquipped;
boolean flyingInLava = player.isInsideOfMaterial(Material.lava) && isFireEquipped;
boolean flyingWithPower = isChaosEquipped || isOrderEquipped;
player.capabilities.allowFlying = flyingInLava || flyingInWater || flyingWithPower;
if (flyingInLava || flyingInWater)
player.capabilities.setFlySpeed(flyingInWater ? 0.03f : 0.02f);
else
player.capabilities.setFlySpeed(0.05f);
if (!player.capabilities.allowFlying)
player.capabilities.isFlying = false;
if (isHeartEquipped || isChaosEquipped || isOrderEquipped)
{
player.getFoodStats().addStats(20, 5.0f);
}
}
}
@SubscribeEvent
public void onLivingAttack(LivingAttackEvent event)
{
EntityLivingBase entityLiving = event.entityLiving;
if ((event.source != null && event.source == DamageSource.lava || event.source == DamageSource.inFire || event.source == DamageSource.onFire)
&& (hasEquippedSet(entityLiving, ModArmor.fireHelmet, ModArmor.fireChest, ModArmor.fireLeggings, ModArmor.fireBoots)
|| hasEquippedSet(entityLiving, ModArmor.chaosHelmet, ModArmor.chaosChest, ModArmor.chaosLeggings, ModArmor.chaosBoots)
|| hasEquippedSet(entityLiving, ModArmor.orderHelmet, ModArmor.orderChest, ModArmor.orderLeggings, ModArmor.orderBoots)))
{
event.setCanceled(true);
entityLiving.extinguish();
}
}
@SubscribeEvent
public void onLightningStrike(EntityStruckByLightningEvent event)
{
Entity entityLiving = event.entity;
if (hasEquippedSet(entityLiving, ModArmor.stormHelmet, ModArmor.stormChest, ModArmor.stormLeggings, ModArmor.stormBoots))
{
event.setCanceled(true);
}
}
@SubscribeEvent
public void onLivingHurt(LivingHurtEvent event)
{
EntityLivingBase attackedEntity = event.entityLiving;
Entity attackerEntity = event.source.getEntity();
if (event.source.getSourceOfDamage() instanceof EntityArrow
&& hasEquippedSet(attackerEntity, ModArmor.stormHelmet, ModArmor.stormChest, ModArmor.stormLeggings, ModArmor.stormBoots))
{
double i = attackedEntity.posX;
double j = attackedEntity.posY;
double k = attackedEntity.posZ;
EntityLightningBolt entityLightningBolt = new EntityLightningBolt(attackerEntity.worldObj, i, j, k);
attackerEntity.worldObj.addWeatherEffect(entityLightningBolt);
}
}
@SubscribeEvent
public void onLivingJumpEvent(LivingJumpEvent event)
{
EntityLivingBase entityLiving = event.entityLiving;
if (!entityLiving.isSneaking() &&
(hasEquippedSet(entityLiving, ModArmor.windHelmet, ModArmor.windChest, ModArmor.windLeggings, ModArmor.windBoots))
|| hasEquippedSet(entityLiving, ModArmor.chaosHelmet, ModArmor.chaosChest, ModArmor.chaosLeggings, ModArmor.chaosBoots)
|| hasEquippedSet(entityLiving, ModArmor.orderHelmet, ModArmor.orderChest, ModArmor.orderLeggings, ModArmor.orderBoots))
{
entityLiving.motionY += 0.3;
entityLiving.velocityChanged = true;
}
}
private static boolean hasEquippedSet(Entity entity, Item... setItems)
{
if (!(entity instanceof EntityLivingBase))
return false;
EntityLivingBase entityLivingBase = (EntityLivingBase) entity;
for (Item item : setItems)
{
int slot = EntityLiving.getArmorPosition(new ItemStack(item));
ItemStack equippedItem = entityLivingBase.getEquipmentInSlot(slot);
if (equippedItem == null || equippedItem.getItem() != item)
return false;
}
return true;
}
} |
package com.joy.app.activity.city;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.android.library.activity.BaseHttpRvActivity;
import com.android.library.httptask.ObjectRequest;
import com.android.library.httptask.ObjectResponse;
import com.android.library.utils.TextUtil;
import com.android.library.view.recyclerview.RecyclerAdapter;
import com.android.library.widget.FrescoImageView;
import com.android.library.widget.JTextView;
import com.joy.app.R;
import com.joy.app.activity.common.WebViewActivity;
import com.joy.app.adapter.city.CityRouteAdapter;
import com.joy.app.bean.city.City;
import com.joy.app.bean.city.CityRoute;
import com.joy.app.utils.http.CityHtpUtil;
import com.joy.app.utils.http.ReqFactory;
import java.util.List;
public class CityActivity extends BaseHttpRvActivity<List<CityRoute>> implements View.OnClickListener {
private String mPlaceId;
private City mCity;
private boolean isCityDetailReqFailed;
public static void startActivity(Context act, String placeId) {
if (TextUtil.isEmpty(placeId))
return;
Intent it = new Intent(act, CityActivity.class);
it.putExtra("placeId", placeId);
act.startActivity(it);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
executeCityDetail();// city detail
}
@Override
protected void initData() {
mPlaceId = getIntent().getStringExtra("placeId");
}
@Override
protected void initTitleView() {
addTitleLeftBackView();
}
@Override
protected void initContentView() {
setBackgroundResource(R.color.color_primary);
setSwipeRefreshEnable(false);
setAdapter(new CityRouteAdapter());
setOnItemClickListener(new RecyclerAdapter.OnItemClickListener() {
@Override
public void onItemClick(RecyclerView.ViewHolder holder, int position) {
CityRoute cityRoute = (CityRoute) getAdapter().getItem(position);
WebViewActivity.startActivityNoTitle(CityActivity.this, cityRoute.getRoute_url(), WebViewActivity.TYPE_CITY_RECMMMEND);
}
});
}
private void executeCityDetail() {
ObjectRequest<City> cityReq = ReqFactory.newPost(CityHtpUtil.URL_POST_CITY, City.class, CityHtpUtil.getCityParams(mPlaceId));
cityReq.setResponseListener(new ObjectResponse<City>() {
@Override
public void onPre() {
hideTipView();
showLoading();
}
@Override
public void onSuccess(Object tag, City city) {
onSuccessCallback(city);
executeRefreshOnly();// recommend route
}
@Override
public void onError(Object tag, String msg) {
isCityDetailReqFailed = true;
hideLoading();
hideContentView();
showFailedTip();
}
});
addRequestNoCache(cityReq);
}
@Override
protected void onRetry() {
if (isCityDetailReqFailed) {
isCityDetailReqFailed = false;
executeCityDetail();
} else {
super.onRetry();
}
}
private void onSuccessCallback(City city) {// generate header view
mCity = city;
View headerView = inflateLayout(R.layout.view_city_header);
FrescoImageView fivHeader = (FrescoImageView) headerView.findViewById(R.id.sdvPhoto);
fivHeader.setImageURI(city.getPic_url());
JTextView jtvCnName = (JTextView) headerView.findViewById(R.id.jtvCnName);
JTextView jtvEnName = (JTextView) headerView.findViewById(R.id.jtvEnName);
jtvCnName.setText(mCity.getCn_name());
jtvEnName.setText(mCity.getEn_name());
headerView.findViewById(R.id.jimTicket).setOnClickListener(this);
headerView.findViewById(R.id.jimVisa).setOnClickListener(this);
headerView.findViewById(R.id.jimAirpalne).setOnClickListener(this);
headerView.findViewById(R.id.jimWifi).setOnClickListener(this);
headerView.findViewById(R.id.jimPlay).setOnClickListener(this);
headerView.findViewById(R.id.jimFood).setOnClickListener(this);
headerView.findViewById(R.id.jimShop).setOnClickListener(this);
headerView.findViewById(R.id.jimHotel).setOnClickListener(this);
addHeaderView(headerView);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.jimTicket:
if (mCity != null)
WebViewActivity.startActivityNoTitleShare(this, mCity.getTicket_url());
break;
case R.id.jimVisa:
if (mCity != null)
WebViewActivity.startActivityNoTitleShare(this, mCity.getVisa_url());
break;
case R.id.jimAirpalne:
if (mCity != null)
WebViewActivity.startActivityNoTitleShare(this, mCity.getTraffic_url());
break;
case R.id.jimWifi:
if (mCity != null)
WebViewActivity.startActivityNoTitleShare(this, mCity.getWifi_url());
break;
case R.id.jimPlay:
CityFunActivity.startActivity(this, mPlaceId, CityFunActivity.FunType.PLAY.getNetType());
break;
case R.id.jimHotel:
if (mCity != null)
CityFunActivity.startActivity(this, mPlaceId, CityFunActivity.FunType.HOTEL.getNetType());
break;
case R.id.jimFood:
CityFunActivity.startActivity(this, mPlaceId, CityFunActivity.FunType.FOOD.getNetType());
break;
case R.id.jimShop:
CityFunActivity.startActivity(this, mPlaceId, CityFunActivity.FunType.SHOP.getNetType());
break;
}
}
@Override
protected ObjectRequest<List<CityRoute>> getObjectRequest(int pageIndex, int pageLimit) {
return ReqFactory.newPost(CityHtpUtil.URL_POST_CITY_ROUTE, CityRoute.class, CityHtpUtil.getCityRouteParams(mPlaceId, pageLimit, pageIndex));
}
@Override
protected boolean invalidateContent(List<CityRoute> cityRoutes) {
boolean ret = super.invalidateContent(cityRoutes);
if (ret)
showView(findViewById(R.id.llRouteTitle));
return ret;
}
} |
package gvs.ui.graph.layout;
import java.awt.Point;
import java.util.Iterator;
import java.util.Observable;
import java.util.Observer;
import java.util.Random;
import java.util.Timer;
import java.util.Vector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import gvs.common.Configuration;
import gvs.interfaces.IEdge;
import gvs.interfaces.IVertex;
import gvs.ui.graph.layout.helpers.Area;
import gvs.ui.graph.layout.helpers.AreaDimension;
import gvs.ui.graph.layout.helpers.AreaPoint;
import gvs.ui.graph.layout.helpers.Particle;
import gvs.ui.graph.layout.rules.Traction;
import gvs.ui.graph.layout.ticker.AreaTicker;
import gvs.ui.graph.layout.ticker.Tickable;
/**
* Creates and prepares the elements which have to be layouted
*
* @author aegli
*
*/
public class LayoutController extends Observable implements Tickable {
private final int setLayoutStableAfterTime = 10000;
private Logger graphContLogger = null;
private Area area = null;
private AreaTicker ticker = null;
private Particle particle = null;
private Vector vertizes = null;
private Vector edges = null;
private boolean doSoftLayout = false;
private Random random = null;
/**
* Starts layout engine
*
*/
public LayoutController() {
// TODO check replacement of Logger Instance
// this.graphContLogger =
// gvs.common.Logger.getInstance().getGraphControllerLogger();
this.graphContLogger = LoggerFactory.getLogger(LayoutController.class);
vertizes = new Vector();
edges = new Vector();
area = new Area(new AreaDimension(950, 900));
ticker = new AreaTicker(this, 40);
ticker.start();
graphContLogger.info("Starting graph layout controller");
graphContLogger.debug("Starting layout guard");
Timer guard = new Timer();
LayoutGuard layoutGuard = new LayoutGuard(area);
guard.schedule(layoutGuard, setLayoutStableAfterTime);
random = new Random();
random.setSeed(4000);
}
/**
* Checks if particles in area are stable. If true, stops layouting engine,
* waits 1500ms and displays with correct components
*/
public void tick(double rate, double rateRatio, boolean drop, long iteration,
long time) {
if (area.getAreaState()) {
ticker.shutdown();
try {
Thread.sleep(Configuration.getInstance().getLayoutDelay());
} catch (InterruptedException e) {
e.printStackTrace();
}
setChanged();
notifyObservers("TRUE");
} else {
setChanged();
notifyObservers("FALSE");
area.updateAll();
}
}
/**
* Receives vertices which have to be layouted
*
* @param vertices
* @param edges
* @param doSoftLayout
*/
public void setElements(Vector vertices, Vector edges, boolean doSoftLayout) {
graphContLogger.info("LayoutController has new elements detected, "
+ "start layouting procedure");
this.doSoftLayout = doSoftLayout;
this.vertizes = vertices;
this.edges = edges;
ticker.startTicking();
createVertexParticles();
createEdgeTractions();
area.setAreaState(false);
}
/**
* Creates a particle for each vertex
*
*/
public void createVertexParticles() {
Point p = new Point();
Iterator it = vertizes.iterator();
while (it.hasNext()) {
IVertex myVertex = (IVertex) it.next();
if (!myVertex.isFixedPosition()) {
if (doSoftLayout) {
p = generateSoftPoints(myVertex);
} else {
p = generateRandomPoints(myVertex);
}
} else {
p = generateFixedPoints(myVertex);
}
particle = new Particle(new AreaPoint(p), myVertex.getId(), myVertex,
myVertex.isFixedPosition(), 50/* masse */, 40/* radius */);
area.addParticles(particle);
}
}
// Use random coordinates as input for engine
private Point generateRandomPoints(IVertex vertex) {
Point randomPoint = new Point();
randomPoint.x = (int) ((double) (area.getUniverseDimension()
.dimensionWidth()) * Math.random());
randomPoint.y = (int) ((double) (area.getUniverseDimension()
.dimensionHeight()) * Math.random());
return randomPoint;
}
// Use soft random coordinates as input for engine
private Point generateSoftPoints(IVertex vertex) {
Point softPoint = new Point();
softPoint.x = (int) (random.nextDouble() * 100);
softPoint.y = (int) (random.nextDouble() * 100);
System.err.println(softPoint.y);
return softPoint;
}
// Use existing vertex coordinates as input for engine
private Point generateFixedPoints(IVertex vertex) {
Point fixedPoint = new Point();
fixedPoint.x = (int) ((double) vertex.getXPosition() * 10);
fixedPoint.y = (int) ((double) vertex.getYPosition() * 10);
return fixedPoint;
}
/**
* Creates tractions between related vertices
*
*/
public void createEdgeTractions() {
Iterator it1 = edges.iterator();
while (it1.hasNext()) {
IEdge edge = (IEdge) it1.next();
IVertex vertexFrom = edge.getStartVertex();
IVertex vertexTo = edge.getEndVertex();
Traction t = new Traction(area.getParticleWithID(vertexFrom.getId()),
area.getParticleWithID(vertexTo.getId()), 10, 70);
area.addTraction(t);
}
}
/**
* Returns layoutig area
*
* @return
*/
public Area getUniverse() {
return area;
}
} |
package hudson.plugins.tasks;
import hudson.Extension;
import hudson.plugins.analysis.core.PluginDescriptor;
/**
* Descriptor for the class {@link TasksPublisher}. Used as a singleton. The
* class is marked as public so that it can be accessed from views.
*
* @author Ulli Hafner
*/
@Extension(ordinal = 100)
public final class TasksDescriptor extends PluginDescriptor {
private static final String ICONS_PREFIX = "/plugin/tasks/icons/";
/** The ID of this plug-in is used as URL. */
static final String PLUGIN_ID = "tasks";
/** The URL of the result action. */
static final String RESULT_URL = PluginDescriptor.createResultUrlName(PLUGIN_ID);
/** Icon to use for the result and project action. */
static final String ICON_URL = ICONS_PREFIX + "tasks-24x24.png";
/**
* Creates a new instance of {@link TasksDescriptor}.
*/
public TasksDescriptor() {
super(TasksPublisher.class);
}
@Override
public String getDisplayName() {
return Messages.Tasks_Publisher_Name();
}
@Override
public String getPluginName() {
return PLUGIN_ID;
}
@Override
public String getIconUrl() {
return ICON_URL;
}
@Override
public String getSummaryIconUrl() {
return ICONS_PREFIX + "tasks-48x48.png";
}
} |
package com.wt.pinger.fragment;
import android.content.Intent;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.widget.SimpleCursorAdapter;
import android.text.method.TextKeyListener;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.Toast;
import com.hivedi.console.Console;
import com.hivedi.era.ERA;
import com.squareup.otto.Subscribe;
import com.wt.pinger.BuildConfig;
import com.wt.pinger.R;
import com.wt.pinger.proto.SimpleQueryHandler;
import com.wt.pinger.providers.CmdContentProvider;
import com.wt.pinger.service.CmdService;
import com.wt.pinger.utils.BusProvider;
import butterknife.BindView;
import butterknife.ButterKnife;
public class ConsoleFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
@BindView(R.id.cmd_edit) EditText edit;
@BindView(R.id.cmd_list) ListView list;
@BindView(R.id.cmd_placeholder) LinearLayout placeholder;
@BindView(R.id.cmdBtn) ImageView cmdBtn;
private SimpleCursorAdapter adapter;
public ConsoleFragment() {}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View res = inflater.inflate(R.layout.fragment_console, container, false);
ButterKnife.bind(this, res);
adapter = new SimpleCursorAdapter(getActivity(), R.layout.item_cmd, null, new String[]{"data"}, new int[]{R.id.cmd_item}, 0);
list.setAdapter(adapter);
cmdBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
list.setSelectionAfterHeaderView();
cmdBtn.setImageResource(R.drawable.ic_clear_black_24dp);
CmdService.executeCmd(getActivity(), edit.getText().toString());
}
});
edit.setText("");
TextKeyListener.clear(edit.getText());
return res;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
setHasOptionsMenu(true);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
MenuItemCompat.setShowAsAction(
menu.add(R.string.label_share).setIcon(R.drawable.ic_share_white_24dp).setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
if (isAdded()) { // <- fox NPE on getActivity()
SimpleQueryHandler qh = new SimpleQueryHandler(getActivity().getContentResolver(), new SimpleQueryHandler.QueryListener() {
@Override
public void onQueryComplete(int token, Object cookie, Cursor cursor) {
if (cursor != null) {
new AsyncTask<Cursor, Void, String>() {
@Override
protected String doInBackground(Cursor... params) {
StringBuilder sb = new StringBuilder();
Cursor cursor = params[0];
final int maxShareSize = 250 * 1024;
if (cursor.moveToFirst()) {
do {
sb.append(cursor.getString(cursor.getColumnIndex("data")));
sb.append("\n");
int len = sb.length();
if (len > maxShareSize) {
// trim
sb.setLength(maxShareSize);
ERA.log("Share length trim from " + len);
ERA.logException(new Exception("Share length trim from " + len));
break;
}
} while (cursor.moveToNext());
}
cursor.close();
return sb.toString();
}
@Override
protected void onPostExecute(String s) {
if (s.length() > 0) {
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
sendIntent.putExtra(Intent.EXTRA_TEXT, s);
sendIntent.setType("text/plain");
startActivity(sendIntent);
} else {
Toast.makeText(getActivity(), R.string.toast_no_data_to_share, Toast.LENGTH_LONG).show();
}
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, cursor);
} else {
Toast.makeText(getActivity(), R.string.toast_no_data_to_share, Toast.LENGTH_LONG).show();
}
}
});
qh.startQuery(0, null, CmdContentProvider.URI_CONTENT, null, null, null, null);
}
return false;
}
}), MenuItemCompat.SHOW_AS_ACTION_ALWAYS
);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getLoaderManager().initLoader(1, null, this);
}
@Override
public void onResume() {
super.onResume();
BusProvider.getInstance().register(this);
CmdService.checkService(getActivity());
}
@Override
public void onPause() {
BusProvider.getInstance().unregister(this);
super.onPause();
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
return new CursorLoader(getActivity(), CmdContentProvider.URI_CONTENT, null, null, null, null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
adapter.swapCursor(data);
placeholder.setVisibility(data != null && data.getCount() > 0 ? View.GONE : View.VISIBLE);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
adapter.swapCursor(null);
}
@Subscribe @SuppressWarnings("unused")
public void serviceMessages(CmdService.CmdServiceMessage msg) {
if (BuildConfig.DEBUG) {
Console.logi("serviceMessages " + msg.type + ", data=" + msg.data);
}
switch(msg.type) {
case CmdService.CMD_MSG_CHECK:
boolean isWorking = msg.getDataAsBool(false);
cmdBtn.setImageResource(!isWorking ? R.drawable.ic_send_black_24dp : R.drawable.ic_clear_black_24dp);
break;
}
}
} |
package info.tregmine.commands;
import info.tregmine.Tregmine;
import info.tregmine.api.GenericPlayer;
import info.tregmine.api.Rank;
import info.tregmine.database.DAOException;
import info.tregmine.database.IContext;
import info.tregmine.database.IPlayerDAO;
import net.md_5.bungee.api.chat.TextComponent;
import org.bukkit.ChatColor;
import org.bukkit.Server;
import org.bukkit.entity.Player;
import java.util.Collection;
import static org.bukkit.ChatColor.WHITE;
public class ActionCommand extends AbstractCommand {
public ActionCommand(Tregmine tregmine) {
super(tregmine, "action");
}
private String argsToMessage(String[] args) {
StringBuffer buf = new StringBuffer();
buf.append(args[0]);
for (int i = 1; i < args.length; ++i) {
buf.append(" ");
buf.append(args[i]);
}
return buf.toString();
}
@Override
public boolean handlePlayer(GenericPlayer player, String[] args) {
if (args.length == 0) {
return false;
}
Server server = player.getServer();
String channel = player.getChatChannel();
String msg = argsToMessage(args);
if (player.getRank() != Rank.RESIDENT && player.getRank() != Rank.SETTLER && player.getRank() != Rank.TOURIST
&& player.getRank() != Rank.UNVERIFIED) {
if (msg.contains("#r") || msg.contains("#R")) {
msg = msg.replaceAll("#R", ChatColor.RESET + "");
msg = msg.replaceAll("#r", ChatColor.RESET + "");
}
if (msg.contains("
msg = msg.replaceAll("#0", ChatColor.BLACK + "");
}
if (msg.contains("
msg = msg.replaceAll("#1", ChatColor.DARK_BLUE + "");
}
if (msg.contains("
msg = msg.replaceAll("#2", ChatColor.DARK_GREEN + "");
}
if (msg.contains("
msg = msg.replaceAll("#3", ChatColor.DARK_AQUA + "");
}
if (msg.contains("
msg = msg.replaceAll("#4", ChatColor.DARK_RED + "");
}
if (msg.contains("
msg = msg.replaceAll("#5", ChatColor.DARK_PURPLE + "");
}
if (msg.contains("
msg = msg.replaceAll("#6", ChatColor.GOLD + "");
}
if (msg.contains("
msg = msg.replaceAll("#7", ChatColor.GRAY + "");
}
if (msg.contains("
msg = msg.replaceAll("#8", ChatColor.DARK_GRAY + "");
}
if (msg.contains("
msg = msg.replaceAll("#9", ChatColor.BLUE + "");
}
if (msg.contains("#a") || msg.contains("#A")) {
msg = msg.replaceAll("#A", ChatColor.GREEN + "");
msg = msg.replaceAll("#a", ChatColor.GREEN + "");
}
if (msg.contains("#b") || msg.contains("#B")) {
msg = msg.replaceAll("#B", ChatColor.AQUA + "");
msg = msg.replaceAll("#b", ChatColor.AQUA + "");
}
if (msg.contains("#c") || msg.contains("#C")) {
msg = msg.replaceAll("#C", ChatColor.RED + "");
msg = msg.replaceAll("#c", ChatColor.RED + "");
}
if (msg.contains("#d") || msg.contains("#D")) {
msg = msg.replaceAll("#D", ChatColor.LIGHT_PURPLE + "");
msg = msg.replaceAll("#d", ChatColor.LIGHT_PURPLE + "");
}
if (msg.contains("#e") || msg.contains("#E")) {
msg = msg.replaceAll("#E", ChatColor.YELLOW + "");
msg = msg.replaceAll("#e", ChatColor.YELLOW + "");
}
if (msg.contains("#f") || msg.contains("#F")) {
msg = msg.replaceAll("#F", ChatColor.WHITE + "");
msg = msg.replaceAll("#f", ChatColor.WHITE + "");
}
if (msg.contains("#k") || msg.contains("#K")) {
msg = msg.replaceAll("#K", ChatColor.MAGIC + "");
msg = msg.replaceAll("#k", ChatColor.MAGIC + "");
}
if (msg.contains("#l") || msg.contains("#L")) {
msg = msg.replaceAll("#L", ChatColor.BOLD + "");
msg = msg.replaceAll("#l", ChatColor.BOLD + "");
}
if (msg.contains("#m") || msg.contains("#M")) {
msg = msg.replaceAll("#M", ChatColor.STRIKETHROUGH + "");
msg = msg.replaceAll("#m", ChatColor.STRIKETHROUGH + "");
}
if (msg.contains("#n") || msg.contains("#N")) {
msg = msg.replaceAll("#N", ChatColor.UNDERLINE + "");
msg = msg.replaceAll("#n", ChatColor.UNDERLINE + "");
}
if (msg.contains("#o") || msg.contains("#O")) {
msg = msg.replaceAll("#O", ChatColor.ITALIC + "");
msg = msg.replaceAll("#o", ChatColor.ITALIC + "");
}
} else {
player.sendMessage(ChatColor.RED + "You are not allowed to use chat colors!");
}
Collection<? extends Player> players = server.getOnlinePlayers();
for (Player tp : players) {
GenericPlayer to = tregmine.getPlayer(tp);
if (!channel.equals(to.getChatChannel())) {
continue;
}
boolean ignored;
try (IContext ctx = tregmine.createContext()) {
IPlayerDAO playerDAO = ctx.getPlayerDAO();
ignored = playerDAO.doesIgnore(to, player);
} catch (DAOException e) {
throw new RuntimeException(e);
}
if (player.getRank().canNotBeIgnored())
ignored = false;
if (ignored == true)
continue;
TextComponent begin = new TextComponent("* ");
TextComponent middle = new TextComponent(player.decideVS(to));
TextComponent end = new TextComponent(" " + WHITE + msg);
to.sendMessage(begin, middle, end);
}
Tregmine.LOGGER.info("* " + player.getName() + " " + msg);
if (player.getRank() != Rank.SENIOR_ADMIN && player.getRank() != Rank.JUNIOR_ADMIN) {
msg = msg.replaceAll("@everyone", "").replaceAll("@here", "");
}
this.tregmine.getDiscordDelegate().getChatChannel().sendMessage("**" + player.getName() + "** " + msg).complete();
return true;
}
} |
package fr.tvbarthel.apps.shapi.game;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.FrameLayout;
import java.util.ArrayList;
import java.util.List;
import fr.tvbarthel.apps.shapi.R;
import fr.tvbarthel.apps.shapi.shape.Shape;
import fr.tvbarthel.apps.shapi.ui.drag.DragHelper;
import fr.tvbarthel.apps.shapi.ui.drag.DragListener;
/**
* View used to render the game field to the user.
*/
public class FieldView extends FrameLayout {
private DragHelper dragHelper;
private DragListener dragListener;
private Listener listener;
private DropZoneView dropZone1;
private DropZoneView dropZone2;
private DropZoneView dropZone3;
private DropZoneView dropZone4;
private OnClickListener internalClickListener;
private DropZoneView.Listener internalDropZoneListener;
private FrameLayout dragMask;
/**
* View used to render the game field to the user.
*
* @param context a {@link Context}
*/
public FieldView(Context context) {
super(context);
initialize(context);
}
/**
* View used to render the game field to the user.
*
* @param context a {@link Context}
* @param attrs an {@link AttributeSet}
*/
public FieldView(Context context, AttributeSet attrs) {
super(context, attrs);
initialize(context);
}
/**
* View used to render the game field to the user..
*
* @param context a {@link Context}
* @param attrs an {@link AttributeSet}
* @param defStyleAttr the def style attribute
*/
public FieldView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize(context);
}
/**
* Set the field to display to the user.
*
* @param field field to display.
*/
public void setField(@NonNull Field field) {
//noinspection unchecked
dragHelper.register(dragMask, dragListener, new ArrayList<Class<?>>(field.getAvailableShapes()));
displayDropZones(field.getZones());
}
/**
* Listener used to catch view events.
*
* @param listener listener used to catch view events.
*/
public void setListener(Listener listener) {
this.listener = listener;
}
/**
* Initialize internal component.
*/
private void initialize(Context context) {
LayoutInflater.from(context).inflate(R.layout.field_view, this);
initializeInternalListeners();
dragMask = ((FrameLayout) findViewById(R.id.field_view_drag_mask));
dropZone1 = ((DropZoneView) findViewById(R.id.field_view_drop_zone_1));
dropZone2 = ((DropZoneView) findViewById(R.id.field_view_drop_zone_2));
dropZone3 = ((DropZoneView) findViewById(R.id.field_view_drop_zone_3));
dropZone4 = ((DropZoneView) findViewById(R.id.field_view_drop_zone_4));
dragMask.setOnClickListener(internalClickListener);
dropZone1.setListener(internalDropZoneListener);
dropZone2.setListener(internalDropZoneListener);
dropZone3.setListener(internalDropZoneListener);
dropZone4.setListener(internalDropZoneListener);
dragHelper = DragHelper.getInstance();
}
private void initializeInternalListeners() {
internalClickListener = new OnClickListener() {
@Override
public void onClick(View v) {
if (listener != null) {
listener.onEmphasisOnAvailableActionRequested();
}
}
};
internalDropZoneListener = new DropZoneView.Listener() {
@Override
public void onShapeDropped(@NonNull DropZone dropZone, @Nullable Shape shape) {
if (listener != null) {
listener.onShapeDropped(dropZone, shape);
}
}
};
dragListener = new DragListener() {
@Override
protected void onDragDropped(View source, Object data, float x, float y) {
super.onDragDropped(source, data, x, y);
animateViewBack(source, x, y);
}
@Override
protected void onDragEnded(View source, Object data) {
super.onDragEnded(source, data);
source.setVisibility(VISIBLE);
}
};
}
private void animateViewBack(View source, float dropX, float dropY) {
source.setTranslationX(dropX - (source.getX() + source.getWidth() / 2));
source.setTranslationY(dropY - (source.getY() + source.getHeight() / 2));
source.setAlpha(0.5f);
source.setVisibility(VISIBLE);
source.animate().alpha(1f).translationX(0).translationY(0).setDuration(300).setListener(null);
}
private void displayDropZones(List<DropZone> zones) {
if (zones.size() == 1) {
dropZone1.setVisibility(View.VISIBLE);
dropZone2.setVisibility(View.GONE);
dropZone3.setVisibility(View.GONE);
dropZone4.setVisibility(View.GONE);
dropZone1.setDropZone(zones.get(0));
} else if (zones.size() == 2) {
dropZone1.setVisibility(View.VISIBLE);
dropZone2.setVisibility(View.VISIBLE);
dropZone3.setVisibility(View.GONE);
dropZone4.setVisibility(View.GONE);
dropZone1.setDropZone(zones.get(0));
dropZone2.setDropZone(zones.get(1));
} else if (zones.size() == 4) {
dropZone1.setVisibility(View.VISIBLE);
dropZone2.setVisibility(View.VISIBLE);
dropZone3.setVisibility(View.VISIBLE);
dropZone4.setVisibility(View.VISIBLE);
dropZone1.setDropZone(zones.get(0));
dropZone2.setDropZone(zones.get(1));
dropZone3.setDropZone(zones.get(2));
dropZone4.setDropZone(zones.get(3));
}
}
/**
* Listener used to catch view events.
*/
public interface Listener {
/**
* Called when the user request emphasis on available actions.
*/
void onEmphasisOnAvailableActionRequested();
/**
* Called when the user dropped a shape in the given drop zone.
*
* @param dropZone zone in which the user dropped the shape.
* @param shape shape dropped by the user.
*/
void onShapeDropped(@NonNull DropZone dropZone, @Nullable Shape shape);
}
} |
package info.u_team.u_team_core.block;
import info.u_team.u_team_core.api.registry.IUBlock;
import net.minecraft.block.Block;
import net.minecraft.item.*;
public class UBlock extends Block implements IUBlock {
protected final String name;
protected ItemBlock itemblock;
public UBlock(String name, Properties properties) {
this(name, properties, true);
}
public UBlock(String name, Properties properties, boolean shouldCreateItemBlock) {
this(name, null, properties, shouldCreateItemBlock);
}
public UBlock(String name, ItemGroup group, Properties properties) {
this(name, group, properties, true);
}
public UBlock(String name, ItemGroup group, Properties properties, boolean shouldCreateItemBlock) {
super(properties);
this.name = name;
if (shouldCreateItemBlock) {
itemblock = createItemBlock(group);
}
}
protected ItemBlock createItemBlock(ItemGroup group) {
return new ItemBlock(this, group == null ? new Item.Properties() : new Item.Properties().group(group));
}
@Override
public String getEntryName() {
return name;
}
@Override
public ItemBlock getItemBlock() {
return itemblock;
}
} |
package comlib.manager;
import comlib.provider.DummyMessageProvider;
import comlib.provider.MessageProvider;
import comlib.event.MessageEvent;
import comlib.message.CommunicationMessage;
import comlib.message.MessageID;
import comlib.provider.information.*;
import comlib.util.BitOutputStream;
import comlib.util.BitStreamReader;
import rescuecore2.Constants;
import rescuecore2.config.Config;
import rescuecore2.messages.Command;
import rescuecore2.messages.Message;
import rescuecore2.standard.kernel.comms.ChannelCommunicationModel;
import rescuecore2.standard.messages.AKSpeak;
import rescuecore2.worldmodel.EntityID;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class MessageManager
{
private boolean developerMode;
private RadioConfig radioConfig;
private VoiceConfig voiceConfig;
private boolean useRadio;
private int numRadio;
private int numVoice;
private int kernelTime;
private MessageProvider[] providerList;
private List<MessageEvent> eventList;
private List<CommunicationMessage> receivedMessages; // FOR-COMPATIBLE
private List<CommunicationMessage> sendMessages;
private BitOutputStream[] bitOutputStreamList;
private int[] maxBandWidthList;
public MessageManager(Config config)
{
this.init(config);
}
private void init(Config config)
{
this.developerMode = config.getBooleanValue("comlib.develop.developerMode", false);
this.radioConfig = new RadioConfig(config);
this.voiceConfig = new VoiceConfig(config);
this.kernelTime = -1;
this.numRadio = config.getIntValue("comms.channels.max.platoon");
this.numVoice = ((config.getValue("comms.channels.0.type").equals("voice")) ? 1 : 0);
this.useRadio = ( this.numRadio >= 1 );
this.providerList = new MessageProvider[config.getIntValue("comlib.default.messageID", 16)];
this.bitOutputStreamList = new BitOutputStream[config.getIntValue("comlib.default.messageID", 16)];
this.maxBandWidthList = new int[numRadio];
this.eventList = new ArrayList<>();
this.receivedMessages = new ArrayList<>();
this.sendMessages = new ArrayList<>();
for (int ch = 1; ch <= numRadio; ch++)
{ maxBandWidthList[ch -1] = config.getIntValue("comms.channels." + ch + ".bandwidth"); }
this.initLoadProvider();
}
public boolean canUseRadio()
{ return this.useRadio; }
public RadioConfig getRadioConfig()
{ return this.radioConfig; }
public VoiceConfig getVoiceConfig()
{ return this.voiceConfig; }
public int getTime()
{ return this.kernelTime; }
public int getMaxBandWidth(int ch)
{ return this.maxBandWidthList[ch -1]; }
public void receiveMessage(int time, Collection<Command> heard)
{
this.kernelTime = time;
this.receivedMessages.clear();
for (BitOutputStream bos : bitOutputStreamList)
{ bos.reset(); }
for (Command command : heard)
{
if (command instanceof AKSpeak)
{
byte[] data = ((AKSpeak)command).getContent();
String voice = new String(data);
if ("Help".equalsIgnoreCase(voice) || "Ouch".equalsIgnoreCase(voice))
{ continue; }
String[] voiceData = voice.split(this.voiceConfig.getMessageSeparator());
if (this.voiceConfig.getKeyword().equals(voiceData[0]))
{ this.receiveVoiceMessage(Arrays.copyOfRange(voiceData, 1, voiceData.length - 1), this.receivedMessages); }
else
{ this.receiveRadioMessage(data, this.receivedMessages); }
}
}
}
private void receiveRadioMessage(byte[] data, List<CommunicationMessage> list)
{
if (data == null || list == null)
{ return; }
BitStreamReader bsr = new BitStreamReader(data);
MessageProvider provider = this.providerList[bsr.getBits(this.radioConfig.getSizeOfMessageID())];
while(bsr.getRemainBuffer() > 0)
{
try
{
CommunicationMessage msg = provider.create(this, bsr);
list.add(msg);
} catch(Exception e) {
//System.err.println("Received message is corrupt or format is different.");
e.printStackTrace();
return;
}
}
}
private void receiveVoiceMessage(String[] data, List<CommunicationMessage> list)
{
if (data == null || (data.length & 0x01) == 1 || list == null)
{ return; }
for (int count = 0; count < data.length; count += 2)
{
int id = Integer.parseInt(data[count]);
String[] messageData = data[count + 1].split(this.voiceConfig.getDataSeparator());
list.add(this.providerList[id].create(this, messageData));
}
}
public List<Message> createSendMessage(EntityID agentID)
{
List<Message> messages = new ArrayList<Message>();
int bosNum = 0;
boolean isFirstLoop = true;
for (int ch = 1; ch <= numRadio; ch++)
{
int sentMessageSize = 0;
for (; bosNum < bitOutputStreamList.length; bosNum++)
{
BitOutputStream bos = bitOutputStreamList[bosNum];
if (bos.size() <= 0)
{ continue; }
if ((sentMessageSize + bos.size()) > getMaxBandWidth(ch))
{ continue; }
sentMessageSize += bos.size();
messages.add(new AKSpeak(agentID, this.getTime(), ch, bos.toByteArray()));
}
if (ch == numRadio && isFirstLoop)
{
isFirstLoop = false;
ch = 1;
bosNum = 0;
}
}
// StringBuilder sb = new StringBuilder();
// for (CommunicationMessage msg : this.sendMessages)
// { this.providerList[msg.getMessageID()].write(this, sb, msg); }
return messages;
}
public List<CommunicationMessage> getReceivedMessage() // FOR-COMPATIBLE
{
return this.receivedMessages;
}
public <M extends CommunicationMessage> void addSendMessage(M msg)
{
this.sendMessages.add(msg);
int msgID = msg.getMessageID();
// TODO: need cutting data
this.providerList[msgID].write(this, bitOutputStreamList[msgID], msg);
}
// public void old_addSendMessage(CommunicationMessage msg)
// this.sendMessages.add(msg);
public void addVoiceSendMessage(CommunicationMessage msg)
{
// TODO: NFC
this.sendMessages.add(msg);
}
private void initLoadProvider()
{
// TODO: Load provider
this.registerStandardProvider(new DummyMessageProvider(MessageID.dummyMessage));
this.registerStandardProvider(new CivilianMessageProvider(MessageID.civilianMessage));
this.registerStandardProvider(new FireBrigadeMessageProvider(MessageID.fireBrigadeMessage));
this.registerStandardProvider(new PoliceForceMessageProvider(MessageID.policeForceMessage));
this.registerStandardProvider(new AmbulanceTeamMessageProvider(MessageID.ambulanceTeamMessage));
this.registerStandardProvider(new BuildingMessageProvider(MessageID.buildingMessage));
//this.register(CommunicationMessage.buildingMessageID, new BuildingMessageProvider(this.event));
//this.register(CommunicationMessage.blockadeMessageID, new BlockadeMessageProvider(this.event));
//this.register(CommunicationMessage.victimMessageID, new VictimMessageProvider());
//this.register(CommunicationMessage.positionMessageID, new PositionMessageProvider(this.event));
}
private void registerStandardProvider(MessageProvider provider)
{
this.providerList[provider.getMessageID()] = provider;
}
public boolean registerProvider(MessageProvider provider)
{
int messageID = provider.getMessageID();
if (!this.developerMode || this.kernelTime != -1 || provider == null || messageID < 0)
{ return false; }
if (messageID >= this.providerList.length)
{
this.providerList = Arrays.copyOf(this.providerList, messageID +1);
this.bitOutputStreamList = Arrays.copyOf(this.bitOutputStreamList, messageID +1);
}
else if (this.providerList[messageID] != null)
{ return false; }
this.registerStandardProvider(provider);
this.radioConfig.updateMessageIDSize(messageID);
this.searchEvent(this.providerList[messageID]);
return true;
}
public boolean registerEvent(MessageEvent event)
{
if (event == null)
{ return false; }
this.eventList.add(event);
this.searchProvider(event);
return true;
}
private void searchProvider(MessageEvent event)
{
for (MessageProvider provider : this.providerList) {
if(provider != null) {
provider.trySetEvent(event);
}
}
}
private void searchEvent(MessageProvider provider)
{
// if (this.eventList.size() < 1)
// { return; }
for (MessageEvent event : this.eventList) {
provider.trySetEvent(event);
}
}
} |
package javaslang.collection;
import javaslang.Tuple;
import javaslang.Tuple2;
import javaslang.Value;
import javaslang.control.None;
import javaslang.control.Option;
import javaslang.control.Some;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Comparator;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* An interface for data structures that are traversable once.
*
* <p>
* Basic operations:
*
* <ul>
* <li>{@link #clear()}</li>
* <li>{@link #contains(Object)}</li>
* <li>{@link #containsAll(java.lang.Iterable)}</li>
* <li>{@link #head()}</li>
* <li>{@link #headOption()}</li>
* <li>{@link #init()}</li>
* <li>{@link #initOption()}</li>
* <li>{@link #isEmpty()}</li>
* <li>{@link #last()}</li>
* <li>{@link #lastOption()}</li>
* <li>{@link #length()}</li>
* <li>{@link #tail()}</li>
* <li>{@link #tailOption()}</li>
* </ul>
*
* Filtering:
*
* <ul>
* <li>{@link #filter(Predicate)}</li>
* <li>{@link #retainAll(java.lang.Iterable)}</li>
* </ul>
*
* Numeric operations:
*
* <ul>
* <li>{@link #average()}</li>
* <li>{@link #max()}</li>
* <li>{@link #maxBy(Comparator)}</li>
* <li>{@link #maxBy(Function)}</li>
* <li>{@link #min()}</li>
* <li>{@link #minBy(Comparator)}</li>
* <li>{@link #minBy(Function)}</li>
* <li>{@link #product()}</li>
* <li>{@link #sum()}</li>
* </ul>
*
* Reduction:
*
* <ul>
* <li>{@link #fold(Object, BiFunction)}</li>
* <li>{@link #foldLeft(Object, BiFunction)}</li>
* <li>{@link #foldRight(Object, BiFunction)}</li>
* <li>{@link #mkString()}</li>
* <li>{@link #mkString(CharSequence)}</li>
* <li>{@link #mkString(CharSequence, CharSequence, CharSequence)}</li>
* <li>{@link #reduce(BiFunction)}</li>
* <li>{@link #reduceLeft(BiFunction)}</li>
* <li>{@link #reduceRight(BiFunction)}</li>
* </ul>
*
* Selection:
*
* <ul>
* <li>{@link #drop(int)}</li>
* <li>{@link #dropRight(int)}</li>
* <li>{@link #dropWhile(Predicate)}</li>
* <li>{@link #findFirst(Predicate)}</li>
* <li>{@link #findLast(Predicate)}</li>
* <li>{@link #take(int)}</li>
* <li>{@link #takeRight(int)}</li>
* <li>{@link #takeWhile(Predicate)}</li>
* </ul>
*
* Tests:
*
* <ul>
* <li>{@link #existsUnique(Predicate)}</li>
* <li>{@link #hasDefiniteSize()}</li>
* <li>{@link #isTraversableAgain()}</li>
* </ul>
*
* Transformation:
*
* <ul>
* <li>{@link #distinct()}</li>
* <li>{@link #distinctBy(Comparator)}</li>
* <li>{@link #distinctBy(Function)}</li>
* <li>{@link #flatMap(Function)}</li>
* <li>{@link #flatten()}</li>
* <li>{@link #groupBy(Function)}</li>
* <li>{@link #map(Function)}</li>
* <li>{@link #partition(Predicate)}</li>
* <li>{@link #replace(Object, Object)}</li>
* <li>{@link #replaceAll(Object, Object)}</li>
* <li>{@link #span(Predicate)}</li>
* </ul>
*
* @param <T> Component type
* @author Daniel Dietrich and others
* @since 2.0.0
*/
public interface TraversableOnce<T> extends Value<T> {
/**
* Calculates the average of this elements. Returns {@code None} if this is empty, otherwise {@code Some(average)}.
* Supported component types are {@code Byte}, {@code Double}, {@code Float}, {@code Integer}, {@code Long},
* {@code Short}, {@code BigInteger} and {@code BigDecimal}.
* <p>
* Examples:
* <pre>
* <code>
* List.empty().average() // = None
* List.of(1, 2, 3).average() // = Some(2.0)
* List.of(0.1, 0.2, 0.3).average() // = Some(0.2)
* List.of("apple", "pear").average() // throws
* </code>
* </pre>
*
* @return {@code Some(average)} or {@code None}, if there are no elements
* @throws UnsupportedOperationException if this elements are not numeric
*/
@SuppressWarnings("unchecked")
default Option<Double> average() {
if (isEmpty()) {
return None.instance();
} else {
final TraversableOnce<?> objects = isTraversableAgain() ? this : toStream();
final Object head = objects.head();
final double d;
if (head instanceof Integer || head instanceof Short || head instanceof Byte) {
d = ((TraversableOnce<Number>) objects)
.toJavaStream()
.mapToInt(Number::intValue)
.average()
.getAsDouble();
} else if (head instanceof Double || head instanceof Float || head instanceof BigDecimal) {
d = ((TraversableOnce<Number>) objects)
.toJavaStream()
.mapToDouble(Number::doubleValue)
.average()
.getAsDouble();
} else if (head instanceof Long || head instanceof BigInteger) {
d = ((TraversableOnce<Number>) objects)
.toJavaStream()
.mapToLong(Number::longValue)
.average()
.getAsDouble();
} else {
throw new UnsupportedOperationException("not numeric");
}
return new Some<>(d);
}
}
/**
* Returns an empty version of this traversable, i.e. {@code this.clear().isEmpty() == true}.
*
* @return an empty TraversableOnce.
*/
TraversableOnce<T> clear();
/**
* Tests if this TraversableOnce contains a given value.
*
* @param element An Object of type A, may be null.
* @return true, if element is in this TraversableOnce, false otherwise.
*/
default boolean contains(T element) {
return findFirst(e -> java.util.Objects.equals(e, element)).isDefined();
}
/**
* Tests if this TraversableOnce contains all given elements.
* <p>
* The result is equivalent to
* {@code elements.isEmpty() ? true : contains(elements.head()) && containsAll(elements.tail())} but implemented
* without recursion.
*
* @param elements A List of values of type T.
* @return true, if this List contains all given elements, false otherwise.
* @throws NullPointerException if {@code elements} is null
*/
default boolean containsAll(java.lang.Iterable<? extends T> elements) {
Objects.requireNonNull(elements, "elements is null");
return List.ofAll(elements).distinct().findFirst(e -> !this.contains(e)).isEmpty();
}
/**
* Returns a new version of this which contains no duplicates. Elements are compared using {@code equals}.
*
* @return a new {@code TraversableOnce} containing this elements without duplicates
*/
TraversableOnce<T> distinct();
/**
* Returns a new version of this which contains no duplicates. Elements are compared using the given
* {@code comparator}.
*
* @param comparator A comparator
* @return a new {@code TraversableOnce} containing this elements without duplicates
*/
TraversableOnce<T> distinctBy(Comparator<? super T> comparator);
/**
* Returns a new version of this which contains no duplicates. Elements mapped to keys which are compared using
* {@code equals}.
* <p>
* The elements of the result are determined in the order of their occurrence - first match wins.
*
* @param keyExtractor A key extractor
* @param <U> key type
* @return a new {@code TraversableOnce} containing this elements without duplicates
* @throws NullPointerException if {@code keyExtractor} is null
*/
<U> TraversableOnce<T> distinctBy(Function<? super T, ? extends U> keyExtractor);
/**
* Drops the first n elements of this or all elements, if this length < n.
*
* @param n The number of elements to drop.
* @return a new instance consisting of all elements of this except the first n ones, or else the empty instance,
* if this has less than n elements.
*/
TraversableOnce<T> drop(int n);
/**
* Drops the last n elements of this or all elements, if this length < n.
*
* @param n The number of elements to drop.
* @return a new instance consisting of all elements of this except the last n ones, or else the empty instance,
* if this has less than n elements.
*/
TraversableOnce<T> dropRight(int n);
/**
* Drops elements while the predicate holds for the current element.
*
* @param predicate A condition tested subsequently for this elements starting with the first.
* @return a new instance consisting of all elements starting from the first one which does not satisfy the
* given predicate.
* @throws NullPointerException if {@code predicate} is null
*/
TraversableOnce<T> dropWhile(Predicate<? super T> predicate);
/**
* Checks, if a unique elements exists such that the predicate holds.
*
* @param predicate A Predicate
* @return true, if predicate holds for a unique element, false otherwise
* @throws NullPointerException if {@code predicate} is null
*/
default boolean existsUnique(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate, "predicate is null");
boolean exists = false;
for (T t : this) {
if (predicate.test(t)) {
if (exists) {
return false;
} else {
exists = true;
}
}
}
return exists;
}
/**
* Returns a new traversable consisting of all elements which satisfy the given predicate.
*
* @param predicate A predicate
* @return a new traversable
* @throws NullPointerException if {@code predicate} is null
*/
@Override
TraversableOnce<T> filter(Predicate<? super T> predicate);
/**
* Returns the first element of this which satisfies the given predicate.
*
* @param predicate A predicate.
* @return Some(element) or None, where element may be null (i.e. {@code List.of(null).findFirst(e -> e == null)}).
* @throws NullPointerException if {@code predicate} is null
*/
default Option<T> findFirst(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate, "predicate is null");
for (T a : this) {
if (predicate.test(a)) {
return new Some<>(a); // may be Some(null)
}
}
return Option.none();
}
/**
* Returns the last element of this which satisfies the given predicate.
* <p>
* Same as {@code reverse().findFirst(predicate)}.
*
* @param predicate A predicate.
* @return Some(element) or None, where element may be null (i.e. {@code List.of(null).findFirst(e -> e == null)}).
* @throws NullPointerException if {@code predicate} is null
*/
Option<T> findLast(Predicate<? super T> predicate);
@Override
<U> TraversableOnce<U> flatMap(Function<? super T, ? extends java.lang.Iterable<? extends U>> mapper);
@Override
<U> TraversableOnce<U> flatten();
/**
* Accumulates the elements of this TraversableOnce by successively calling the given operator {@code op}.
* <p>
* Example: {@code List("a", "b", "c").fold("", (xs, x) -> xs + x) = "abc"}
*
* @param zero Value to start the accumulation with.
* @param op The accumulator operator.
* @return an accumulated version of this.
* @throws NullPointerException if {@code op} is null
*/
default T fold(T zero, BiFunction<? super T, ? super T, ? extends T> op) {
Objects.requireNonNull(op, "op is null");
return foldLeft(zero, op);
}
/**
* Accumulates the elements of this TraversableOnce by successively calling the given function {@code f} from the left,
* starting with a value {@code zero} of type B.
* <p>
* Example: Reverse and map a TraversableOnce in one pass
* <pre><code>
* List.of("a", "b", "c").foldLeft(List.empty(), (xs, x) -> xs.prepend(x.toUpperCase()))
* // = List("C", "B", "A")
* </code></pre>
*
* @param zero Value to start the accumulation with.
* @param f The accumulator function.
* @param <U> Result type of the accumulator.
* @return an accumulated version of this.
* @throws NullPointerException if {@code f} is null
*/
default <U> U foldLeft(U zero, BiFunction<? super U, ? super T, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
U xs = zero;
for (T x : this) {
xs = f.apply(xs, x);
}
return xs;
}
/**
* Accumulates the elements of this TraversableOnce by successively calling the given function {@code f} from the right,
* starting with a value {@code zero} of type B.
* <p>
* Example: {@code List.of("a", "b", "c").foldRight("", (x, xs) -> x + xs) = "abc"}
* <p>
* In order to prevent recursive calls, foldRight is implemented based on reverse and foldLeft. A recursive variant
* is based on foldMap, using the monoid of function composition (endo monoid).
* <pre>
* <code>
* foldRight = reverse().foldLeft(zero, (b, a) -> f.apply(a, b));
* foldRight = foldMap(Algebra.Monoid.endoMonoid(), a -> b -> f.apply(a, b)).apply(zero);
* </code>
* </pre>
*
* @param zero Value to start the accumulation with.
* @param f The accumulator function.
* @param <U> Result type of the accumulator.
* @return an accumulated version of this.
* @throws NullPointerException if {@code f} is null
*/
<U> U foldRight(U zero, BiFunction<? super T, ? super U, ? extends U> f);
@Override
default T get() {
return iterator().next();
}
/**
* Groups this elements by classifying the elements.
*
* @param classifier A function which classifies elements into classes
* @param <C> classified class type
* @return A Map containing the grouped elements
*/
<C> Map<C, ? extends TraversableOnce<T>> groupBy(Function<? super T, ? extends C> classifier);
/**
* Checks if this Traversable is known to have a finite size.
* <p>
* This method should be implemented by classes only, i.e. not by interfaces.
*
* @return true, if this Traversable is known to hafe a finite size, false otherwise.
*/
boolean hasDefiniteSize();
/**
* Returns the first element of a non-empty TraversableOnce.
*
* @return The first element of this TraversableOnce.
* @throws NoSuchElementException if this is empty
*/
T head();
/**
* Returns the first element of a non-empty TraversableOnce as {@code Option}.
*
* @return {@code Some(element)} or {@code None} if this is empty.
*/
Option<T> headOption();
/**
* Dual of {@linkplain #tail()}, returning all elements except the last.
*
* @return a new instance containing all elements except the last.
* @throws UnsupportedOperationException if this is empty
*/
TraversableOnce<T> init();
/**
* Dual of {@linkplain #tailOption()}, returning all elements except the last as {@code Option}.
*
* @return {@code Some(traversable)} or {@code None} if this is empty.
*/
Option<? extends TraversableOnce<T>> initOption();
/**
* Checks if this TraversableOnce is empty.
*
* @return true, if this TraversableOnce contains no elements, false otherwise.
*/
@Override
boolean isEmpty();
/**
* Each of Javaslang's collections may contain more than one element.
*
* @return {@code false}
*/
@Override
default boolean isSingletonType() {
return false;
}
/**
* Checks if this Traversable can be repeatedly traversed.
* <p>
* This method should be implemented by classes only, i.e. not by interfaces.
*
* @return true, if this Traversable is known to be traversable repeatedly, false otherwise.
*/
boolean isTraversableAgain();
/**
* An iterator by means of head() and tail(). Subclasses may want to override this method.
*
* @return A new Iterator of this TraversableOnce elements.
*/
@Override
default Iterator<T> iterator() {
final TraversableOnce<T> that = this;
return new AbstractIterator<T>() {
TraversableOnce<T> traversable = that;
@Override
public boolean hasNext() {
return !traversable.isEmpty();
}
@Override
public T next() {
if (traversable.isEmpty()) {
throw new NoSuchElementException();
} else {
final T result = traversable.head();
traversable = traversable.tail();
return result;
}
}
};
}
/**
* Dual of {@linkplain #head()}, returning the last element.
*
* @return the last element.
* @throws NoSuchElementException is this is empty
*/
default T last() {
if (isEmpty()) {
throw new NoSuchElementException("last of empty TraversableOnce");
} else {
final Iterator<T> it = iterator();
T result = null;
while (it.hasNext()) {
result = it.next();
}
return result;
}
}
/**
* Dual of {@linkplain #headOption()}, returning the last element as {@code Opiton}.
*
* @return {@code Some(element)} or {@code None} if this is empty.
*/
default Option<T> lastOption() {
return isEmpty() ? None.instance() : new Some<>(last());
}
/**
* Computes the number of elements of this.
*
* @return the number of elements
*/
int length();
/**
* Maps the elements of this traversable to elements of a new type preserving their order, if any.
*
* @param mapper A mapper.
* @param <U> Component type of the target TraversableOnce
* @return a mapped TraversableOnce
* @throws NullPointerException if {@code mapper} is null
*/
@Override
<U> TraversableOnce<U> map(Function<? super T, ? extends U> mapper);
/**
* Calculates the maximum of this elements according to their natural order.
*
* @return {@code Some(maximum)} of this elements or {@code None} if this is empty or this elements are not comparable
*/
@SuppressWarnings("unchecked")
default Option<T> max() {
final Stream<T> stream = Stream.ofAll(iterator());
if (isEmpty() || !(stream.head() instanceof Comparable)) {
return None.instance();
} else {
return stream.maxBy((o1, o2) -> ((Comparable<T>) o1).compareTo(o2));
}
}
/**
* Calculates the maximum of this elements using a specific comparator.
*
* @param comparator A non-null element comparator
* @return {@code Some(maximum)} of this elements or {@code None} if this is empty
* @throws NullPointerException if {@code comparator} is null
*/
default Option<T> maxBy(Comparator<? super T> comparator) {
Objects.requireNonNull(comparator, "comparator is null");
if (isEmpty()) {
return None.instance();
} else {
final T value = reduce((t1, t2) -> comparator.compare(t1, t2) >= 0 ? t1 : t2);
return new Some<>(value);
}
}
/**
* Calculates the maximum of this elements within the co-domain of a specific function.
*
* @param f A function that maps this elements to comparable elements
* @param <U> The type where elements are compared
* @return The element of type T which is the maximum within U
*/
default <U extends Comparable<? super U>> Option<T> maxBy(Function<? super T, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
if (isEmpty()) {
return None.instance();
} else {
final Iterator<T> iter = iterator();
T tm = iter.next();
U um = f.apply(tm);
while (iter.hasNext()) {
final T t = iter.next();
final U u = f.apply(t);
if (u.compareTo(um) > 0) {
um = u;
tm = t;
}
}
return new Some<>(tm);
}
}
/**
* Calculates the minimum of this elements according to their natural order.
*
* @return {@code Some(minimum)} of this elements or {@code None} if this is empty or this elements are not comparable
*/
@SuppressWarnings("unchecked")
default Option<T> min() {
final Stream<T> stream = Stream.ofAll(iterator());
if (isEmpty() || !(stream.head() instanceof Comparable)) {
return None.instance();
} else {
return stream.minBy((o1, o2) -> ((Comparable<T>) o1).compareTo(o2));
}
}
/**
* Calculates the minimum of this elements using a specific comparator.
*
* @param comparator A non-null element comparator
* @return {@code Some(minimum)} of this elements or {@code None} if this is empty
* @throws NullPointerException if {@code comparator} is null
*/
default Option<T> minBy(Comparator<? super T> comparator) {
Objects.requireNonNull(comparator, "comparator is null");
if (isEmpty()) {
return None.instance();
} else {
final T value = reduce((t1, t2) -> comparator.compare(t1, t2) <= 0 ? t1 : t2);
return new Some<>(value);
}
}
/**
* Calculates the minimum of this elements within the co-domain of a specific function.
*
* @param f A function that maps this elements to comparable elements
* @param <U> The type where elements are compared
* @return The element of type T which is the minimum within U
*/
default <U extends Comparable<? super U>> Option<T> minBy(Function<? super T, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
if (isEmpty()) {
return None.instance();
} else {
final Iterator<T> iter = iterator();
T tm = iter.next();
U um = f.apply(tm);
while (iter.hasNext()) {
final T t = iter.next();
final U u = f.apply(t);
if (u.compareTo(um) < 0) {
um = u;
tm = t;
}
}
return new Some<>(tm);
}
}
/**
* Joins the elements of this by concatenating their string representations.
* <p>
* This has the same effect as calling {@code mkString("", "", "")}.
*
* @return a new String
*/
default String mkString() {
return mkString("", "", "");
}
/**
* Joins the string representations of this elements using a specific delimiter.
* <p>
* This has the same effect as calling {@code mkString(delimiter, "", "")}.
*
* @param delimiter A delimiter string put between string representations of elements of this
* @return A new String
*/
default String mkString(CharSequence delimiter) {
return mkString(delimiter, "", "");
}
/**
* Joins the string representations of this elements using a specific delimiter, prefix and suffix.
* <p>
* Example: {@code List.of("a", "b", "c").mkString(", ", "Chars(", ")") = "Chars(a, b, c)"}
*
* @param delimiter A delimiter string put between string representations of elements of this
* @param prefix prefix of the resulting string
* @param suffix suffix of the resulting string
* @return a new String
*/
default String mkString(CharSequence delimiter, CharSequence prefix, CharSequence suffix) {
final StringBuilder builder = new StringBuilder(prefix);
iterator().map(String::valueOf).intersperse(String.valueOf(delimiter)).forEach(builder::append);
return builder.append(suffix).toString();
}
/**
* Creates a partition of this {@code TraversableOnce} by splitting this elements in two in distinct tarversables
* according to a predicate.
*
* @param predicate A predicate which classifies an element if it is in the first or the second traversable.
* @return A disjoint union of two traversables. The first {@code TraversableOnce} contains all elements that satisfy the given {@code predicate}, the second {@code TraversableOnce} contains all elements that don't. The original order of elements is preserved.
* @throws NullPointerException if predicate is null
*/
Tuple2<? extends TraversableOnce<T>, ? extends TraversableOnce<T>> partition(Predicate<? super T> predicate);
@Override
TraversableOnce<T> peek(Consumer<? super T> action);
/**
* Calculates the product of this elements. Supported component types are {@code Byte}, {@code Double}, {@code Float},
* {@code Integer}, {@code Long}, {@code Short}, {@code BigInteger} and {@code BigDecimal}.
* <p>
* Examples:
* <pre>
* <code>
* List.empty().product() // = 1
* List.of(1, 2, 3).product() // = 6
* List.of(0.1, 0.2, 0.3).product() // = 0.006
* List.of("apple", "pear").product() // throws
* </code>
* </pre>
*
* @return a {@code Number} representing the sum of this elements
* @throws UnsupportedOperationException if this elements are not numeric
*/
@SuppressWarnings("unchecked")
default Number product() {
if (isEmpty()) {
return 1;
} else {
final TraversableOnce<?> objects = isTraversableAgain() ? this : toStream();
final Object head = objects.head();
if (head instanceof Integer || head instanceof Short || head instanceof Byte) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToInt(Number::intValue).reduce(1,
(i1, i2) -> i1 * i2);
} else if (head instanceof Double || head instanceof Float || head instanceof BigDecimal) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToDouble(Number::doubleValue).reduce(1.0,
(d1, d2) -> d1 * d2);
} else if (head instanceof Long || head instanceof BigInteger) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToLong(Number::longValue).reduce(1L,
(l1, l2) -> l1 * l2);
} else {
throw new UnsupportedOperationException("not numeric");
}
}
}
/**
* Accumulates the elements of this TraversableOnce by successively calling the given operation {@code op}.
* The order of element iteration is undetermined.
*
* @param op A BiFunction of type T
* @return the reduced value.
* @throws UnsupportedOperationException if this is empty
* @throws NullPointerException if {@code op} is null
*/
default T reduce(BiFunction<? super T, ? super T, ? extends T> op) {
Objects.requireNonNull(op, "op is null");
return reduceLeft(op);
}
/**
* Accumulates the elements of this TraversableOnce by successively calling the given operation {@code op} from the left.
*
* @param op A BiFunction of type T
* @return the reduced value.
* @throws NoSuchElementException if this is empty
* @throws NullPointerException if {@code op} is null
*/
default T reduceLeft(BiFunction<? super T, ? super T, ? extends T> op) {
Objects.requireNonNull(op, "op is null");
if (isEmpty()) {
throw new NoSuchElementException("reduceLeft on Nil");
} else {
return tail().foldLeft(head(), op);
}
}
/**
* Accumulates the elements of this TraversableOnce by successively calling the given operation {@code op} from the right.
*
* @param op An operation of type T
* @return the reduced value.
* @throws NoSuchElementException if this is empty
* @throws NullPointerException if {@code op} is null
*/
T reduceRight(BiFunction<? super T, ? super T, ? extends T> op);
/**
* Replaces the first occurrence (if exists) of the given currentElement with newElement.
*
* @param currentElement An element to be substituted.
* @param newElement A replacement for currentElement.
* @return a TraversableOnce containing all elements of this where the first occurrence of currentElement is replaced with newELement.
*/
TraversableOnce<T> replace(T currentElement, T newElement);
/**
* Replaces all occurrences of the given currentElement with newElement.
*
* @param currentElement An element to be substituted.
* @param newElement A replacement for currentElement.
* @return a TraversableOnce containing all elements of this where all occurrences of currentElement are replaced with newELement.
*/
TraversableOnce<T> replaceAll(T currentElement, T newElement);
/**
* Keeps all occurrences of the given elements from this.
*
* @param elements Elements to be kept.
* @return a TraversableOnce containing all occurrences of the given elements.
* @throws NullPointerException if {@code elements} is null
*/
TraversableOnce<T> retainAll(java.lang.Iterable<? extends T> elements);
/**
* Returns a tuple where the first element is the longest prefix of elements that satisfy p and the second element is the remainder.
*
* @param predicate A predicate.
* @return a Tuple containing the longest prefix of elements that satisfy p and the remainder.
* @throws NullPointerException if {@code predicate} is null
*/
Tuple2<? extends TraversableOnce<T>, ? extends TraversableOnce<T>> span(Predicate<? super T> predicate);
/**
* Calculates the sum of this elements. Supported component types are {@code Byte}, {@code Double}, {@code Float},
* {@code Integer}, {@code Long}, {@code Short}, {@code BigInteger} and {@code BigDecimal}.
* <p>
* Examples:
* <pre>
* <code>
* List.empty().sum() // = 0
* List.of(1, 2, 3).sum() // = 6
* List.of(0.1, 0.2, 0.3).sum() // = 0.6
* List.of("apple", "pear").sum() // throws
* </code>
* </pre>
*
* @return a {@code Number} representing the sum of this elements
* @throws UnsupportedOperationException if this elements are not numeric
*/
@SuppressWarnings("unchecked")
default Number sum() {
if (isEmpty()) {
return 0;
} else {
final TraversableOnce<?> objects = isTraversableAgain() ? this : toStream();
final Object head = objects.head();
if (head instanceof Integer || head instanceof Short || head instanceof Byte) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToInt(Number::intValue).sum();
} else if (head instanceof Double || head instanceof Float || head instanceof BigDecimal) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToDouble(Number::doubleValue).sum();
} else if (head instanceof Long || head instanceof BigInteger) {
return ((TraversableOnce<Number>) objects).toJavaStream().mapToLong(Number::longValue).sum();
} else {
throw new UnsupportedOperationException("not numeric");
}
}
}
/**
* Drops the first element of a non-empty TraversableOnce.
*
* @return A new instance of TraversableOnce containing all elements except the first.
* @throws UnsupportedOperationException if this is empty
*/
TraversableOnce<T> tail();
/**
* Drops the first element of a non-empty TraversableOnce and returns an {@code Option}.
*
* @return {@code Some(traversable)} or {@code None} if this is empty.
*/
Option<? extends TraversableOnce<T>> tailOption();
/**
* Takes the first n elements of this or all elements, if this length < n.
* <p>
* The result is equivalent to {@code sublist(0, max(0, min(length(), n)))} but does not throw if {@code n < 0} or
* {@code n > length()}.
* <p>
* In the case of {@code n < 0} the empty instance is returned, in the case of {@code n > length()} this is returned.
*
* @param n The number of elements to take.
* @return A new instance consisting the first n elements of this or all elements, if this has less than n elements.
*/
TraversableOnce<T> take(int n);
/**
* Takes the last n elements of this or all elements, if this length < n.
* <p>
* The result is equivalent to {@code sublist(max(0, min(length(), length() - n)), n)}, i.e. takeRight will not
* throw if {@code n < 0} or {@code n > length()}.
* <p>
* In the case of {@code n < 0} the empty instance is returned, in the case of {@code n > length()} this is returned.
*
* @param n The number of elements to take.
* @return A new instance consisting the first n elements of this or all elements, if this has less than n elements.
*/
TraversableOnce<T> takeRight(int n);
/**
* Takes elements until the predicate holds for the current element.
* <p>
* Note: This is essentially the same as {@code takeWhile(predicate.negate())}. It is intended to be used with
* method references, which cannot be negated directly.
*
* @param predicate A condition tested subsequently for this elements.
* @return a new instance consisting of all elements until the first which does satisfy the given predicate.
* @throws NullPointerException if {@code predicate} is null
*/
TraversableOnce<T> takeUntil(Predicate<? super T> predicate);
/**
* Takes elements while the predicate holds for the current element.
*
* @param predicate A condition tested subsequently for the contained elements.
* @return a new instance consisting of all elements until the first which does not satisfy the given predicate.
* @throws NullPointerException if {@code predicate} is null
*/
TraversableOnce<T> takeWhile(Predicate<? super T> predicate);
/**
* Unzips this elements by mapping this elements to pairs which are subsequentially split into two distinct
* sets.
*
* @param unzipper a function which converts elements of this to pairs
* @param <T1> 1st element type of a pair returned by unzipper
* @param <T2> 2nd element type of a pair returned by unzipper
* @return A pair of set containing elements split by unzipper
* @throws NullPointerException if {@code unzipper} is null
*/
<T1, T2> Tuple2<? extends TraversableOnce<T1>, ? extends TraversableOnce<T2>> unzip(
Function<? super T, Tuple2<? extends T1, ? extends T2>> unzipper);
/**
* Returns a traversable formed from this traversable and another java.lang.Iterable collection by combining
* corresponding elements in pairs. If one of the two iterables is longer than the other, its remaining elements
* are ignored.
* <p>
* The length of the returned traversable is the minimum of the lengths of this traversable and {@code that}
* iterable.
*
* @param <U> The type of the second half of the returned pairs.
* @param that The java.lang.Iterable providing the second half of each result pair.
* @return a new traversable containing pairs consisting of corresponding elements of this traversable and {@code that} iterable.
* @throws NullPointerException if {@code that} is null
*/
<U> TraversableOnce<Tuple2<T, U>> zip(java.lang.Iterable<U> that);
/**
* Returns a traversable formed from this traversable and another java.lang.Iterable by combining corresponding elements in
* pairs. If one of the two collections is shorter than the other, placeholder elements are used to extend the
* shorter collection to the length of the longer.
* <p>
* The length of the returned traversable is the maximum of the lengths of this traversable and {@code that}
* iterable.
* <p>
* Special case: if this traversable is shorter than that elements, and that elements contains duplicates, the
* resulting traversable may be shorter than the maximum of the lengths of this and that because a traversable
* contains an element at most once.
* <p>
* If this Traversable is shorter than that, thisElem values are used to fill the result.
* If that is shorter than this Traversable, thatElem values are used to fill the result.
*
* @param <U> The type of the second half of the returned pairs.
* @param that The java.lang.Iterable providing the second half of each result pair.
* @param thisElem The element to be used to fill up the result if this traversable is shorter than that.
* @param thatElem The element to be used to fill up the result if that is shorter than this traversable.
* @return A new traversable containing pairs consisting of corresponding elements of this traversable and that.
* @throws NullPointerException if {@code that} is null
*/
<U> TraversableOnce<Tuple2<T, U>> zipAll(java.lang.Iterable<U> that, T thisElem, U thatElem);
/**
* Zips this traversable with its indices.
*
* @return A new traversable containing all elements of this traversable paired with their index, starting with 0.
*/
TraversableOnce<Tuple2<T, Integer>> zipWithIndex();
} |
package jp.canetrash.vicuna.logic;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
import jp.canetrash.vicuna.Const;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow;
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets;
import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.util.store.FileDataStoreFactory;
import com.google.api.services.gmail.Gmail;
/**
* @author tfunato
*
*/
@Component
public class OAuthLogic {
private static final String SCOPE = "https:
private static final String APP_NAME = "Gmail API";
private static final String CLIENT_SECRET_PATH = Const.STORE_BASE_DIR
+ "client_secret.json";
private static final java.io.File DATA_STORE_DIR = new java.io.File(
Const.STORE_BASE_DIR, "oauth2");
public static final String USER = "me";
private GoogleAuthorizationCodeFlow flow;
@Value("${oauth.redirect.uri.base}")
private String redirectUriBase;
private HttpTransport httpTransport = new NetHttpTransport();
private JsonFactory jsonFactory = new JacksonFactory();
public OAuthLogic() {
try {
GoogleClientSecrets clientSecrets = GoogleClientSecrets.load(
jsonFactory, new FileReader(CLIENT_SECRET_PATH));
GoogleAuthorizationCodeFlow flow = new GoogleAuthorizationCodeFlow.Builder(
httpTransport, jsonFactory, clientSecrets,
Arrays.asList(SCOPE)).setDataStoreFactory(
new FileDataStoreFactory(DATA_STORE_DIR)).build();
this.flow = flow;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public boolean isAuthorized() {
try {
Credential credential = flow.loadCredential(USER);
if (credential != null
&& (credential.getRefreshToken() != null || credential
.getExpiresInSeconds() > 60)) {
return true;
}
return false;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String getAuthPage() {
String url = flow.newAuthorizationUrl().setRedirectUri(redirectUriBase)
.build();
return url;
}
public Gmail getGmailService() {
try {
Credential credential = flow.loadCredential(USER);
return new Gmail.Builder(httpTransport, jsonFactory, credential)
.setApplicationName(APP_NAME).build();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void storeCredential(String code) {
try {
GoogleTokenResponse response = flow.newTokenRequest(code)
.setRedirectUri(redirectUriBase).execute();
flow.createAndStoreCredential(response, USER);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
} |
package md.mgmt.common.impl;
import md.mgmt.base.md.ClusterNodeInfo;
import md.mgmt.common.CommonModule;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.UUID;
@Component
public class CommonModuleImpl implements CommonModule {
@Override
public String genFileCode() {
return UUID.randomUUID().toString().replaceAll("-", "");
}
@Override
public Long genDistrCode() {
return Long.valueOf(new Random().nextInt() & 0x0FFFFFFFF);
}
@Override
public boolean checkDistrCodeFit(Long distrCode) {
return true;
}
@Override
public ClusterNodeInfo getMdLocation(Long distrCode) {
if (distrCode == null) {
return null;
}
ClusterNodeInfo clusterNodeInfo = new ClusterNodeInfo();
if (distrCode % 3 == 0) {
clusterNodeInfo.setIp("node-03");
} else if (distrCode % 2 == 0) {
clusterNodeInfo.setIp("node-02");
} else {
clusterNodeInfo.setIp("node-01");
}
clusterNodeInfo.setIp("192.168.0.13");
clusterNodeInfo.setDistrCode(distrCode);
clusterNodeInfo.setPort(8008);
return clusterNodeInfo;
}
@Override
public ClusterNodeInfo genMdLocation() {
return getMdLocation(genDistrCode());
}
@Override
public List<ClusterNodeInfo> getMdLocationList(List<Long> distrCodeList) {
if (distrCodeList == null) {
return null;
}
List<ClusterNodeInfo> nodeInfos = new ArrayList<ClusterNodeInfo>();
for (long code : distrCodeList) {
nodeInfos.add(getMdLocation(code));
}
return nodeInfos;
}
} |
package me.coley.recaf.plugin;
import me.coley.recaf.plugin.api.BasePlugin;
import me.coley.recaf.plugin.api.InternalPlugin;
import me.coley.recaf.plugin.api.InternalApi;
import me.coley.recaf.util.Log;
import me.coley.recaf.workspace.EntryLoader;
import org.plugface.core.PluginContext;
import org.plugface.core.impl.DefaultPluginContext;
import org.plugface.core.impl.DefaultPluginManager;
import org.plugface.core.internal.AnnotationProcessor;
import org.plugface.core.internal.DependencyResolver;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Plugin manager.
*
* @author Matt
*/
public class PluginsManager extends DefaultPluginManager {
private static final PluginsManager INSTANCE;
private final Map<String, BasePlugin> plugins = new HashMap<>();
private final Map<String, Boolean> pluginStates = new HashMap<>();
private final Map<String, BufferedImage> pluginIcons = new HashMap<>();
private EntryLoader entryLoader;
/**
* Deny public construction.
*/
private PluginsManager(PluginContext context, AnnotationProcessor processor,
DependencyResolver resolver) {
super(context, processor, resolver);
}
public void load() throws Exception {
PluginFolderSource source = new PluginFolderSource();
// Collect plugin instances
Collection<Object> instances = loadPlugins(source);
for (Object instance : instances) {
if (instance instanceof BasePlugin) {
BasePlugin plugin = (BasePlugin) instance;
String name = plugin.getName();
String version = plugin.getVersion();
String className = instance.getClass().getName();
BufferedImage icon =
source.getPluginIcons().get(source.getClassToPlugin().get(className));
Log.info("Discovered plugin '{}-{}'", name, version);
// PlugFace already has its own internal storage of the plugin instances,
// but we want to control them a bit easier. So we'll keep a local reference.
addPlugin(name, plugin, icon);
} else {
Log.error("Class '{}' does not extend plugin!", instance.getClass().getName());
}
}
// Done
if (!plugins.isEmpty())
Log.info("Loaded {} plugins", plugins.size());
}
/**
* @return Collection of all plugin instances.
*/
public Map<String, BasePlugin> plugins() {
return plugins;
}
/**
* @return Collection of visible plugin instances.
*/
public Map<String, BasePlugin> visiblePlugins() {
return plugins.entrySet()
.stream()
.filter(e -> !(e.getValue() instanceof InternalPlugin))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
/**
* @return Map of plugin states.
*/
public Map<String, Boolean> getPluginStates() {
return pluginStates;
}
/**
* Map of plugin's icons. Not all plugins have icons.
*
* @return Map of plugin icons.
*/
public Map<String, BufferedImage> getPluginIcons() {
return pluginIcons;
}
/**
* @return {@code true} when 1 or more plugins have been loaded.
*/
public boolean hasPlugins() {
return !plugins.isEmpty();
}
/**
* @return The current entry loader defined by a plugin.
*/
public EntryLoader getEntryLoader() {
return entryLoader;
}
/**
* Set current entry loader.
*
* @param entryLoader
* New entry loader defined by a plugin.
*/
public void setEntryLoader(EntryLoader entryLoader) {
this.entryLoader = entryLoader;
}
/**
* Fetch the active plugins matching the given type.
* This will exclude plugins that are disabled.
*
* @param type
* Class of plugin.
* @param <T>
* Plugin type.
*
* @return Collection of active plugins matching the given type.
*/
@SuppressWarnings("unchecked")
public <T extends BasePlugin> Collection<T> ofType(Class<T> type) {
return plugins().values().stream()
.filter(plugin -> type.isAssignableFrom(plugin.getClass()))
.filter(plugin -> pluginStates.containsKey(plugin.getName()))
.map(plugin -> (T) plugin)
.collect(Collectors.toList());
}
/**
* @return Plugins manager instance.
*/
public static PluginsManager getInstance() {
return INSTANCE;
}
/**
* Registers a plugin.
*
* @param name
* Name of the plugin.
* @param plugin
* Plugin to register.
* @param icon
* Icon of the plugin.
*/
@InternalApi
public void addPlugin(String name, BasePlugin plugin, BufferedImage icon) {
plugins.put(name, plugin);
pluginStates.put(name, Boolean.TRUE);
if (icon != null) {
pluginIcons.put(name, icon);
}
}
/**
* Registers a plugin.
*
* @param plugin
* Plugin to register.
* @param icon
* Icon of the plugin.
*/
@InternalApi
public void addPlugin(BasePlugin plugin, BufferedImage icon) {
addPlugin(plugin.getName(), plugin, icon);
}
/**
* Registers a plugin.
*
* @param plugin
* Plugin to register.
*/
@InternalApi
public void addPlugin(BasePlugin plugin) {
addPlugin(plugin, null);
}
static {
DefaultPluginContext context = new DefaultPluginContext();
AnnotationProcessor processor = new AnnotationProcessor();
DependencyResolver resolver = new DependencyResolver(processor);
INSTANCE = new PluginsManager(context, processor, resolver);
}
} |
package net.flutterflies.waterless;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.ArrayList;
import java.util.List;
public class Waterless extends JavaPlugin
{
FileConfiguration config;
List<Material> waterlessMats = new ArrayList<Material>();
@Override
public void onEnable()
{
config = this.getConfig();
List<String> stringList = config.getStringList("redstone");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Redstone blocks");
}
else
{
getLogger().info("No Redstone blocks to enable");
}
stringList = config.getStringList("minecart");
if(stringList != null)
{
waterlessMats.addAll(makeList(stringList));
getLogger().info("Enabling Minecart rails.");
}
else
{
getLogger().info("No Minecart rails to enable.");
}
Bukkit.getPluginManager().registerEvents(new WaterListener(waterlessMats), this);
saveConfig();
}
@Override
public void onDisable()
{
config = null;
Bukkit.getScheduler().cancelTasks(this);
}
public List<Material> makeList(List<String> list)
{
List<Material> materialList = new ArrayList<Material>();
for(String string : list)
{
if(string != null)
{
materialList.add(Material.getMaterial(string));
}
}
return materialList;
}
} |
package net.ghostrealms.ticket;
import org.bukkit.plugin.java.JavaPlugin;
public class SupportTicket extends JavaPlugin {
@Override
public void onEnable() {
}
@Override
public void onDisable() {
}
} |
package net.imagej.legacy.ui;
import java.awt.Panel;
import net.imagej.legacy.LegacyService;
import org.scijava.ui.StatusBar;
import org.scijava.widget.UIComponent;
/**
* Adapter {@link StatusBar} implementation that delegates to legacy ImageJ
* methods.
*
* @author Mark Hiner
*/
public class LegacyStatusBar extends AbstractLegacyAdapter implements
UIComponent<Panel>, StatusBar
{
public LegacyStatusBar(final LegacyService legacyService) {
super(legacyService);
}
@Override
public void setStatus(final String message) {
boolean processing = getLegacyService().setProcessingEvents(true);
// if we are already in the middle of processing events, then we must have
// gotten here from an event that originated in the LegacyStatusBar. So,
// return, knowing that the value will eventually be restored by another
// finally block earlier in this stack trace.
if (processing) return;
try {
helper().setStatus(message);
} finally {
getLegacyService().setProcessingEvents(processing);
}
}
@Override
public void setProgress(final int val, final int max) {
boolean processing = getLegacyService().setProcessingEvents(true);
// if we are already in the middle of processing events, then we must have
// gotten here from an event that originated in the LegacyStatusBar. So,
// return, knowing that the value will eventually be restored by another
// finally block earlier in this stack trace.
if (processing) return;
try {
helper().setProgress(val, max);
} finally {
getLegacyService().setProcessingEvents(processing);
}
}
@Override
public Panel getComponent() {
return helper().getStatusBar();
}
@Override
public Class<Panel> getComponentType() {
return Panel.class;
}
} |
package net.sf.xenqtt.client;
import net.sf.xenqtt.message.QoS;
/**
* Implement this interface to use {@link SynchronousMqttClient}. The client will invoke the methods in this interface when a published message is received. A
* single instance of this interface may be used with multiple clients.
*/
public interface PublishListener {
/**
* Called when a published message is received from the broker. You should always call {@link PublishMessage#ack() ack()} when you are done processing the
* message. This is not required if the {@link PublishMessage#getQoS() QoS} is {@link QoS#AT_MOST_ONCE} but it is a good practice to always call it.
*
* @param client
* The client that received the message
* @param message
* The message that was published
*/
void publish(MqttClient client, PublishMessage message);
} |
package net.shadowfacts.zcraft.block;
import net.minecraft.block.Block;
import net.shadowfacts.zcraft.ZCraft;
import net.shadowfacts.zcraft.block.decoration.BlockZinchoriumLight;
import net.shadowfacts.zcraft.gui.ZCreativeTabs;
import cpw.mods.fml.common.registry.GameRegistry;
public class ZBlocks {
// Blocks
public static BlockOre greenZinchoriumGemOre;
public static BlockZinchoriumLight greenZinchoriumLightActive;
public static BlockZinchoriumLight greenZinchoriumLightIdle;
public static BlockOre oreCopper;
// Methods
public static void preInit() {
greenZinchoriumGemOre = new BlockOre();
greenZinchoriumGemOre.setBlockName("greenZinchoriumGemOre").setCreativeTab(ZCreativeTabs.tabZCore)
.setHardness(1.0f).setStepSound(Block.soundTypeStone)
.setBlockTextureName(ZCraft.modId + ":greenZinchoriumGemOre")
.setHarvestLevel("pickaxe", 1);
greenZinchoriumLightActive = new BlockZinchoriumLight(true);
greenZinchoriumLightActive.setBlockName("greenZinchoriumLightActive")
.setBlockTextureName(ZCraft.modId + ":zinchoriumLight/green")
.setHardness(1.0f).setStepSound(Block.soundTypeMetal)
.setHarvestLevel("pickaxe", 2);;
greenZinchoriumLightIdle = new BlockZinchoriumLight(false);
greenZinchoriumLightIdle.setBlockName("greenZinchoriumLightIdle")
.setCreativeTab(ZCreativeTabs.tabZCore).setBlockTextureName(ZCraft.modId + ":zinchoriumLight/green")
.setHardness(1.0f).setStepSound(Block.soundTypeMetal)
.setHarvestLevel("pickaxe", 2);;
oreCopper = new BlockOre();
oreCopper.setBlockName("oreCopper")
.setBlockTextureName(ZCraft.modId + ":oreCopper").setCreativeTab(ZCreativeTabs.tabZCore)
.setHardness(1.5f).setStepSound(Block.soundTypeStone)
.setHarvestLevel("pickaxe", 1);
// Register blocks
GameRegistry.registerBlock(greenZinchoriumGemOre, "greenZinchoriumGemOre");
GameRegistry.registerBlock(greenZinchoriumLightActive, "greenZinchoriumLightActive");
GameRegistry.registerBlock(greenZinchoriumLightIdle, "greenZinchoriumLightIdle");
GameRegistry.registerBlock(oreCopper, "oreCopper");
}
public static void load() {
}
public static void postInit() {
}
} |
package nl.hsac.fitnesse.fixture;
import fit.exception.FitFailureException;
import fitnesse.ContextConfigurator;
import freemarker.template.Configuration;
import freemarker.template.DefaultObjectWrapper;
import freemarker.template.Template;
import nl.hsac.fitnesse.fixture.util.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import java.io.File;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Holds overall environment settings. Expected to be set up before actual tests
* are performed.
*/
public class Environment {
private final static Environment INSTANCE = new Environment();
private String fitNesseRoot = ContextConfigurator.DEFAULT_ROOT;
private Configuration freemarkerConfig;
private FreeMarkerHelper fmHelper;
private ConcurrentHashMap<String, Template> templateCache;
private ConcurrentHashMap<String, String> symbols;
private HttpClient httpClient;
private long nextSequenceNr = System.currentTimeMillis();
private NamespaceContextImpl nsContext;
private XMLFormatter xmlFormatter;
private JsonFormatter jsonFormatter;
private HtmlCleaner htmlCleaner;
private TimeoutHelper timeoutHelper = new TimeoutHelper();
private ProgramHelper programHelper;
private DatesHelper datesHelper = new DatesHelper();
private SeleniumHelper seleniumHelper;
private MapHelper mapHelper = new MapHelper();
private Environment() {
Configuration cfg = new Configuration();
// Specify the data source where the template files come from.
cfg.setClassForTemplateLoading(getClass(), "/templates/");
DefaultObjectWrapper wrapper = new DefaultObjectWrapper();
wrapper.setExposeFields(true);
cfg.setObjectWrapper(wrapper);
freemarkerConfig = cfg;
fmHelper = new FreeMarkerHelper();
templateCache = new ConcurrentHashMap<String, Template>();
symbols = new ConcurrentHashMap<String, String>();
xmlFormatter = new XMLFormatter();
nsContext = new NamespaceContextImpl();
fillNamespaceContext();
jsonFormatter = new JsonFormatter();
htmlCleaner = new HtmlCleaner();
httpClient = new HttpClient();
programHelper = new ProgramHelper();
programHelper.setTimeoutHelper(timeoutHelper);
configDatesHelper();
seleniumHelper = new SeleniumHelper();
}
/**
* Fills namespace context with default namespaces.
*/
private void fillNamespaceContext() {
// SOAP
registerNamespace("env", "http://schemas.xmlsoap.org/soap/envelope/");
registerNamespace("xsi", "http:
}
/**
* @return singleton instance.
*/
public static Environment getInstance() {
return INSTANCE;
}
/**
* @return new instance of class.
* @throws RuntimeException if no instance could be created.
*/
public <T> T createInstance(Class<T> clazz) {
try {
return clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException("Unable to create instance of: " + clazz.getName(), e);
}
}
/**
* Stores key/value to be used.
* @param key
* @param value
*/
public void setSymbol(String key, String value) {
if (value == null) {
symbols.remove(key);
} else {
symbols.put(key, value);
}
}
/**
* Retrieves value previously stored.
* @param key
* @return value stored for key.
*/
public String getSymbol(String key) {
return symbols.get(key);
}
/**
* @return next sequence nr
*/
public long getNextNr() {
return nextSequenceNr++;
}
/**
* Only to be used in unit tests.
* @param value next number to provide
*/
public void setNextNr(long value) {
nextSequenceNr = value;
}
/**
* Gets symbol value, or throws exception if no symbol by that key exists.
* @param key symbol's key.
* @return symbol's value.
*/
public String getRequiredSymbol(String key) {
String result = null;
Object symbol = getSymbol(key);
if (symbol == null) {
throw new FitFailureException("No Symbol defined with key: " + key);
} else {
result = symbol.toString();
}
return result;
}
/**
* @return FreeMarker configuration to use.
*/
public Configuration getConfiguration() {
return freemarkerConfig;
}
/**
* @param name name of template to get
* @return template by that name
*/
public Template getTemplate(String name) {
Template result;
if (!templateCache.containsKey(name)) {
Template t = fmHelper.getTemplate(getConfiguration(), name);
result = templateCache.putIfAbsent(name, t);
if (result == null) {
result = t;
}
} else {
result = templateCache.get(name);
}
return result;
}
/**
* @param templateName name of template to apply
* @param model model to supply to template
* @return result of template
*/
public String processTemplate(String templateName, Object model) {
Template t = getTemplate(templateName);
return fmHelper.processTemplate(t, model);
}
/**
* Performs POST to supplied url of result of applying template with model.
* All namespaces registered in this environment will be registered with result.
* @param url url to post to.
* @param templateName name of template to use.
* @param model model for template.
* @param result result to populate with response.
*/
public void callService(String url, String templateName, Object model, XmlHttpResponse result) {
callService(url, templateName, model, result, null);
}
/**
* Performs POST to supplied url of result of applying template with model.
* All namespaces registered in this environment will be registered with result.
* @param url url to post to.
* @param templateName name of template to use.
* @param model model for template.
* @param result result to populate with response.
* @param headers headers to add.
*/
public void callService(String url, String templateName, Object model, XmlHttpResponse result, Map<String, Object> headers) {
doHttpPost(url, templateName, model, result, headers, XmlHttpResponse.CONTENT_TYPE_XML_TEXT_UTF8);
setNamespaceContext(result);
}
/**
* Performs POST to supplied url of result of applying template with model.
* @param url url to post to.
* @param templateName name of template to use.
* @param model model for template.
* @param result result to populate with response.
*/
public void doHttpPost(String url, String templateName, Object model, HttpResponse result) {
doHttpPost(url, templateName, model, result, null, XmlHttpResponse.CONTENT_TYPE_XML_TEXT_UTF8);
}
/**
* Performs POST to supplied url of result of applying template with model.
* @param url url to post to.
* @param templateName name of template to use.
* @param model model for template.
* @param result result to populate with response.
* @param headers headers to add.
* @param contentType contentType for request.
*/
public void doHttpPost(String url, String templateName, Object model, HttpResponse result, Map<String, Object> headers, String contentType) {
String request = processTemplate(templateName, model);
result.setRequest(request);
doHttpPost(url, result, headers, contentType);
}
/**
* Performs POST to supplied url of result's request.
* @param url url to post to.
* @param result result containing request, its response will be filled.
* @param headers headers to add.
* @param contentType contentType for request.
*/
public void doHttpPost(String url, HttpResponse result, Map<String, Object> headers, String contentType) {
httpClient.post(url, result, headers, contentType);
}
/**
* GETs content from URL.
* @param url url to get from.
* @param headers headers to add
* @return response.
*/
public HttpResponse doHttpGet(String url, Map<String, Object> headers) {
HttpResponse response = new HttpResponse();
doGet(url, response, headers);
return response;
}
/**
* GETs XML content from URL.
* @param url url to get from.
* @return response.
*/
public XmlHttpResponse doHttpGetXml(String url) {
XmlHttpResponse response = new XmlHttpResponse();
doGet(url, response);
setNamespaceContext(response);
return response;
}
/**
* GETs content from URL.
* @param url url to get from.
* @param response response to store url and response value in.
* @param headers http headers to add
*/
public void doGet(String url, HttpResponse response, Map<String, Object> headers) {
response.setRequest(url);
httpClient.get(url, response, headers);
}
/**
* GETs content from URL.
* @param url url to get from.
* @param response response to store url and response value in.
*/
public void doGet(String url, HttpResponse response) {
doGet(url, response, null);
}
/**
* DELETEs content at URL.
* @param url url to send delete to.
* @param response response to store url and response value in.
*/
public void doDelete(String url, HttpResponse response) {
response.setRequest(url);
httpClient.delete(url, response);
}
private void setNamespaceContext(XmlHttpResponse response) {
response.setNamespaceContext(getNamespaceContext());
}
/**
* Adds new mapping of prefix to uri for XPath naming resolution.
* @param prefix prefix that will be used
* @param uri uri that prefix should refer to.
*/
public void registerNamespace(String prefix, String uri) {
nsContext.add(prefix, uri);
}
/**
* @return namespace context for XPath evaluation
*/
public NamespaceContextImpl getNamespaceContext() {
return nsContext;
}
/**
* Formats supplied XML string for display in FitNesse page.
* @param xmlString XML to format.
* @return HTML formatted version of xmlString
*/
public String getHtmlForXml(String xmlString) {
return getHtml(xmlFormatter, xmlString);
}
/**
* Formats supplied Json string for display in FitNesse page.
* @param jsonString json to format.
* @return HTML formatted version of jsonString
*/
public String getHtmlForJson(String jsonString) {
return getHtml(jsonFormatter, jsonString);
}
/**
* Formats supplied value for display as pre-formatted text in FitNesse page.
* @param formatter formatter to use to generate pre-formatted text.
* @param value value to format.
* @return HTML formatted version of value.
*/
public String getHtml(Formatter formatter, String value) {
String result = null;
if (value != null) {
if ("".equals(value)) {
result = "";
} else {
String formattedResponse = formatter.format(value);
result = "<pre>" + StringEscapeUtils.escapeHtml4(formattedResponse) + "</pre>";
}
}
return result;
}
/**
* Creates exception that will display nicely in a columnFixture.
* @param msg message for exception
* @param responseText XML received, which will be shown in wiki table.
* @throws FitFailureException always
*/
public static void handleErrorResponse(String msg, String responseText) {
throw new FitFailureException(msg + getInstance().getHtmlForXml(responseText));
}
/**
* @return helper to clean wiki values provided to fixtures.
*/
public HtmlCleaner getHtmlCleaner() {
return htmlCleaner;
}
/**
* Invokes an external program, waits for it to complete,
* and returns the result.
* @param timeout maximum time (in milliseconds) to wait.
* @param directory working directory for program
* (may be null if not important).
* @param command program to start.
* @param arguments arguments for program.
* @return response from program.
*/
public ProgramResponse invokeProgram(int timeout, String directory, String command,
String... arguments) {
ProgramResponse result = new ProgramResponse();
if (directory != null && !StringUtils.isBlank(directory)) {
result.setDirectory(new File(directory));
}
result.setCommand(command);
result.setArguments(arguments);
programHelper.execute(result, timeout);
return result;
}
private void configDatesHelper() {
datesHelper.setDayPattern("%s_dag");
datesHelper.setMonthPattern("%s_maand");
datesHelper.setYearPattern("%s_jaar");
}
/**
* @return datesHelper to use.
*/
public DatesHelper getDatesHelper() {
return datesHelper;
}
/**
* @return seleniumHelper to use.
*/
public SeleniumHelper getSeleniumHelper() {
return seleniumHelper;
}
/**
* @return directory containing FitNesse's root.
*/
public String getFitNesseRootDir() {
return fitNesseRoot;
}
/**
* @return directory containing FitNesse's files section.
*/
public String getFitNesseFilesSectionDir() {
return new File(fitNesseRoot, "files").getAbsolutePath();
}
/**
* @param fitNesseRoot directory containing FitNesse's root.
*/
public void setFitNesseRoot(String fitNesseRoot) {
File root = new File(fitNesseRoot);
if (!root.exists() || !root.isDirectory()) {
throw new IllegalArgumentException("value for fitNesseRoot must be an existing directory");
}
this.fitNesseRoot = fitNesseRoot;
}
/**
* @return default (global) map helper.
*/
public MapHelper getMapHelper() {
return mapHelper;
}
/**
* Sets the default MapHelper.
* @param aMapHelper map helper to use.
*/
public void setMapHelper(MapHelper aMapHelper) {
mapHelper = aMapHelper;
}
/**
* @return XML formatter used.
*/
public XMLFormatter getXmlFormatter() {
return xmlFormatter;
}
/**
* @return JSON formatter used.
*/
public JsonFormatter getJsonFormatter() {
return jsonFormatter;
}
} |
package nl.jasperhuzen.exampleapp;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
@Slf4j
@EnableScheduling
@SpringBootApplication
public class ExampleApp {
static {
System.setProperty("logging.config", "log4j.properties");
}
public static void main(String[] args) {
SpringApplication.run(ExampleApp.class, args);
}
protected ExampleApp() {
log.info("Example app started");
}
} |
package nl.topicus.jdbc;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
public class CloudSpannerMetaData extends AbstractCloudSpannerMetaData
{
private CloudSpannerConnection connection;
CloudSpannerMetaData(CloudSpannerConnection connection)
{
this.connection = connection;
}
@Override
public boolean allProceduresAreCallable() throws SQLException
{
return true;
}
@Override
public boolean allTablesAreSelectable() throws SQLException
{
return true;
}
@Override
public String getURL() throws SQLException
{
return connection.getUrl();
}
@Override
public String getUserName() throws SQLException
{
return null;
}
@Override
public boolean isReadOnly() throws SQLException
{
return false;
}
@Override
public boolean nullsAreSortedHigh() throws SQLException
{
return false;
}
@Override
public boolean nullsAreSortedLow() throws SQLException
{
return true;
}
@Override
public boolean nullsAreSortedAtStart() throws SQLException
{
return false;
}
@Override
public boolean nullsAreSortedAtEnd() throws SQLException
{
return false;
}
@Override
public String getDatabaseProductName() throws SQLException
{
return connection.getProductName();
}
@Override
public String getDatabaseProductVersion() throws SQLException
{
return null;
}
@Override
public String getDriverName() throws SQLException
{
return CloudSpannerDriver.class.getName();
}
@Override
public String getDriverVersion() throws SQLException
{
return CloudSpannerDriver.MAJOR_VERSION + "." + CloudSpannerDriver.MINOR_VERSION;
}
@Override
public int getDriverMajorVersion()
{
return CloudSpannerDriver.MAJOR_VERSION;
}
@Override
public int getDriverMinorVersion()
{
return CloudSpannerDriver.MINOR_VERSION;
}
@Override
public boolean usesLocalFiles() throws SQLException
{
return false;
}
@Override
public boolean usesLocalFilePerTable() throws SQLException
{
return false;
}
@Override
public boolean supportsMixedCaseIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesUpperCaseIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesLowerCaseIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesMixedCaseIdentifiers() throws SQLException
{
return true;
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() throws SQLException
{
return false;
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() throws SQLException
{
return true;
}
@Override
public String getIdentifierQuoteString() throws SQLException
{
return "`";
}
@Override
public String getSQLKeywords() throws SQLException
{
return "INTERLEAVE, PARENT";
}
@Override
public String getNumericFunctions() throws SQLException
{
return "";
}
@Override
public String getStringFunctions() throws SQLException
{
return "";
}
@Override
public String getSystemFunctions() throws SQLException
{
return "";
}
@Override
public String getTimeDateFunctions() throws SQLException
{
return "";
}
@Override
public String getSearchStringEscape() throws SQLException
{
return "\\";
}
@Override
public String getExtraNameCharacters() throws SQLException
{
return "";
}
@Override
public boolean supportsAlterTableWithAddColumn() throws SQLException
{
return true;
}
@Override
public boolean supportsAlterTableWithDropColumn() throws SQLException
{
return true;
}
@Override
public boolean supportsColumnAliasing() throws SQLException
{
return true;
}
@Override
public boolean nullPlusNonNullIsNull() throws SQLException
{
return true;
}
@Override
public boolean supportsConvert() throws SQLException
{
return false;
}
@Override
public boolean supportsConvert(int fromType, int toType) throws SQLException
{
return false;
}
@Override
public boolean supportsTableCorrelationNames() throws SQLException
{
return false;
}
@Override
public boolean supportsDifferentTableCorrelationNames() throws SQLException
{
return false;
}
@Override
public boolean supportsExpressionsInOrderBy() throws SQLException
{
return true;
}
@Override
public boolean supportsOrderByUnrelated() throws SQLException
{
return true;
}
@Override
public boolean supportsGroupBy() throws SQLException
{
return true;
}
@Override
public boolean supportsGroupByUnrelated() throws SQLException
{
return true;
}
@Override
public boolean supportsGroupByBeyondSelect() throws SQLException
{
return true;
}
@Override
public boolean supportsLikeEscapeClause() throws SQLException
{
return true;
}
@Override
public boolean supportsMultipleResultSets() throws SQLException
{
return false;
}
@Override
public boolean supportsMultipleTransactions() throws SQLException
{
return true;
}
@Override
public boolean supportsNonNullableColumns() throws SQLException
{
return true;
}
@Override
public boolean supportsMinimumSQLGrammar() throws SQLException
{
return false;
}
@Override
public boolean supportsCoreSQLGrammar() throws SQLException
{
return false;
}
@Override
public boolean supportsExtendedSQLGrammar() throws SQLException
{
return false;
}
@Override
public boolean supportsANSI92EntryLevelSQL() throws SQLException
{
return false;
}
@Override
public boolean supportsANSI92IntermediateSQL() throws SQLException
{
return false;
}
@Override
public boolean supportsANSI92FullSQL() throws SQLException
{
return false;
}
@Override
public boolean supportsIntegrityEnhancementFacility() throws SQLException
{
return false;
}
@Override
public boolean supportsOuterJoins() throws SQLException
{
return true;
}
@Override
public boolean supportsFullOuterJoins() throws SQLException
{
return true;
}
@Override
public boolean supportsLimitedOuterJoins() throws SQLException
{
return true;
}
@Override
public String getSchemaTerm() throws SQLException
{
return null;
}
@Override
public String getProcedureTerm() throws SQLException
{
return null;
}
@Override
public String getCatalogTerm() throws SQLException
{
return null;
}
@Override
public boolean isCatalogAtStart() throws SQLException
{
return false;
}
@Override
public String getCatalogSeparator() throws SQLException
{
return null;
}
@Override
public boolean supportsSchemasInDataManipulation() throws SQLException
{
return false;
}
@Override
public boolean supportsSchemasInProcedureCalls() throws SQLException
{
return false;
}
@Override
public boolean supportsSchemasInTableDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsSchemasInIndexDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsCatalogsInDataManipulation() throws SQLException
{
return false;
}
@Override
public boolean supportsCatalogsInProcedureCalls() throws SQLException
{
return false;
}
@Override
public boolean supportsCatalogsInTableDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsCatalogsInIndexDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException
{
return false;
}
@Override
public boolean supportsPositionedDelete() throws SQLException
{
return false;
}
@Override
public boolean supportsPositionedUpdate() throws SQLException
{
return false;
}
@Override
public boolean supportsSelectForUpdate() throws SQLException
{
return false;
}
@Override
public boolean supportsStoredProcedures() throws SQLException
{
return false;
}
@Override
public boolean supportsSubqueriesInComparisons() throws SQLException
{
return true;
}
@Override
public boolean supportsSubqueriesInExists() throws SQLException
{
return true;
}
@Override
public boolean supportsSubqueriesInIns() throws SQLException
{
return true;
}
@Override
public boolean supportsSubqueriesInQuantifieds() throws SQLException
{
return true;
}
@Override
public boolean supportsCorrelatedSubqueries() throws SQLException
{
return true;
}
@Override
public boolean supportsUnion() throws SQLException
{
return true;
}
@Override
public boolean supportsUnionAll() throws SQLException
{
return true;
}
@Override
public boolean supportsOpenCursorsAcrossCommit() throws SQLException
{
return false;
}
@Override
public boolean supportsOpenCursorsAcrossRollback() throws SQLException
{
return false;
}
@Override
public boolean supportsOpenStatementsAcrossCommit() throws SQLException
{
return false;
}
@Override
public boolean supportsOpenStatementsAcrossRollback() throws SQLException
{
return false;
}
@Override
public int getMaxBinaryLiteralLength() throws SQLException
{
return 0;
}
@Override
public int getMaxCharLiteralLength() throws SQLException
{
return 0;
}
@Override
public int getMaxColumnNameLength() throws SQLException
{
return 128;
}
@Override
public int getMaxColumnsInGroupBy() throws SQLException
{
return 0;
}
@Override
public int getMaxColumnsInIndex() throws SQLException
{
return 0;
}
@Override
public int getMaxColumnsInOrderBy() throws SQLException
{
return 0;
}
@Override
public int getMaxColumnsInSelect() throws SQLException
{
return 0;
}
@Override
public int getMaxColumnsInTable() throws SQLException
{
return 0;
}
@Override
public int getMaxConnections() throws SQLException
{
return 0;
}
@Override
public int getMaxCursorNameLength() throws SQLException
{
return 0;
}
@Override
public int getMaxIndexLength() throws SQLException
{
return 0;
}
@Override
public int getMaxSchemaNameLength() throws SQLException
{
return 0;
}
@Override
public int getMaxProcedureNameLength() throws SQLException
{
return 0;
}
@Override
public int getMaxCatalogNameLength() throws SQLException
{
return 0;
}
@Override
public int getMaxRowSize() throws SQLException
{
return 0;
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() throws SQLException
{
return false;
}
@Override
public int getMaxStatementLength() throws SQLException
{
return 0;
}
@Override
public int getMaxStatements() throws SQLException
{
return 0;
}
@Override
public int getMaxTableNameLength() throws SQLException
{
return 128;
}
@Override
public int getMaxTablesInSelect() throws SQLException
{
return 0;
}
@Override
public int getMaxUserNameLength() throws SQLException
{
return 0;
}
@Override
public int getDefaultTransactionIsolation() throws SQLException
{
return Connection.TRANSACTION_SERIALIZABLE;
}
@Override
public boolean supportsTransactions() throws SQLException
{
return true;
}
@Override
public boolean supportsTransactionIsolationLevel(int level) throws SQLException
{
return Connection.TRANSACTION_SERIALIZABLE == level;
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException
{
return false;
}
@Override
public boolean supportsDataManipulationTransactionsOnly() throws SQLException
{
return true;
}
@Override
public boolean dataDefinitionCausesTransactionCommit() throws SQLException
{
return true;
}
@Override
public boolean dataDefinitionIgnoredInTransactions() throws SQLException
{
return false;
}
@Override
public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern)
throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern,
String columnNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types)
throws SQLException
{
String sql = "select TABLE_CATALOG AS TABLE_CAT, TABLE_SCHEMA AS TABLE_SCHEM, TABLE_NAME, 'TABLE' AS TABLE_TYPE, NULL AS REMARKS, NULL AS TYPE_CAT, NULL AS TYPE_SCHEM, NULL AS TYPE_NAME, NULL AS SELF_REFERENCING_COL_NAME, NULL AS REF_GENERATION "
+ "FROM information_schema.tables AS t " + "WHERE 1=1 ";
if (catalog != null)
sql = sql + "AND UPPER(t.TABLE_CATALOG) like ? ";
if (schemaPattern != null)
sql = sql + "AND UPPER(t.TABLE_SCHEMA) like ? ";
if (tableNamePattern != null)
sql = sql + "AND UPPER(t.TABLE_NAME) like ? ";
sql = sql + "ORDER BY TABLE_NAME";
PreparedStatement statement = connection.prepareStatement(sql);
int paramIndex = 1;
if (catalog != null)
{
statement.setString(paramIndex, catalog.toUpperCase());
paramIndex++;
}
if (schemaPattern != null)
{
statement.setString(paramIndex, schemaPattern.toUpperCase());
paramIndex++;
}
if (tableNamePattern != null)
{
statement.setString(paramIndex, tableNamePattern.toUpperCase());
paramIndex++;
}
return statement.executeQuery();
}
@Override
public ResultSet getSchemas() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getCatalogs() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getTableTypes() throws SQLException
{
String sql = "select 'TABLE' AS TABLE_TYPE";
return connection.createStatement().executeQuery(sql);
}
@Override
public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException
{
String sql = "select TABLE_CATALOG AS TABLE_CAT, TABLE_SCHEMA AS TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, 1 AS DATA_TYPE, SPANNER_TYPE AS TYPE_NAME, "
+ "0 AS COLUMN_SIZE, 0 AS BUFFER_LENGTH, NULL AS DECIMAL_DIGITS, 0 AS NUM_PREC_RADIX, "
+ "CASE "
+ " WHEN IS_NULLABLE = 'YES' THEN 1 "
+ " WHEN IS_NULLABLE = 'NO' THEN 0 "
+ " ELSE 2 "
+ "END AS NULLABLE, NULL AS REMARKS, NULL AS COLUMN_DEF, 0 AS SQL_DATA_TYPE, 0 AS SQL_DATETIME_SUB, 0 AS CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE, NULL AS SCOPE_CATALOG, "
+ "NULL AS SCOPE_SCHEMA, NULL AS SCOPE_TABLE, NULL AS SOURCE_DATA_TYPE, 'NO' AS IS_AUTOINCREMENT, 'NO' AS IS_GENERATEDCOLUMN "
+ "FROM information_schema.columns " + "WHERE 1=1 ";
if (catalog != null)
sql = sql + "AND UPPER(TABLE_CATALOG) like ? ";
if (schemaPattern != null)
sql = sql + "AND UPPER(TABLE_SCHEMA) like ? ";
if (tableNamePattern != null)
sql = sql + "AND UPPER(TABLE_NAME) like ? ";
if (columnNamePattern != null)
sql = sql + "AND UPPER(COLUMN_NAME) LIKE ? ";
sql = sql + "ORDER BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, ORDINAL_POSITION ";
PreparedStatement statement = connection.prepareStatement(sql);
int paramIndex = 1;
if (catalog != null)
{
statement.setString(paramIndex, catalog.toUpperCase());
paramIndex++;
}
if (schemaPattern != null)
{
statement.setString(paramIndex, schemaPattern.toUpperCase());
paramIndex++;
}
if (tableNamePattern != null)
{
statement.setString(paramIndex, tableNamePattern.toUpperCase());
paramIndex++;
}
if (columnNamePattern != null)
{
statement.setString(paramIndex, columnNamePattern.toUpperCase());
paramIndex++;
}
return statement.executeQuery();
}
@Override
public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern)
throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern)
throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable)
throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException
{
String sql = "select IDX.TABLE_CATALOG AS TABLE_CAT, IDX.TABLE_SCHEMA AS TABLE_SCHEM, IDX.TABLE_NAME AS TABLE_NAME, COLS.COLUMN_NAME AS COLUMN_NAME, ORDINAL_POSITION AS KEY_SEQ, IDX.INDEX_NAME AS PK_NAME "
+ "from information_schema.indexes idx "
+ "inner join information_schema.index_columns cols on idx.table_catalog=cols.table_catalog and idx.table_schema=cols.table_schema and idx.table_name=cols.table_name and idx.index_name=cols.index_name "
+ "where index_type='PRIMARY_KEY' and idx.table_name = %TABLE_NAME% ORDER BY ORDINAL_POSITION";
sql = sql.replace("%TABLE_NAME%", table);
return connection.createStatement().executeQuery(sql);
}
@Override
public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException
{
String sql = "SELECT PARENT.TABLE_CATALOG AS PKTABLE_CAT, PARENT.TABLE_SCHEMA AS PKTABLE_SCHEM, PARENT.TABLE_NAME AS PKTABLE_NAME, COL.COLUMN_NAME AS PKCOLUMN_NAME, CHILD.TABLE_CATALOG AS FKTABLE_CAT, CHILD.TABLE_SCHEMA AS FKTABLE_SCHEM, CHILD.TABLE_NAME AS FKTABLE_NAME, COL.COLUMN_NAME FKCOLUMN_NAME, COL.ORDINAL_POSITION AS KEY_SEQ, 3 AS UPDATE_RULE, CASE WHEN CHILD.ON_DELETE_ACTION = 'CASCADE' THEN 0 ELSE 3 END AS DELETE_RULE, NULL AS FK_NAME, INDEXES.INDEX_NAME AS PK_NAME, 7 AS DEFERRABILITY "
+ "FROM INFORMATION_SCHEMA.TABLES CHILD "
+ "INNER JOIN INFORMATION_SCHEMA.TABLES PARENT ON CHILD.TABLE_CATALOG=PARENT.TABLE_CATALOG AND CHILD.TABLE_SCHEMA=PARENT.TABLE_SCHEMA AND CHILD.PARENT_TABLE_NAME=PARENT.TABLE_NAME "
+ "INNER JOIN INFORMATION_SCHEMA.INDEXES ON PARENT.TABLE_CATALOG=INDEXES.TABLE_CATALOG AND PARENT.TABLE_SCHEMA=INDEXES.TABLE_SCHEMA AND PARENT.TABLE_NAME=INDEXES.TABLE_NAME AND INDEXES.INDEX_TYPE='PRIMARY_KEY' "
+ "INNER JOIN INFORMATION_SCHEMA.INDEX_COLUMNS COL ON INDEXES.TABLE_CATALOG=COL.TABLE_CATALOG AND INDEXES.TABLE_SCHEMA=COL.TABLE_SCHEMA AND INDEXES.TABLE_NAME=COL.TABLE_NAME AND INDEXES.INDEX_NAME=COL.INDEX_NAME "
+ "WHERE CHILD.PARENT_TABLE_NAME IS NOT NULL ";
if (catalog != null)
sql = sql + "AND UPPER(CHILD.TABLE_CATALOG) like ? ";
if (schema != null)
sql = sql + "AND UPPER(CHILD.TABLE_SCHEMA) like ? ";
if (table != null)
sql = sql + "AND UPPER(CHILD.TABLE_NAME) like ? ";
sql = sql + "ORDER BY PARENT.TABLE_CATALOG, PARENT.TABLE_SCHEMA, PARENT.TABLE_NAME, COL.ORDINAL_POSITION ";
PreparedStatement statement = connection.prepareStatement(sql);
int paramIndex = 1;
if (catalog != null)
{
statement.setString(paramIndex, catalog.toUpperCase());
paramIndex++;
}
if (schema != null)
{
statement.setString(paramIndex, schema.toUpperCase());
paramIndex++;
}
if (table != null)
{
statement.setString(paramIndex, table.toUpperCase());
paramIndex++;
}
return statement.executeQuery();
}
@Override
public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable,
String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getTypeInfo() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate)
throws SQLException
{
String sql = "select idx.TABLE_CATALOG AS TABLE_CAT, idx.TABLE_SCHEMA AS TABLE_SCHEM, idx.TABLE_NAME, CASE WHEN IS_UNIQUE THEN FALSE ELSE TRUE END AS NON_UNIQUE, NULL AS INDEX_QUALIFIER, idx.INDEX_NAME, 3 AS TYPE, ORDINAL_POSITION, COLUMN_NAME, SUBSTR(COLUMN_ORDERING, 0, 1) AS ASC_OR_DESC, -1 AS CARDINALITY, -1 AS PAGES, NULL AS FILTER_CONDITION "
+ "FROM information_schema.indexes idx "
+ "INNER JOIN information_schema.index_columns col on idx.table_catalog=col.table_catalog and idx.table_schema=col.table_schema and idx.table_name=col.table_name and idx.index_name=col.index_name "
+ "WHERE 1=1 ";
if (catalog != null)
sql = sql + "AND UPPER(idx.TABLE_CATALOG) like ? ";
if (schema != null)
sql = sql + "AND UPPER(idx.TABLE_SCHEMA) like ? ";
if (table != null)
sql = sql + "AND UPPER(idx.TABLE_NAME) like ? ";
if (unique)
sql = sql + "AND IS_UNIQUE ";
sql = sql + "ORDER BY IS_UNIQUE, INDEX_NAME, ORDINAL_POSITION ";
PreparedStatement statement = connection.prepareStatement(sql);
int paramIndex = 1;
if (catalog != null)
{
statement.setString(paramIndex, catalog.toUpperCase());
paramIndex++;
}
if (schema != null)
{
statement.setString(paramIndex, schema.toUpperCase());
paramIndex++;
}
if (table != null)
{
statement.setString(paramIndex, table.toUpperCase());
paramIndex++;
}
return statement.executeQuery();
}
@Override
public boolean supportsResultSetType(int type) throws SQLException
{
return type == ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException
{
return type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY;
}
@Override
public boolean ownUpdatesAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean ownDeletesAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean ownInsertsAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean othersUpdatesAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean othersDeletesAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean othersInsertsAreVisible(int type) throws SQLException
{
return false;
}
@Override
public boolean updatesAreDetected(int type) throws SQLException
{
return false;
}
@Override
public boolean deletesAreDetected(int type) throws SQLException
{
return false;
}
@Override
public boolean insertsAreDetected(int type) throws SQLException
{
return false;
}
@Override
public boolean supportsBatchUpdates() throws SQLException
{
return false;
}
@Override
public ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types)
throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public Connection getConnection() throws SQLException
{
return connection;
}
@Override
public boolean supportsSavepoints() throws SQLException
{
return false;
}
@Override
public boolean supportsNamedParameters() throws SQLException
{
return true;
}
@Override
public boolean supportsMultipleOpenResults() throws SQLException
{
return false;
}
@Override
public boolean supportsGetGeneratedKeys() throws SQLException
{
return false;
}
@Override
public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getAttributes(String catalog, String schemaPattern, String typeNamePattern,
String attributeNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean supportsResultSetHoldability(int holdability) throws SQLException
{
return holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public int getResultSetHoldability() throws SQLException
{
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public int getDatabaseMajorVersion() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public int getDatabaseMinorVersion() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public int getJDBCMajorVersion() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public int getJDBCMinorVersion() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public int getSQLStateType() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean locatorsUpdateCopy() throws SQLException
{
return false;
}
@Override
public boolean supportsStatementPooling() throws SQLException
{
return false;
}
@Override
public RowIdLifetime getRowIdLifetime() throws SQLException
{
return RowIdLifetime.ROWID_UNSUPPORTED;
}
@Override
public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException
{
return false;
}
@Override
public boolean autoCommitFailureClosesAllResultSets() throws SQLException
{
return false;
}
@Override
public ResultSet getClientInfoProperties() throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern,
String columnNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern,
String columnNamePattern) throws SQLException
{
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean generatedKeyAlwaysReturned() throws SQLException
{
return false;
}
} |
package com.tuxnet.project_euler;
public class Main {
public static void main(String[] args) {
//Problem10.solve();
//Problem2.solve();
//Problem25.solve();
//Problem15.solve();
//Problem19.solve();
Problem9.solve();
}
} |
package com.artemis;
import org.junit.Before;
import org.junit.Test;
import com.artemis.annotations.Wire;
import com.artemis.component.ComponentX;
import com.artemis.component.ComponentY;
import com.artemis.managers.TagManager;
import com.artemis.systems.EntityProcessingSystem;
import com.artemis.systems.VoidEntitySystem;
import static org.junit.Assert.*;
public class WireTest {
private World world;
private MappedSystem mappedSystem;
private MappedSystemAll mappedSystemAll;
private ExtendedSystem extendedSystem;
private MappedManager mappedManager;
private MappedManagerAll mappedManagerAll;
private ExtendedManager extendedManager;
private Entity entity;
@Before
public void init() {
mappedSystem = new MappedSystem();
mappedSystemAll = new MappedSystemAll();
extendedSystem = new ExtendedSystem();
mappedManager = new MappedManager();
mappedManagerAll = new MappedManagerAll();
extendedManager = new ExtendedManager();
world = new World(new WorldConfiguration()
.setSystem(TagManager.class)
.setSystem(mappedManager)
.setSystem(mappedManagerAll)
.setSystem(extendedManager)
.setSystem(mappedSystem)
.setSystem(mappedSystemAll)
.setSystem(extendedSystem));
entity = world.createEntity();
EntityEdit edit = entity.edit();
edit.create(ComponentX.class);
edit.create(ComponentY.class);
world.process();
}
@Test
public void inject_custom_type() {
CustomInjectedManager injected = new CustomInjectedManager();
InjectMe injectMe = new InjectMe();
WorldConfiguration config = new WorldConfiguration()
.setSystem(injected)
.register(injectMe);
World w = new World(config);
assertSame(injectMe, injected.injectMe);
assertNull(injected.nullInjectMe);
}
@Test(expected = MundaneWireException.class)
public void inject_custom_type_not_registered() {
CustomInjectedManager injected = new CustomInjectedManager();
InjectMe injectMe = new InjectMe();
WorldConfiguration config = new WorldConfiguration()
.setSystem(injected);
World w = new World(config);
fail("expected exception");
}
@Test
public void inject_named_custom_type() {
CustomNamedInjectedManager injected = new CustomNamedInjectedManager();
InjectMe injectMe = new InjectMe();
WorldConfiguration config = new WorldConfiguration()
.setSystem(injected)
.register("hi", injectMe);
World w = new World(config);
assertSame(injectMe, injected.injectMe);
assertNull(injected.nullInjectMe);
}
@Test(expected = MundaneWireException.class)
public void inject_named_custom_type_not_registered() {
CustomNamedInjectedManager injected = new CustomNamedInjectedManager();
InjectMe injectMe = new InjectMe();
WorldConfiguration config = new WorldConfiguration()
.setSystem(injected);
World w = new World(config);
fail("expected exception");
}
@Test
public void systems_support_wire_annotation() {
assertNotNull(mappedSystem.x);
assertNotNull(mappedSystem.y);
assertNotNull(mappedSystem.tagManager);
assertNotNull(mappedSystem.mappedSystemAll);
assertNotNull(extendedSystem.x);
assertNotNull(extendedSystem.y);
assertEquals(ComponentX.class, mappedSystem.x.get(entity).getClass());
assertEquals(ComponentY.class, mappedSystem.y.get(entity).getClass());
}
@Test
public void managers_support_wire_annotation() {
assertNotNull(mappedManager.x);
assertNotNull(mappedManager.y);
assertNotNull(mappedManager.tagManager);
assertNotNull(mappedManager.mappedSystem);
assertEquals(ComponentX.class, mappedSystem.x.get(entity).getClass());
assertEquals(ComponentY.class, mappedSystem.y.get(entity).getClass());
}
@Test
public void systems_all_support_wire_annotation() {
assertNotNull(mappedSystemAll.x);
assertNotNull(mappedSystemAll.y);
assertNotNull(mappedSystemAll.tagManager);
assertNotNull(mappedSystemAll.mappedSystem);
assertEquals(ComponentX.class, mappedSystem.x.get(entity).getClass());
assertEquals(ComponentY.class, mappedSystem.y.get(entity).getClass());
}
@Test
public void managers_all_support_wire_annotation() {
assertNotNull(mappedManagerAll.x);
assertNotNull(mappedManagerAll.y);
assertNotNull(mappedManagerAll.tagManager);
assertNotNull(mappedManagerAll.mappedSystem);
assertNotNull(extendedManager.x);
assertNotNull(extendedManager.y);
assertEquals(ComponentX.class, mappedSystem.x.get(entity).getClass());
assertEquals(ComponentY.class, mappedSystem.y.get(entity).getClass());
}
@Test
public void ensure_inherited_managers_injected_by_default() {
FailingSystem failingSystem = new FailingSystem();
FailingManager failingManager = new FailingManager();
World world = new World(new WorldConfiguration()
.setSystem(failingManager)
.setSystem(failingSystem));
assertNotNull(failingManager.x);
assertNotNull(failingSystem.x);
}
@Test(expected=MundaneWireException.class)
public void fail_on_system_not_injected() {
World world = new World(new WorldConfiguration()
.setSystem(new FailingNpeSystem()));
}
@Test(expected=MundaneWireException.class)
public void fail_on_manager_not_injected() {
World world = new World(new WorldConfiguration()
.setSystem(new FailingNpeManager()));
}
@Test
public void inject_pojo_object() {
World world = new World(new WorldConfiguration()
.setSystem(TagManager.class)
.setSystem(new MappedSystem())
.setSystem(new MappedSystemAll()));
PojoWireNoWorld obj = new PojoWireNoWorld();
world.inject(obj);
assertNotNull(obj.componentXMapper);
assertNotNull(obj.tagManager);
assertNotNull(obj.mappedSystem);
}
@Test
public void inject_anything_into_everything() {
World world = new World(new WorldConfiguration()
.register("world")
.register("hupp", "n1")
.register("blergh", "n2")
.setSystem(TagManager.class));
SomeThing st = new SomeThing();
world.inject(st);
assertNotNull(st.tagManager);
assertEquals("n1", st.helloN1);
assertEquals("world", st.hello);
assertEquals("n2", st.helloN2);
}
@Test
public void try_inject_on_wired_object_mirrors_inject_behaviour() {
World world = new World(new WorldConfiguration().register("world").setSystem(TagManager.class));
SomeThing st = new SomeThing();
world.inject(st, false);
assertEquals("world", st.hello);
}
@Test
public void try_inject_on_plain_object_does_nothing() {
World world = new World(new WorldConfiguration());
Object object = new Object();
world.inject(object, false);
}
@Test @SuppressWarnings("static-method")
public void inject_static_field() {
World w = new World(new WorldConfiguration()
.setSystem(new ManagerWithStaticField()));
w.process();
assertNotNull(ManagerWithStaticField.mapper);
}
@Test @SuppressWarnings("static-method")
public void inject_static_field_extended() {
World w = new World(new WorldConfiguration()
.setSystem(new ExtendedStaticManager()));
w.process();
assertNotNull(ManagerWithStaticField.mapper);
}
@Test @SuppressWarnings("static-method")
public void inject_static_field_inherited() {
World w = new World(new WorldConfiguration()
.setSystem(new ManagerWithStaticField()));
w.process();
assertNotNull(ManagerWithStaticField.mapper);
}
private static class SomeThing {
@Wire(name="hupp") private String helloN1;
@Wire private String hello;
@Wire(name="blergh") private String helloN2;
private TagManager tagManager;
}
private static class PojoWireNoWorld {
private ComponentMapper<ComponentX> componentXMapper;
private TagManager tagManager;
private MappedSystem mappedSystem;
}
private static class MappedSystemAll extends EntityProcessingSystem {
private ComponentMapper<ComponentX> x;
private ComponentMapper<ComponentY> y;
private TagManager tagManager;
private MappedSystem mappedSystem;
@SuppressWarnings("unchecked")
public MappedSystemAll() {
super(Aspect.all(ComponentX.class, ComponentY.class));
}
@Override
protected void process(Entity e) {}
}
private static class MappedSystem extends EntityProcessingSystem {
@Wire private ComponentMapper<ComponentX> x;
@Wire private ComponentMapper<ComponentY> y;
@Wire private TagManager tagManager;
@Wire private MappedSystemAll mappedSystemAll;
@SuppressWarnings("unchecked")
public MappedSystem() {
super(Aspect.all(ComponentX.class, ComponentY.class));
}
@Override
protected void process(Entity e) {}
}
private static class ExtendedStaticManager extends ManagerWithStaticField {}
private static class ManagerWithStaticField extends Manager{
static ComponentMapper<ComponentX> mapper;
}
private static class MappedManager extends Manager {
@Wire private ComponentMapper<ComponentX> x;
@Wire private ComponentMapper<ComponentY> y;
@Wire private MappedSystem mappedSystem;
@Wire private TagManager tagManager;
}
private static class MappedManagerAll extends Manager {
private ComponentMapper<ComponentX> x;
private ComponentMapper<ComponentY> y;
private MappedSystem mappedSystem;
private TagManager tagManager;
}
private static class BaseManager extends Manager {
protected ComponentMapper<ComponentX> x;
}
private static class ExtendedManager extends BaseManager {
private ComponentMapper<ComponentY> y;
}
private static class FailingManager extends BaseManager {
@SuppressWarnings("unused")
private ComponentMapper<ComponentY> y;
}
private static abstract class BaseSystem extends VoidEntitySystem {
protected ComponentMapper<ComponentX> x;
}
private static class ExtendedSystem extends BaseSystem {
private ComponentMapper<ComponentY> y;
@Override
protected void processSystem() {}
}
private static class FailingSystem extends BaseSystem {
@SuppressWarnings("unused")
private FailingManager manager;
@Override
protected void processSystem() {}
}
private static class FailingNpeSystem extends VoidEntitySystem {
@SuppressWarnings("unused")
private FailingManager manager;
@Override
protected void processSystem() {}
}
private static class FailingNpeManager extends Manager {
@SuppressWarnings("unused")
private FailingSystem fail;
}
private static class CustomInjectedManager extends Manager {
@Wire InjectMe injectMe;
InjectMe nullInjectMe;
}
private static class CustomNamedInjectedManager extends Manager {
@Wire(name = "hi") InjectMe injectMe;
InjectMe nullInjectMe;
}
public static class InjectMe {}
} |
/**
* A HTTP plugin for Cordova / Phonegap
*/
package com.synconset;
import org.apache.cordova.CallbackContext;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.util.Map;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.HostnameVerifier;
import java.util.Iterator;
import android.util.Log;
import com.github.kevinsawicki.http.HttpRequest;
public abstract class CordovaHttp {
protected static final String TAG = "CordovaHTTP";
protected static final String CHARSET = "UTF-8";
private static AtomicBoolean sslPinning = new AtomicBoolean(false);
private static AtomicBoolean acceptAllCerts = new AtomicBoolean(false);
private static List<HttpRequest> httpRequests = new CopyOnWriteArrayList<HttpRequest>();
private String urlString;
private Map<?, ?> params;
private Map<String, String> headers;
private CallbackContext callbackContext;
public CordovaHttp(String urlString, Map<?, ?> params, Map<String, String> headers, CallbackContext callbackContext) {
this.urlString = urlString;
this.params = params;
this.headers = headers;
this.callbackContext = callbackContext;
}
public static void enableSSLPinning(boolean enable) {
sslPinning.set(enable);
if (enable) {
acceptAllCerts.set(false);
}
}
public static void addHttpRequest(HttpRequest httpRequest){
if (httpRequest == null) {
return;
}
httpRequests.add(httpRequest);
}
public static void removeHttpRequest(HttpRequest httpRequest){
if (httpRequest == null) {
return;
}
httpRequests.remove(httpRequest);
}
public static void invalidateSessionCancelingTasks(boolean cancelPendingTasks){
for (HttpRequest httpRequest: httpRequests) {
try {
httpRequest.invalidateSessionCancelingTasks(cancelPendingTasks);
} catch (Exception e) {
System.out.println("e");
e.printStackTrace();
}
}
httpRequests.clear();
}
public static void acceptAllCerts(boolean accept) {
acceptAllCerts.set(accept);
if (accept) {
sslPinning.set(false);
}
}
protected String getUrlString() {
return this.urlString;
}
protected Map<?, ?> getParams() {
return this.params;
}
protected Map<String, String> getHeaders() {
return this.headers;
}
protected CallbackContext getCallbackContext() {
return this.callbackContext;
}
protected HttpRequest setupSecurity(HttpRequest request) {
if (acceptAllCerts.get()) {
request.trustAllCerts();
request.trustAllHosts();
}
if (sslPinning.get()) {
request.pinToCerts();
}
return request;
}
protected void respondWithError(int status, String msg) {
try {
JSONObject response = new JSONObject();
response.put("status", status);
response.put("error", msg);
this.callbackContext.error(response);
} catch (JSONException e) {
this.callbackContext.error(msg);
}
}
protected void respondWithError(String msg) {
this.respondWithError(500, msg);
}
} |
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import java.util.*;
import org.junit.*;
public class TarjanSccSolverAdjacencyListTest {
// Initialize graph with 'n' nodes.
public static List<List<Integer>> createGraph(int n) {
List<List<Integer>> graph = new ArrayList<>();
for(int i = 0; i < n; i++) graph.add(new ArrayList<>());
return graph;
}
// Add directed edge to graph.
public static void addEdge(List<List<Integer>> graph, int from, int to) {
graph.get(from).add(to);
}
@Test(expected=IllegalArgumentException.class)
public void nullGraphConstructor() {
new TarjanSccSolverAdjacencyList(null);
}
@Test
public void singletonCase() {
int n = 1;
List<List<Integer>> g = createGraph(n);
TarjanSccSolverAdjacencyList solver = new TarjanSccSolverAdjacencyList(g);
solver.solve();
int[] actual = solver.getSccs();
int[] expected = new int[n];
assertThat(actual).isEqualTo(expected);
assertThat(solver.sccCount()).isEqualTo(1);
}
@Test
public void testTwoDisjointComponents() {
int n = 5;
List<List<Integer>> g = createGraph(n);
addEdge(g, 0, 1);
addEdge(g, 1, 0);
addEdge(g, 2, 3);
addEdge(g, 3, 4);
addEdge(g, 4, 2);
TarjanSccSolverAdjacencyList solver = new TarjanSccSolverAdjacencyList(g);
solver.solve();
List<List<Integer>> expectedSccs = ImmutableList.of(
ImmutableList.of(0, 1),
ImmutableList.of(2, 3, 4)
);
assertThat(solver.sccCount()).isEqualTo(expectedSccs.size());
assertThat(isScc(solver.getSccs(), expectedSccs)).isTrue();
}
@Test
public void testButterflyCase() {
int n = 5;
List<List<Integer>> g = createGraph(n);
addEdge(g, 0, 1);
addEdge(g, 1, 2);
addEdge(g, 2, 3);
addEdge(g, 3, 1);
addEdge(g, 1, 4);
addEdge(g, 4, 0);
TarjanSccSolverAdjacencyList solver = new TarjanSccSolverAdjacencyList(g);
solver.solve();
List<List<Integer>> expectedSccs = ImmutableList.of(
ImmutableList.of(0, 1, 2, 3, 4)
);
assertThat(solver.sccCount()).isEqualTo(expectedSccs.size());
assertThat(isScc(solver.getSccs(), expectedSccs)).isTrue();
}
private static boolean isScc(int[] ids, List<List<Integer>> expectedSccs) {
Set<Integer> set = new HashSet<>();
for(List<Integer> indexes : expectedSccs) {
set.clear();
for (int index : indexes) set.add(ids[index]);
if (set.size() != 1) return false;
}
return true;
}
} |
package org.kohsuke.args4j.spi;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.OptionDef;
import java.nio.file.Path;
/**
* Takes a classpath like option ("-cp a.jar;b.jar;c") and maps them to a collection of {@link Path}.
*
* @author kmahoney
*/
public class MultiPathOptionHandler extends DelimitedOptionHandler<Path> {
protected static String sysPathSeperator = System.getProperty("path.separator");
public MultiPathOptionHandler(CmdLineParser parser, OptionDef option, Setter<? super Path> setter) {
super(parser, option, setter, sysPathSeperator, new PathOptionHandler(parser, option, setter));
}
} |
package com.parrot.arsdk.arutils;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.InputMismatchException;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Scanner;
import java.util.concurrent.Semaphore;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.annotation.SuppressLint;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.os.AsyncTask;
import android.os.Environment;
import android.util.Log;
import android.content.Context;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import android.os.Build;
import java.util.UUID;
import java.io.InputStream;
import java.io.OutputStream;
import com.parrot.arsdk.arsal.ARSALBLEManager;
import com.parrot.arsdk.arsal.ARSALBLEManager.ARSALManagerNotificationData;
import com.parrot.arsdk.arsal.ARSALPrint;
import com.parrot.arsdk.arsal.ARSAL_ERROR_ENUM;
import com.parrot.arsdk.arsal.ARUUID;
import com.parrot.arsdk.arnetworkal.ARNetworkALBLENetwork;
import java.nio.ByteBuffer;
public class ARUtilsRFCommFtp
{
private static final String LOG_TAG = "ARUtilsRFCommFTP.java";
private static final String RFCOMM_UPDATE_KEY = "UPD";
public final static String RFCOMM_GETTING_KEY = "kARUTILS_BLERFComm_Getting";
private ARSALBLEManager bleManager = null;
private BluetoothGatt gattDevice = null;
private int port;
private int connectionCount = 0;
private Lock connectionLock = new ReentrantLock();
private ArrayList<BluetoothGattCharacteristic> arrayGetting = null;
private native void nativeProgressCallback(long nativeCallbackObject, float percent);
private native static void nativeJNIInit();
private BluetoothGattCharacteristic rfCommWriteCharac;
private BluetoothGattCharacteristic rfCommReadCharac;
// Type of message
protected static final int ST_NOT_CONNECTED = 0;
protected static final int ST_CONNECTING = 1;
protected static final int ST_CONNECTED = 2;
public static final byte TYPE_MES_OPEN_SESSION = 0x00;
public static final byte TYPE_MES_CLOSE_SESSION = 0x01;
private static final byte TYPE_MES_ACKNOWLEDGT = 0x02;
public static final byte TYPE_MES_DATA = (byte) 0x80; // get or set request
public static final String SOFTWARE_DOWNLOAD_SIZE_SET = "/api/software/download_size/set";
private static final UUID MY_UUID = UUID.fromString("8b6814d3-6ce7-4498-9700-9312c1711f63"); // TODO change this name
private static final Integer RFCOMM_CHANNEL = 21;
private BluetoothSocket mSocket;
private InputStream mInStream;
private OutputStream mOutStream;
private boolean mIsOpeningSession = false;
private int mState = ST_NOT_CONNECTED;
private BluetoothDevice mDevice;
static
{
nativeJNIInit();
}
private ARUtilsRFCommFtp()
{
}
private static class ARUtilsRFCommFtpHolder
{
private final static ARUtilsRFCommFtp instance = new ARUtilsRFCommFtp();
}
public static ARUtilsRFCommFtp getInstance(Context context)
{
ARUtilsRFCommFtp instance = ARUtilsRFCommFtpHolder.instance;
if (context == null)
{
throw new IllegalArgumentException("Context must not be null");
}
instance.setBLEManager(context);
return instance;
}
private synchronized void setBLEManager(Context context)
{
if (this.bleManager == null)
{
if (context == null)
{
throw new IllegalArgumentException("Context must not be null");
}
this.bleManager = ARSALBLEManager.getInstance(context);
}
}
public boolean registerDevice(BluetoothGatt gattDevice, int port)
{
ARSALPrint.d(LOG_TAG, "registerDevice " + gattDevice.toString() + " port : " + port);
boolean ret = true;
if (connectionCount == 0)
{
this.gattDevice = gattDevice;
this.port = port;
connectionCount++;
searchForInterestingCharacs();
ret = registerCharacteristics();
}
else if ((this.gattDevice == gattDevice) && (this.port == port))
{
connectionCount++;
}
else
{
ARSALPrint.e(LOG_TAG, "Bad parameters");
ret = false;
}
return ret;
}
public boolean unregisterDevice()
{
boolean ret = true;
if (connectionCount > 0)
{
if (connectionCount == 1)
{
this.gattDevice = null;
this.port = 0;
unregisterCharacteristics();
}
connectionCount
}
else
{
ARSALPrint.e(LOG_TAG, "Bad parameters");
ret = false;
}
return ret;
}
@SuppressLint("NewApi")
public void searchForInterestingCharacs()
{
List<BluetoothGattService> services = gattDevice.getServices();
ARSAL_ERROR_ENUM error = ARSAL_ERROR_ENUM.ARSAL_OK;
boolean ret = true;
ARSALPrint.d(LOG_TAG, "registerCharacteristics");
// store in variables the characteristics we will need
Iterator<BluetoothGattService> servicesIterator = services.iterator();
while (servicesIterator.hasNext())
{
BluetoothGattService service = servicesIterator.next();
String serviceUuid = ARUUID.getShortUuid(service.getUuid());
String name = ARUUID.getShortUuid(service.getUuid());
ARSALPrint.d(LOG_TAG, "service " + name);
if (serviceUuid.startsWith(ARNetworkALBLENetwork.ARNETWORKAL_BLENETWORK_PARROT_SERVICE_PREFIX_UUID_RFCOMM))
{
List<BluetoothGattCharacteristic> characteristics = service.getCharacteristics();
Iterator<BluetoothGattCharacteristic> characteristicsIterator = characteristics.iterator();
while (characteristicsIterator.hasNext())
{
BluetoothGattCharacteristic characteristic = characteristicsIterator.next();
String characteristicUuid = ARUUID.getShortUuid(characteristic.getUuid());
ARSALPrint.d(LOG_TAG, "characteristic " + characteristicUuid);
if (characteristicUuid.startsWith(ARNetworkALBLENetwork.ARNETWORKAL_BLENETWORK_PARROT_CHARACTERISTIC_PREFIX_UUID_RFCOMM_READ))
{
this.rfCommReadCharac = characteristic;
}
else if (characteristicUuid.startsWith(ARNetworkALBLENetwork.ARNETWORKAL_BLENETWORK_PARROT_CHARACTERISTIC_PREFIX_UUID_RFCOMM_WRITE))
{
this.rfCommWriteCharac = characteristic;
this.rfCommReadCharac = characteristic;
}
}
}
}
}
public boolean registerCharacteristics()
{
boolean ret = false;
ARSALPrint.d(LOG_TAG, "registerCharacteristics");
arrayGetting = null;
if (this.rfCommReadCharac != null)
{
this.arrayGetting = new ArrayList<BluetoothGattCharacteristic>();
this.arrayGetting.add(rfCommReadCharac);
bleManager.registerNotificationCharacteristics(this.arrayGetting, RFCOMM_GETTING_KEY);
ret = true;
}
return ret;
}
public boolean unregisterCharacteristics()
{
boolean ret = true;
ARSALPrint.d(LOG_TAG, "unregisterCharacteristics");
ret = bleManager.unregisterNotificationCharacteristics(RFCOMM_GETTING_KEY);
return ret;
}
/* Do nothing*/
/**
* Open session
*/
private void openSession() {
ARSALPrint.d(LOG_TAG, "open RFComm session");
mIsOpeningSession = true;
write(getHeaderFirst(0, TYPE_MES_OPEN_SESSION));
byte[] readArray = new byte[4096];
try {
// wait for the answer
int readLength = mInStream.read(readArray);
}
catch (IOException e) {
closeConnection();
return;
}
mIsOpeningSession = false;
mState = ST_CONNECTED;
}
/**
* Close session
*/
private void closeSession() {
ARSALPrint.d(LOG_TAG, "close RFComm session");
mIsOpeningSession = true;
write(getHeaderFirst(0, TYPE_MES_CLOSE_SESSION));
byte[] readArray = new byte[4096];
try {
// wait for the answer
int readLength = mInStream.read(readArray);
}
catch (IOException e) {
closeConnection();
return;
}
mState = ST_NOT_CONNECTED;
}
private void sendFile(File file, long nativeCallbackObject, Semaphore cancelSem)
{
int nbSstoredBytes = 0; // for now, send the entire file (=> no resume)
boolean ret = true;
FileInputStream f = null;
try {
f = new FileInputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
}
byte[] buffer = new byte[1001];
int len = 0;
long total = 0;
int id = 0;
float percent = 0.f;
float lastPercent = 0.f;
try {
// Skip bytes that already saved on divice
if(nbSstoredBytes > 0) {
while (nbSstoredBytes > 0) {
int nbSkipped = (int) f.skip(nbSstoredBytes);
nbSstoredBytes -= nbSkipped ;
}
}
long fileSize = file.length();
// Send to device
while ( ret && (len = f.read(buffer)) > 0 ) {
total += len;
byte[] request = new byte[len];
System.arraycopy(buffer, 0, request, 0, len);
//long time = System.currentTimeMillis();
if(!sendFirmwareOnDevice(request, id)){
ARSALPrint.e(LOG_TAG, "upload firmware, task was canceled");
f.close();
return;
}
/*if (System.currentTimeMillis() - time > 500) {
try {
Log.e(LOG_TAG, "fixIssuesOnHtcDevices");
//fixIssuesOnHtcDevices();
} catch (Exception e) {
}
}*/
percent = ((float)total / (float)fileSize) * 100.f;
if (nativeCallbackObject != 0)
{
// no need of this block for now, because we have no resume for now
/*if ((resume == true) && (totalPacket < resumeIndex))
{
if ((percent - lastPercent) > 1.f)
{
lastPercent = percent;
nativeProgressCallback(nativeCallbackObject, percent);
}
}
else
{*/
nativeProgressCallback(nativeCallbackObject, percent);
}
if (isConnectionCanceled(cancelSem))
{
ARSALPrint.d(LOG_TAG, "Canceled received during file upload");
ret = false;
}
id++;
}
} catch (Exception e) {
e.printStackTrace();
try {
f.close();
} catch (IOException e1) {
e1.printStackTrace();
}
return;
}
try {
f.close();
} catch (IOException e) {
e.printStackTrace();
}
ARSALPrint.d(LOG_TAG, "Sending done. Sent " + total + " bytes");
}
public boolean sendFirmwareOnDevice(byte[] data, int id) {
if (mState != ST_CONNECTED) {
//Log.d(LOG_TAG, "sendFirmwareOnDevice ST_NOT_CONNECTED");
return false;
} else {
byte[] request = getUploadPacket(data, id);
//long time = System.currentTimeMillis();
try {
write(request);
} catch (Exception e) {
return false;
}
/*if (System.currentTimeMillis() < time + 10000) {
try {
fixIssuesOnHtcDevices();
} catch (Exception e) {
}
}*/
return true;
}
}
private synchronized void write(byte[] buffer) {
try {
mOutStream.write(buffer);
} catch (IOException e) {
ARSALPrint.e(LOG_TAG, "Exception during write" + e.getMessage());
}
}
/**
* Return header of message
* @param length - size of message
*/
private static byte[] getHeaderFirst(int length, byte type) {
// 3-byte header : ZZ + T
// ZZ - total number of bytes of the packet (including this header)
// T - type massage
byte[] zz = sizeIntToByte(length + 3); // 3 bytes of header
byte[] t = new byte[1];
t[0] = type;
byte[] header = new byte[zz.length + t.length];
System.arraycopy(zz, 0, header, 0, zz.length);
System.arraycopy(t, 0, header, zz.length, t.length);
return header;
}
/**
* Return ZZ (2 bytes) - total number of bytes of the messages,
* (Most Significant Byte first, then Least Significant Byte)
*/
private static byte[] sizeIntToByte(int length) {
byte[] zz = new byte[2];
zz = new byte[]{(byte)(length >>> 8), (byte)(length)};
//zz = new byte[]{(byte)(length), (byte)(length >>> 8)};
return zz;
}
/**
* Return ZZ (2 bytes) - total number of bytes of the messages,
* (Most Significant Byte first, then Least Significant Byte)
*/
private static byte[] sizeIntToByte2(int length) {
byte[] zz = new byte[2];
zz = new byte[]{(byte)(length), (byte)(length >>> 8)};
return zz;
}
public synchronized void closeConnection() {
try {
ARSALPrint.e(LOG_TAG, "Cancel");
if (mInStream != null)
{
mInStream.close();
mInStream = null;
}
if (mOutStream != null)
{
mOutStream.close();
mOutStream = null;
}
if (mSocket != null)
{
mSocket.close();
mSocket = null;
}
} catch (IOException e) {
ARSALPrint.e(LOG_TAG, "Closing of mSocket failed", e);
}
mState = ST_NOT_CONNECTED;
}
/**
* Return packet in a specific format for uploading to Zik. Use for updating firmware.
*/
private static byte[] getUploadPacket(byte[] data, int id) {
// 3-byte header
// 4-byte header for uploading: XYZZ (see download2.odt)
// 1-byte packet type (DATA = 0)
// 2-byte packet identifier (MSB first),
// core on several bytes
// 2-byte trailer (sign)
// X current packet number, on 1 byte, value 1
// Y total number of packets, on 1 byte, value 1
// ZZ total number of bytes of the packet (including this header)
// optional trailer (sign) : inclusive OR (1 byte) and exclusive OR (1 byte).
// These 2 bytes are computed from start of packet up to last header byte
int sizePack2 = data.length + 9;
byte[] header = getHeaderFirst(sizePack2, TYPE_MES_DATA); // ignoring on Zik side, but need
byte[] xy = {0x01, 0x01};
byte[] zzDesordered = ByteBuffer.allocate(2).putShort((short)(data.length + 9)).array();
byte[] zz = {zzDesordered[1], zzDesordered[0]};
//byte[] zz = sizeIntToByte(data.length + 9); // without length of the first header, 2 byte - trailer (sign) at the end
byte[] pktType = {0x00};
byte[] pktIdDesordered = ByteBuffer.allocate(2).putShort((short)id).array();//sizeIntToByte2(id);
byte[] pktId = {pktIdDesordered[1], pktIdDesordered[0]};
byte[] request = new byte[data.length + 12]; // a;; header and trailer
System.arraycopy(header, 0, request, 0, header.length);
System.arraycopy(xy, 0, request, header.length, xy.length);
System.arraycopy(zz, 0, request, header.length + xy.length, zz.length);
System.arraycopy(pktType, 0, request, header.length + xy.length + zz.length, pktType.length);
System.arraycopy(pktId, 0, request, header.length + xy.length + zz.length + pktType.length, pktId.length);
System.arraycopy(data, 0, request, header.length + xy.length + zz.length + pktType.length + pktId.length, data.length);
// signing
byte a = 0x00;
byte b = 0x00;
for ( int i=header.length; i<request.length; i++ ) { // ignoring the first header (3 bytes)
//for ( int i=0; i<request.length - 2; i++ ) { // ignoring the first header (3 bytes)
a |= request[i];
b ^= request[i];
}
byte[] sign = {(byte) a, (byte) b};
System.arraycopy(sign, 0, request, header.length + xy.length + zz.length + pktType.length + pktId.length + data.length, sign.length);
return request;
}
} |
import boofcv.abst.filter.blur.BlurFilter;
import boofcv.alg.feature.detect.edge.CannyEdge;
import boofcv.alg.feature.detect.edge.EdgeContour;
import boofcv.alg.filter.binary.BinaryImageOps;
import boofcv.alg.filter.binary.Contour;
import boofcv.alg.filter.binary.GThresholdImageOps;
import boofcv.alg.filter.binary.ThresholdImageOps;
import boofcv.alg.filter.blur.BlurImageOps;
import boofcv.alg.filter.blur.GBlurImageOps;
import boofcv.factory.feature.detect.edge.FactoryEdgeDetectors;
import boofcv.factory.filter.blur.FactoryBlurFilter;
import boofcv.gui.ListDisplayPanel;
import boofcv.gui.binary.VisualizeBinaryData;
import boofcv.gui.image.ShowImages;
import boofcv.io.image.ConvertBufferedImage;
import boofcv.io.image.UtilImageIO;
import boofcv.struct.ConnectRule;
import boofcv.struct.image.GrayF32;
import boofcv.struct.image.GrayS16;
import boofcv.struct.image.GrayS32;
import boofcv.struct.image.GrayU8;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* Extract a standardised card (or cards) from an image.
*/
public class CardDetector {
public void scan(String filename) throws IOException {
scan(UtilImageIO.loadImage(filename));
}
public void scan(String filename, boolean debug) throws IOException {
scan(UtilImageIO.loadImage(filename), debug);
}
public void scan(BufferedImage originalImage) throws IOException {
scan(originalImage, false);
}
public void scan(BufferedImage originalImage, boolean debug) throws IOException {
// TODO
ListDisplayPanel panel = new ListDisplayPanel();
GrayU8 gray = ConvertBufferedImage.convertFromSingle(originalImage, null, GrayU8.class);
GrayU8 blurred = gray.createSameShape();
// size of the blur kernel. square region with a width of radius*2 + 1
int radius = 8;
// Apply gaussian blur using a procedural interface
GBlurImageOps.gaussian(gray, blurred, -1, radius, null);
panel.addImage(ConvertBufferedImage.convertTo(blurred, null, true),"Gaussian");
// Apply a mean filter using an object oriented interface. This has the advantage of automatically
// recycling memory used in intermediate steps
BlurFilter<GrayU8> filterMean = FactoryBlurFilter.mean(GrayU8.class, radius);
filterMean.process(gray, blurred);
panel.addImage(ConvertBufferedImage.convertTo(blurred, null, true),"Mean");
// Apply a median filter using image type specific procedural interface. Won't work if the type
// isn't known at compile time
GrayU8 median = BlurImageOps.median(gray, blurred, radius);
panel.addImage(ConvertBufferedImage.convertTo(blurred, null, true),"Median");
GrayU8 filtered = BinaryImageOps.erode8(gray, 1, null);
filtered = BinaryImageOps.dilate8(filtered, 1, null);
panel.addImage(ConvertBufferedImage.convertTo(filtered, null, true),"filtered");
// Canny edge
GrayU8 edgeImage = gray.createSameShape();
CannyEdge<GrayU8,GrayS16> canny = FactoryEdgeDetectors.canny(2,true, true, GrayU8.class, GrayS16.class);
// The edge image is actually an optional parameter. If you don't need it just pass in null
canny.process(median,0.1f,0.3f,edgeImage);
List<EdgeContour> edgeContours = canny.getContours();
// The 'edgeContours' is a tree graph that can be difficult to process. An alternative is to extract
// the contours from the binary image, which will produce a single loop for each connected cluster of pixels.
// Note that you are only interested in external contours.
List<Contour> contours = BinaryImageOps.contour(edgeImage, ConnectRule.EIGHT, null);
BufferedImage visualBinary = VisualizeBinaryData.renderBinary(edgeImage, false, null);
BufferedImage visualCannyContour = VisualizeBinaryData.renderContours(edgeContours,null,
gray.width,gray.height,null);
int colorExternal = 0xFFFFFF;
int colorInternal = 0xFF2020;
BufferedImage visualEdgeContour = VisualizeBinaryData.renderContours(contours, colorExternal, colorInternal,
gray.width, gray.height, null);
panel.addImage(visualBinary,"Binary Edges from Canny");
panel.addImage(visualCannyContour, "Canny Trace Graph");
panel.addImage(visualEdgeContour,"Contour from Canny Binary");
ShowImages.showWindow(panel,"Image Blur Examples",true);
}
public void scan2(String filename) throws IOException {
BufferedImage image = UtilImageIO.loadImage(filename);
// convert into a usable format
GrayF32 input = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class);
GrayU8 binary = new GrayU8(input.width,input.height);
GrayS32 label = new GrayS32(input.width,input.height);
// Select a global threshold using Otsu's method.
double threshold = GThresholdImageOps.computeOtsu(input, 0, 255);
// Apply the threshold to create a binary image
ThresholdImageOps.threshold(input, binary, (float) threshold, true);
// remove small blobs through erosion and dilation
// The null in the input indicates that it should internally declare the work image it needs
// this is less efficient, but easier to code.
GrayU8 filtered = BinaryImageOps.erode8(binary, 1, null);
filtered = BinaryImageOps.dilate8(filtered, 1, null);
// Detect blobs inside the image using an 8-connect rule
List<Contour> contours = BinaryImageOps.contour(filtered, ConnectRule.EIGHT, label);
// colors of contours
int colorExternal = 0xFFFFFF;
int colorInternal = 0xFF2020;
// display the results
BufferedImage visualBinary = VisualizeBinaryData.renderBinary(binary, false, null);
BufferedImage visualFiltered = VisualizeBinaryData.renderBinary(filtered, false, null);
BufferedImage visualLabel = VisualizeBinaryData.renderLabeledBG(label, contours.size(), null);
BufferedImage visualContour = VisualizeBinaryData.renderContours(contours, colorExternal, colorInternal,
input.width, input.height, null);
ListDisplayPanel panel = new ListDisplayPanel();
panel.addImage(visualBinary, "Binary Original");
panel.addImage(visualFiltered, "Binary Filtered");
panel.addImage(visualLabel, "Labeled Blobs");
panel.addImage(visualContour, "Contours");
ShowImages.showWindow(panel,"Binary Operations",true);
}
public static void main(String[] args) throws IOException {
new CardDetector().scan2(args[0]);
}
} |
package edu.mit.mobile.android.content.test.sample2;
import android.content.ContentValues;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import edu.mit.mobile.android.content.ContentItem;
import edu.mit.mobile.android.content.DBSortOrder;
import edu.mit.mobile.android.content.DBTable;
import edu.mit.mobile.android.content.ForeignKeyManager;
import edu.mit.mobile.android.content.OnSaveListener;
import edu.mit.mobile.android.content.ProviderUtils;
import edu.mit.mobile.android.content.UriPath;
import edu.mit.mobile.android.content.column.DBColumn;
import edu.mit.mobile.android.content.column.DatetimeColumn;
import edu.mit.mobile.android.content.column.TextColumn;
import edu.mit.mobile.android.content.test.SampleProvider2;
/**
* A slightly more complex example to test. Doesn't entirely make sense as a
* data item on a phone, but serves as a nice, well-understood demonstration and
* test.
*
* @author steve
*
*/
@DBTable(BlogPost.TABLE)
@UriPath(BlogPost.PATH)
@DBSortOrder(BlogPost.SORT_ORDER_DEFAULT)
public class BlogPost implements ContentItem {
// Defining the table name as a static string will let you use it in your
// content provider if you ever need to do custom DB queries.
public static final String TABLE = "posts";
// Column definitions below. ContentItem contains one column definition
// for the BaseColumns._ID which defines the primary key.
@DBColumn(type = DatetimeColumn.class, defaultValue = DatetimeColumn.NOW_IN_MILLISECONDS)
public static final String CREATED_DATE = "created";
@DBColumn(type = DatetimeColumn.class, defaultValue = DatetimeColumn.NOW_IN_MILLISECONDS)
public static final String MODIFIED_DATE = "modified";
@DBColumn(type = TextColumn.class, notnull = true)
public static final String TITLE = "title";
@DBColumn(type = TextColumn.class, notnull = true)
public static final String BODY = "body";
@DBColumn(type = TextColumn.class, unique = true, notnull = true)
public static final String SLUG = "slug";
// The path component of the content URI.
public static final String PATH = "posts";
// the DBSortOrder annotation on this class denotes the default sort order.
public static final String SORT_ORDER_DEFAULT = CREATED_DATE + " DESC";
// This is a helpful tool connecting back to the "child" of this object. This is similar
// to Django's relation manager, although we need to define it ourselves.
public static final ForeignKeyManager COMMENTS = new ForeignKeyManager(Comment.class);
// The SimpleContentProvider constructs content URIs based on your provided
// path and authority.
// This constant is not necessary, but is very handy for doing queries.
public static final Uri CONTENT_URI = ProviderUtils.toContentUri(SampleProvider2.AUTHORITY, PATH);
public static final OnSaveListener ON_SAVE_LISTENER = new OnSaveListener(){
@Override
public ContentValues onPreSave(SQLiteDatabase db, Uri uri, ContentValues cv) {
if (! cv.containsKey(SLUG) && cv.containsKey(TITLE)){
final String slug = cv.getAsString(TITLE).replaceAll("\\s+", "-").replaceAll("[^\\w-]+", "");
cv.put(SLUG, slug);
}
return cv;
}
};
} |
import org.eclipse.jetty.server.HttpConnection;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.websocket.server.WebSocketServerConnection;
import org.eclipse.jetty.websocket.server.WebSocketServerFactory;
import javax.imageio.ImageIO;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.awt.image.BufferedImage;
import java.io.*;
public class HelloHandler extends AbstractHandler {
WebSocketServerFactory _webSocketFactory;
public HelloHandler() {
//_webSocketFactory = factory;
}
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
if (target.equals("/incoming/")) {
return;
}
String requestMethod = request.getMethod();
String action = request.getParameter("Action");
if (requestMethod.equals("POST")) {
System.out.print(requestMethod);
if (action.equals("Action Notes")) {
response.setContentType("text/html;charset=utf-8");
response.setStatus(HttpServletResponse.SC_OK);
baseRequest.setHandled(true);
System.err.println("hola hola hola "+Main.notes);
//Sending the notes to the glass application.
response.getWriter().println(Main.notes);
SessionHQ.getInstance().sendAction("tkraska", "Action Start");
} else if (action.equals("Post Image")) {
response.setContentType("image/png");
response.setStatus(HttpServletResponse.SC_OK);
baseRequest.setHandled(true);
System.out.println("The content length: " + request.getContentLength());
// BufferedInputStream inputStream = new BufferedInputStream(request.getInputStream());
// BufferedImage image = ImageIO.read(inputStream);//Now I got the image
//I'm going to send it back just to make sure that I'm doing this properly
//java.util.Scanner s = new java.util.Scanner(request.getInputStream()).useDelimiter("\\A");
//this input stream is only for the REQUEST. I need to get the content!!!
// while (s.hasNext()) {
// System.out.println("Yoloooo");
// System.out.println(s.next());
InputStream inputStream = request.getInputStream();
OutputStream outputStream = response.getOutputStream();
byte[] buffer = new byte[1024];
int len;
// System.err.println("the request is" + request);
while ((len =inputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, len);
}
//Another aproach
// BufferedReader bR = request.getReader();
// String ln = null;
// while ((ln = bR.readLine()) != null) {
// System.out.println(ln);
} else {
//Since we are not requesting the notes, we just send the action to the client
SessionHQ.getInstance().sendAction("tkraska", action);
}
} else {
response.setContentType("text/html;charset=utf-8");
response.setStatus(HttpServletResponse.SC_OK);
baseRequest.setHandled(true);
response.getWriter().println("<h1>Welcome to Glass App. Developed by David Correa, Mentored by Tim Kraska</h1>");
}
// Thread.sleep(3000);
// SessionHQ.getInstance().sendMessage("tkraska", "Message Start");
// Thread.sleep(3000);
// SessionHQ.getInstance().sendMessage("tkraska", "Message Next");
// Thread.sleep(3000);
// SessionHQ.getInstance().sendMessage("tkraska", "Message Previous");
// Thread.sleep(3000);
// SessionHQ.getInstance().sendMessage("tkraska", "Message End");
}
//WebSocketServerConnection wbSConnection = (WebSocketServerConnection) request.getAttribute(HttpConnection.UPGRADE_CONNECTION_ATTRIBUTE);
// response.setContentType("text/html;charset=utf-8");
// response.setStatus(HttpServletResponse.SC_OK);
// baseRequest.setHandled(true);
// response.getWriter().println("<h1>Hello World</h1>");
} |
package be.ibridge.kettle.job.entry.sftp;
import java.io.File;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.swt.widgets.Shell;
import org.w3c.dom.Node;
import be.ibridge.kettle.core.Const;
import be.ibridge.kettle.core.LogWriter;
import be.ibridge.kettle.core.Result;
import be.ibridge.kettle.core.ResultFile;
import be.ibridge.kettle.core.XMLHandler;
import be.ibridge.kettle.core.exception.KettleDatabaseException;
import be.ibridge.kettle.core.exception.KettleException;
import be.ibridge.kettle.core.exception.KettleXMLException;
import be.ibridge.kettle.core.util.StringUtil;
import be.ibridge.kettle.job.Job;
import be.ibridge.kettle.job.JobMeta;
import be.ibridge.kettle.job.entry.JobEntryBase;
import be.ibridge.kettle.job.entry.JobEntryDialogInterface;
import be.ibridge.kettle.job.entry.JobEntryInterface;
import be.ibridge.kettle.repository.Repository;
/**
* This defines an FTP job entry.
*
* @author Matt
* @since 05-11-2003
*
*/
public class JobEntrySFTP extends JobEntryBase implements Cloneable, JobEntryInterface
{
private String serverName;
private String serverPort;
private String userName;
private String password;
private String sftpDirectory;
private String targetDirectory;
private String wildcard;
private boolean remove;
public JobEntrySFTP(String n)
{
super(n, "");
serverName=null;
serverPort="22";
setID(-1L);
setType(JobEntryInterface.TYPE_JOBENTRY_SFTP);
}
public JobEntrySFTP()
{
this("");
}
public JobEntrySFTP(JobEntryBase jeb)
{
super(jeb);
}
public Object clone()
{
JobEntrySFTP je = (JobEntrySFTP) super.clone();
return je;
}
public String getXML()
{
StringBuffer retval = new StringBuffer(200);
retval.append(super.getXML());
retval.append(" ").append(XMLHandler.addTagValue("servername", serverName));
retval.append(" ").append(XMLHandler.addTagValue("serverport", serverPort));
retval.append(" ").append(XMLHandler.addTagValue("username", userName));
retval.append(" ").append(XMLHandler.addTagValue("password", password));
retval.append(" ").append(XMLHandler.addTagValue("sftpdirectory", sftpDirectory));
retval.append(" ").append(XMLHandler.addTagValue("targetdirectory", targetDirectory));
retval.append(" ").append(XMLHandler.addTagValue("wildcard", wildcard));
retval.append(" ").append(XMLHandler.addTagValue("remove", remove));
return retval.toString();
}
public void loadXML(Node entrynode, ArrayList databases, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases);
serverName = XMLHandler.getTagValue(entrynode, "servername");
serverPort = XMLHandler.getTagValue(entrynode, "serverport");
userName = XMLHandler.getTagValue(entrynode, "username");
password = XMLHandler.getTagValue(entrynode, "password");
sftpDirectory = XMLHandler.getTagValue(entrynode, "sftpdirectory");
targetDirectory = XMLHandler.getTagValue(entrynode, "targetdirectory");
wildcard = XMLHandler.getTagValue(entrynode, "wildcard");
remove = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "remove") );
}
catch(KettleXMLException xe)
{
throw new KettleXMLException("Unable to load job entry of type 'SFTP' from XML node", xe);
}
}
public void loadRep(Repository rep, long id_jobentry, ArrayList databases)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases);
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
int intServerPort = (int)rep.getJobEntryAttributeInteger(id_jobentry, "serverport");
serverPort = rep.getJobEntryAttributeString(id_jobentry, "serverport"); // backward compatible.
if (intServerPort>0 && Const.isEmpty(serverPort)) serverPort = Integer.toString(intServerPort);
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = rep.getJobEntryAttributeString(id_jobentry, "password");
sftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "sftpdirectory");
targetDirectory = rep.getJobEntryAttributeString(id_jobentry, "targetdirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
remove = rep.getJobEntryAttributeBoolean(id_jobentry, "remove");
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'SFTP' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
public void saveRep(Repository rep, long id_job)
throws KettleException
{
try
{
super.saveRep(rep, id_job);
rep.saveJobEntryAttribute(id_job, getID(), "servername", serverName);
rep.saveJobEntryAttribute(id_job, getID(), "serverport", serverPort);
rep.saveJobEntryAttribute(id_job, getID(), "username", userName);
rep.saveJobEntryAttribute(id_job, getID(), "password", password);
rep.saveJobEntryAttribute(id_job, getID(), "sftpdirectory", sftpDirectory);
rep.saveJobEntryAttribute(id_job, getID(), "targetdirectory", targetDirectory);
rep.saveJobEntryAttribute(id_job, getID(), "wildcard", wildcard);
rep.saveJobEntryAttribute(id_job, getID(), "remove", remove);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry of type 'SFTP' to the repository for id_job="+id_job, dbe);
}
}
/**
* @return Returns the directory.
*/
public String getScpDirectory()
{
return sftpDirectory;
}
/**
* @param directory The directory to set.
*/
public void setScpDirectory(String directory)
{
this.sftpDirectory = directory;
}
/**
* @return Returns the password.
*/
public String getPassword()
{
return password;
}
/**
* @param password The password to set.
*/
public void setPassword(String password)
{
this.password = password;
}
/**
* @return Returns the serverName.
*/
public String getServerName()
{
return serverName;
}
/**
* @param serverName The serverName to set.
*/
public void setServerName(String serverName)
{
this.serverName = serverName;
}
/**
* @return Returns the userName.
*/
public String getUserName()
{
return userName;
}
/**
* @param userName The userName to set.
*/
public void setUserName(String userName)
{
this.userName = userName;
}
/**
* @return Returns the wildcard.
*/
public String getWildcard()
{
return wildcard;
}
/**
* @param wildcard The wildcard to set.
*/
public void setWildcard(String wildcard)
{
this.wildcard = wildcard;
}
/**
* @return Returns the targetDirectory.
*/
public String getTargetDirectory()
{
return targetDirectory;
}
/**
* @param targetDirectory The targetDirectory to set.
*/
public void setTargetDirectory(String targetDirectory)
{
this.targetDirectory = targetDirectory;
}
/**
* @param remove The remove to set.
*/
public void setRemove(boolean remove)
{
this.remove = remove;
}
/**
* @return Returns the remove.
*/
public boolean getRemove()
{
return remove;
}
public String getServerPort() {
return serverPort;
}
public void setServerPort(String serverPort) {
this.serverPort = serverPort;
}
public Result execute(Result prev_result, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = new Result(nr);
result.setResult( false );
long filesRetrieved = 0;
log.logDetailed(toString(), "Start of SFTP job entry");
SFTPClient sftpclient = null;
// String substitution..
String realServerName = StringUtil.environmentSubstitute(serverName);
String realServerPort = StringUtil.environmentSubstitute(serverPort);
String realUsername = StringUtil.environmentSubstitute(userName);
String realPassword = StringUtil.environmentSubstitute(password);
String realSftpDirString = StringUtil.environmentSubstitute(sftpDirectory);
String realWildcard = StringUtil.environmentSubstitute(wildcard);
String realTargetDirectory = StringUtil.environmentSubstitute(targetDirectory);
try
{
// Create sftp client to host ...
sftpclient = new SFTPClient(InetAddress.getByName(realServerName), Const.toInt(realServerPort, 22), realUsername);
log.logDetailed(toString(), "Opened SFTP connection to server ["+realServerName+"] on port ["+realServerPort+"] with username ["+realUsername+"]");
// login to ftp host ...
sftpclient.login(realPassword);
// Passwords should not appear in log files.
//log.logDetailed(toString(), "logged in using password "+realPassword); // Logging this seems a bad idea! Oh well.
// move to spool dir ...
if (!Const.isEmpty(realSftpDirString))
{
sftpclient.chdir(realSftpDirString);
log.logDetailed(toString(), "Changed to directory ["+realSftpDirString+"]");
}
// Get all the files in the current directory...
String[] filelist = sftpclient.dir();
log.logDetailed(toString(), "Found "+filelist.length+" files in the remote directory");
Pattern pattern = null;
if (!Const.isEmpty(realWildcard))
{
pattern = Pattern.compile(realWildcard);
}
// Get the files in the list...
for (int i=0;i<filelist.length && !parentJob.isStopped();i++)
{
boolean getIt = true;
// First see if the file matches the regular expression!
if (pattern!=null)
{
Matcher matcher = pattern.matcher(filelist[i]);
getIt = matcher.matches();
}
if (getIt)
{
log.logDebug(toString(), "Getting file ["+filelist[i]+"] to directory ["+realTargetDirectory+"]");
String targetFilename = realTargetDirectory+Const.FILE_SEPARATOR+filelist[i];
sftpclient.get(targetFilename, filelist[i]);
filesRetrieved++;
// Add to the result files...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, new File(targetFilename), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
log.logDetailed(toString(), "Transferred file ["+filelist[i]+"]");
// Delete the file if this is needed!
if (remove)
{
sftpclient.delete(filelist[i]);
log.logDetailed(toString(), "Deleted file ["+filelist[i]+"]");
}
}
}
result.setResult( true );
result.setNrFilesRetrieved(filesRetrieved);
}
catch(Exception e)
{
result.setNrErrors(1);
e.printStackTrace();
log.logError(toString(), "Error getting files from SFTP : "+e.getMessage());
} finally {
// close connection, if possible
try {
if(sftpclient != null) sftpclient.disconnect();
} catch (Exception e) {
// just ignore this, makes no big difference
}
}
return result;
}
public boolean evaluates()
{
return true;
}
public JobEntryDialogInterface getDialog(Shell shell,JobEntryInterface jei,JobMeta jobMeta,String jobName,Repository rep) {
return new JobEntrySFTPDialog(shell,this,jobMeta);
}
} |
package hex.glm;
import com.google.gson.JsonObject;
import hex.FrameTask.DataInfo;
import hex.GridSearch.GridSearchProgress;
import hex.glm.GLMModel.GLMXValidationTask;
import hex.glm.GLMParams.Family;
import hex.glm.GLMParams.Link;
import hex.glm.GLMTask.GLMIterationTask;
import hex.glm.GLMTask.LMAXTask;
import hex.glm.GLMTask.YMUTask;
import hex.glm.LSMSolver.ADMMSolver;
import jsr166y.CountedCompleter;
import water.*;
import water.H2O.H2OCallback;
import water.H2O.H2OCountedCompleter;
import water.api.DocGen;
import water.api.ParamImportance;
import water.fvec.Frame;
import water.util.Log;
import water.util.ModelUtils;
import water.util.RString;
import water.util.Utils;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class GLM2 extends Job.ModelJobWithoutClassificationField {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
public static DocGen.FieldDoc[] DOC_FIELDS;
public static final String DOC_GET = "GLM2";
public final String _jobName;
@API(help = "max-iterations", filter = Default.class, lmin=1, lmax=1000000, json=true, importance = ParamImportance.CRITICAL)
public int max_iter = 100;
transient public boolean _done = false;
@API(help = "Standardize numeric columns to have zero mean and unit variance.", filter = Default.class, json=true, importance = ParamImportance.CRITICAL)
protected boolean standardize = true;
@API(help = "validation folds", filter = Default.class, lmin=0, lmax=100, json=true, importance = ParamImportance.CRITICAL)
protected int n_folds;
@API(help = "Family.", filter = Default.class, json=true, importance = ParamImportance.CRITICAL)
protected Family family = Family.gaussian;
@API(help = "", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
protected Link link = Link.family_default;
@API(help = "Tweedie variance power", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
protected double tweedie_variance_power;
@API(help = "distribution of regularization between L1 and L2.", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
protected double [] alpha = new double[]{0.5};
public final double DEFAULT_LAMBDA = 1e-5;
@API(help = "regularization strength", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
protected double [] lambda = new double[]{DEFAULT_LAMBDA};
private double _currentLambda = Double.POSITIVE_INFINITY;
@API(help = "beta_eps", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
protected double beta_epsilon = DEFAULT_BETA_EPS;
@API(help="use line search (slower speed, to be used if glm does not converge otherwise)",filter=Default.class, importance = ParamImportance.SECONDARY)
protected boolean higher_accuracy;
@API(help="By default, first factor level is skipped from the possible set of predictors. Set this flag if you want use all of the levels. Needs sufficient regularization to solve!",filter=Default.class, importance = ParamImportance.SECONDARY)
protected boolean use_all_factor_levels;
@API(help="use lambda search starting at lambda max, given lambda is then interpreted as lambda min",filter=Default.class, importance = ParamImportance.SECONDARY)
protected boolean lambda_search;
@API(help="use strong rules to filter out inactive columns",filter=Default.class, importance = ParamImportance.SECONDARY)
protected boolean strong_rules_enabled = true;
// intentionally not declared as API now
int sparseCoefThreshold = 1000; // if more than this number of predictors, result vector of coefficients will be stored sparse
@API(help="lambda_Search stop condition: stop training when model has more than than this number of predictors (or don't use this option if -1).",filter=Default.class, importance = ParamImportance.EXPERT)
protected int max_predictors = -1;
@API(help="number of lambdas to be used in a search",filter=Default.class, importance = ParamImportance.EXPERT)
protected int nlambdas = 100;
@API(help="min lambda used in lambda search, specified as a ratio of lambda_max",filter=Default.class, importance = ParamImportance.EXPERT)
protected double lambda_min_ratio = -1;
@API(help="prior probability for y==1. To be used only for logistic regression iff the data has been sampled and the mean of response does not reflect reality.",filter=Default.class, importance = ParamImportance.EXPERT)
protected double prior = -1; // -1 is magic value for default value which is mean(y) computed on the current dataset
private double _iceptAdjust; // adjustment due to the prior
public int MAX_ITERATIONS_PER_LAMBDA = 10;
/**
* Whether to compute variable importances for input features, based on the absolute
* value of the coefficients. For safety this should only be done if
* use_all_factor_levels, because an important factor level can be skipped and not
* appear if !use_all_factor_levels.
*/
@API(help = "Compute variable importances for input features. NOTE: If use_all_factor_levels is off the importance of the base level will NOT be shown.", filter = Default.class, json=true, importance = ParamImportance.SECONDARY)
public boolean variable_importances = true;
@API(help = "", json=true, importance = ParamImportance.SECONDARY)
private double [] _wgiven;
@API(help = "", json=true, importance = ParamImportance.SECONDARY)
private double _proximalPenalty;
@API(help = "", json=true, importance = ParamImportance.SECONDARY)
private double [] _beta;
@API(help = "", json=true, importance = ParamImportance.SECONDARY)
private boolean _runAllLambdas = true;
private Key _srcKey;
@API(help = "Tweedie link power", json=true, importance = ParamImportance.SECONDARY)
double tweedie_link_power;
@API(help = "lambda_value max", json=true, importance = ParamImportance.SECONDARY)
double lambda_max = Double.NaN;
double lambda_min = Double.NaN;
long _nobs = 0;
public static int MAX_PREDICTORS = 7000;
private static double GLM_GRAD_EPS = 1e-4; // done (converged) if subgrad < this value.
private boolean highAccuracy(){return higher_accuracy;}
private void setHighAccuracy(){
higher_accuracy = true;
}
private Key _progressKey;
private DataInfo _dinfo;
private int [] _activeCols;
private DataInfo _activeData;
public GLMParams _glm;
private boolean _grid;
private double ADMM_GRAD_EPS = 1e-4; // default addm gradietn eps
private static final double MIN_ADMM_GRAD_EPS = 1e-5; // min admm gradient eps
int _lambdaIdx = -1;
private double _addedL2;
public static final double DEFAULT_BETA_EPS = 1e-4;
private double _ymu;
private int _iter;
private double objval(GLMIterationTask glmt){
return glmt._val.residual_deviance / glmt._nobs + 0.5 * l2pen() * l2norm(glmt._beta) + l1pen() * l1norm(glmt._beta);
}
private static class IterationInfo extends Iced {
final int _iter;
private double [] _fullGrad;
public double [] fullGrad(double alpha, double lambda){
if(_fullGrad == null)return null;
double [] res = _fullGrad.clone();
double l2 = (1-alpha)*lambda; // no 0.5 mul here since we're adding derivative of 0.5*|b|^2
if(_activeCols != null)
for(int i = 0; i < _glmt._beta.length-1; ++i)
res[_activeCols[i]] += _glmt._beta[i]*l2;
else for(int i = 0; i < _glmt._beta.length; ++i) {
res[i] += _glmt._beta[i]*l2;
}
return res;
}
private final GLMIterationTask _glmt;
final int [] _activeCols;
public IterationInfo(int i, GLMIterationTask glmt, final int [] activeCols, double [] gradient){
_iter = i;
_glmt = glmt.clone();
if(_glmt._grad == null)
_glmt._grad = GLM2.contractVec(gradient,activeCols);
assert _glmt._grad != null;
_activeCols = activeCols;
_fullGrad = gradient;
// NOTE: _glmt._beta CAN BE NULL (unlikely but possible, if activecCols were empty)
assert _glmt._val != null:"missing validation";
}
}
private IterationInfo _lastResult;
@Override
public JsonObject toJSON() {
JsonObject jo = super.toJSON();
if (lambda == null) jo.addProperty("lambda_value", "automatic"); //better than not printing anything if lambda_value=null
return jo;
}
@Override public Key defaultDestKey(){
return null;
}
@Override public Key defaultJobKey() {return null;}
public GLM2() {_jobName = "";}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm){
this(desc,jobKey,dest,dinfo,glm,null);
lambda_search = true;
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,nfolds,DEFAULT_BETA_EPS);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha,int nfolds, double betaEpsilon){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,null, null);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob, Key src_key){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,parentJob, null,false,-1,0,null,Double.NaN, src_key);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob, double [] beta, boolean highAccuracy, double prior, double proximalPenalty, int [] activeCols, double lambda_max, Key src_key) {
assert beta == null || beta.length == (dinfo.fullN()+1):"unexpected size of beta, got length " + beta.length + ", expected " + dinfo.fullN();
job_key = jobKey;
description = desc;
destination_key = dest;
beta_epsilon = betaEpsilon;
_beta = beta;
_dinfo = dinfo;
_glm = glm;
this.lambda = lambda;
_beta = beta;
if((_proximalPenalty = proximalPenalty) != 0)
_wgiven = beta;
this.alpha = new double[]{alpha};
n_folds = nfolds;
source = dinfo._adaptedFrame;
response = dinfo._adaptedFrame.lastVec();
_jobName = dest.toString() + ((nfolds > 1)?("[" + dinfo._foldId + "]"):"");
higher_accuracy = highAccuracy;
this.prior = prior;
if(activeCols != null){
_activeCols = activeCols.clone();
_activeData = _dinfo.filterExpandedColumns(_activeCols);
} else
_activeData = _dinfo;
this.lambda_max = lambda_max;
_srcKey = src_key;
}
static String arrayToString (double[] arr) {
if (arr == null) {
return "(null)";
}
StringBuffer sb = new StringBuffer();
for (int i = 0; i < arr.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(arr[i]);
}
return sb.toString();
}
public float [] thresholds = ModelUtils.DEFAULT_THRESHOLDS;
/** Return the query link to this page */
public static String link(Key k, String content) {
RString rs = new RString("<a href='GLM2.query?source=%$key'>%content</a>");
rs.replace("key", k.toString());
rs.replace("content", content);
return rs.toString();
}
public GLMGridSearch gridSearch(){
return new GLMGridSearch(4, this, destination_key).fork();
}
private transient AtomicBoolean _jobdone = new AtomicBoolean(false);
@Override public void cancel(String msg){
if(!_grid) {
source.unlock(self());
}
DKV.remove(_progressKey);
Value v = DKV.get(destination_key);
if(v != null){
GLMModel m = v.get();
Key [] xvals = m.xvalModels();
if(xvals != null)
for(Key k:xvals)
DKV.remove(k);
DKV.remove(destination_key);
}
DKV.remove(destination_key);
super.cancel(msg);
}
@Override public void init(){
super.init();
if(nlambdas == -1)
nlambdas = 100;
if(lambda_search && lambda.length > 1)
throw new IllegalArgumentException("Can not supply both lambda_search and multiple lambdas. If lambda_search is on, GLM expects only one value of lambda_value, representing the lambda_value min (smallest lambda_value in the lambda_value search).");
// check the response
if( response.isEnum() && family != Family.binomial)throw new IllegalArgumentException("Invalid response variable, trying to run regression with categorical response!");
switch( family ) {
case poisson:
case tweedie:
if( response.min() < 0 ) throw new IllegalArgumentException("Illegal response column for family='" + family + "', response must be >= 0.");
break;
case gamma:
if( response.min() <= 0 ) throw new IllegalArgumentException("Invalid response for family='Gamma', response must be > 0!");
break;
case binomial:
if(response.min() < 0 || response.max() > 1) throw new IllegalArgumentException("Illegal response column for family='Binomial', response must in <0,1> range!");
break;
default:
//pass
}
Frame fr = DataInfo.prepareFrame(source, response, ignored_cols, family==Family.binomial, true,true);
_dinfo = new DataInfo(fr, 1, use_all_factor_levels || lambda_search, standardize ? DataInfo.TransformType.STANDARDIZE : DataInfo.TransformType.NONE, DataInfo.TransformType.NONE);
_activeData = _dinfo;
if(higher_accuracy)setHighAccuracy();
}
@Override protected boolean filterNaCols(){return true;}
@Override protected Response serve() {
init();
if(link == Link.family_default)
link = family.defaultLink;
tweedie_link_power = 1 - tweedie_variance_power;// TODO
if(tweedie_link_power == 0)link = Link.log;
_glm = new GLMParams(family, tweedie_variance_power, link, tweedie_link_power);
if(alpha.length > 1) { // grid search
if(destination_key == null)destination_key = Key.make("GLMGridResults_"+Key.make());
if(job_key == null)job_key = Key.make((byte) 0, Key.JOB, H2O.SELF);;
GLMGridSearch j = gridSearch();
_fjtask = j._fjtask;
assert _fjtask != null;
return GLMGridView.redirect(this,j.dest());
} else {
if(destination_key == null)destination_key = Key.make("GLMModel_"+Key.make());
if(job_key == null)job_key = Key.make("GLM2Job_"+Key.make());
fork();
assert _fjtask != null;
return GLMProgress.redirect(this,job_key, dest());
}
}
private static double beta_diff(double[] b1, double[] b2) {
if(b1 == null)return Double.MAX_VALUE;
double res = b1[0] >= b2[0]?b1[0] - b2[0]:b2[0] - b1[0];
for( int i = 1; i < b1.length; ++i ) {
double diff = b1[i] - b2[i];
if(diff > res)
res = diff;
else if( -diff > res)
res = -diff;
}
return res;
}
private static class GLM2_Progress extends Iced{
final long _total;
double _done;
public GLM2_Progress(int total){_total = total;
assert _total > 0:"total = " + _total;
}
public float progess(){
return 0.01f*((int)(100*_done/(double)_total));
}
}
private static class GLM2_ProgressUpdate extends TAtomic<GLM2_Progress> {
final int _i;
public GLM2_ProgressUpdate(){_i = 1;}
public GLM2_ProgressUpdate(int i){_i = i;}
@Override
public GLM2_Progress atomic(GLM2_Progress old) {
if(old == null)return old;
old._done += _i;
return old;
}
}
@Override public float progress(){
if(isDone())return 1.0f;
Value v = DKV.get(_progressKey);
if(v == null)return 0;
float res = v.<GLM2_Progress>get().progess();
if(res > 1f)
res = 1f;
return res;
}
protected double l2norm(double[] beta){
double l2 = 0;
for (double aBeta : beta) l2 += aBeta * aBeta;
return l2;
}
protected double l1norm(double[] beta){
double l2 = 0;
for (double aBeta : beta) l2 += Math.abs(aBeta);
return l2;
}
private final double [] expandVec(double [] beta, final int [] activeCols){
assert beta != null;
if (activeCols == null) return beta;
double[] res = MemoryManager.malloc8d(_dinfo.fullN() + 1);
int i = 0;
for (int c : activeCols)
res[c] = beta[i++];
res[res.length - 1] = beta[beta.length - 1];
return res;
}
private static final double [] contractVec(double [] beta, final int [] activeCols){
if(beta == null)return null;
if(activeCols == null)return beta.clone();
double [] res = MemoryManager.malloc8d(activeCols.length+1);
int i = 0;
for(int c:activeCols)
res[i++] = beta[c];
res[res.length-1] = beta[beta.length-1];
return res;
}
private final double [] resizeVec(double[] beta, final int[] activeCols, final int[] oldActiveCols){
if(beta == null || Arrays.equals(activeCols,oldActiveCols))return beta;
double [] full = expandVec(beta, oldActiveCols);
if(activeCols == null)return full;
return contractVec(full,activeCols);
}
protected boolean needLineSearch(final double [] beta,double objval, double step){
if(Double.isNaN(objval))return true; // needed for gamma (and possibly others...)
assert _lastResult._glmt._grad != null:"missing gradient in last result!";
final double [] grad = Arrays.equals(_activeCols,_lastResult._activeCols)
?_lastResult._glmt.gradient(alpha[0],_currentLambda)
:contractVec(_lastResult.fullGrad(alpha[0],_currentLambda),_activeCols);
// line search
double f_hat = 0;
ADMMSolver.subgrad(alpha[0],_currentLambda,beta,grad);
final double [] oldBeta = resizeVec(_lastResult._glmt._beta, _activeCols,_lastResult._activeCols);
if(oldBeta == null)
f_hat += l2norm(beta);
else for(int i = 0; i < beta.length; ++i){
double diff = beta[i] - oldBeta[i];
f_hat += grad[i]*diff;
}
f_hat = objval(_lastResult._glmt) + 0.25*step*f_hat;
return objval > f_hat;
}
private class LineSearchIteration extends H2OCallback<GLMTask.GLMLineSearchTask> {
LineSearchIteration(CountedCompleter cmp){super((H2OCountedCompleter)cmp); cmp.addToPendingCount(1);}
@Override public void callback(final GLMTask.GLMLineSearchTask glmt) {
assert getCompleter().getPendingCount() == 1:"unexpected pending count, expected 1, got " + getCompleter().getPendingCount();
double step = 0.5;
for(int i = 0; i < glmt._objvals.length; ++i){
if(!needLineSearch(glmt._betas[i],glmt._objvals[i],step)){
LogInfo("line search: found admissible step = " + step);
new GLMIterationTask(GLM2.this.self(),_activeData,_glm,true,true,true,glmt._betas[i],_ymu,1.0/_nobs,thresholds, new Iteration(getCompleter(),true,false,step)).asyncExec(_activeData._adaptedFrame);
return;
}
step *= 0.5;
} // no line step worked converge
LogInfo("Line search did not find feasible step, going forward with step = " + step + ".");
new GLMIterationTask(GLM2.this.self(),_activeData,_glm,true,true,true,glmt._betas[glmt._betas.length-1],_ymu,1.0/_nobs,thresholds, new Iteration(getCompleter(),false,false,step)).asyncExec(_activeData._adaptedFrame);
}
}
protected double checkGradient(final double [] newBeta, final double [] grad){
// check the gradient
ADMMSolver.subgrad(alpha[0], _currentLambda, newBeta, grad);
double err = 0;
for(double d:grad)
if(d > err) err = d;
else if(d < -err) err = -d;
LogInfo("converged with max |subgradient| = " + err);
return err;
}
private String LogInfo(String msg){
msg = "GLM2[dest=" + dest() + ", iteration=" + _iter + ", lambda = " + _currentLambda + "]: " + msg;
Log.info(msg);
return msg;
}
private double [] setSubmodel(final double[] newBeta, GLMValidation val, H2OCountedCompleter cmp){
final double [] fullBeta = (_activeCols == null || newBeta == null)?newBeta:expandVec(newBeta,_activeCols);
final double [] newBetaDeNorm;
if(_dinfo._predictor_transform == DataInfo.TransformType.STANDARDIZE) {
newBetaDeNorm = fullBeta.clone();
double norm = 0.0; // Reverse any normalization on the intercept
// denormalize only the numeric coefs (categoricals are not normalized)
final int numoff = _dinfo.numStart();
for( int i=numoff; i< fullBeta.length-1; i++ ) {
double b = newBetaDeNorm[i]*_dinfo._normMul[i-numoff];
norm += b*_dinfo._normSub[i-numoff]; // Also accumulate the intercept adjustment
newBetaDeNorm[i] = b;
}
newBetaDeNorm[newBetaDeNorm.length-1] -= norm;
} else
newBetaDeNorm = null;
GLMModel.setSubmodel(cmp, dest(), _currentLambda, newBetaDeNorm == null ? fullBeta : newBetaDeNorm, newBetaDeNorm == null ? null : fullBeta, (_iter + 1), System.currentTimeMillis() - start_time, _dinfo.fullN() >= sparseCoefThreshold, val);
return fullBeta;
}
private transient long _callbackStart = 0;
private transient double _rho_mul = 1.0;
private transient double _gradientEps = ADMM_GRAD_EPS;
private class Iteration extends H2OCallback<GLMIterationTask> {
public final long _iterationStartTime;
final boolean _doLineSearch;
final boolean _countIteration;
final double _lineSearchStep;
public Iteration(CountedCompleter cmp, boolean doLineSearch){ this(cmp,doLineSearch,true,1.0);}
public Iteration(CountedCompleter cmp, boolean doLineSearch, boolean countIteration,double lineSearchStep){
super((H2OCountedCompleter)cmp);
_lineSearchStep = lineSearchStep;
cmp.addToPendingCount(1);
_doLineSearch = doLineSearch;
_countIteration = countIteration;
_iterationStartTime = System.currentTimeMillis(); }
@Override public void callback(final GLMIterationTask glmt){
assert _activeCols == null || glmt._beta == null || glmt._beta.length == (_activeCols.length+1):LogInfo("betalen = " + glmt._beta.length + ", activecols = " + _activeCols.length);
assert _activeCols == null || _activeCols.length == _activeData.fullN();
assert getCompleter().getPendingCount() >= 1:LogInfo("unexpected pending count, expected >= 1, got " + getCompleter().getPendingCount()); // will be decreased by 1 after we leave this callback
if(_countIteration)++_iter;
_callbackStart = System.currentTimeMillis();
LogInfo("iteration done in " + (_callbackStart - _iterationStartTime) + "ms");
if( !isRunning(self()) ) throw new JobCancelledException();
boolean gotNaNsorInfs = Utils.hasNaNsOrInfs(glmt._xy) || glmt._gram.hasNaNsOrInfs();
boolean constBeta = true;
if(gotNaNsorInfs){
LogInfo("got NaNs/Infs, invoking line-search.");
setHighAccuracy();
if(_lastResult == null) {
if(glmt._beta == null) // failed in first iteration! throw an error
throw new RuntimeException(LogInfo("GLM2: can not solve. Got NaNs/Infs in the first iteration"));
for (int i = 0; i < glmt._beta.length; ++i) {
glmt._beta[i] *= 0.5;
constBeta &= glmt._beta[i] < beta_epsilon;
}
} else {
double [] lastBeta = resizeVec(_lastResult._glmt._beta,_activeCols,_lastResult._activeCols);
if(lastBeta != null)
for (int i = 0; i < glmt._beta.length; ++i) {
glmt._beta[i] = 0.5 * (glmt._beta[i] + lastBeta[i]);
double diff = (glmt._beta[i] - lastBeta[i]);
constBeta &= (-beta_epsilon < diff && diff < beta_epsilon);
}
else for (int i = 0; i < glmt._beta.length; ++i) {
glmt._beta[i] *= 0.5;
constBeta &= glmt._beta[i] < beta_epsilon;
}
}
if(constBeta || _iter >= max_iter) { // line search failed to progress -> converge (if we a valid solution already, otherwise fail!)
if(_lastResult == null)throw new RuntimeException(LogInfo("GLM failed to solve! Got NaNs/Infs in the first iteration and line search did not help!"));
checkKKTAndComplete(glmt,glmt._beta,false);
return;
} else // do the line search iteration
new GLMIterationTask(GLM2.this.self(),_activeData,glmt._glm, true, true, true, glmt._beta,_ymu,1.0/_nobs,thresholds, new Iteration(getCompleter(),true)).asyncExec(_activeData._adaptedFrame);
return;
}
if(glmt._val != null){
if(family != Family.gaussian && !(glmt._val.residual_deviance <= glmt._val.null_deviance)){ // complete fail, look if we can restart with higher_accuracy on
if(!highAccuracy()){
LogInfo("reached negative explained deviance without line-search, rerunning with high accuracy settings.");
setHighAccuracy();
_iter = 0;
if(_lastResult != null && Arrays.equals(_lastResult._activeCols,_activeCols)) {
double [] beta = _lastResult._glmt._beta;
_lastResult = null;
new GLMIterationTask(GLM2.this.self(), _activeData, glmt._glm, true, true, true, beta, _ymu, 1.0 / _nobs, thresholds, new Iteration(getCompleter(), false)).asyncExec(_activeData._adaptedFrame);
} else { // no sane solution to go back to, start from scratch!
_lastResult = null;
new GLMIterationTask(GLM2.this.self(), _activeData, glmt._glm, true, false, false, null, _ymu, 1.0 / _nobs, thresholds, new Iteration(getCompleter(), false)).asyncExec(_activeData._adaptedFrame);
}
return;
}
}
}
if(glmt._val != null && glmt._computeGradient){ // check gradient
final double [] grad = glmt.gradient(alpha[0],_currentLambda);
ADMMSolver.subgrad(alpha[0], _currentLambda, glmt._beta, grad);
double err = 0;
for(double d:grad)
if(d > err) err = d;
else if(d < -err) err = -d;
LogInfo("gradient after " + _iter + " iterations = " + err);
if(_doLineSearch && err <= GLM_GRAD_EPS){
LogInfo("converged by reaching small enough gradient, with max |subgradient| = " + err );
checkKKTAndComplete(glmt, glmt._beta,false);
return;
}
}
if(glmt._beta != null && glmt._val!=null && glmt._computeGradient && _glm.family != Family.tweedie){
if(_lastResult != null && needLineSearch(glmt._beta,objval(glmt),_lineSearchStep)){
if(!_doLineSearch){ // no progress, converge
checkKKTAndComplete(glmt, glmt._beta,true);
return;
}
if(!highAccuracy()){
setHighAccuracy();
if(_lastResult._iter < (_iter-2)){ // there is a gap form last result...return to it and start again
final double [] prevBeta = _lastResult._activeCols != _activeCols? resizeVec(_lastResult._glmt._beta, _activeCols, _lastResult._activeCols):_lastResult._glmt._beta;
new GLMIterationTask(GLM2.this.self(),_activeData,glmt._glm, true, true, true, prevBeta, _ymu,1.0/_nobs, thresholds, new Iteration(getCompleter(),false)).asyncExec(_activeData._adaptedFrame);
return;
}
}
final double [] b = resizeVec(_lastResult._glmt._beta, _activeCols, _lastResult._activeCols);
assert b == null || (b.length == glmt._beta.length):LogInfo(b.length + " != " + glmt._beta.length + ", pickNextLambda = " + _activeCols.length);
new GLMTask.GLMLineSearchTask(GLM2.this.self(),_activeData,_glm, b,glmt._beta,1e-4,glmt._nobs,alpha[0],_currentLambda, new LineSearchIteration(getCompleter())).asyncExec(_activeData._adaptedFrame);
return;
}
_lastResult = new IterationInfo(GLM2.this._iter-1, glmt,_activeCols,null);
}
final double [] newBeta = MemoryManager.malloc8d(glmt._xy.length);
ADMMSolver slvr = new ADMMSolver(_currentLambda,alpha[0], _gradientEps, _addedL2);
slvr._rho = _currentLambda*alpha[0]*_rho_mul;
long t1 = System.currentTimeMillis();
slvr.solve(glmt._gram,glmt._xy,glmt._yy,newBeta);
if(slvr._addedL2 > _addedL2) LogInfo("added " + (slvr._addedL2 - _addedL2) + "L2 penalty");
LogInfo("ADMM: " + slvr.iterations + " iterationss, " + (System.currentTimeMillis()-t1) + "ms (" + slvr.decompTime + "), subgrad_err=" + slvr.gerr);
new GLM2_ProgressUpdate().fork(_progressKey); // update progress
_gradientEps = Math.max(ADMM_GRAD_EPS,Math.min(slvr.gerr,0.01));
_addedL2 = slvr._addedL2;
if(Utils.hasNaNsOrInfs(newBeta)){
throw new RuntimeException(LogInfo("got NaNs and/or Infs in beta"));
} else {
final double bdiff = beta_diff(glmt._beta,newBeta);
if(_glm.family == Family.gaussian || bdiff < beta_epsilon || _iter >= max_iter){ // Gaussian is non-iterative and gradient is ADMMSolver's gradient => just validate and move on to the next lambda_value
int diff = (int)Math.log10(bdiff);
int nzs = 0;
for(int i = 0; i < glmt._beta.length; ++i)
if(glmt._beta[i] != 0) ++nzs;
LogInfo("converged (reached a fixed point with ~ 1e" + diff + " precision), got " + nzs + " nzs");
checkKKTAndComplete(glmt,newBeta,false);
return;
} else { // not done yet, launch next iteration
if(glmt._beta != null)
setSubmodel(glmt._beta, glmt._val,(H2OCountedCompleter)getCompleter().getCompleter()); // update current intermediate result
final boolean validate = higher_accuracy || (_iter % 5) == 0;
new GLMIterationTask(GLM2.this.self(),_activeData,glmt._glm, true, validate, validate, newBeta,_ymu,1.0/_nobs,thresholds, new Iteration(getCompleter(),validate && _lastResult != null)).asyncExec(_activeData._adaptedFrame);
}
}
}
private void checkKKTAndComplete(final GLMIterationTask glmt, final double [] newBeta, final boolean failedLineSearch){
final double [] fullBeta = newBeta == null?MemoryManager.malloc8d(_dinfo.fullN()+1):expandVec(newBeta,_activeCols);
// now we need full gradient (on all columns) using this beta
new GLMIterationTask(GLM2.this.self(),_dinfo,_glm,false,true,true,fullBeta,_ymu,1.0/_nobs,thresholds, new H2OCallback<GLMIterationTask>((H2OCountedCompleter)getCompleter()) {
@Override public String toString(){
return "checkKKTAndComplete.Callback, completer = " + getCompleter() == null?"null":getCompleter().toString();
}
@Override
public void callback(final GLMIterationTask glmt2) {
// first check KKT conditions!
final double [] grad = glmt2.gradient(alpha[0],_currentLambda);
if(Utils.hasNaNsOrInfs(grad)){
double [] newBeta2 = newBeta.clone();
LogInfo("Got NaNs/Infs in gradient during KKT check, invoking line-search");
setHighAccuracy();
boolean constBeta = true;
double [] lastBeta = glmt._beta;
if(lastBeta != null)
for (int i = 0; i < newBeta2.length; ++i) {
newBeta2[i] = 0.5 * (newBeta2[i] + lastBeta[i]);
double diff = (newBeta2[i] - lastBeta[i]);
constBeta &= (-beta_epsilon < diff && diff < 1e-2*beta_epsilon);
} else for (int i = 0; i < newBeta2.length; ++i) {
newBeta2[i] *= 0.5;
constBeta &= newBeta2[i] < 1e-2*beta_epsilon;
}
if(constBeta) {
if (_lastResult == null)
throw new RuntimeException("can't solve!");
newBeta2 = resizeVec(_lastResult._glmt._beta,_activeCols,_lastResult._activeCols);
if(Arrays.equals(newBeta,newBeta2)) {
System.out.println("grad = " + Arrays.toString(_lastResult.fullGrad(0,0)));
throw new RuntimeException("can not solve");
}
}
getCompleter().addToPendingCount(1);
checkKKTAndComplete(glmt,newBeta2,true);
return;
}
glmt._val = glmt2._val;
_lastResult = new IterationInfo(_iter,glmt2,null,glmt2.gradient(alpha[0],0));
// check the KKT conditions and filter data for next lambda_value
// check the gradient
double[] subgrad = grad.clone();
ADMMSolver.subgrad(alpha[0], _currentLambda, fullBeta, subgrad);
double err = GLM_GRAD_EPS;
if (!failedLineSearch &&_activeCols != null) {
for (int c : _activeCols)
if (subgrad[c] > err) err = subgrad[c];
else if (subgrad[c] < -err) err = -subgrad[c];
int[] failedCols = new int[64];
int fcnt = 0;
double grad_eps = GLM_GRAD_EPS;
for (int c : _activeCols)
if (subgrad[c] > grad_eps)
grad_eps = subgrad[c];
else if (subgrad[c] < -grad_eps)
grad_eps = -subgrad[c];
for (int i = 0; i < grad.length - 1; ++i) {
if (Arrays.binarySearch(_activeCols, i) >= 0) continue;
if (subgrad[i] > grad_eps || -subgrad[i] > grad_eps) {
if (fcnt == failedCols.length)
failedCols = Arrays.copyOf(failedCols, failedCols.length << 1);
failedCols[fcnt++] = i;
}
}
if (fcnt > 0) {
final int n = _activeCols.length;
final int[] oldActiveCols = _activeCols;
_activeCols = Arrays.copyOf(_activeCols, _activeCols.length + fcnt);
for (int i = 0; i < fcnt; ++i)
_activeCols[n + i] = failedCols[i];
Arrays.sort(_activeCols);
LogInfo(fcnt + " variables failed KKT conditions check! Adding them to the model and continuing computation.(grad_eps = " + grad_eps + ", activeCols = " + (_activeCols.length > 100?"lost":Arrays.toString(_activeCols)));
_activeData = _dinfo.filterExpandedColumns(_activeCols);
// NOTE: tricky completer game here:
// We expect 0 pending in this method since this is the end-point, ( actually it's racy, can be 1 with pending 1 decrement from the original Iteration callback, end result is 0 though)
// while iteration expects pending count of 1, so we need to increase it here (Iteration itself adds 1 but 1 will be subtracted when we leave this method since we're in the callback which is called by onCompletion!
// [unlike at the start of nextLambda call when we're not inside onCompletion]))
getCompleter().addToPendingCount(1);
new GLMIterationTask(GLM2.this.self(), _activeData, _glm, true, true, true, resizeVec(newBeta, _activeCols, oldActiveCols), glmt._ymu, glmt._reg, thresholds, new Iteration(getCompleter(),false)).asyncExec(_activeData._adaptedFrame);
return;
}
}
int diff = MAX_ITERATIONS_PER_LAMBDA - _iter + _iter1;
if(diff > 0)
new GLM2_ProgressUpdate(diff).fork(_progressKey); // update progress
GLM2.this.setSubmodel(newBeta, glmt2._val,(H2OCountedCompleter)getCompleter().getCompleter());
_done = true;
LogInfo("computation of current lambda done in " + (System.currentTimeMillis() - GLM2.this.start_time) + "ms");
assert _lastResult._fullGrad != null;
}
}).asyncExec(_dinfo._adaptedFrame);
}
}
private static int nzs(double ds[]){
int res = 0;
for(double d:ds)if(d != 0)++res;
return res;
}
private class LambdaIteration extends H2OCallback {
public LambdaIteration(CountedCompleter cmp) {
super((H2OCountedCompleter) cmp);
}
@Override
public void callback(H2OCountedCompleter h2OCountedCompleter) {
// check if we're done otherwise launch next lambda computation
_done = _currentLambda <= lambda_min
|| (max_predictors != -1 && nzs(_lastResult._glmt._beta) > max_predictors); // _iter < max_iter && (improved || _runAllLambdas) && _lambdaIdx < (lambda_value.length-1);;
if(!_done) {
H2OCountedCompleter cmp = (H2OCountedCompleter)getCompleter();
cmp.addToPendingCount(1);
nextLambda(nextLambdaValue(), new LambdaIteration(cmp));
}
}
}
private class GLMJobCompleter extends H2OCountedCompleter {
AtomicReference<CountedCompleter> _cmp = new AtomicReference<CountedCompleter>();
public GLMJobCompleter(H2OCountedCompleter cmp){super(cmp);}
@Override
public void compute2() {
run(true,this);
}
private transient boolean _failed;
@Override public void onCompletion(CountedCompleter cmp){
if(!_grid)source.unlock(self());
if(!_failed) {
LogInfo("GLM " + self() + " completed by " + cmp.getClass().getName() + ", " + cmp.toString());
assert _cmp.compareAndSet(null, cmp) : "double completion, first from " + _cmp.get().getClass().getName() + ", second from " + cmp.getClass().getName();
_done = true;
GLMModel model = DKV.get(dest()).get();
model.maybeComputeVariableImportances();
model.stop_training();
if (_addedL2 > 0) {
String warn = "Added L2 penalty (rho = " + _addedL2 + ") due to non-spd matrix. ";
model.addWarning(warn);
}
state = JobState.DONE;
DKV.remove(_progressKey);
model.get_params().state = state;
model.update(self());
getCompleter().addToPendingCount(1);
new GLMModel.UnlockModelTask(new H2OCallback((H2OCountedCompleter) getCompleter()) {
@Override
public void callback(H2OCountedCompleter h2OCountedCompleter) {
remove(); // Remove/complete job only for top-level, not xval GLM2s
}
}, model._key, self()).forkTask();
}
}
@Override public boolean onExceptionalCompletion(Throwable t, CountedCompleter cmp){
if(_cmp.compareAndSet(null, cmp)) {
_done = true;
GLM2.this.cancel(t);
if(_grid){
_failed = true;
tryComplete();
}
}
return !_grid;
}
}
@Override
public GLM2 fork(){return fork(null);}
public GLM2 fork(H2OCountedCompleter cc){
if(!_grid)source.read_lock(self());
// keep *this* separate from what's stored in K/V as job (will be changing it!)
Futures fs = new Futures();
_progressKey = Key.make(dest().toString() + "_progress", (byte) 1, Key.HIDDEN_USER_KEY, dest().home_node());
int total = max_iter;
if(lambda_search)
total = MAX_ITERATIONS_PER_LAMBDA*nlambdas;
GLM2_Progress progress = new GLM2_Progress(total*(n_folds > 1?(n_folds+1):1));
LogInfo("created progress " + progress);
DKV.put(_progressKey,progress,fs);
fs.blockForPending();
_fjtask = new H2O.H2OEmptyCompleter(cc);
H2OCountedCompleter fjtask = new GLMJobCompleter(_fjtask);
GLM2 j = (GLM2)clone();
j.start(_fjtask); // modifying GLM2 object, don't want job object to be the same instance
H2O.submitTask(fjtask);
return j;
}
transient GLM2 [] _xvals;
private class XvalidationCallback extends H2OCallback {
public XvalidationCallback(H2OCountedCompleter cmp){super(cmp);}
@Override
public void callback(H2OCountedCompleter cc) {
ParallelGLMs pgs = (ParallelGLMs)cc;
_xvals = pgs._glms;
for(int i = 0; i < _xvals.length; ++i){
assert _xvals[i]._lastResult._fullGrad != null:LogInfo("last result missing full gradient!");
assert _xvals[i]._lastResult._glmt._val != null:LogInfo("last result missing validation!");
}
_iter = _xvals[0]._iter;
_lastResult = (IterationInfo)pgs._glms[0]._lastResult.clone();
final GLMModel [] xvalModels = new GLMModel[_xvals.length-1];
final double curentLambda = _currentLambda;
final H2OCountedCompleter mainCmp = (H2OCountedCompleter)getCompleter().getCompleter();
mainCmp.addToPendingCount(1);
final GLMModel.GetScoringModelTask [] tasks = new GLMModel.GetScoringModelTask[pgs._glms.length];
H2OCallback c = new H2OCallback(mainCmp) {
@Override public String toString(){
return "GetScoringModelTask.Callback, completer = " + getCompleter() == null?"null":getCompleter().toString();
}
AtomicReference<CountedCompleter> _cmp = new AtomicReference<CountedCompleter>();
@Override
public void callback(H2OCountedCompleter cc) {
assert _cmp.compareAndSet(null,cc):"Double completion, first " + _cmp.get().getClass().getName() + ", second from " + cc.getClass().getName();
for(int i = 1; i < tasks.length; ++i)
xvalModels[i-1] = tasks[i]._res;
mainCmp.addToPendingCount(1);
new GLMXValidationTask(tasks[0]._res, curentLambda, xvalModels, mainCmp).asyncExec(_dinfo._adaptedFrame);
}
};
c.addToPendingCount(tasks.length-1);
for(int i = 0; i < tasks.length; ++i)
(tasks[i] = new GLMModel.GetScoringModelTask(c,pgs._glms[i].dest(),curentLambda)).forkTask();
}
}
private GLMModel addLmaxSubmodel(GLMModel m,GLMValidation val){
double[] beta = MemoryManager.malloc8d(_dinfo.fullN() + 1);
beta[beta.length - 1] = _glm.link(_ymu) + _iceptAdjust;
m.submodels = new GLMModel.Submodel[]{new GLMModel.Submodel(lambda_max,beta,beta,0,0, beta.length >= sparseCoefThreshold)};
m.submodels[0].validation = val;
return m;
}
public void run(boolean doLog, H2OCountedCompleter cmp){
if(doLog) logStart();
// if this is cross-validated task, don't do actual computation,
// just fork off the nfolds+1 tasks and wait for the results
assert alpha.length == 1;
start_time = System.currentTimeMillis();
if(nlambdas == -1)nlambdas = 100;
if(lambda_search && nlambdas <= 1)
throw new IllegalArgumentException(LogInfo("GLM2: nlambdas must be > 1 when running with lambda search."));
Futures fs = new Futures();
Key dst = dest();
new YMUTask(GLM2.this.self(), _dinfo, n_folds,new H2OCallback<YMUTask>(cmp) {
@Override
public String toString(){
return "YMUTask callback. completer = " + getCompleter() != null?"null":getCompleter().toString();
}
@Override
public void callback(final YMUTask ymut) {
if (ymut._ymin == ymut._ymax)
throw new IllegalArgumentException(LogInfo("GLM2: attempted to run with constant response. Response == " + ymut._ymin + " for all rows in the training set."));
_ymu = ymut.ymu();
_nobs = ymut.nobs();
if(_glm.family == Family.binomial && prior != -1 && prior != _ymu && !Double.isNaN(prior)) {
double ratio = prior / _ymu;
double pi0 = 1, pi1 = 1;
if (ratio > 1) {
pi1 = 1.0 / ratio;
} else if (ratio < 1) {
pi0 = ratio;
}
_iceptAdjust = Math.log(pi0 / pi1);
} else prior = _ymu;
H2OCountedCompleter cmp = (H2OCountedCompleter)getCompleter();
cmp.addToPendingCount(1);
new LMAXTask(GLM2.this.self(), _dinfo, _glm, _ymu,_nobs,alpha[0], thresholds, new H2OCallback<LMAXTask>(cmp){
@Override
public String toString(){
return "LMAXTask callback. completer = " + (getCompleter() != null?"NULL":getCompleter().toString());
}
@Override public void callback(final LMAXTask t){
_currentLambda = lambda_max = t.lmax();
_lastResult = new IterationInfo(0,t,null,t.gradient(0,0));
GLMModel model = new GLMModel(GLM2.this, dest(), _dinfo, _glm, beta_epsilon, alpha[0], lambda_max, _ymu, prior);
if(lambda_search) {
assert !Double.isNaN(lambda_max) : LogInfo("running lambda_value search, but don't know what is the lambda_value max!");
model = addLmaxSubmodel(model, t._val);
if (nlambdas == -1) {
lambda = null;
} else {
if (lambda_min_ratio == -1)
lambda_min_ratio = _nobs > 25 * _dinfo.fullN() ? 1e-4 : 1e-2;
final double d = Math.pow(lambda_min_ratio, 1.0 / (nlambdas - 1));
if (nlambdas == 0)
throw new IllegalArgumentException("nlambdas must be > 0 when running lambda search.");
lambda = new double[nlambdas];
lambda[0] = lambda_max;
if (nlambdas == 1)
throw new IllegalArgumentException("Number of lambdas must be > 1 when running with lambda_search!");
for (int i = 1; i < lambda.length; ++i)
lambda[i] = lambda[i - 1] * d;
lambda_min = lambda[lambda.length - 1];
max_iter = MAX_ITERATIONS_PER_LAMBDA * nlambdas;
}
_runAllLambdas = false;
} else {
if(lambda == null || lambda.length == 0)
lambda = new double[]{DEFAULT_LAMBDA};
int i = 0;
while(i < lambda.length && lambda[i] >= lambda_max)++i;
if(i == lambda.length)
throw new IllegalArgumentException("Given lambda(s) are all > lambda_max = " + lambda_max + ", have nothing to run with. lambda = " + Arrays.toString(lambda));
if(i > 0) {
model.addWarning("Removed " + i + " lambdas greater than lambda_max.");
lambda = Utils.append(new double[]{lambda_max},Arrays.copyOfRange(lambda,i,lambda.length));
addLmaxSubmodel(model,t._val);
}
}
model.delete_and_lock(self());
lambda_min = lambda[lambda.length-1];
if(n_folds > 1){
final H2OCountedCompleter futures = new H2O.H2OEmptyCompleter();
final GLM2 [] xvals = new GLM2[n_folds+1];
futures.addToPendingCount(xvals.length-2);
for(int i = 0; i < xvals.length; ++i){
xvals[i] = (GLM2)GLM2.this.clone();
xvals[i].n_folds = 0;
xvals[i].standardize = standardize;
xvals[i].family = family;
xvals[i].link = link;
xvals[i].beta_epsilon = beta_epsilon;
xvals[i].max_iter = max_iter;
xvals[i].variable_importances = variable_importances;
if(i != 0){
xvals[i]._dinfo = _dinfo.getFold(i-1,n_folds);
xvals[i].destination_key = Key.make(dest().toString() + "_xval_" + i, (byte) 1, Key.HIDDEN_USER_KEY, H2O.SELF);
xvals[i]._nobs = ymut.nobs(i-1);
xvals[i]._ymu = ymut.ymu(i-1);
final int fi = i;
new LMAXTask(self(),xvals[i]._dinfo,_glm,ymut.ymu(fi-1),ymut.nobs(fi-1),alpha[0],thresholds,new H2OCallback<LMAXTask>(futures){
@Override
public String toString(){
return "Xval LMAXTask callback., completer = " + getCompleter() == null?"null":getCompleter().toString();
}
@Override
public void callback(LMAXTask lmaxTask) {
xvals[fi].lambda_max = lmaxTask.lmax();
xvals[fi]._currentLambda = lmaxTask.lmax();
xvals[fi]._lastResult = new IterationInfo(0,lmaxTask,null,lmaxTask.gradient(alpha[0],0));
GLMModel m = new GLMModel(GLM2.this, xvals[fi].destination_key, xvals[fi]._dinfo, _glm, beta_epsilon, alpha[0], xvals[fi].lambda_max, xvals[fi]._ymu, prior);//.delete_and_lock(self());
double[] beta = MemoryManager.malloc8d(_dinfo.fullN() + 1);
beta[beta.length - 1] = _glm.link(lmaxTask._ymu);
m.submodels = new GLMModel.Submodel[]{new GLMModel.Submodel(lmaxTask.lmax(),beta,beta,0,0, beta.length >= sparseCoefThreshold)};
m.submodels[0].validation = lmaxTask._val;
m.setSubmodelIdx(0);
m.delete_and_lock(self());
if(lmaxTask.lmax() > lambda_max){
futures.addToPendingCount(1);
new ParallelGLMs(GLM2.this,new GLM2[]{xvals[fi]},lambda_max,1,futures).fork();
}
}
}).asyncExec(xvals[i]._dinfo._adaptedFrame);
}
}
_xvals = xvals;
futures.join();
}
getCompleter().addToPendingCount(1);
nextLambda(nextLambdaValue(), new LambdaIteration(getCompleter()));
}
}).asyncExec(_dinfo._adaptedFrame);
}
}).asyncExec(_dinfo._adaptedFrame);
}
public double nextLambdaValue(){
assert lambda == null || lambda_min == lambda[lambda.length-1];
return (lambda == null)?pickNextLambda():lambda[++_lambdaIdx];
}
private transient int _iter1 = 0;
void nextLambda(final double currentLambda, final H2OCountedCompleter cmp){
if(currentLambda > lambda_max){
_done = true;
cmp.tryComplete();
return;
}
_iter1 = _iter;
LogInfo("starting computation of lambda = " + currentLambda + ", previous lambda = " + _currentLambda);
_done = false;
final double previousLambda = _currentLambda;
_currentLambda = currentLambda;
if(n_folds > 1){ // if we're cross-validated tasks, just fork off the parallel glms and wait for result!
for(int i = 0; i < _xvals.length; ++i)
if(_xvals[i]._lastResult._fullGrad == null){
RuntimeException re = new RuntimeException(LogInfo("missing full gradient at lambda = " + previousLambda + " at fold " + i));
Log.err(re);
throw re;
}
ParallelGLMs pgs = new ParallelGLMs(this,_xvals,currentLambda, H2O.CLOUD.size(),new XvalidationCallback(cmp));
pgs.fork();
return;
} else {
if(lambda_search){ // if we are in lambda_search, we want only limited number of iters per lambda!
max_iter = _iter + MAX_ITERATIONS_PER_LAMBDA;
}
final double[] grad = _lastResult.fullGrad(alpha[0],previousLambda);
assert grad != null;
activeCols(_currentLambda, previousLambda, grad);
if(_activeCols != null && _activeCols.length == 0)
activeCols(_currentLambda, previousLambda, grad);
assert cmp.getPendingCount() == 0;
// expand the beta
// todo make this work again
// if (Arrays.equals(_lastResult._activeCols, _activeCols) && _lastResult._glmt._gram != null) { // set of coefficients did not change
// new Iteration(cmp, false).callback(_lastResult._glmt);
// _lastResult._glmt.tryComplete(); // shortcut to reuse the last gram if same active columns
// } else
new GLMIterationTask(GLM2.this.self(), _activeData, _glm, true, false, false, resizeVec(_lastResult._glmt._beta, _activeCols, _lastResult._activeCols), _ymu, 1.0 / _nobs, thresholds, new Iteration(cmp,false)).asyncExec(_activeData._adaptedFrame);;
}
}
private final double l2pen(){return 0.5*_currentLambda*(1-alpha[0]);}
private final double l1pen(){return _currentLambda*alpha[0];}
// // filter the current active columns using the strong rules
// // note: strong rules are update so tha they keep all previous coefficients in, to prevent issues with line-search
private double pickNextLambda(){
final double[] grad = _lastResult.fullGrad(alpha[0],_currentLambda);
return pickNextLambda(_currentLambda, grad, Math.max((int) (Math.min(_dinfo.fullN(),_nobs) * 0.05), 1));
}
private double pickNextLambda(final double oldLambda, final double[] grad, int maxNewVars){
double [] g = grad.clone();
for(int i = 0; i < g.length; ++i)
g[i] = g[i] < 0?g[i]:-g[i];
if(_activeCols != null) { // not interested in cols which are already active!
for (int i : _activeCols) g[i] *= -1;
}
Arrays.sort(g);
if(maxNewVars < (g.length-1) && g[maxNewVars] == g[maxNewVars+1]){
double x = g[maxNewVars];
while(maxNewVars > 0 && g[maxNewVars] == x)--maxNewVars;
}
double res = 0.5*(-g[maxNewVars]/Math.max(1e-3,alpha[0]) + oldLambda);
return res < oldLambda?res:oldLambda*0.9;
}
// filter the current active columns using the strong rules
private int [] activeCols(final double l1, final double l2, final double [] grad){
final double rhs = alpha[0]*(2*l1-l2);
int [] cols = MemoryManager.malloc4(_dinfo.fullN());
int selected = 0;
int j = 0;
if(_activeCols == null)_activeCols = new int[]{-1};
for(int i = 0; i < _dinfo.fullN(); ++i)
if((j < _activeCols.length && i == _activeCols[j]) || grad[i] > rhs || grad[i] < -rhs){
cols[selected++] = i;
if(j < _activeCols.length && i == _activeCols[j])++j;
}
if(!strong_rules_enabled || selected == _dinfo.fullN()){
_activeCols = null;
_activeData._adaptedFrame = _dinfo._adaptedFrame;
_activeData = _dinfo;
} else {
_activeCols = Arrays.copyOf(cols,selected);
_activeData = _dinfo.filterExpandedColumns(_activeCols);
}
LogInfo("strong rule at lambda_value=" + l1 + ", got " + selected + " active cols out of " + _dinfo.fullN() + " total.");
assert _activeCols == null || _activeData.fullN() == _activeCols.length:LogInfo("mismatched number of cols, got " + _activeCols.length + " active cols, but data info claims " + _activeData.fullN());
return _activeCols;
}
// Expand grid search related argument sets
@Override protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) {
return superServeGrid(server, parms, type);
}
public static final DecimalFormat AUC_DFORMAT = new DecimalFormat("
public static final String aucStr(double auc){
return AUC_DFORMAT.format(Math.round(1000 * auc) * 0.001);
}
public static final DecimalFormat AIC_DFORMAT = new DecimalFormat("
public static final String aicStr(double aic){
return AUC_DFORMAT.format(Math.round(1000*aic)*0.001);
}
public static final DecimalFormat DEV_EXPLAINED_DFORMAT = new DecimalFormat("
public static final String devExplainedStr(double dev){
return AUC_DFORMAT.format(Math.round(1000*dev)*0.001);
}
public static class GLMGrid extends Lockable<GLMGrid> {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code.
final Key _jobKey;
final long _startTime;
@API(help="mean of response in the training dataset")
public final Key [] destination_keys;
final double [] _alphas;
public GLMGrid (Key gridKey,Key jobKey, GLM2 [] jobs){
super(gridKey);
_jobKey = jobKey;
_alphas = new double [jobs.length];
destination_keys = new Key[jobs.length];
for(int i = 0; i < jobs.length; ++i){
destination_keys[i] = jobs[i].destination_key;
_alphas[i] = jobs[i].alpha[0];
}
_startTime = System.currentTimeMillis();
}
public static class UnlockGridTsk extends DTask.DKeyTask<UnlockGridTsk,GLMGrid> {
final Key _jobKey;
public UnlockGridTsk(Key gridKey, Key jobKey, H2OCountedCompleter cc){
super(cc,gridKey);
_jobKey = jobKey;
}
@Override
public void map(GLMGrid g) {
addToPendingCount(g.destination_keys.length);
for(Key k:g.destination_keys)
new GLMModel.UnlockModelTask(this,k,_jobKey).forkTask();
g.unlock(_jobKey);
}
}
public static class DeleteGridTsk extends DTask.DKeyTask<DeleteGridTsk,GLMGrid> {
public DeleteGridTsk(H2OCountedCompleter cc, Key gridKey){
super(cc,gridKey);
}
@Override
public void map(GLMGrid g) {
addToPendingCount(g.destination_keys.length);
for(Key k:g.destination_keys)
new GLMModel.DeleteModelTask(this,k).forkTask();
assert g.is_unlocked():"not unlocked??";
g.delete();
}
}
@Override
protected Futures delete_impl(Futures fs) {return fs;}
@Override
protected String errStr() {
return null;
}
}
public class GLMGridSearch extends Job {
public final int _maxParallelism;
transient private AtomicInteger _idx;
public final GLM2 [] _jobs;
public final GLM2 _glm2;
public GLMGridSearch(int maxP, GLM2 glm2, Key destKey){
super(glm2.self(), destKey);
_glm2 = glm2;
description = "GLM Grid on data " + glm2._dinfo.toString() ;
_maxParallelism = maxP;
_jobs = new GLM2[glm2.alpha.length];
_idx = new AtomicInteger(_maxParallelism);
for(int i = 0; i < _jobs.length; ++i) {
_jobs[i] = (GLM2)_glm2.clone();
_jobs[i]._grid = true;
_jobs[i].alpha = new double[]{glm2.alpha[i]};
_jobs[i].destination_key = Key.make(glm2.destination_key + "_" + i);
_jobs[i]._progressKey = Key.make(dest().toString() + "_progress_" + i, (byte) 1, Key.HIDDEN_USER_KEY, dest().home_node());
_jobs[i].job_key = Key.make(glm2.job_key + "_" + i);
}
}
@Override public float progress(){
float sum = 0f;
for(GLM2 g:_jobs)sum += g.progress();
return sum/_jobs.length;
}
private transient boolean _cancelled;
@Override public void cancel(){
_cancelled = true;
for(GLM2 g:_jobs)
g.cancel();
source.unlock(self());
DKV.remove(destination_key);
super.cancel();
}
@Override
public GLMGridSearch fork(){
System.out.println("read-locking " + source._key + " by job " + self());
source.read_lock(self());
Futures fs = new Futures();
new GLMGrid(destination_key,self(),_jobs).delete_and_lock(self());
// keep *this* separate from what's stored in K/V as job (will be changing it!)
assert _maxParallelism >= 1;
final Job job = this;
_fjtask = new H2O.H2OEmptyCompleter();
H2OCountedCompleter fjtask = new H2OCallback<ParallelGLMs>(_fjtask) {
@Override public String toString(){
return "GLMGrid.Job.Callback, completer = " + getCompleter() == null?"null":getCompleter().toString();
}
@Override
public void callback(ParallelGLMs parallelGLMs) {
_glm2._done = true;
// we're gonna get success-callback after cancelling forked tasks since forked glms do not propagate exception if part of grid search
if(!_cancelled) {
source.unlock(self());
Lockable.unlock_lockable(destination_key, self());
remove();
}
}
@Override public boolean onExceptionalCompletion(Throwable t, CountedCompleter cmp){
if(!(t instanceof JobCancelledException) && (t.getMessage() == null || !t.getMessage().contains("job was cancelled"))) {
job.cancel(t);
}
return true;
}
};
start(_fjtask); // modifying GLM2 object, don't want job object to be the same instance
fs.blockForPending();
H2O.submitTask(new ParallelGLMs(this,_jobs,Double.NaN,H2O.CLOUD.size(),fjtask));
return this;
}
@Override public Response redirect() {
String n = GridSearchProgress.class.getSimpleName();
return Response.redirect( this, n, "job_key", job_key, "destination_key", destination_key);
}
}
private static class GLMT extends DTask<GLMT> {
private final GLM2 _glm;
private final double _lambda;
public GLMT(H2OCountedCompleter cmp, GLM2 glm, double lambda){
super(cmp);
_glm = glm;
_lambda = lambda;
}
@Override
public void compute2() {
assert Double.isNaN(_lambda) || _glm._lastResult._fullGrad != null:_glm.LogInfo("missing full gradient");
if(Double.isNaN(_lambda))
_glm.fork(this);
else {
_glm.nextLambda(_lambda, this);
}
}
@Override public void onCompletion(CountedCompleter cc){
if(!Double.isNaN(_lambda)) {
assert _glm._done : _glm.LogInfo("GLMT hit onCompletion but glm is not done yet!");
assert _glm._lastResult._fullGrad != null : _glm.LogInfo(" GLMT done with missing full gradient");
}
}
}
// class to execute multiple GLM runs in parallel
// (with user-given limit on how many to run in parallel)
public static class ParallelGLMs extends H2OCountedCompleter {
transient final private GLM2 [] _glms;
transient final private GLMT [] _tasks;
transient final Job _job;
transient final public int _maxP;
transient private AtomicInteger _nextTask;
public final double _lambda;
public ParallelGLMs(Job j, GLM2 [] glms){this(j,glms,Double.NaN);}
public ParallelGLMs(Job j, GLM2 [] glms, double lambda){this(j,glms,lambda, H2O.CLOUD.size());}
public ParallelGLMs(Job j, GLM2 [] glms, double lambda, int maxP){
_job = j; _lambda = lambda; _glms = glms; _maxP = maxP;
_tasks = new GLMT[_glms.length];
addToPendingCount(_glms.length);
}
public ParallelGLMs(Job j, GLM2 [] glms, double lambda, int maxP, H2OCountedCompleter cmp){
super(cmp); _lambda = lambda; _job = j; _glms = glms; _maxP = maxP;
_tasks = new GLMT[_glms.length];
addToPendingCount(_glms.length);
}
private void forkDTask(int i){
int nodeId = i%H2O.CLOUD.size();
forkDTask(i,H2O.CLOUD._memary[nodeId]);
}
private void forkDTask(final int i, H2ONode n){
_tasks[i] = new GLMT(new Callback(n,i),_glms[i],_lambda);
assert Double.isNaN(_lambda) || _tasks[i]._glm._lastResult._fullGrad != null;
if(n == H2O.SELF) H2O.submitTask(_tasks[i]);
else new RPC(n,_tasks[i]).call();
}
class Callback extends H2OCallback<H2OCountedCompleter> {
final int i;
final H2ONode n;
public Callback(H2ONode n, int i){super(ParallelGLMs.this); this.n = n; this.i = i;}
@Override public void callback(H2OCountedCompleter cc){
int i;
if((i = _nextTask.getAndIncrement()) < _glms.length) { // not done yet
forkDTask(i, n);
}
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
_job.cancel(ex);
return true;
}
}
@Override public void compute2(){
final int n = Math.min(_maxP, _glms.length);
_nextTask = new AtomicInteger(n);
for(int i = 0; i < n; ++i)
forkDTask(i);
tryComplete();
}
@Override public void onCompletion(CountedCompleter cc){
if(!Double.isNaN(_lambda))
for(int i= 0; i < _tasks.length; ++i) {
assert _tasks[i]._glm._lastResult._fullGrad != null;
_glms[i] = _tasks[i]._glm;
}
}
}
} |
package net.katsuster.strview.test.io;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.*;
import net.katsuster.strview.io.*;
import net.katsuster.strview.util.*;
import net.katsuster.strview.util.bit.*;
import net.katsuster.strview.test.util.*;
public class MemoryBitListTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public final void testMemoryBitList() {
String msg1 = "MemoryBitList() failed.";
MemoryBitList a1 = new MemoryBitList();
assertNotNull(msg1, a1);
}
@Test
public final void testMemoryBitListLong() {
String msg1 = "MemoryBitList(long) failed.";
String msg2 = "MemoryBitList(long) illegal arguments check failed.";
MemoryBitList a1 = new MemoryBitList(100);
assertNotNull(msg1, a1);
MemoryBitList a2 = new MemoryBitList(LargeList.LENGTH_UNKNOWN);
assertNotNull(msg1, a2);
try {
new MemoryBitList(-2);
fail(msg2);
} catch (NegativeArraySizeException ex) {
}
try {
new MemoryBitList((long)Integer.MAX_VALUE + 1);
fail(msg2);
} catch (IllegalArgumentException ex) {
}
}
@Test
public final void testMemoryBitListBooleanArray() {
String msg1 = "MemoryBitList(boolean[]) failed.";
String msg2 = "MemoryBitList(boolean[]) illegal arguments check failed.";
boolean[] a_a = new boolean[512];
for (int i = 0; i < a_a.length; i++) {
a_a[i] = (i % 5 == 0);
}
MemoryBitList a1 = new MemoryBitList(a_a);
assertNotNull(msg1, a1);
LargeBitListTest.testGetLongInner(a1, 0, a_a);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a1, 0, a_a, 6);
try {
new MemoryBitList(null);
fail(msg2);
} catch (IllegalArgumentException ex) {
}
}
@Test
public final void testClone() {
String msg1 = "clone() failed.";
boolean[] a_a = {
false, false, true, true,
};
MemoryBitList a1 = new MemoryBitList(a_a);
assertNotNull(msg1, a1);
a1.set(0, true);
assertEquals(msg1, true, a1.get(0));
try {
MemoryBitList a2 = a1.clone();
assertNotNull(msg1, a2);
a2.set(0, false);
assertEquals(msg1, true, a1.get(0));
assertEquals(msg1, false, a2.get(0));
} catch (CloneNotSupportedException ex) {
fail(msg1);
}
}
@Test
public final void testLength() {
String msg1 = "length() failed.";
boolean[] a_a = new boolean[1];
boolean[] a_b = new boolean[512];
MemoryBitList a1 = new MemoryBitList();
MemoryBitList a2 = new MemoryBitList(a_a);
MemoryBitList a3 = new MemoryBitList(a_b);
assertEquals(msg1, 0, a1.length());
assertEquals(msg1, 1, a2.length());
assertEquals(msg1, 512, a3.length());
}
@Test
public final void testGetLong() {
//String msg1 = "get(long) failed.";
String msg2 = "get(long) illegal arguments check failed.";
boolean[] a_a = new boolean[1023];
boolean[] a_b = new boolean[2048];
for (int i = 0; i < a_a.length; i++) {
a_a[i] = (i % 6 == 0);
}
for (int i = 0; i < a_b.length; i++) {
a_b[i] = (i % 7 == 0);
}
MemoryBitList a1 = new MemoryBitList(a_a);
MemoryBitList a2 = new MemoryBitList(a_b);
LargeBitListTest.testGetLongInner(a1, 0, a_a);
LargeBitListTest.testGetLongInner(a2, 0, a_b);
try {
a1.get(a_a.length);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
try {
a1.get(-1);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
}
@Test
public final void testGetLongBoolArrayIntInt() {
//String msg1 = "get(long, boolean[], int, int) failed.";
boolean[] a_a = new boolean[2048];
boolean[] a_b = new boolean[4096];
for (int i = 0; i < a_a.length; i++) {
a_a[i] = (i % 8 == 0);
}
for (int i = 0; i < a_b.length; i++) {
a_b[i] = (i % 9 == 0);
}
MemoryBitList a1 = new MemoryBitList(a_a);
MemoryBitList a2 = new MemoryBitList(a_b);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a1, 0, a_a, 1);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a1, 0, a_a, 30);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a2, 0, a_b, 4);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a2, 0, a_b, 100);
}
@Test
public final void testSetLongByte() {
//String msg1 = "set(long, byte) failed.";
String msg2 = "set(long, byte) illegal arguments check failed.";
boolean[] a_a = new boolean[1024];
boolean[] a_b = new boolean[1024];
boolean[] b_a = new boolean[4096];
boolean[] b_b = new boolean[4096];
for (int i = 0; i < a_a.length; i++) {
a_a[i] = (i % 10 == 0);
}
for (int i = 0; i < b_a.length; i++) {
b_a[i] = (i % 11 == 0);
}
MemoryBitList a1 = new MemoryBitList(a_b);
MemoryBitList a2 = new MemoryBitList(b_b);
for (int i = 0; i < a1.length(); i++) {
a1.set(i, (i % 10 == 0));
}
for (int i = 0; i < a2.length(); i++) {
a2.set(i, (i % 11 == 0));
}
LargeBitListTest.testGetLongInner(a1, 0, a_a);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a1, 0, a_a, 30);
LargeBitListTest.testGetLongInner(a2, 0, b_a);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a2, 0, b_a, 100);
try {
a1.set(a_b.length, false);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
try {
a1.set(-1, false);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
}
@Test
public final void testSetLongBoolArrayIntInt() {
//String msg1 = "set(long, boolean[], int, int) failed.";
boolean[] a_a = new boolean[1024];
boolean[] a_b = new boolean[1024];
boolean[] b_a = new boolean[1024];
boolean[] b_b = new boolean[1024];
boolean[] buf_a = new boolean[5];
boolean[] buf_b = new boolean[5];
for (int i = 0; i < a_a.length - buf_a.length; i += buf_a.length) {
for (int j = 0; j < buf_a.length; j++) {
buf_a[j] = ((i + j) % 3 == 0);
buf_b[j] = ((i + j) % 4 == 0);
}
System.arraycopy(buf_a, 0, a_a, i, buf_a.length);
System.arraycopy(buf_b, 0, b_a, i, buf_b.length);
}
MemoryBitList a1 = new MemoryBitList(a_b);
MemoryBitList a2 = new MemoryBitList(b_b);
for (int i = 0; i < a_a.length - buf_a.length; i += buf_a.length) {
for (int j = 0; j < buf_a.length; j++) {
buf_a[j] = ((i + j) % 3 == 0);
buf_b[j] = ((i + j) % 4 == 0);
}
a1.set(i, buf_a, 0, buf_a.length);
a2.set(i, buf_b, 0, buf_b.length);
}
LargeBitListTest.testGetLongInner(a1, 0, a_a);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a1, 0, a_a, 30);
LargeBitListTest.testGetLongInner(a2, 0, b_a);
LargeBitListTest.testGetLongBooleanArrayIntIntInner(a2, 0, b_a, 100);
}
@Test
public final void testGetPackedLong() {
String msg1 = "getPackedLong(long, int) failed.";
String msg2 = "getPackedLong(long, int) illegal arguments check failed.";
boolean[] buf = new boolean[500];
MemoryBitList a;
int size, diff, start, off, i;
long result, act;
for (i = 0; i < buf.length; i++) {
buf[i] = (i % 17 == 0) || (i % 3 == 0);
}
a = new MemoryBitList(buf);
for (size = 0; size <= 64; size++) {
for (diff = 1; diff <= 64; diff++) {
for (start = 0; start <= 64; start++) {
for (off = start; off < buf.length - size; off += diff) {
boolean[] actbuf = new boolean[size];
result = a.getPackedLong(off, size);
a.get(off, actbuf, 0, size);
act = AbstractLargeBitList.packBitsLong(actbuf);
assertEquals(msg1
+ "size:" + size
+ "diff:" + diff
+ "start:" + start
+ "off:" + off,
act, result);
}
}
}
}
try {
a.getPackedLong(0, -1);
fail(msg2);
} catch (IllegalArgumentException ex) {
}
try {
a.getPackedLong(0, 65);
fail(msg2);
} catch (IllegalArgumentException ex) {
}
try {
a.getPackedLong(-1, 1);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
try {
a.getPackedLong(buf.length - 1, 2);
fail(msg2);
} catch (IndexOutOfBoundsException ex) {
}
}
@Test
public final void testBench() {
Random ra = new Random();
boolean[] dest = new boolean[32 * 1024 * 1024];
boolean a_a;
MemoryBitList a1 = new MemoryBitList(dest);
long start, elapse;
int ite = 1, i;
System.out.println("MemoryBitListTest.testBench()");
//seq 1B
elapse = 0;
for (i = 0; i < ite; i++) {
a_a = (ra.nextInt(255) % 2 == 0);
start = System.nanoTime();
for (int j = 0; j < a1.length(); j++) {
a1.set(j, a_a);
}
elapse += (System.nanoTime() - start);
}
LargeBitListTest.printBenchResult("seq write 1bit",
i * a1.length(), elapse);
LargeBitListTest.benchPackedIO(a1, "seq packed 4bit", 4, 1);
LargeBitListTest.benchPackedIO(a1, "seq packed 8bit", 8, 1);
LargeBitListTest.benchPackedIO(a1, "seq packed 16bit", 16, 1);
LargeBitListTest.benchPackedIO(a1, "seq packed 32bit", 32, 1);
LargeBitListTest.benchPackedIO(a1, "seq packed 64bit", 64, 1);
LargeBitListTest.benchIO(a1, "seq access 4bit", 4, 1);
LargeBitListTest.benchIO(a1, "seq access 16bit", 16, 1);
LargeBitListTest.benchIO(a1, "seq access 4Kbit", 4 * 1024, 1);
LargeBitListTest.benchIO(a1, "seq access 64Kbit", 64 * 1024, 1);
}
} |
package org.codemucker.jfind;
import java.lang.reflect.Method;
public class ReflectedMethod extends AbstractReflectedObject {
private final Method method;
public static ReflectedMethod from(Method method) {
if(method==null){
return null;
}
return new ReflectedMethod(method);
}
public ReflectedMethod(Method method) {
super(method.getAnnotations(),method.getModifiers());
this.method = method;
}
public Method getUnderlying(){
return method;
}
public String getName(){
return method.getName();
}
} |
package ca.ualberta.cs.lonelytwitter;
import java.util.Date;
public abstract class lonelyTweetModel
{
protected String text;
protected Date timestamp;
protected boolean important;
public String getText()
{
return text;
}
public abstract void setText(String text);
public Date getTimestamp()
{
return timestamp;
}
public void setTimestamp(Date timestamp)
{
this.timestamp = timestamp;
}
public lonelyTweetModel(String text, Date timestamp)
{
super();
this.text = text;
this.timestamp = new Date();
}
public lonelyTweetModel(String text)
{
super();
this.text = text;
this.timestamp = new Date();
}
//abstract method can be used to differentiate between normal and important
public boolean getImportant(){
return important;
}
public abstract void setImportant(boolean important) throws Exception;
} |
package org.joda.time.chrono.gj;
import java.util.Random;
import junit.framework.TestCase;
import org.joda.time.Chronology;
import org.joda.time.DateTime;
import org.joda.time.DateTimeField;
import org.joda.time.chrono.GregorianChronology;
import org.joda.time.chrono.JulianChronology;
/**
* Tests either the Julian or Gregorian chronology from org.joda.time.chrono.gj
* against the implementations in this package. It tests all the date fields
* against their principal methods.
* <p>
* Randomly generated values are fed into the DateTimeField methods and the
* results are compared between the two chronologies. If any result doesn't
* match, an error report is generated and the program exits. Each time this
* test program is run, the pseudo random number generator is seeded with the
* same value. This ensures consistent results between test runs.
* <p>
* The main method accepts three optional arguments: iterations, mode, seed. By
* default, iterations is set to 1,000,000. The test will take several minutes
* to run, depending on the computer's performance. Every 5 seconds a progress
* message is printed.
* <p>
* The mode can be either 'g' for proleptic gregorian (the default) or 'j' for
* proleptic julian. To override the default random number generator seed, pass
* in a third argument which accepts a long signed integer.
*
* @author Brian S O'Neill
*/
public class MainTest extends TestCase {
public static final int GREGORIAN_MODE = 0;
public static final int JULIAN_MODE = 1;
private static final long MILLIS_PER_YEAR = (long)365.2425 * 24 * 60 * 60 * 1000;
private static final long _1000_YEARS = 1000 * MILLIS_PER_YEAR;
private static final long _500_YEARS = 500 * MILLIS_PER_YEAR;
private static final long MAX_MILLIS = (10000 - 1970) * MILLIS_PER_YEAR;
private static final long MIN_MILLIS = (-10000 - 1970) * MILLIS_PER_YEAR;
// Show progess reports every 5 seconds.
private static final long UPDATE_INTERVAL = 5000;
/**
* Arguments: iterations [mode [seed]]
*/
public static void main(String[] args) throws Exception {
int iterations = 1000000;
int mode = GREGORIAN_MODE;
long seed = 1345435247779935L;
if (args.length > 0) {
iterations = Integer.parseInt(args[0]);
if (args.length > 1) {
if (args[1].startsWith("g")) {
mode = GREGORIAN_MODE;
} else if (args[1].startsWith("j")) {
mode = JULIAN_MODE;
} else {
throw new IllegalArgumentException
("Unknown mode: " + args[1]);
}
if (args.length > 2) {
seed = Long.parseLong(args[2]);
}
}
}
new MainTest(iterations, mode, seed).testChronology();
}
private final int iIterations;
private final int iMode;
private final long iSeed;
private final Chronology iTest;
private final Chronology iActual;
/**
* @param iterations number of test iterations to perform
* @param mode GREGORIAN_MODE or JULIAN_MODE,0=Gregorian, 1=Julian
* @param seed seed for random number generator
*/
public MainTest(int iterations, int mode, long seed) {
super("testChronology");
iIterations = iterations;
iMode = mode;
iSeed = seed;
if (mode == GREGORIAN_MODE) {
iTest = new TestGregorianChronology();
iActual = GregorianChronology.getInstanceUTC();
} else {
iTest = new TestJulianChronology();
iActual = JulianChronology.getInstanceUTC();
}
}
/**
* Main junit test
*/
public void testChronology() {
int iterations = iIterations;
long seed = iSeed;
String modeStr;
if (iMode == GREGORIAN_MODE) {
modeStr = "Gregorian";
} else {
modeStr = "Julian";
}
System.out.println("\nTesting " + modeStr + " chronology over " + iterations + " iterations");
Random rnd = new Random(seed);
long updateMillis = System.currentTimeMillis() + UPDATE_INTERVAL;
for (int i=0; i<iterations; i++) {
long now = System.currentTimeMillis();
if (now >= updateMillis) {
updateMillis = now + UPDATE_INTERVAL;
double complete = ((int)((double)i / iterations * 1000.0)) / 10d;
if (complete < 100) {
System.out.println("" + complete + "% complete (i=" + i + ")");
}
}
long millis = randomMillis(rnd);
int value = rnd.nextInt(200) - 100;
// millis2 is used for difference tests.
long millis2 = millis + rnd.nextLong() % _1000_YEARS - _500_YEARS;
try {
testFields(millis, value, millis2);
} catch (RuntimeException e) {
System.out.println("Failure index: " + i);
System.out.println("Test millis: " + millis);
System.out.println("Test value: " + value);
System.out.println("Test millis2: " + millis2);
fail(e.getMessage());
}
}
System.out.println("100% complete (i=" + iterations + ")");
}
private void testFields(long millis, int value, long millis2) {
testField(iTest.year(), iActual.year(), millis, value, millis2);
testField(iTest.monthOfYear(), iActual.monthOfYear(), millis, value, millis2);
testField(iTest.dayOfMonth(), iActual.dayOfMonth(), millis, value, millis2);
testField(iTest.weekyear(), iActual.weekyear(), millis, value, millis2);
testField(iTest.weekOfWeekyear(),
iActual.weekOfWeekyear(), millis, value, millis2);
testField(iTest.dayOfWeek(), iActual.dayOfWeek(), millis, value, millis2);
testField(iTest.dayOfYear(), iActual.dayOfYear(), millis, value, millis2);
}
private void testField(DateTimeField fieldA, DateTimeField fieldB, long millis,
int value, long millis2)
{
int a, b;
long x, y;
boolean m, n;
// get test
a = fieldA.get(millis);
b = fieldB.get(millis);
testValue(fieldA, fieldB, "get", millis, a, b);
// getMaximumValue test
// Restrict this test to the fields that matter.
Class fieldClass = fieldA.getClass();
if (fieldClass == TestGJDayOfYearField.class ||
fieldClass == TestGJDayOfMonthField.class ||
fieldClass == TestGJWeekOfWeekyearField.class) {
a = fieldA.getMaximumValue(millis);
b = fieldB.getMaximumValue(millis);
testValue(fieldA, fieldB, "getMaximumValue", millis, a, b);
}
// set test
a = getWrappedValue
(value, fieldA.getMinimumValue(millis), fieldA.getMaximumValue(millis));
b = getWrappedValue
(value, fieldB.getMinimumValue(millis), fieldB.getMaximumValue(millis));
if (iMode == JULIAN_MODE && a == 0
&& (fieldA.getName().equals("year") || fieldA.getName().equals("weekyear"))) {
// Exclude setting Julian year of zero.
} else {
x = fieldA.set(millis, a);
y = fieldB.set(millis, b);
testMillis(fieldA, fieldB, "set", millis, x, y, a, b);
}
// roundFloor test
x = fieldA.roundFloor(millis);
y = fieldB.roundFloor(millis);
testMillis(fieldA, fieldB, "roundFloor", millis, x, y);
// roundCeiling test
x = fieldA.roundCeiling(millis);
y = fieldB.roundCeiling(millis);
testMillis(fieldA, fieldB, "roundCeiling", millis, x, y);
// roundHalfFloor test
x = fieldA.roundHalfFloor(millis);
y = fieldB.roundHalfFloor(millis);
testMillis(fieldA, fieldB, "roundHalfFloor", millis, x, y);
// roundHalfEven test
x = fieldA.roundHalfEven(millis);
y = fieldB.roundHalfEven(millis);
testMillis(fieldA, fieldB, "roundHalfEven", millis, x, y);
// remainder test
x = fieldA.remainder(millis);
y = fieldB.remainder(millis);
testMillis(fieldA, fieldB, "remainder", millis, x, y);
// add test
x = fieldA.add(millis, value);
y = fieldB.add(millis, value);
testMillis(fieldA, fieldB, "add", millis, x, y);
// addWrapField test
x = fieldA.addWrapField(millis, value);
y = fieldB.addWrapField(millis, value);
testMillis(fieldA, fieldB, "addWrapField", millis, x, y);
// getDifference test
x = fieldA.getDifference(millis, millis2);
y = fieldB.getDifference(millis, millis2);
try {
testValue(fieldA, fieldB, "getDifference", millis, x, y);
} catch (RuntimeException e) {
System.out.println("Test datetime 2: " + makeDatetime(millis2));
throw e;
}
// isLeap test
m = fieldA.isLeap(millis);
n = fieldB.isLeap(millis);
testBoolean(fieldA, fieldB, "isLeap", millis, m, n);
// getLeapAmount test
a = fieldA.getLeapAmount(millis);
b = fieldB.getLeapAmount(millis);
testValue(fieldA, fieldB, "getLeapAmount", millis, a, b);
}
private int getWrappedValue(int value, int minValue, int maxValue) {
if (minValue >= maxValue) {
throw new IllegalArgumentException("MIN > MAX");
}
int wrapRange = maxValue - minValue + 1;
value -= minValue;
if (value >= 0) {
return (value % wrapRange) + minValue;
}
int remByRange = (-value) % wrapRange;
if (remByRange == 0) {
return 0 + minValue;
}
return (wrapRange - remByRange) + minValue;
}
private void testValue(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long valueA, long valueB) {
if (valueA != valueB) {
failValue(fieldA, fieldB, method, millis, valueA, valueB);
}
}
private void testMillis(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long millisA, long millisB) {
if (millisA != millisB) {
failMillis(fieldA, fieldB, method, millis, millisA, millisB);
}
}
private void testMillis(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long millisA, long millisB,
int valueA, int valueB) {
if (millisA != millisB) {
failMillis(fieldA, fieldB, method, millis, millisA, millisB, valueA, valueB);
}
}
private void testBoolean(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, boolean boolA, boolean boolB) {
if (boolA != boolB) {
failBoolean(fieldA, fieldB, method, millis, boolA, boolB);
}
}
private void failValue(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long valueA, long valueB) {
System.out.println("Failure on " + makeName(fieldA, fieldB) + "." + method);
System.out.println(fieldA.getClass().getName() + "\n\tvs. "
+ fieldB.getClass().getName());
System.out.println("Datetime: " + makeDatetime(millis));
System.out.println("Millis from 1970: " + millis);
System.out.println(valueA + " != " + valueB);
throw new RuntimeException();
}
private void failMillis(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long millisA, long millisB) {
System.out.println("Failure on " + makeName(fieldA, fieldB) + "." + method);
System.out.println(fieldA.getClass().getName() + "\n\tvs. "
+ fieldB.getClass().getName());
System.out.println("Datetime: " + makeDatetime(millis));
System.out.println("Millis from 1970: " + millis);
System.out.println(makeDatetime(millisA) + " != " + makeDatetime(millisB));
System.out.println(millisA + " != " + millisB);
System.out.println("Original value as reported by first field: " +
fieldA.get(millis));
System.out.println("Original value as reported by second field: " +
fieldB.get(millis));
System.out.println("First new value as reported by first field: " +
fieldA.get(millisA));
System.out.println("First new value as reported by second field: " +
fieldB.get(millisA));
System.out.println("Second new value as reported by first field: " +
fieldA.get(millisB));
System.out.println("Second new value as reported by second field: " +
fieldB.get(millisB));
throw new RuntimeException();
}
private void failMillis(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, long millisA, long millisB,
int valueA, int valueB) {
System.out.println("Failure on " + makeName(fieldA, fieldB) + "." + method);
System.out.println(fieldA.getClass().getName() + "\n\tvs. "
+ fieldB.getClass().getName());
System.out.println("Datetime: " + makeDatetime(millis));
System.out.println("Millis from 1970: " + millis);
System.out.println(makeDatetime(millisA) + " != " + makeDatetime(millisB));
System.out.println(millisA + " != " + millisB);
System.out.println("Original value as reported by first field: " +
fieldA.get(millis));
System.out.println("Original value as reported by second field: " +
fieldB.get(millis));
System.out.println("First new value as reported by first field: " +
fieldA.get(millisA));
System.out.println("First new value as reported by second field: " +
fieldB.get(millisA));
System.out.println("Second new value as reported by first field: " +
fieldA.get(millisB));
System.out.println("Second new value as reported by second field: " +
fieldB.get(millisB));
System.out.println("Value to set for first field: " + valueA);
System.out.println("Value to set for second field: " + valueB);
throw new RuntimeException();
}
private void failBoolean(DateTimeField fieldA, DateTimeField fieldB,
String method, long millis, boolean boolA, boolean boolB) {
System.out.println("Failure on " + makeName(fieldA, fieldB) + "." + method);
System.out.println(fieldA.getClass().getName() + "\n\tvs. "
+ fieldB.getClass().getName());
System.out.println("Datetime: " + makeDatetime(millis));
System.out.println("Millis from 1970: " + millis);
System.out.println(boolA + " != " + boolB);
throw new RuntimeException();
}
private String makeName(DateTimeField fieldA, DateTimeField fieldB) {
if (fieldA.getName().equals(fieldB.getName())) {
return fieldA.getName();
} else {
return fieldA.getName() + "/" + fieldB.getName();
}
}
private String makeDatetime(long millis) {
return makeDatetime(millis, iActual);
}
private String makeDatetime(long millis, Chronology chrono) {
return chrono.dayOfWeek().getAsShortText(millis) + " "
+ new DateTime(millis, chrono).toString() + " / " +
chrono.weekyear().get(millis) + "-W" + chrono.weekOfWeekyear().get(millis) +
"-" + chrono.dayOfWeek().get(millis);
}
private String makeDate(long millis) {
return makeDate(millis, iActual);
}
private String makeDate(long millis, Chronology chrono) {
return chrono.dayOfWeek().getAsShortText(millis) + " "
+ new DateTime(millis, chrono).toString("yyyy-MM-dd") + " / " +
chrono.weekyear().get(millis) + "-W" + chrono.weekOfWeekyear().get(millis) +
"-" + chrono.dayOfWeek().get(millis);
}
private static long randomMillis(Random rnd) {
long millis = rnd.nextLong();
if (millis >= 0) {
millis = millis % MAX_MILLIS;
} else {
millis = millis % -MIN_MILLIS;
}
return millis;
}
private static void dump(Chronology chrono, long millis) {
System.out.println("year: " + chrono.year().get(millis));
System.out.println("monthOfYear: " + chrono.monthOfYear().get(millis));
System.out.println("dayOfMonth: " + chrono.dayOfMonth().get(millis));
System.out.println("weekyear: " + chrono.weekyear().get(millis));
System.out.println("weekOfWeekyear: " + chrono.weekOfWeekyear().get(millis));
System.out.println("dayOfWeek: " + chrono.dayOfWeek().get(millis));
System.out.println("dayOfYear: " + chrono.dayOfYear().get(millis));
}
} |
package ninja;
import sirius.kernel.cache.Cache;
import sirius.kernel.cache.CacheManager;
import sirius.kernel.commons.Limit;
import sirius.kernel.commons.Strings;
import sirius.kernel.health.Exceptions;
import sirius.kernel.xml.Attribute;
import sirius.kernel.xml.XMLStructuredOutput;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Represents a bucket.
* <p>
* Internally a bucket is just a directory within the base directory.
*/
public class Bucket {
private static final Pattern BUCKET_NAME_PATTERN = Pattern.compile("^[a-z\\d][a-z\\d\\-.]{1,61}[a-z\\d]$");
/**
* Matches IPv4 addresses roughly.
*/
private static final Pattern IP_ADDRESS_PATTERN = Pattern.compile("^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$");
private static final int MOST_RECENT_VERSION = 2;
private final File folder;
private final File versionMarker;
private final File publicMarker;
private static final Cache<String, Boolean> publicAccessCache = CacheManager.createLocalCache("public-bucket-access");
/**
* Creates a new bucket based on the given directory.
*
* @param folder the directory which stores the contents of the bucket.
*/
public Bucket(File folder) {
this.folder = folder;
// set the public marker file
this.publicMarker = new File(folder, "$public");
// as last step, check the version, and migrate the bucket if necessary
this.versionMarker = new File(folder, "$version");
int version = getVersion();
if (version < MOST_RECENT_VERSION) {
migrateBucket(version);
}
}
/**
* Returns the name of the bucket.
*
* @return the name of the bucket
*/
public String getName() {
return folder.getName();
}
/**
* Returns the encoded name of the bucket.
*
* @return the encoded name of the bucket
*/
public String getEncodedName() {
try {
return URLEncoder.encode(getName(), StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
return getName();
}
}
/**
* Returns the underlying directory as {@link File}.
*
* @return a {@link File} representing the underlying directory
*/
public File getFolder() {
return folder;
}
/**
* Determines if the bucket exists.
*
* @return <b>true</b> if the bucket exists, <b>false</b> else
*/
public boolean exists() {
return folder.exists();
}
/**
* Creates the bucket.
* <p>
* If the underlying directory already exists, nothing happens.
*
* @return <b>true</b> if the folder for the bucket was created successfully and if it was missing before
*/
public boolean create() {
if (folder.exists() || !folder.mkdirs()) {
return false;
}
// having successfully created the folder, write the version marker
setVersion(MOST_RECENT_VERSION);
return true;
}
/**
* Deletes the bucket and all of its contents.
*
* @return true if all files of the bucket and the bucket itself was deleted successfully, false otherwise.
*/
public boolean delete() {
if (!folder.exists()) {
return true;
}
boolean deleted = false;
for (File child : Objects.requireNonNull(folder.listFiles())) {
deleted = child.delete() || deleted;
}
deleted = folder.delete() || deleted;
return deleted;
}
/**
* Returns a list of at most the provided number of stored objects
*
* @param output the xml structured output the list of objects should be written to
* @param limit controls the maximum number of objects returned
* @param marker the key to start with when listing objects in a bucket
* @param prefix limits the response to keys that begin with the specified prefix
*/
public void outputObjects(XMLStructuredOutput output, int limit, @Nullable String marker, @Nullable String prefix) {
ListFileTreeVisitor visitor = new ListFileTreeVisitor(output, limit, marker, prefix);
output.beginOutput("ListBucketResult", Attribute.set("xmlns", "http://s3.amazonaws.com/doc/2006-03-01/"));
output.property("Name", getName());
output.property("MaxKeys", limit);
output.property("Marker", marker);
output.property("Prefix", prefix);
try {
walkFileTreeOurWay(folder.toPath(), visitor);
} catch (IOException e) {
Exceptions.handle(e);
}
output.property("IsTruncated", limit > 0 && visitor.getCount() > limit);
output.endOutput();
}
/**
* Very simplified stand-in for {@link Files#walkFileTree(Path, FileVisitor)} where we control the traversal order.
*
* @param path the start path.
* @param visitor the visitor processing the files.
* @throws IOException forwarded from nested I/O operations.
*/
private static void walkFileTreeOurWay(Path path, FileVisitor<? super Path> visitor) throws IOException {
if (!path.toFile().isDirectory()) {
throw new IOException("Directory expected.");
}
try (Stream<Path> children = Files.list(path)) {
children.sorted(Bucket::compareUtf8Binary)
.filter(p -> p.toFile().isFile())
.forEach(p -> {
try {
BasicFileAttributes attrs = Files.readAttributes(p, BasicFileAttributes.class);
visitor.visitFile(p, attrs);
} catch (IOException e) {
Exceptions.handle(e);
}
});
}
}
private static int compareUtf8Binary(Path p1, Path p2) {
String s1 = p1.getFileName().toString();
String s2 = p2.getFileName().toString();
byte[] b1 = s1.getBytes(StandardCharsets.UTF_8);
byte[] b2 = s2.getBytes(StandardCharsets.UTF_8);
// unless we upgrade to java 9+ offering Arrays.compare(...), we need to compare the arrays manually :(
int length = Math.min(b1.length, b2.length);
for (int i = 0; i < length; ++i) {
if (b1[i] != b2[i]) {
return Byte.compare(b1[i], b2[i]);
}
}
return b1.length - b2.length;
}
/**
* Determines if the bucket is only privately accessible, i.e. non-public.
*
* @return <b>true</b> if the bucket is only privately accessible, <b>false</b> else
*/
public boolean isPrivate() {
return !Boolean.TRUE.equals(publicAccessCache.get(getName(), key -> publicMarker.exists()));
}
/**
* Marks the bucket as only privately accessible, i.e. non-public.
*/
public void makePrivate() {
if (publicMarker.exists()) {
if (publicMarker.delete()) {
publicAccessCache.put(getName(), false);
} else {
Storage.LOG.WARN("Failed to delete public marker for bucket %s - it remains public!", getName());
}
}
}
/**
* Marks the bucket as publicly accessible.
*/
public void makePublic() {
if (!publicMarker.exists()) {
try {
new FileOutputStream(publicMarker).close();
} catch (IOException e) {
throw Exceptions.handle(Storage.LOG, e);
}
}
publicAccessCache.put(getName(), true);
}
/**
* Returns the object with the given key.
* <p>
* The method never returns <b>null</b>, but {@link StoredObject#exists()} may return <b>false</b>.
* <p>
* Make sure that the key passes {@link StoredObject#isValidKey(String)} by meeting the naming restrictions
* documented there.
*
* @param key the key of the requested object
* @return the object with the given key
*/
@Nonnull
public StoredObject getObject(String key) {
if (!StoredObject.isValidKey(key)) {
throw Exceptions.createHandled()
.withSystemErrorMessage(
"Object key \"%s\" does not adhere to the rules. [https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html]",
key)
.handle();
}
return new StoredObject(new File(folder, key));
}
/**
* Returns a number of files meeting the given query, within the given indexing limits. Leave the query empty to
* get all files.
*
* @param query the query to filter for
* @param limit the limit to apply
* @return all files meeting the query, restricted by the limit
*/
public List<StoredObject> getObjects(@Nullable String query, Limit limit) {
try (Stream<Path> stream = Files.list(folder.toPath())) {
return stream.sorted(Bucket::compareUtf8Binary)
.map(Path::toFile)
.filter(currentFile -> isMatchingObject(query, currentFile))
.filter(limit.asPredicate())
.map(StoredObject::new)
.collect(Collectors.toList());
} catch (IOException e) {
throw Exceptions.handle(e);
}
}
/**
* Count the files containing the query. Leave the query empty to count all files.
*
* @param query the query to filter for
* @return the number of files in the bucket matching the query
*/
public int countObjects(@Nullable String query) {
try (Stream<Path> stream = Files.list(folder.toPath())) {
return Math.toIntExact(stream.map(Path::toFile)
.filter(currentFile -> isMatchingObject(query, currentFile))
.count());
} catch (IOException e) {
throw Exceptions.handle(e);
}
}
private boolean isMatchingObject(@Nullable String query, File currentFile) {
return (Strings.isEmpty(query) || currentFile.getName().contains(query)) && currentFile.isFile() && !currentFile
.getName()
.startsWith("__");
}
private int getVersion() {
// non-existent buckets always have the most recent version
if (!exists()) {
return MOST_RECENT_VERSION;
}
// return the minimal version if the bucket exists, but without a version marker
if (!versionMarker.exists()) {
return 1;
}
try {
// parse the version from the version marker file
return Integer.parseInt(Strings.join(Files.readAllLines(versionMarker.toPath()), "\n").trim());
} catch (IOException e) {
throw Exceptions.handle(Storage.LOG, e);
}
}
private void setVersion(int version) {
// non-existent buckets always have the most recent version
if (!exists()) {
return;
}
try {
// write the version into the version marker file
Files.write(versionMarker.toPath(), Collections.singletonList(String.valueOf(version)));
} catch (IOException e) {
throw Exceptions.handle(Storage.LOG, e);
}
}
/**
* Migrates a bucket folder to the most recent version.
*
* @param fromVersion the version to migrate from.
*/
private void migrateBucket(int fromVersion) {
if (fromVersion <= 1) {
try {
// migrate public marker
File legacyPublicMarker = new File(folder, "__ninja_public");
if (legacyPublicMarker.exists() && !publicMarker.exists()) {
Files.move(legacyPublicMarker.toPath(), publicMarker.toPath());
} else if (legacyPublicMarker.exists()) {
Files.delete(legacyPublicMarker.toPath());
}
} catch (IOException e) {
throw Exceptions.handle(Storage.LOG, e);
}
// todo: migrate files and properties
}
// further incremental updates go here one day
// write the most recent version marker
setVersion(MOST_RECENT_VERSION);
}
public static boolean isValidName(@Nullable String name) {
if (name == null || Strings.isEmpty(name.trim())) {
return false;
}
// test the majority of simple requirements via a regex
if (!BUCKET_NAME_PATTERN.matcher(name).matches()) {
return false;
}
// make sure that it does not start with "xn--"
if (name.startsWith("xn
return false;
}
try {
// make sure that the name is no valid IP address (the null check is pointless, it is just there to trigger
// actual conversion after the regex has matched; if the parsing fails, we end up in the catch clause)
if (IP_ADDRESS_PATTERN.matcher(name).matches() && InetAddress.getByName(name) != null) {
return false;
}
} catch (Exception e) {
// ignore this, we want the conversion to fail and thus to end up here
}
// reaching this point, the name is valid
return true;
}
} |
package org.concord.energy3d.model;
import java.awt.Color;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.concord.energy3d.scene.Scene;
import org.concord.energy3d.scene.Scene.TextureMode;
import org.concord.energy3d.scene.SceneManager;
import org.concord.energy3d.shapes.AngleAnnotation;
import org.concord.energy3d.shapes.Heliodon;
import org.concord.energy3d.shapes.SizeAnnotation;
import org.concord.energy3d.simulation.SolarRadiation;
import org.concord.energy3d.util.FontManager;
import org.concord.energy3d.util.ObjectCloner;
import org.concord.energy3d.util.SelectUtil;
import org.concord.energy3d.util.Util;
import org.concord.energy3d.util.WallVisitor;
import com.ardor3d.bounding.BoundingBox;
import com.ardor3d.image.Image;
import com.ardor3d.image.Texture;
import com.ardor3d.image.TextureStoreFormat;
import com.ardor3d.intersection.PickData;
import com.ardor3d.intersection.PickResults;
import com.ardor3d.intersection.PickingUtil;
import com.ardor3d.intersection.PrimitivePickResults;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.math.MathUtils;
import com.ardor3d.math.Matrix3;
import com.ardor3d.math.Ray3;
import com.ardor3d.math.Vector2;
import com.ardor3d.math.Vector3;
import com.ardor3d.math.type.ReadOnlyColorRGBA;
import com.ardor3d.math.type.ReadOnlyVector3;
import com.ardor3d.renderer.Camera;
import com.ardor3d.renderer.Camera.ProjectionMode;
import com.ardor3d.renderer.queue.RenderBucketType;
import com.ardor3d.renderer.state.BlendState;
import com.ardor3d.renderer.state.OffsetState;
import com.ardor3d.renderer.state.OffsetState.OffsetType;
import com.ardor3d.renderer.state.RenderState.StateType;
import com.ardor3d.renderer.state.TextureState;
import com.ardor3d.scenegraph.Line;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.scenegraph.Node;
import com.ardor3d.scenegraph.Spatial;
import com.ardor3d.scenegraph.hint.CullHint;
import com.ardor3d.scenegraph.hint.LightCombineMode;
import com.ardor3d.scenegraph.shape.Sphere;
import com.ardor3d.ui.text.BMText;
import com.ardor3d.ui.text.BMText.Align;
import com.ardor3d.ui.text.BMText.Justify;
import com.ardor3d.util.TextureManager;
import com.ardor3d.util.geom.BufferUtils;
/*
* This class should have been called a more generic name than its current one. New classes that have nothing to do with a house have to inherit from this class
* because of the binary serialization used to save state (hence its name cannot be changed).
*
*/
public abstract class HousePart implements Serializable {
private static final long serialVersionUID = 1L;
public static final OffsetState offsetState = new OffsetState();
protected static final double SNAP_DISTANCE = 0.5;
protected static final double STRETCH_ROOF_STEP = 1;
protected static final float printOutlineThickness = 2f;
protected static int printSequence;
private static HousePart gridsHighlightedHousePart;
private static boolean snapToObjects = true;
protected transient final int numOfDrawPoints;
protected transient final int numOfEditPoints;
protected transient HousePart original = null;
protected transient Node root;
protected transient Node pointsRoot;
protected transient Node labelsRoot;
protected transient Node sizeAnnotRoot;
protected transient Node angleAnnotRoot;
protected transient Mesh mesh;
protected transient Mesh gridsMesh;
protected transient Vector3 flattenCenter;
protected transient double orgHeight;
protected transient double area;
protected transient int containerRoofIndex;
protected transient double solarPotentialToday;
private transient double[] solarPotential;
private transient double[] heatLoss;
private transient double solarPotentialNow; // solar potential of current hour
private transient boolean isPrintVertical;
protected final ArrayList<Vector3> points;
protected final ArrayList<HousePart> children = new ArrayList<HousePart>();
protected HousePart container;
protected double height;
protected long id;
protected int editPointIndex = -1;
protected boolean drawCompleted = false;
private ReadOnlyColorRGBA color = ColorRGBA.LIGHT_GRAY; // custom color
private double labelOffset = -0.01;
private boolean firstPointInserted = false;
private boolean freeze;
transient Line heatFlux;
transient ReadOnlyVector3 pickedNormal;
private static Map<String, Texture> cachedGrayTextures = new HashMap<String, Texture>();
static {
offsetState.setTypeEnabled(OffsetType.Fill, true);
offsetState.setFactor(1f);
offsetState.setUnits(1f);
}
public static boolean isSnapToObjects() {
return snapToObjects;
}
public static void setSnapToObjects(final boolean snapToObjects) {
HousePart.snapToObjects = snapToObjects;
}
public static HousePart getGridsHighlightedHousePart() {
return gridsHighlightedHousePart;
}
public static void setGridsHighlightedHousePart(final HousePart gridsHighlightedHousePart) {
HousePart.gridsHighlightedHousePart = gridsHighlightedHousePart;
}
public void setColor(final ReadOnlyColorRGBA color) {
this.color = color;
}
public ReadOnlyColorRGBA getColor() {
return color;
}
/* if an attribute is serializable or is not needed after deserialization then they are passed as parameters to constructor */
public HousePart(final int numOfDrawPoints, final int numOfEditPoints, final double height, final boolean complete) {
this.numOfDrawPoints = numOfDrawPoints;
this.numOfEditPoints = numOfEditPoints;
this.height = height;
points = new ArrayList<Vector3>(numOfEditPoints);
init();
allocateNewPoint();
if (complete) {
while (points.size() != numOfEditPoints) {
allocateNewPoint();
}
firstPointInserted = true;
complete();
}
}
public HousePart(final int numOfDrawPoints, final int numOfEditPoints, final double height) {
this(numOfDrawPoints, numOfEditPoints, height, false);
}
/* if an attribute is transient but is always needed then it should be set to default here */
protected void init() {
orgHeight = height;
flattenCenter = new Vector3();
isPrintVertical = false;
if (id == 0) {
id = Scene.getInstance().nextID();
}
root = new Node(toString());
pointsRoot = new Node("Edit Points");
sizeAnnotRoot = new Node("Size Annotations");
sizeAnnotRoot.getSceneHints().setAllPickingHints(false);
angleAnnotRoot = new Node("Angle Annotations");
angleAnnotRoot.getSceneHints().setAllPickingHints(false);
labelsRoot = new Node("Labels");
labelsRoot.getSceneHints().setAllPickingHints(false);
setAnnotationsVisible(Scene.getInstance().areAnnotationsVisible());
// Set up a reusable pick results
for (int i = 0; i < points.size(); i++) {
addNewEditPointShape(i);
}
root.attachChild(pointsRoot);
root.attachChild(sizeAnnotRoot);
root.attachChild(angleAnnotRoot);
root.attachChild(labelsRoot);
gridsMesh = new Line("Grids");
gridsMesh.getMeshData().setVertexBuffer(BufferUtils.createVector3Buffer(2));
gridsMesh.setDefaultColor(new ColorRGBA(0, 0, 1, 0.25f));
gridsMesh.setModelBound(null);
final BlendState blendState = new BlendState();
blendState.setBlendEnabled(true);
gridsMesh.setRenderState(blendState);
gridsMesh.getSceneHints().setRenderBucketType(RenderBucketType.Transparent);
Util.disablePickShadowLight(gridsMesh);
root.attachChild(gridsMesh);
setGridsVisible(false);
heatFlux = new Line("Heat Flux");
heatFlux.setLineWidth(1);
heatFlux.setModelBound(null);
Util.disablePickShadowLight(heatFlux);
heatFlux.getMeshData().setVertexBuffer(BufferUtils.createVector3Buffer(6));
heatFlux.setDefaultColor(ColorRGBA.YELLOW);
root.attachChild(heatFlux);
if (color == null) {
if (this instanceof Foundation) {
color = Scene.getInstance().getFoundationColor();
} else if (this instanceof Door) {
color = Scene.getInstance().getDoorColor();
} else if (this instanceof Roof) {
color = Scene.getInstance().getRoofColor();
} else if (this instanceof Wall) {
color = Scene.getInstance().getWallColor();
}
}
}
public double getGridSize() {
return 2.5;
}
private void addNewEditPointShape(final int i) {
final Sphere pointShape = new Sphere("Point", Vector3.ZERO, 8, 8, 0.1);
pointShape.setUserData(new UserData(this, i, true));
pointShape.updateModelBound(); // important
pointShape.setVisible(false);
pointShape.getSceneHints().setLightCombineMode(LightCombineMode.Off);
pointShape.getSceneHints().setCastsShadows(false);
pointShape.setModelBound(new BoundingBox());
pointsRoot.attachChild(pointShape);
}
public Mesh getEditPointShape(final int i) {
if (i >= pointsRoot.getNumberOfChildren()) {
addNewEditPointShape(i);
}
return (Mesh) pointsRoot.getChild(i);
}
abstract protected String getTextureFileName();
public void setOriginal(final HousePart original) {
this.original = original;
root.detachChild(pointsRoot);
if (original.mesh != null) {
root.detachChild(mesh);
mesh = original.mesh.makeCopy(true);
mesh.setUserData(new UserData(this, ((UserData) original.mesh.getUserData()).getEditPointIndex(), false));
root.attachChild(mesh);
}
drawAnnotations();
root.updateWorldBound(true);
}
public HousePart getOriginal() {
return original;
}
public Node getRoot() {
if (root == null) {
init();
}
return root;
}
public ArrayList<Vector3> getPoints() {
if (root == null) {
init();
}
return points;
}
public void complete() {
firstPointInserted = true;
drawCompleted = true;
orgHeight = height;
try {
if (isDrawable()) {
computeArea();
}
} catch (final Exception e) {
// It's normal to get exception when cleaning up incomplete windows
e.printStackTrace();
}
}
public boolean isDrawCompleted() {
return drawCompleted;
}
public void setDrawCompleted(final boolean completed) {
drawCompleted = completed;
}
public boolean isFirstPointInserted() {
return firstPointInserted;
}
public ArrayList<HousePart> getChildren() {
return children;
}
protected void setHeight(final double newHeight, final boolean finalize) {
height = newHeight;
if (finalize) {
orgHeight = newHeight;
}
}
public void setEditPointsVisible(final boolean visible) {
for (int i = 0; i < pointsRoot.getNumberOfChildren(); i++) {
getEditPointShape(i).setVisible(visible);
}
}
public void setEditPoint(final int i) {
editPointIndex = i;
drawCompleted = false;
}
public int getEditPoint() {
return editPointIndex;
}
protected PickedHousePart pickContainer(final int x, final int y, final Class<?> typeOfHousePart) {
return pickContainer(x, y, new Class<?>[] { typeOfHousePart });
}
protected PickedHousePart pickContainer(final int x, final int y, final Class<?>[] typesOfHousePart) {
final HousePart previousContainer = container;
final PickedHousePart picked;
if (!firstPointInserted || container == null) {
picked = SelectUtil.pickPart(x, y, typesOfHousePart);
} else {
picked = SelectUtil.pickPart(x, y, container);
}
if (!firstPointInserted && picked != null) {
UserData userData = null;
if (picked != null) {
userData = picked.getUserData();
}
if (container == null || userData == null || container != userData.getHousePart()) {
if (container != null) {
container.getChildren().remove(this);
if (this instanceof Roof) {
((Wall) container).visitNeighbors(new WallVisitor() {
@Override
public void visit(final Wall wall, final Snap prev, final Snap next) {
wall.setRoof(null);
}
});
}
}
if (userData != null && userData.getHousePart().isDrawCompleted()) {
if (!(userData.getHousePart() instanceof FoundationPolygon) && (!(this instanceof Roof) || ((Wall) userData.getHousePart()).getRoof() == null)) {
container = userData.getHousePart();
container.getChildren().add(this);
}
} else {
container = null;
}
}
}
if (previousContainer != container) {
if (previousContainer == null) {
SceneManager.getInstance().setGridsVisible(false);
} else if (container != null) {
previousContainer.gridsMesh.getSceneHints().setCullHint(CullHint.Always);
}
if (container != null && !(this instanceof Roof)) {
setGridsVisible(true);
} else if (this instanceof Foundation) {
SceneManager.getInstance().setGridsVisible(true);
}
}
return picked;
}
protected boolean isHorizontal() {
return true;
}
public Vector3 toRelative(final ReadOnlyVector3 p) {
final HousePart container = getContainerRelative();
if (container == null) {
return p.clone();
}
final Vector3 p0 = container.getAbsPoint(0);
final Vector3 p1 = container.getAbsPoint(1);
final Vector3 p2 = container.getAbsPoint(2);
final Vector2 p_2d = new Vector2(p.getX(), p.getY());
final Vector2 p0_2d = new Vector2(p0.getX(), p0.getY());
final double uScale = Util.projectPointOnLineScale(p_2d, p0_2d, new Vector2(p2.getX(), p2.getY()));
final double vScale;
final boolean relativeToHorizontal = getContainerRelative().isHorizontal();
if (relativeToHorizontal) {
vScale = Util.projectPointOnLineScale(p_2d, p0_2d, new Vector2(p1.getX(), p1.getY()));
return new Vector3(uScale, vScale, p.getZ());
} else {
vScale = Util.projectPointOnLineScale(new Vector2(0, p.getZ()), new Vector2(0, p0.getZ()), new Vector2(0, p1.getZ()));
return new Vector3(uScale, 0.0, vScale);
}
}
public Vector3 toRelativeVector(final ReadOnlyVector3 v) {
if (getContainerRelative() == null) { // a foundation does not have a container, return a clone of itself
return v.clone();
}
return toRelative(v.add(getContainerRelative().getAbsPoint(0), null));
}
protected Vector3 toAbsolute(final ReadOnlyVector3 p) {
return toAbsolute(p, null);
}
protected Vector3 toAbsolute(final ReadOnlyVector3 p, final Vector3 result) {
final HousePart container = getContainerRelative();
if (container == null) {
return result == null ? new Vector3(p) : result.set(p);
}
final Vector3 u = Vector3.fetchTempInstance();
final Vector3 v = Vector3.fetchTempInstance();
final Vector3 p0 = Vector3.fetchTempInstance();
Vector3 pointOnSpace;
try {
container.getAbsPoint(0, p0);
container.getAbsPoint(2, u).subtract(p0, u);
if (Util.isZero(u.length())) {
u.set(MathUtils.ZERO_TOLERANCE, 0, 0);
}
container.getAbsPoint(1, v).subtract(p0, v);
final boolean relativeToHorizontal = getContainerRelative().isHorizontal();
if (Util.isZero(v.length())) {
v.set(0, relativeToHorizontal ? MathUtils.ZERO_TOLERANCE : 0, relativeToHorizontal ? 0 : MathUtils.ZERO_TOLERANCE);
}
pointOnSpace = p0.add(u.multiply(p.getX(), u), u).add(v.multiply((relativeToHorizontal) ? p.getY() : p.getZ(), v), result);
if (relativeToHorizontal) {
pointOnSpace.setZ(pointOnSpace.getZ() + p.getZ());
}
} finally {
Vector3.releaseTempInstance(u);
Vector3.releaseTempInstance(v);
Vector3.releaseTempInstance(p0);
}
/* do not round the result, otherwise neighboring walls won't have exact same edit points */
return pointOnSpace;
}
protected void snapToGrid(final Vector3 p, final ReadOnlyVector3 current, final double gridSize) {
snapToGrid(p, current, gridSize, true);
}
protected void snapToGrid(final Vector3 p, final ReadOnlyVector3 previous, final double gridSize, final boolean snapToZ) {
if (Scene.getInstance().isSnapToGrids()) {
final Vector3 newP = new Vector3();
if (container == null) {
newP.set(Math.round(p.getX() / gridSize) * gridSize, Math.round(p.getY() / gridSize) * gridSize, !snapToZ ? p.getZ() : Math.round(p.getZ() / gridSize) * gridSize);
} else if (snapToZ) {
final double baseZ = getAbsPoint(0).getZ();
final double vScale = Util.projectPointOnLineScale(new Vector2(0, p.getZ()), new Vector2(), new Vector2(0, 1)) - baseZ;
final double vScaleRounded = Math.round(vScale / gridSize) * gridSize;
newP.set(p);
newP.setZ(baseZ + vScaleRounded);
} else {
final Vector3 p0 = getContainerRelative().getAbsPoint(0);
final Vector3 p1 = getContainerRelative().getAbsPoint(1);
final Vector3 p2 = getContainerRelative().getAbsPoint(2);
final ReadOnlyVector3 u = p2.subtract(p0, null);
final ReadOnlyVector3 v = p1.subtract(p0, null);
final double uScale = Util.projectPointOnLineScale(p, p0, p2);
final double vScale = Util.projectPointOnLineScale(p, p0, p1);
final double uScaleRounded = Math.round(u.length() * uScale / gridSize) * gridSize;
final double vScaleRounded = Math.round(v.length() * vScale / gridSize) * gridSize;
newP.set(p0).addLocal(u.normalize(null).multiplyLocal(uScaleRounded)).addLocal(v.normalize(null).multiplyLocal(vScaleRounded));
if (getContainerRelative().isHorizontal()) {
newP.setZ(p.getZ());
}
}
if (newP.distance(p) < previous.distance(p) * 0.40) {
p.set(newP);
} else {
p.set(previous);
}
}
}
public void addPoint(final int x, final int y) {
setPreviewPoint(x, y);
if (container != null || !mustHaveContainer()) {
firstPointInserted = true;
if (drawCompleted) {
throw new RuntimeException("Drawing of this object is already completed");
}
if (points.size() >= numOfEditPoints) {
complete();
} else {
allocateNewPoint();
setPreviewPoint(x, y);
}
}
}
protected boolean mustHaveContainer() {
return true;
}
private void allocateNewPoint() {
for (int i = 0; i < numOfEditPoints / numOfDrawPoints; i++) {
points.add(new Vector3());
}
}
public void draw() {
try {
if (root == null) {
init();
}
drawMesh();
if (isDrawable()) {
computeArea();
}
updateTextureAndColor();
updateEditShapes();
clearAnnotations();
if (isDrawable() && !isFrozen()) {
drawAnnotations();
}
root.updateGeometricState(0);
} catch (final Throwable e) {
e.printStackTrace();
}
}
public void drawGrids(final double gridSize) {
}
public void setGridsVisible(final boolean visible) {
if (container == null) {
SceneManager.getInstance().setGridsVisible(Scene.getInstance().isSnapToGrids() && visible);
} else if (this instanceof Roof) {
if (visible) {
drawGrids(getGridSize());
}
if (gridsMesh != null) {
gridsMesh.setVisible(Scene.getInstance().isSnapToGrids() && visible);
}
} else if (container != null) {
if (visible) {
container.drawGrids(getGridSize());
}
if (container.gridsMesh != null) {
container.gridsMesh.getSceneHints().setCullHint(Scene.getInstance().isSnapToGrids() && visible ? CullHint.Inherit : CullHint.Always);
}
}
}
public void updateEditShapes() {
final Vector3 p = Vector3.fetchTempInstance();
try {
for (int i = 0; i < points.size(); i++) {
getAbsPoint(i, p);
getEditPointShape(i).setTranslation(p);
final Camera camera = SceneManager.getInstance().getCamera();
if (camera != null && camera.getProjectionMode() != ProjectionMode.Parallel) {
final double distance = camera.getLocation().distance(p);
getEditPointShape(i).setScale(distance > 0.1 ? distance / 10 : 0.01);
} else {
getEditPointShape(i).setScale(camera.getFrustumTop() / 4);
}
}
} finally {
Vector3.releaseTempInstance(p);
}
// /* remove remaining edit shapes */
// for (int i = points.size(); i < pointsRoot.getNumberOfChildren(); i++) {
// pointsRoot.detachChildAt(points.size());
}
public void computeOrientedBoundingBox() {
final ReadOnlyVector3 center = computeOrientedBoundingBox(mesh);
flattenCenter.set(center);
}
protected static ReadOnlyVector3 computeOrientedBoundingBox(final Mesh mesh) {
return Util.getOrientedBoundingBox(mesh).getCenter();
}
protected ReadOnlyVector3 getCenter() {
return mesh.getModelBound().getCenter();
}
public void flatten(final double flattenTime) {
if (isPrintable()) {
if (isPrintVertical) {
root.setRotation(new Matrix3().fromAngles(0, -Math.PI / 2.0 * flattenTime, 0).multiply(root.getRotation(), null));
}
final Vector3 targetCenter = new Vector3(((UserData) mesh.getUserData()).getPrintCenter());
root.setTranslation(targetCenter.subtractLocal(flattenCenter).multiplyLocal(flattenTime));
root.updateGeometricState(0);
}
}
public boolean isPrintable() {
return true;
}
public int drawLabels(int printSequence) {
if (!isPrintable()) {
return printSequence;
}
final String text = "(" + (printSequence++ + 1) + ")";
final BMText label = fetchBMText(text, 0);
final Vector3 offset;
if (original == null) {
offset = getNormal().multiply(0.5, null);
} else {
offset = new Vector3(0, labelOffset, 0);
}
root.getTransform().applyInverseVector(offset);
offset.addLocal(getCenter());
label.setTranslation(offset);
return printSequence;
}
public void hideLabels() {
for (final Spatial label : labelsRoot.getChildren()) {
label.getSceneHints().setCullHint(CullHint.Always);
}
}
protected BMText fetchBMText(final String text, final int index) {
final BMText label;
if (labelsRoot.getChildren().size() > index) {
label = (BMText) labelsRoot.getChild(index);
label.setText(text);
label.getSceneHints().setCullHint(CullHint.Inherit);
} else {
label = new BMText("Label Text", text, FontManager.getInstance().getPartNumberFont(), Align.Center, Justify.Center);
Util.initHousePartLabel(label);
labelsRoot.attachChild(label);
}
return label;
}
public ReadOnlyVector3 getNormal() {
return Vector3.UNIT_Z;
}
protected SizeAnnotation fetchSizeAnnot(final int annotCounter) {
return fetchSizeAnnot(annotCounter, sizeAnnotRoot);
}
protected SizeAnnotation fetchSizeAnnot(final int annotCounter, final Node sizeAnnotRoot) {
final SizeAnnotation annot;
if (annotCounter < sizeAnnotRoot.getChildren().size()) {
annot = (SizeAnnotation) sizeAnnotRoot.getChild(annotCounter);
annot.getSceneHints().setCullHint(CullHint.Inherit);
} else {
annot = new SizeAnnotation();
sizeAnnotRoot.attachChild(annot);
}
return annot;
}
protected void clearAnnotations() {
for (final Spatial annot : sizeAnnotRoot.getChildren()) {
annot.getSceneHints().setCullHint(CullHint.Always);
}
for (final Spatial annot : angleAnnotRoot.getChildren()) {
annot.getSceneHints().setCullHint(CullHint.Always);
}
}
protected AngleAnnotation fetchAngleAnnot(final int annotCounter) {
return fetchAngleAnnot(annotCounter, angleAnnotRoot);
}
protected AngleAnnotation fetchAngleAnnot(final int annotCounter, final Node angleAnnotRoot) {
final AngleAnnotation annot;
if (annotCounter < angleAnnotRoot.getChildren().size()) {
annot = (AngleAnnotation) angleAnnotRoot.getChild(annotCounter);
annot.getSceneHints().setCullHint(CullHint.Inherit);
} else {
annot = new AngleAnnotation();
angleAnnotRoot.attachChild(annot);
}
return annot;
}
public abstract void setPreviewPoint(int x, int y);
public void delete() {
}
public void drawAnnotations() {
}
protected abstract void drawMesh();
public void setAnnotationsVisible(final boolean visible) {
final CullHint cull = visible ? CullHint.Inherit : CullHint.Always;
sizeAnnotRoot.getSceneHints().setCullHint(cull);
angleAnnotRoot.getSceneHints().setCullHint(cull);
}
public abstract void updateTextureAndColor();
protected void updateTextureAndColor(final Mesh mesh, final ReadOnlyColorRGBA defaultColor) {
updateTextureAndColor(mesh, defaultColor, Scene.getInstance().getTextureMode());
}
protected void updateTextureAndColor(final Mesh mesh, final ReadOnlyColorRGBA defaultColor, final TextureMode textureMode) {
if (this instanceof Tree) { // special treatment because the same mesh of a tree has two textures (shed or not)
final TextureState ts = new TextureState();
final Texture texture = getTexture(getTextureFileName(), textureMode == TextureMode.Simple, defaultColor, isFrozen());
ts.setTexture(texture);
mesh.setRenderState(ts);
} else {
if (SceneManager.getInstance().getSolarHeatMap()) {
if (isDrawable()) {
if (this instanceof Foundation || this instanceof Wall || this instanceof Roof) {
SolarRadiation.getInstance().initMeshTextureData(mesh, mesh, this instanceof Roof ? (ReadOnlyVector3) mesh.getParent().getUserData() : getNormal());
}
}
} else if (isFrozen()) {
mesh.clearRenderState(StateType.Texture);
mesh.setDefaultColor(Scene.GRAY);
} else if (textureMode == TextureMode.None || getTextureFileName() == null) {
mesh.clearRenderState(StateType.Texture);
mesh.setDefaultColor(defaultColor);
} else {
final TextureState ts = new TextureState();
final Texture texture = getTexture(getTextureFileName(), textureMode == TextureMode.Simple, defaultColor, false);
ts.setTexture(texture);
mesh.setRenderState(ts);
mesh.setDefaultColor(ColorRGBA.WHITE);
}
}
}
private Texture getTexture(final String filename, final boolean isTransparent, final ReadOnlyColorRGBA defaultColor, final boolean grayout) {
Texture texture = TextureManager.load(filename, Texture.MinificationFilter.Trilinear, TextureStoreFormat.GuessNoCompressedFormat, true);
if (isTransparent) {
final Color color = new Color(defaultColor.getRed(), defaultColor.getGreen(), defaultColor.getBlue());
final Image image = texture.getImage();
final ByteBuffer data = image.getData(0);
byte alpha;
int i;
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
i = (y * image.getWidth() + x) * 4;
alpha = data.get(i + 3);
if (alpha == 0) { // when it is transparent, put the default color of the part
data.put(i, (byte) color.getRed());
data.put(i + 1, (byte) color.getGreen());
data.put(i + 2, (byte) color.getBlue());
}
}
}
texture.setImage(image);
}
if (grayout) {
final Texture grayoutTexture = cachedGrayTextures.get(filename + ":grayout");
if (grayoutTexture != null) {
return grayoutTexture;
}
final Image image = texture.getImage();
final Image grayImage = new Image(); // make a copy
grayImage.setDataFormat(image.getDataFormat());
grayImage.setDataType(image.getDataType());
grayImage.setWidth(image.getWidth());
grayImage.setHeight(image.getHeight());
grayImage.setMipMapByteSizes(image.getMipMapByteSizes());
final ByteBuffer data = image.getData(0);
final ByteBuffer grayData = ByteBuffer.allocate(data.capacity());
byte alpha, red, green, blue, gray;
int i;
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
i = (y * image.getWidth() + x) * 4;
red = data.get(i);
green = data.get(i + 1);
blue = data.get(i + 2);
alpha = data.get(i + 3);
gray = (byte) Math.min(red, green);
gray = (byte) Math.min(blue, gray);
grayData.put(i, gray);
grayData.put(i + 1, gray);
grayData.put(i + 2, gray);
grayData.put(i + 3, alpha);
}
}
grayImage.addData(grayData);
texture = TextureManager.loadFromImage(grayImage, Texture.MinificationFilter.Trilinear, TextureStoreFormat.GuessNoCompressedFormat);
cachedGrayTextures.put(filename + ":grayout", texture);
}
return texture;
}
public void setContainer(final HousePart container) {
this.container = container;
}
public HousePart getContainer() {
return container;
}
protected HousePart getContainerRelative() {
return container;
}
public Mesh getMesh() {
return mesh;
}
@Override
public String toString() {
String s = this.getClass().getSimpleName() + "(" + id + ")";
for (int i = 0; i < points.size(); i += 2) {
s += " " + Util.toString2D(getAbsPoint(i));
}
s += (" editPoint=" + editPointIndex);
return s;
}
public void setLabelOffset(final double labelOffset) {
this.labelOffset = labelOffset;
}
public Vector3 getAbsPoint(final int index) {
return toAbsolute(points.get(index), null);
}
public Vector3 getAbsPoint(final int index, final Vector3 result) {
return toAbsolute(points.get(index), result);
}
public void drawChildren() {
for (final HousePart child : children) {
child.drawChildren();
child.draw();
}
}
public double getHeight() {
return height;
}
public void setHeight(final double height) {
this.height = height;
}
public void reset() {
}
public boolean isDrawable() {
return points.size() >= 4 && getAbsPoint(0).distance(getAbsPoint(2)) >= getGridSize() && getAbsPoint(0).distance(getAbsPoint(1)) >= getGridSize();
}
public void setPrintVertical(final boolean isVertical) {
isPrintVertical = isVertical;
flattenCenter.set(0, 0, 0);
flatten(1.0);
computeOrientedBoundingBox();
}
public boolean isValid() {
// if (!isDrawable())
// return false;
for (final ReadOnlyVector3 p : points) {
if (!Vector3.isValid(p)) {
return false;
}
}
return true;
}
public double getArea() {
return area;
}
protected abstract void computeArea();
public void setFreeze(final boolean freeze) {
this.freeze = freeze;
}
public boolean isFrozen() {
return freeze;
}
public long getId() {
return id;
}
public void setId(final long id) {
this.id = id;
}
public Foundation getTopContainer() {
HousePart c = getContainer();
if (c == null) {
return null;
}
HousePart x = null;
while (c != null) {
x = c;
c = c.getContainer();
}
return (Foundation) x;
}
public void setHighlight(final boolean highlight) {
if (highlight) {
final OffsetState offset = new OffsetState();
offset.setFactor(-1);
offset.setUnits(-1);
mesh.setRenderState(offset);
mesh.setDefaultColor(ColorRGBA.RED);
} else {
mesh.clearRenderState(StateType.Offset);
mesh.setDefaultColor(getColor());
}
}
public void setSolarPotential(final double[] solarPotential) {
this.solarPotential = solarPotential;
}
public double[] getSolarPotential() {
return solarPotential;
}
public void setHeatLoss(final double[] heatLoss) {
this.heatLoss = heatLoss;
}
public double[] getHeatLoss() {
return heatLoss;
}
public double getTotalHeatLoss() {
if (heatLoss == null) {
return 0;
}
double sum = 0;
for (final double x : heatLoss) {
sum += x;
}
return sum;
}
public double getSolarPotentialNow() {
return solarPotentialNow;
}
public void setSolarPotentialNow(final double solarPotentialNow) {
this.solarPotentialNow = solarPotentialNow;
}
public double getSolarPotentialToday() {
return solarPotentialToday;
}
public void setSolarPotentialToday(final double solarPotentialToday) {
this.solarPotentialToday = solarPotentialToday;
}
public Mesh getRadiationMesh() {
return mesh;
}
public Spatial getRadiationCollisionSpatial() {
return getRadiationMesh();
}
public Spatial getEditPointsRoot() {
return pointsRoot;
}
public Spatial getCollisionSpatial() {
return mesh;
}
/** use the lightness of color to approximate albedo */
public float getAlbedo() {
if (Scene.getInstance().getTextureMode() == TextureMode.Full) {
return 0.2f;
}
ReadOnlyColorRGBA c = null;
if (color != null) {
c = color;
} else {
if (this instanceof Foundation) {
c = Scene.getInstance().getFoundationColor();
} else if (this instanceof Door) {
c = Scene.getInstance().getDoorColor();
} else if (this instanceof Roof) {
c = Scene.getInstance().getRoofColor();
} else if (this instanceof Wall) {
c = Scene.getInstance().getWallColor();
} else {
c = ColorRGBA.WHITE;
}
}
float min = Math.min(c.getRed(), c.getGreen());
min = Math.min(min, c.getBlue());
float max = Math.max(c.getRed(), c.getGreen());
max = Math.max(max, c.getBlue());
return 0.5f * (min + max);
}
double calculateHeatVector() {
double heat = 0;
double a = area;
if (this instanceof Foundation) {
final Building building = new Building((Foundation) this);
if (building.isWallComplete()) {
building.calculate();
a = building.getArea(); // reduce the area of the foundation to the floor area within the building envelope
}
}
if (heatLoss != null) {
if (SceneManager.getInstance().isHeatFluxDaily()) {
for (final double x : heatLoss) {
heat += x;
}
heat /= a * heatLoss.length;
heatFlux.setDefaultColor(ColorRGBA.YELLOW);
} else {
final int hourOfDay4 = Heliodon.getInstance().getCalendar().get(Calendar.HOUR_OF_DAY) * 4;
heat = (heatLoss[hourOfDay4] + heatLoss[hourOfDay4 + 1] + heatLoss[hourOfDay4 + 2] + heatLoss[hourOfDay4 + 3]) / (4 * a);
heatFlux.setDefaultColor(ColorRGBA.WHITE);
}
}
return heat;
}
public void drawHeatFlux() {
FloatBuffer arrowsVertices = heatFlux.getMeshData().getVertexBuffer();
final int cols = (int) Math.max(2, getAbsPoint(0).distance(getAbsPoint(2)) / Scene.getInstance().getHeatVectorGridSize());
final int rows = (int) Math.max(2, getAbsPoint(0).distance(getAbsPoint(1)) / Scene.getInstance().getHeatVectorGridSize());
arrowsVertices = BufferUtils.createVector3Buffer(rows * cols * 6);
heatFlux.getMeshData().setVertexBuffer(arrowsVertices);
final double heat = calculateHeatVector();
if (heat != 0) {
final ReadOnlyVector3 o = getAbsPoint(0);
final ReadOnlyVector3 u = getAbsPoint(2).subtract(o, null);
final ReadOnlyVector3 v = getAbsPoint(1).subtract(o, null);
final ReadOnlyVector3 normal = getNormal();
final Vector3 a = new Vector3();
double g, h;
for (int j = 0; j < cols; j++) {
h = j + 0.5;
for (int i = 0; i < rows; i++) {
g = i + 0.5;
a.setX(o.getX() + g * v.getX() / rows + h * u.getX() / cols);
a.setY(o.getY() + g * v.getY() / rows + h * u.getY() / cols);
a.setZ(o.getZ() + g * v.getZ() / rows + h * u.getZ() / cols);
drawArrow(a, normal, arrowsVertices, heat);
}
}
heatFlux.getMeshData().updateVertexCount();
heatFlux.updateModelBound();
}
updateHeatFluxVisibility();
}
protected void drawArrow(final ReadOnlyVector3 o, final ReadOnlyVector3 normal, final FloatBuffer arrowsVertices, final double heat) {
if (this instanceof Wall) {
final Wall wall = (Wall) this;
for (final HousePart x : wall.children) {
if (x instanceof Window || x instanceof Door) {
final Vector3 vo = x.toRelative(o);
double xmin = 2;
double zmin = 2;
double xmax = -2;
double zmax = -2;
for (final Vector3 a : x.points) {
if (a.getX() > xmax) {
xmax = a.getX();
}
if (a.getZ() > zmax) {
zmax = a.getZ();
}
if (a.getX() < xmin) {
xmin = a.getX();
}
if (a.getZ() < zmin) {
zmin = a.getZ();
}
}
if (vo.getX() > xmin && vo.getZ() > zmin && vo.getX() < xmax && vo.getZ() < zmax) {
return;
}
}
}
}
arrowsVertices.put(o.getXf()).put(o.getYf()).put(o.getZf());
final Vector3 p = new Vector3();
normal.multiply(Scene.getInstance().getHeatVectorLength() * Math.abs(heat), p);
final Vector3 p2 = new Vector3();
o.add(p, p2);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
if (heat < 0) {
p2.set(o);
}
if (heat != 0) {
final float arrowLength = 0.5f;
p.normalizeLocal();
final double sign = Math.signum(heat);
if (this instanceof Roof) {
final float px = (float) (p.getX() * arrowLength * sign);
final float py = (float) (p.getY() * arrowLength * sign);
final float pz = (float) (p.getZ() * arrowLength * sign);
final float yp = -pz;
final float zp = py;
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - px).put(p2.getYf() - py + yp * 0.25f).put(p2.getZf() - pz + zp * 0.25f);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - px).put(p2.getYf() - py - yp * 0.25f).put(p2.getZf() - pz - zp * 0.25f);
} else if (this instanceof Foundation) {
final float cos = (float) (p.dot(Vector3.UNIT_X) * sign);
final float sin = (float) (p.dot(Vector3.UNIT_Z) * sign);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - arrowLength * cos).put(p2.getYf() - arrowLength * 0.5f).put(p2.getZf() - arrowLength * sin);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - arrowLength * cos).put(p2.getYf() + arrowLength * 0.5f).put(p2.getZf() - arrowLength * sin);
} else {
final float cos = (float) (p.dot(Vector3.UNIT_X) * sign);
final float sin = (float) (p.dot(Vector3.UNIT_Y) * sign);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - arrowLength * cos).put(p2.getYf() - arrowLength * sin).put(p2.getZf() - arrowLength * 0.5f);
arrowsVertices.put(p2.getXf()).put(p2.getYf()).put(p2.getZf());
arrowsVertices.put(p2.getXf() - arrowLength * cos).put(p2.getYf() - arrowLength * sin).put(p2.getZf() + arrowLength * 0.5f);
}
}
}
public void updateHeatFluxVisibility() {
heatFlux.setVisible(Scene.getInstance().getAlwaysComputeHeatFluxVectors() && SceneManager.getInstance().areHeatFluxVectorsVisible());
}
public abstract boolean isCopyable();
public HousePart copy(final boolean check) {
final HousePart c = (HousePart) ObjectCloner.deepCopy(this);
c.container = this.container;
c.id = Scene.getInstance().nextID();
return c;
}
public Vector3 getAbsCenter() {
double x = 0, y = 0, z = 0;
final int n = points.size();
for (int i = 0; i < n; i++) {
final Vector3 v = getAbsPoint(i);
x += v.getX();
y += v.getY();
z += v.getZ();
}
return new Vector3(x / n, y / n, z / n);
}
protected ReadOnlyVector3 computeNormalAndKeepOnSurface() {
if (container == null) {
return null;
}
if (container instanceof Rack) {
final Rack rack = (Rack) container;
final PickResults pickResults = new PrimitivePickResults();
final Ray3 ray = new Ray3(getAbsPoint(0).multiplyLocal(1, 1, 0), Vector3.UNIT_Z);
PickingUtil.findPick(container.getCollisionSpatial(), ray, pickResults, false);
if (pickResults.getNumber() != 0) {
final PickData pickData = pickResults.getPickData(0);
final Vector3 p = pickData.getIntersectionRecord().getIntersectionPoint(0);
points.get(0).setZ(p.getZ());
} else {
if (rack.getBaseHeight() < Math.abs(0.5 * rack.getRackHeight() / Scene.getInstance().getAnnotationScale() * Math.sin(Math.toRadians(rack.getTiltAngle())))) {
final Ray3 ray2 = new Ray3(getAbsPoint(0).multiplyLocal(1, 1, 0), Vector3.NEG_UNIT_Z);
PickingUtil.findPick(container.getCollisionSpatial(), ray2, pickResults, false);
if (pickResults.getNumber() != 0) {
final PickData pickData = pickResults.getPickData(0);
final Vector3 p = pickData.getIntersectionRecord().getIntersectionPoint(0);
points.get(0).setZ(p.getZ());
}
}
}
return rack.getNormal();
} else if (container instanceof Roof) {
final Roof roof = (Roof) container;
final int[] editPointToRoofIndex = new int[points.size()];
final PickResults pickResults = new PrimitivePickResults();
for (int i = 0; i < points.size(); i++) {
pickResults.clear();
final Ray3 ray = new Ray3(getAbsPoint(i).multiplyLocal(1, 1, 0), Vector3.UNIT_Z);
for (final Spatial roofPart : roof.getRoofPartsRoot().getChildren()) {
if (roofPart.getSceneHints().getCullHint() != CullHint.Always) {
PickingUtil.findPick(((Node) roofPart).getChild(0), ray, pickResults, false);
if (pickResults.getNumber() != 0) {
break;
}
}
}
if (pickResults.getNumber() != 0) {
final PickData pickData = pickResults.getPickData(0);
final Vector3 p = pickData.getIntersectionRecord().getIntersectionPoint(0);
points.get(i).setZ(p.getZ());
final UserData userData = (UserData) ((Spatial) pickData.getTarget()).getUserData();
final int roofPartIndex = userData.getEditPointIndex();
editPointToRoofIndex[i] = roofPartIndex;
}
// find roofPart with most edit points on it
containerRoofIndex = editPointToRoofIndex[0];
if (points.size() > 1) {
containerRoofIndex = 0;
final Map<Integer, Integer> counts = new HashMap<Integer, Integer>(points.size());
for (final int roofIndex : editPointToRoofIndex) {
counts.put(roofIndex, counts.get(roofIndex) == null ? 1 : counts.get(roofIndex) + 1);
}
int highestCount = 0;
for (final int roofIndex : editPointToRoofIndex) {
if (counts.get(roofIndex) > highestCount) {
highestCount = counts.get(roofIndex);
containerRoofIndex = roofIndex;
}
}
}
}
return (ReadOnlyVector3) roof.getRoofPartsRoot().getChild(containerRoofIndex).getUserData();
} else if (container instanceof Foundation) {
final Foundation foundation = (Foundation) container;
final List<Node> nodes = foundation.getImportedNodes();
if (nodes != null) {
final Map<Vector3, ReadOnlyVector3> intersections = new HashMap<Vector3, ReadOnlyVector3>();
final PickResults pickResults = new PrimitivePickResults();
for (final Node n : nodes) {
for (final Spatial s : n.getChildren()) {
if (s instanceof Mesh) {
final Mesh m = (Mesh) s;
pickResults.clear();
PickingUtil.findPick(m, new Ray3(getAbsPoint(0).multiplyLocal(1, 1, 0), Vector3.UNIT_Z), pickResults, false);
if (pickResults.getNumber() > 0) {
intersections.put(pickResults.getPickData(0).getIntersectionRecord().getIntersectionPoint(0), ((UserData) m.getUserData()).getNormal());
}
}
}
}
if (!intersections.isEmpty()) {
double zmax = -Double.MAX_VALUE;
ReadOnlyVector3 normal = null;
for (final Vector3 v : intersections.keySet()) {
if (v.getZ() > zmax) {
zmax = v.getZ();
normal = intersections.get(v);
}
}
if (normal != null) {
pickedNormal = normal;
return normal;
}
}
}
}
return container.getNormal();
}
protected boolean fits(final HousePart child) {
return true;
}
} |
package ch.ntb.inf.deep.eclipse.ui.view;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.text.Document;
import org.eclipse.jface.text.TextViewer;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ViewPart;
import ch.ntb.inf.deep.cfg.CFG;
import ch.ntb.inf.deep.cgPPC.CodeGen;
import ch.ntb.inf.deep.classItems.Class;
import ch.ntb.inf.deep.classItems.ClassMember;
import ch.ntb.inf.deep.classItems.DataItem;
import ch.ntb.inf.deep.classItems.ICclassFileConsts;
import ch.ntb.inf.deep.classItems.Item;
import ch.ntb.inf.deep.classItems.Method;
import ch.ntb.inf.deep.classItems.NamedConst;
import ch.ntb.inf.deep.classItems.Type;
import ch.ntb.inf.deep.config.Configuration;
import ch.ntb.inf.deep.config.Device;
import ch.ntb.inf.deep.config.MemoryMap;
import ch.ntb.inf.deep.config.Segment;
import ch.ntb.inf.deep.linker.BlockItem;
import ch.ntb.inf.deep.linker.FixedValueItem;
import ch.ntb.inf.deep.ssa.SSA;
import ch.ntb.inf.deep.strings.HString;
public class ClassTreeView extends ViewPart implements ISelectionChangedListener, ICclassFileConsts {
public static final String ID = "ch.ntb.inf.deep.eclipse.ui.view.ClassTreeView";
private TreeViewer classTreeViewer;
private TreeViewer deviceTreeViewer;
private TextViewer textViewer;
private Action refresh;
class ClassTreeLabelProvider extends LabelProvider {
public Image getImage(Object element) {
return null;
}
public String getText(Object element) {
if(element instanceof Item){
if(element instanceof Method){
return ((Method)element).name.toString() + ((Method)element).methDescriptor.toString();
}
return ((Item)element).name.toString();
}else{
if(element instanceof RootElement)return ((RootElement)element).name.toString();
if(element instanceof CFG)return "CFG";
if(element instanceof SSA)return "SSA";
if(element instanceof CodeGen)return "MachineCode";
if(element instanceof String)return (String) element;
return "";
}
}
}
class ClassTreeContentProvider implements ITreeContentProvider {
public Object[] getChildren(Object parent) {
Object[] item = null;
if(parent instanceof Class){
Class clazz = (Class)parent;
int nofChildren = 0;
//determine number of children
//check if methods exists
if(clazz.nofMethods > 0){
nofChildren++;
}
//check if classFields exists
if(clazz.nofClassFields > 0){
nofChildren++;
}
//check if constFields exists
if(clazz.nofConstFields > 0){
nofChildren++;
}
//create array for children
item = new Object[nofChildren];
//fill array
int index = 0;
//add constFields if they exists
if(clazz.nofConstFields > 0){
item[index++] = new ClassChild(HString.getHString("ConstFields"), clazz, clazz.constFields);
}
//add classFields if they exists
if(clazz.nofClassFields > 0){
item[index++] = new ClassChild(HString.getHString("ClassFields"), clazz, clazz.classFields);
}
//add methods if they exists
if(clazz.nofMethods > 0){
item[index++] = new ClassChild(HString.getHString("Methods"), clazz, clazz.methods);
}
return item;
}
if(parent instanceof Method){
Method meth = (Method)parent;
//every method have 3 children: cfg, ssa and machineCode
//create array for children
item = new Object[3];
//fill array
item[0] = meth.cfg;
item[1] = meth.ssa;
item[2] = meth.machineCode;
return item;
}
if(parent instanceof ClassChild){
int index = 0;
Class clazz = (Class)((ClassChild)parent).owner;
Item child;
if(((ClassChild)parent).name.equals(HString.getHString("ConstFields"))){
item = new Object[clazz.nofConstFields];
child = clazz.constFields;
while(child != null && index < item.length){
item[index++] = child;
child = child.next;
}
return item;
}
if(((ClassChild)parent).name.equals(HString.getHString("ClassFields"))){
item = new Object[clazz.nofClassFields];
child = clazz.classFields;
while(child != null && index < item.length){
item[index++] = child;
child = child.next;
}
return item;
}
if(((ClassChild)parent).name.equals(HString.getHString("Methods"))){
item = new Object[clazz.nofMethods];
child = clazz.methods;
while(child != null && index < item.length){
item[index++] = child;
child = child.next;
}
return item;
}
}
if(parent instanceof RootElement){
if(Type.nofClasses < 1)return new Object[]{"No Classses loaded"};
Item[] classes = new Item[Type.nofClasses];
int count = 0;
Item classmember = ((RootElement)parent).children;
while(classmember != null && count < classes.length){
if(classmember instanceof Class){
classes[count++] = classmember;
}
classmember = classmember.next;
}
return classes;
}
return item;
}
public Object getParent(Object element) {
if(element instanceof ClassMember){
return ((ClassMember)element).getOwner();
}
return null;
}
public boolean hasChildren(Object element) {
//Classes and methods have always children
if(element instanceof Class){
if(((Class)element).nofMethods > 0)return true;
if(((Class)element).nofClassFields > 0)return true;
if(((Class)element).nofConstFields > 0)return true;
}
if(element instanceof Method)return true;
if(element instanceof ClassChild)return true;
if(element instanceof RootElement)return true;
return false;
}
@Override
public Object[] getElements(Object inputElement) {
if(!(inputElement instanceof TreeInput))return new Object[]{""};
return new Object[]{((TreeInput)inputElement).obj};
}
@Override
public void dispose() {
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
}
}
class DeviceTreeLabelProvider extends LabelProvider {
public Image getImage(Object element) {
return null;
}
public String getText(Object element) {
if(element instanceof MemoryMap){
return "Memory Map";
}
if(element instanceof Device){
return((Device)element).getName().toString();
}
if(element instanceof Segment){
return ((Segment)element).getName().toString();
}
if(element instanceof String){
return (String)element;
}
return "";
}
}
class DeviceTreeContentProvider implements ITreeContentProvider {
@Override
public Object[] getChildren(Object parentElement) {
if(parentElement instanceof MemoryMap){
MemoryMap memMap = (MemoryMap)parentElement;
if(memMap.getDevices() == null)return new Object[]{"No memory map loaded"};
Device[] devices = new Device[memMap.getNofDevices()];
Device dev = memMap.getDevices();
for(int i = 0; i < devices.length && dev != null;i++){
devices[i] = dev;
dev = dev.next;
}
return devices;
}
if(parentElement instanceof Device){
Segment segs =((Device)parentElement).segments;
Segment current = segs;
int count;
for(count = 0; current != null; count++)current = current.next;
if(count > 0){
Segment seg[] = new Segment[count];
for(int i = 0; i < seg.length && segs != null; i++){
seg[i] = segs;
segs = segs.next;
}
return seg;
}
}
if(parentElement instanceof Segment){
Segment segs =((Device)parentElement).segments;
Segment current = segs;
int count;
for(count = 0; current != null; count++)current = current.next;
if(count > 0){
Segment seg[] = new Segment[count];
for(int i = 0; i < seg.length && segs != null; i++){
seg[i] = segs;
segs = segs.next;
}
return seg;
}
}
return null;
}
@Override
public Object getParent(Object element) {
if(element instanceof Segment){
Segment seg = (Segment)element;
if(seg.parent != null)return seg.parent;
return seg.owner;
}
return null;
}
@Override
public boolean hasChildren(Object element) {
if(element instanceof MemoryMap){
return true;
}
if(element instanceof Device){
if(((Device)element).segments != null){
return true;
}
}
if(element instanceof Segment){
if(((Segment)element).subSegments != null){
return true;
}
}
return false;
}
@Override
public Object[] getElements(Object inputElement) {
if(!(inputElement instanceof TreeInput))return new Object[]{""};
return new Object[]{((TreeInput)inputElement).obj};
}
@Override
public void dispose() {
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
}
}
@Override
public void createPartControl(Composite parent) {
GridLayout layout = new GridLayout(5, true);
parent.setLayout(layout);
classTreeViewer = new TreeViewer(parent, SWT.SINGLE);
GridData classTreeViewerData = new GridData(SWT.FILL, SWT.FILL, true, true);
classTreeViewerData.horizontalSpan = 2;
classTreeViewer.getControl().setLayoutData(classTreeViewerData);
classTreeViewer.setLabelProvider(new ClassTreeLabelProvider());
classTreeViewer.setContentProvider(new ClassTreeContentProvider());
classTreeViewer.setAutoExpandLevel(2);
classTreeViewer.setInput(new TreeInput(new RootElement(HString.getHString("ClassList"), Type.classList)));
classTreeViewer.addSelectionChangedListener(this);
textViewer = new TextViewer(parent, SWT.V_SCROLL | SWT.H_SCROLL | SWT.SCROLL_PAGE);
GridData textViewerData = new GridData(GridData.FILL, GridData.FILL, true, true);
textViewerData.horizontalSpan = 3;
textViewerData.verticalSpan = 2;
textViewer.getControl().setLayoutData(textViewerData);
Document doc = new Document();
textViewer.setDocument(doc);
deviceTreeViewer = new TreeViewer(parent, SWT.SINGLE);
GridData deviceTreeViewerData = new GridData(SWT.FILL, SWT.FILL,true, true);
deviceTreeViewerData.horizontalSpan = 2;
deviceTreeViewer.getControl().setLayoutData(deviceTreeViewerData);
deviceTreeViewer.setLabelProvider(new DeviceTreeLabelProvider());
deviceTreeViewer.setContentProvider(new DeviceTreeContentProvider());
deviceTreeViewer.setAutoExpandLevel(2);
deviceTreeViewer.setInput(new TreeInput(MemoryMap.getInstance()));
deviceTreeViewer.addSelectionChangedListener(this);
//get Display needs to set the font
Display d =parent.getShell().getDisplay();
if(d != null){
FontData defaultFont = new FontData("Courier", 9, SWT.NORMAL);
Font font = new Font(d, defaultFont);
textViewer.getControl().setFont(font);
}
textViewer.setEditable(false);
createActions();
contributeToActionBars();
}
@Override
public void setFocus() {
classTreeViewer.getControl().setFocus();
}
private void createActions() {
refresh = new Action(){
public void run(){
classTreeViewer.setInput(new TreeInput(new RootElement(HString.getHString("ClassList"), Type.classList)));
classTreeViewer.getControl().setEnabled(true);
classTreeViewer.refresh();
deviceTreeViewer.setInput(new TreeInput(MemoryMap.getInstance()));
deviceTreeViewer.getControl().setEnabled(true);
deviceTreeViewer.refresh();
}
};
refresh.setText("Refresh");
ImageDescriptor img = ImageDescriptor.createFromImage(PlatformUI.getWorkbench().getSharedImages().getImage(ISharedImages.IMG_TOOL_REDO));
refresh.setImageDescriptor(img);
}
private void contributeToActionBars() {
IActionBars bars = getViewSite().getActionBars();
fillLocalPullDown(bars.getMenuManager());
fillLocalToolBar(bars.getToolBarManager());
}
private void fillLocalPullDown(IMenuManager menu) {
}
private void fillLocalToolBar(IToolBarManager manager) {
manager.add(refresh);
}
class ClassChild extends RootElement{
Item owner;
ClassChild(HString name,Item owner, Item children){
super(name, children);
this.owner = owner;
}
}
class RootElement{
HString name;
Item children;
RootElement(HString name, Item children){
this.name = name;
this.children = children;
}
}
class TreeInput{
public Object obj;
TreeInput(Object obj){
this.obj = obj;
}
}
@Override
public void selectionChanged(SelectionChangedEvent event) {
Object obj = ((IStructuredSelection)event.getSelection()).getFirstElement();
StringBuilder sb = new StringBuilder();
if(obj instanceof Class){
Class c = (Class)obj;
sb.append("Name: " + c.name + "\n");
sb.append("Number of class methods: " + c.nofClassMethods + "\n");
sb.append("Number of instance methods: " + c.nofInstMethods + "\n");
sb.append("Number of class fields: " + c.nofClassFields + "\n");
if((c.accAndPropFlags & (1 << apfInterface)) == 0){
sb.append("Class field base address: 0x" + Integer.toHexString(c.varSegment.getBaseAddress() + c.varOffset) + "\n");
}
sb.append("Class fields size: " + c.classFieldsSize + " byte\n");
sb.append("Number of instance fields: " + c.nofInstFields + "\n");
sb.append("Instance size: " + c.objectSize + " byte\n");
sb.append("Number of interfaces: " + c.nofInterfaces + "\n");
sb.append("Number of base classes: " + c.extensionLevel + "\n");
sb.append("Number of references: " + c.nofClassRefs + "\n");
sb.append("Max extension level: " + Class.maxExtensionLevel + "\n");
if((c.accAndPropFlags & (1 << apfInterface)) == 0){
sb.append("Machine code base address: 0x" + Integer.toHexString(c.codeSegment.getBaseAddress() + c.codeOffset) + "\n");
sb.append("Machine code size: " + ((FixedValueItem)c.codeBase.next).getValue() + " byte\n");
sb.append("Constant block base address: 0x" + Integer.toHexString(c.constSegment.getBaseAddress() + c.constOffset) + "\n");
sb.append("Constant block size: " + ((FixedValueItem)c.constantBlock).getValue() + " byte\n");
}
sb.append("Type descriptor address: 0x" + Integer.toHexString(c.address) + "\n");
if((c.accAndPropFlags & (1 << apfInterface)) == 0){
sb.append("\nConstantblock:\n");
BlockItem item = c.constantBlock;
while(item != null){
sb.append(item.toString() + "\n");
item = item.next;
}
}
textViewer.getDocument().set(sb.toString());
textViewer.refresh();
return;
}
if(obj instanceof Method){
Method m = (Method)obj;
sb.append("Name: " + m.name + "\n");
sb.append("Accessibility: ");
if((m.accAndPropFlags & (1 << apfPublic)) != 0){
sb.append("public\n");
}else if((m.accAndPropFlags & (1 << apfPrivate)) != 0){
sb.append("private\n");
}else if((m.accAndPropFlags & (1 << apfProtected)) != 0){
sb.append("protected\n");
}else if ((m.accAndPropFlags & (1 << dpfSysPrimitive)) != 0){
sb.append("special system primitive");
}else{
sb.append("protected\n");
}
sb.append("Static: ");
if((m.accAndPropFlags & (1 << apfStatic)) != 0 || (m.accAndPropFlags & (1 << dpfSysPrimitive)) != 0 ){
sb.append("yes\n");
}else{
sb.append("no\n");
}
sb.append("Synthetic: ");
if((m.accAndPropFlags & (1 << dpfSynthetic)) != 0){
sb.append("yes\n");
}else{
sb.append("no\n");
}
sb.append("Address: 0x" + Integer.toHexString(m.address) + "\n");
sb.append("Offset: 0x" + Integer.toHexString(m.offset) + "\n");
sb.append("Index: 0x" + Integer.toHexString(m.index) + "\n");
textViewer.getDocument().set(sb.toString());
textViewer.refresh();
return;
}
if(obj instanceof DataItem){
DataItem field = (DataItem)obj;
sb.append("Name: " + field.name.toString() + "\n");
sb.append("Type: " + decodeFieldType(field.type.name) + "\n");
sb.append("Accessibility: ");
if((field.accAndPropFlags & (1 << apfPublic)) != 0){
sb.append("public\n");
}else if((field.accAndPropFlags & (1 << apfPrivate)) != 0){
sb.append("private\n");
}else if((field.accAndPropFlags & (1 << apfProtected)) != 0){
sb.append("protected\n");
}else if ((field.accAndPropFlags & (1 << dpfSysPrimitive)) != 0){
sb.append("special system primitive");
}else{
sb.append("protected\n");
}
sb.append("Constant: ");
if((field.accAndPropFlags & (1 << dpfConst)) != 0){
sb.append("yes\n");
sb.append("Value: " + ((NamedConst)field).getConstantItem().toString() + "\n");
}else{
sb.append("no\n");
}
sb.append("address: 0x" + Integer.toHexString(field.address) + "\n");
sb.append("offset: 0x" + Integer.toHexString(field.offset) + "\n");
textViewer.getDocument().set(sb.toString());
textViewer.refresh();
return;
}
if(obj instanceof CFG){
CFG cfg = (CFG)obj;
textViewer.getDocument().set(cfg.toString());
textViewer.refresh();
return;
}
if(obj instanceof SSA){
SSA ssa = (SSA)obj;
textViewer.getDocument().set(ssa.toString());
textViewer.refresh();
return;
}
if(obj instanceof CodeGen){
CodeGen machineCode = (CodeGen)obj;
textViewer.getDocument().set(machineCode.toString());
textViewer.refresh();
return;
}
if(obj instanceof Device){
Device dev = (Device)obj;
textViewer.getDocument().set(dev.toString());
textViewer.refresh();
return;
}
if(obj instanceof Segment){
Segment seg = (Segment)obj;
textViewer.getDocument().set(seg.toString());
textViewer.refresh();
return;
}
}
private String decodeFieldType(HString type){
StringBuilder sb = new StringBuilder();
int index = 0;
int dim = 0;
while(type.charAt(index) == '['){
index++;
dim++;
}
switch(type.charAt(index)){
case 'B':
sb.append("byte");
break;
case 'C':
sb.append("char");
break;
case 'D':
sb.append("double");
break;
case 'F':
sb.append("float");
break;
case 'I':
sb.append("int");
break;
case 'J':
sb.append("long");
break;
case 'S':
sb.append("short");
break;
case 'Z':
sb.append("boolean");
break;
default:
sb.append(type.toString());
break;
}
for(int i = 0; i < dim; i++){
sb.append("[]");
}
return sb.toString();
}
} |
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.PrintWriter;
/**
* Servlet implementation class surveyaccess
*/
@WebServlet("/surveyaccess")
public class surveyaccess extends HttpServlet {
public String eename="dan";
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public surveyaccess() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
response.getWriter().append("Served at: ").append(request.getContextPath());
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
try{
// read form fields
String ename = request.getParameter("ename");
String Band = request.getParameter("Band");
String eID = request.getParameter("eID");
String ServiceLine = request.getParameter("SL");
String BluepagesManagerName = request.getParameter("BMN");
String Service_Area = request.getParameter("SA");
String Primary_Job_Role = request.getParameter("pr");
String Testscript = request.getParameter("tscript");
String Test_data_creation = request.getParameter("tdata");
String Execute_test_scripts = request.getParameter("ExecuteTest");
String Write_defects = request.getParameter("WriteDefects");
String Effectively_communicate_to_stakeholders_client = request.getParameter("Effectivelycommunicate");
String Use_of_HP_Application_Lifecycle_Management = request.getParameter("UseofHPApplication");
String Use_of_Rational_ClearCase = request.getParameter("UseofRational");
String Use_of_Rational_Collaborative_Life_cycle_Management = request.getParameter("Use_of_Rational_Collaborative");
String Use_of_Rational_Doors = request.getParameter("UseofRationalManual");
String Use_of_Rational_Manual_Tester=request.getParameter("UseofRationalManual");
String Use_of_Rational_Quality_Manager=request.getParameter("QualityManager");
String Use_of_Rational_Requirements_Composer=request.getParameter("UseofRationalRequirementsComposer");
String Use_of_Rational_Pro=request.getParameter("UseOfRationalRPro");
String Use_of_Rational_Software_Architect_Design_Manager=request.getParameter("Usadm");
String Use_of_Rational_Team_Concert=request.getParameter("Useteamc");
String Use_Rational_TestManager=request.getParameter("Use_Rational_TestManager");
String Continuos_Integration_Continuous_Testing=request.getParameter("Continuos_Integration_Continuous_Testing");
String Test_driven_development_TDD_Behavior_driven_development_BDD=request.getParameter("TDD");
String Test_Architect_Technical_Leadership=request.getParameter("TestArch");
String Create_test_plan=request.getParameter("Createtestplan");
String Create_test_plan_data=request.getParameter("Create_test_data_plan");
String Create_test_schedula=request.getParameter("Create_test_schedul");
String Review_and_track_test_team_schedule_progress=request.getParameter("Review_and_track");
String Resource_onboarding=request.getParameter("Resource_onboarding");
String Create_and_present_test_status_and_reports_to_key_stakeholders=request.getParameter("Create_and_present_test_s");
String Prepare_Analyze_test_metric=request.getParameter("Prepare_Analyze_test_metric");
String Perform_defect_management=request.getParameter("Perform_defect_management");
String Create_test_estimates_and_budgets=request.getParameter("creattestestimates");
String Perform_cost_accounting_and_provide_status=request.getParameter("performcostaccounting");
String Agile_SCRUM_Master_experience=request.getParameter("Agile_SCRUM_Master_experience");
String Consult_on_test_solutions=request.getParameter("Consult_on_test_solutions");
String Experience_in_Yrs__with_Software_Dev_Methodology_SDLC_Agile_SCRUM_Kanban_Iterative=request.getParameter("einyrsws");
String Participate_in_and_support=request.getParameter("Participate_in_and_support");
String Lead_test_assessments=request.getParameter("Participate_in_and_support");
String testing_using_accelerators=request.getParameter("testing_using_acceleratorst");
String Conduct_data_analytics_study=request.getParameter("conductdataana");
String Support_troubled_test_programs =request.getParameter("Support_troubled_test_programs");
String Develop_Test_proposals =request.getParameter("Develop_Test_proposals");
String Build_test_automation_frameworks =request.getParameter("Build_test_automation");
String Lead_test_automation_efforts =request.getParameter("LeadTestA");
String Design_test_automation_scripts =request.getParameter("Design_test_automation_scripts");
String Knowledge_of_Programming_Languages =request.getParameter("Knowledge_of_Programming_Languages");
String Write_performance_testing_scripts =request.getParameter("Write_performance_testing_scripts");
String Rational_Functional_Tester =request.getParameter("Rational_Functional_Tester");
String IBM_ATA_Automating_Test_Automation =request.getParameter("IBM_ATA_Automating_Test_Automation");
String QA_Partner =request.getParameter("QA_Partner");
String HP_Quality_Center_QC =request.getParameter("HP_Quality_Center_QC");
String HP_WinRunner =request.getParameter("HP_WinRunner");
String HP_Unified_Functional_Testing_UFT =request.getParameter("HP_Unified_Functional_Testing_UFT");
String HP_QuickTest_Pro_QTP =request.getParameter("HP_QuickTest_Pro_QTP ");
String Selenium_IDE_Integrated_Development_Environment =request.getParameter("Selenium_IDE_Integrated_Development_Environment");
String Selenium_RC_WebDriver =request.getParameter("Selenium_RC_WebDriver");
String Build_performance_testing_frameworks =request.getParameter("Build_performance_testing_frameworks");
String leadperformancetest =request.getParameter("leadperformancetest");
String Design_performance_testing_scripts =request.getParameter("Design_performance_testing_scripts");
String Knowledge_of_Programming_Languagestwo =request.getParameter("Knowledge_of_Programming_Languagestwo");
String Write_performance_testing_scriptstw0 =request.getParameter("Write_performance_testing_scriptstw0");
String Rational_Performance_Tester0 =request.getParameter("Rational_Performance_Tester0");
String HP_Quality_Center_LoadRunner =request.getParameter("HP_Quality_Center_LoadRunner");
String HP_Performance_Center =request.getParameter("HP_Performance_Center");
String Silk_Performer =request.getParameter("Silk_Performer");
String Apache_Jmeter =request.getParameter("Apache_Jmeter");
String Parasoft_SOAtest =request.getParameter("Parasoft_SOAtest");
String Perform_mobile_application_testing =request.getParameter("Perform_mobile_application_testing");
String Perform_security_testing =request.getParameter("Perform_security_testing");
String Perform_data_analytics_testing =request.getParameter("Perform_data_analytics_testing");
String Perform_testing_in_the_Cloud =request.getParameter("Perform_testing_in_the_Cloud");
String Perfrom_Agile_testing =request.getParameter("Perfrom_Agile_testing");
String Junit_TestNG_Java=request.getParameter("Junit_TestNG_Java");
String Cucumber_Celerity=request.getParameter("Cucumber_Celerity");
String _508_Testing_tools_JAWS=request.getParameter("508_Testing_tools_JAWS");
String _508_Testing_tools_Dragon=request.getParameter("_508_Testing_tools_Dragon");
String _508_Testing_tools_Zoomtext=request.getParameter("_508_Testing_tools_Zoomtext");
String _508_Testing_tools_Compliance_Sheriff=request.getParameter("_508_Testing_tools_Compliance_Sheriff");
String _508_Testing_tools_Object_Inspector=request.getParameter("_508_Testing_tools_Object_Inspector");
String _508_Testing_tools_Java_Ferret=request.getParameter("_508_Testing_tools_Java_Ferret");
String _508_Testing_tools_Web_Accessibility_Toolbar_WAT=request.getParameter("_508_Testing_tools_Web_Accessibility_Toolbar_WAT");
String _508_Testing_tools_Adobe_Acrobat_Pro_PDF=request.getParameter("_508_Testing_tools_Adobe_Acrobat_Pro_PDF");
String IA_Tool_Scan_FortiFy=request.getParameter("IA_Tool_Scan_FortiFy");
String IA_Tool_Scan_AppScan=request.getParameter("IA_Tool_Scan_AppScan");
String _Knowledge_of_Programming_Languages=request.getParameter("_Knowledge_of_Programming_Languages");
String Java_J2EE_Net_Perl_Python_Shell_Scripting_Power_Shell_SQL_Other_Pls_list=request.getParameter("javajee");
String Retail=request.getParameter("Retail");
String Manufacturing=request.getParameter("Manufacturing");
String Health_Care=request.getParameter("Health_Care");
String Transportation=request.getParameter("Transportation");
String Banking=request.getParameter("Banking");
String Finance=request.getParameter("Finance");
String Government=request.getParameter("Government");
String Current_Clearance_s_status=request.getParameter("Current_Clearance_s_status");
String Additional_Comments=request.getParameter("Additional_Comments");
String E=request.getParameter("SLw");
String p=request.getParameter("p");
PrintWriter writerr = response.getWriter();
int []dan=new int[3];
while(Testscript==""){
dan[3]=0;
writerr.println("entered while "+ Testscript);
}
//the error was with the if it does not do correct compare
if (E=="y"){
writerr.println("entered error phase"+ E);
writerr.println("entered error phase "+Testscript);
dan[3]=0;
}
else{
writerr.println("entered else0 " +E );
writerr.println("entered else0 "+ p);
}
//String password = request.getParameter("password");
eename=ename;
System.out.println("username: " + ename);
//System.out.println("password: " + password);
// do some processing here...
// get response writer
PrintWriter writer = response.getWriter();
// build HTML code
String htmlRespone = "<html>";
htmlRespone += ename + " Thank You for summitting this survey" + "<br/>";
//htmlRespone += "Your password is: " + password + "</h2>";
htmlRespone += "</html>";
SurveyDao danielhdelva= new SurveyDao();
// if(eID=="" ||eID==null){
// eID=""+Math.random();
danielhdelva.create(ename, eID,Band,ServiceLine,BluepagesManagerName,Service_Area,Primary_Job_Role,Testscript,Test_data_creation,Execute_test_scripts,Write_defects,Effectively_communicate_to_stakeholders_client,Use_of_HP_Application_Lifecycle_Management,Use_of_Rational_ClearCase,Use_of_Rational_Collaborative_Life_cycle_Management,Use_of_Rational_Doors,Use_of_Rational_Manual_Tester,Use_of_Rational_Quality_Manager,Use_of_Rational_Requirements_Composer,Use_of_Rational_Pro,Use_of_Rational_Software_Architect_Design_Manager,Use_of_Rational_Team_Concert,Use_Rational_TestManager,Continuos_Integration_Continuous_Testing,Test_driven_development_TDD_Behavior_driven_development_BDD,Test_Architect_Technical_Leadership,
Create_test_plan, Create_test_plan_data,Create_test_schedula,Review_and_track_test_team_schedule_progress, Resource_onboarding,Create_and_present_test_status_and_reports_to_key_stakeholders,Prepare_Analyze_test_metric,Perform_defect_management,Create_test_estimates_and_budgets,Perform_cost_accounting_and_provide_status,Agile_SCRUM_Master_experience,Consult_on_test_solutions,Experience_in_Yrs__with_Software_Dev_Methodology_SDLC_Agile_SCRUM_Kanban_Iterative,Participate_in_and_support,Lead_test_assessments,testing_using_accelerators,Conduct_data_analytics_study,Support_troubled_test_programs,Develop_Test_proposals,Build_test_automation_frameworks,Lead_test_automation_efforts,Design_test_automation_scripts,
Knowledge_of_Programming_Languages,Write_performance_testing_scripts,Rational_Functional_Tester,IBM_ATA_Automating_Test_Automation,QA_Partner,HP_Quality_Center_QC,HP_WinRunner,HP_Unified_Functional_Testing_UFT,HP_QuickTest_Pro_QTP,Selenium_IDE_Integrated_Development_Environment,Selenium_RC_WebDriver,Build_performance_testing_frameworks,leadperformancetest,Design_performance_testing_scripts,Knowledge_of_Programming_Languagestwo,
Write_performance_testing_scriptstw0,Rational_Performance_Tester0,HP_Quality_Center_LoadRunner,HP_Performance_Center,Silk_Performer,Apache_Jmeter,Parasoft_SOAtest,Perform_mobile_application_testing,Perform_security_testing,Perform_data_analytics_testing,Perform_testing_in_the_Cloud,Perfrom_Agile_testing,Junit_TestNG_Java,Cucumber_Celerity,_508_Testing_tools_JAWS,_508_Testing_tools_Dragon,_508_Testing_tools_Zoomtext,_508_Testing_tools_Compliance_Sheriff,_508_Testing_tools_Object_Inspector,_508_Testing_tools_Java_Ferret,_508_Testing_tools_Web_Accessibility_Toolbar_WAT,_508_Testing_tools_Adobe_Acrobat_Pro_PDF,IA_Tool_Scan_FortiFy,IA_Tool_Scan_AppScan,_Knowledge_of_Programming_Languages,Java_J2EE_Net_Perl_Python_Shell_Scripting_Power_Shell_SQL_Other_Pls_list,Retail,Manufacturing,Health_Care,Transportation,Banking,Finance,Government,Current_Clearance_s_status,Additional_Comments);
// return response
writer.println(htmlRespone);
}catch(Throwable e){
PrintWriter writer = response.getWriter();
writer.println("Please Enter Valid EmployeeId That is unique");}
}
} |
package org.deepsymmetry.beatlink;
import java.io.IOException;
import java.net.*;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.deepsymmetry.beatlink.data.MetadataFinder;
import org.deepsymmetry.electro.Metronome;
import org.deepsymmetry.electro.Snapshot;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides the ability to create a virtual CDJ device that can lurk on a DJ Link network and receive packets sent to
* players, monitoring the detailed state of the other devices. This detailed information is helpful for augmenting
* what {@link BeatFinder} reports, allowing you to keep track of which player is the tempo master, how many beats of
* a track have been played, how close a player is getting to its next cue point, and more. It is also the foundation
* for finding out the rekordbox ID of the loaded track, which supports all the features associated with the
* {@link MetadataFinder}.
*
* @author James Elliott
*/
@SuppressWarnings("WeakerAccess")
public class VirtualCdj
extends LifecycleParticipant
implements OnAirListener, SyncListener, MasterHandoffListener, FaderStartListener {
private static final Logger logger = LoggerFactory.getLogger(VirtualCdj.class);
/**
* The port to which other devices will send status update messages.
*/
@SuppressWarnings("WeakerAccess")
public static final int UPDATE_PORT = 50002;
/**
* The socket used to receive device status packets while we are active.
*/
private final AtomicReference<DatagramSocket> socket = new AtomicReference<DatagramSocket>();
/**
* Check whether we are presently posing as a virtual CDJ and receiving device status updates.
*
* @return true if our socket is open, sending presence announcements, and receiving status packets
*/
public boolean isRunning() {
return socket.get() != null;
}
public InetAddress getLocalAddress() {
ensureRunning();
return socket.get().getLocalAddress();
}
/**
* The broadcast address on which we can reach the DJ Link devices. Determined when we start
* up by finding the network interface address on which we are receiving the other devices'
* announcement broadcasts.
*/
private final AtomicReference<InetAddress> broadcastAddress = new AtomicReference<InetAddress>();
public InetAddress getBroadcastAddress() {
ensureRunning();
return broadcastAddress.get();
}
/**
* Keep track of the most recent updates we have seen, indexed by the address they came from.
*/
private final Map<InetAddress, DeviceUpdate> updates = new ConcurrentHashMap<InetAddress, DeviceUpdate>();
/**
* Should we try to use a device number in the range 1 to 4 if we find one is available?
*/
private final AtomicBoolean useStandardPlayerNumber = new AtomicBoolean(false);
public void setUseStandardPlayerNumber(boolean attempt) {
useStandardPlayerNumber.set(attempt);
}
public boolean getUseStandardPlayerNumber() {
return useStandardPlayerNumber.get();
}
/**
* Get the device number that is used when sending presence announcements on the network to pose as a virtual CDJ.
* This starts out being zero unless you explicitly assign another value, which means that the <code>VirtualCdj</code>
* should assign itself an unused device number by watching the network when you call
* {@link #start()}. If {@link #getUseStandardPlayerNumber()} returns {@code true}, self-assignment will try to
* find a value in the range 1 to 4. Otherwise (or if those values are all used by other players), it will try to
* find a value in the range 5 to 15.
*
* @return the virtual player number
*/
public synchronized byte getDeviceNumber() {
return announcementBytes[DEVICE_NUMBER_OFFSET];
}
@SuppressWarnings("WeakerAccess")
public synchronized void setDeviceNumber(byte number) {
if (isSendingStatus()) {
throw new IllegalStateException("Can't change device number while sending status packets.");
}
if (number == 0 && isRunning()) {
selfAssignDeviceNumber();
} else {
announcementBytes[DEVICE_NUMBER_OFFSET] = number;
}
}
/**
* The interval, in milliseconds, at which we post presence announcements on the network.
*/
private final AtomicInteger announceInterval = new AtomicInteger(1500);
/**
* Get the interval, in milliseconds, at which we broadcast presence announcements on the network to pose as
* a virtual CDJ.
*
* @return the announcement interval
*/
public int getAnnounceInterval() {
return announceInterval.get();
}
public void setAnnounceInterval(int interval) {
if (interval < 200 || interval > 2000) {
throw new IllegalArgumentException("Interval must be between 200 and 2000");
}
announceInterval.set(interval);
}
private static final byte[] announcementBytes = {
0x51, 0x73, 0x70, 0x74, 0x31, 0x57, 0x6d, 0x4a, 0x4f, 0x4c, 0x06, 0x00, 0x62, 0x65, 0x61, 0x74,
0x2d, 0x6c, 0x69, 0x6e, 0x6b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x02, 0x00, 0x36, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x01, 0x00
};
/**
* The location of the device name in the announcement packet.
*/
public static final int DEVICE_NAME_OFFSET = 0x0c;
/**
* The length of the device name in the announcement packet.
*/
public static final int DEVICE_NAME_LENGTH = 0x14;
/**
* The location of the device number in the announcement packet.
*/
public static final int DEVICE_NUMBER_OFFSET = 0x24;
/**
* Get the name to be used in announcing our presence on the network.
*
* @return the device name reported in our presence announcement packets
*/
public static String getDeviceName() {
return new String(announcementBytes, DEVICE_NAME_OFFSET, DEVICE_NAME_LENGTH).trim();
}
/**
* Set the name to be used in announcing our presence on the network. The name can be no longer than twenty
* bytes, and should be normal ASCII, no Unicode.
*
* @param name the device name to report in our presence announcement packets.
*/
public synchronized void setDeviceName(String name) {
if (name.getBytes().length > DEVICE_NAME_LENGTH) {
throw new IllegalArgumentException("name cannot be more than " + DEVICE_NAME_LENGTH + " bytes long");
}
Arrays.fill(announcementBytes, DEVICE_NAME_OFFSET, DEVICE_NAME_LENGTH, (byte)0);
System.arraycopy(name.getBytes(), 0, announcementBytes, DEVICE_NAME_OFFSET, name.getBytes().length);
}
/**
* Keep track of which device has reported itself as the current tempo master.
*/
private final AtomicReference<DeviceUpdate> tempoMaster = new AtomicReference<DeviceUpdate>();
public DeviceUpdate getTempoMaster() {
ensureRunning();
return tempoMaster.get();
}
/**
* Establish a new tempo master, and if it is a change from the existing one, report it to the listeners.
*
* @param newMaster the packet which caused the change of masters, or {@code null} if there is now no master.
*/
private void setTempoMaster(DeviceUpdate newMaster) {
DeviceUpdate oldMaster = tempoMaster.getAndSet(newMaster);
if ((newMaster == null && oldMaster != null) ||
(newMaster != null && ((oldMaster == null) || !newMaster.getAddress().equals(oldMaster.getAddress())))) {
// This is a change in master, so report it to any registered listeners
deliverMasterChangedAnnouncement(newMaster);
}
}
/**
* How large a tempo change is required before we consider it to be a real difference.
*/
private final AtomicLong tempoEpsilon = new AtomicLong(Double.doubleToLongBits(0.0001));
/**
* Find out how large a tempo change is required before we consider it to be a real difference.
*
* @return the BPM fraction that will trigger a tempo change update
*/
public double getTempoEpsilon() {
return Double.longBitsToDouble(tempoEpsilon.get());
}
/**
* Set how large a tempo change is required before we consider it to be a real difference.
*
* @param epsilon the BPM fraction that will trigger a tempo change update
*/
public void setTempoEpsilon(double epsilon) {
tempoEpsilon.set(Double.doubleToLongBits(epsilon));
}
/**
* Track the most recently reported master tempo.
*/
private final AtomicLong masterTempo = new AtomicLong();
public double getMasterTempo() {
ensureRunning();
return Double.longBitsToDouble(masterTempo.get());
}
/**
* Establish a new master tempo, and if it is a change from the existing one, report it to the listeners.
*
* @param newTempo the newly reported master tempo.
*/
private void setMasterTempo(double newTempo) {
double oldTempo = Double.longBitsToDouble(masterTempo.getAndSet(Double.doubleToLongBits(newTempo)));
if ((getTempoMaster() != null) && (Math.abs(newTempo - oldTempo) > getTempoEpsilon())) {
// This is a change in tempo, so report it to any registered listeners, and update our metronome if we are synced.
if (isSynced()) {
metronome.setTempo(newTempo);
notifyBeatSenderOfChange();
}
deliverTempoChangedAnnouncement(newTempo);
}
}
/**
* Given an update packet sent to us, create the appropriate object to describe it.
*
* @param packet the packet received on our update port
* @return the corresponding {@link DeviceUpdate} subclass, or {@code nil} if the packet was not recognizable
*/
private DeviceUpdate buildUpdate(DatagramPacket packet) {
final int length = packet.getLength();
final Util.PacketType kind = Util.validateHeader(packet, UPDATE_PORT);
if (kind == null) {
logger.warn("Ignoring unrecognized packet sent to update port.");
return null;
}
switch (kind) {
case MIXER_STATUS:
if (length != 56) {
logger.warn("Processing a Mixer Status packet with unexpected length " + length + ", expected 56 bytes.");
}
if (length >= 56) {
return new MixerStatus(packet);
} else {
logger.warn("Ignoring too-short Mixer Status packet.");
return null;
}
case CDJ_STATUS:
if (length != 208 && length != 212 && length != 284 && length != 292) {
logger.warn("Processing a CDJ Status packet with unexpected length " + length + ".");
}
if (length >= 208) {
return new CdjStatus(packet);
} else {
logger.warn("Ignoring too-short CDJ Status packet.");
return null;
}
default:
logger.warn("Ignoring " + kind.name + " packet sent to update port.");
return null;
}
}
/**
* Process a device update once it has been received. Track it as the most recent update from its address,
* and notify any registered listeners, including master listeners if it results in changes to tracked state,
* such as the current master player and tempo. Also handles the Baroque dance of handing off the tempo master
* role from or to another device.
*/
private void processUpdate(DeviceUpdate update) {
updates.put(update.getAddress(), update);
// Keep track of the largest sync number we see.
if (update instanceof CdjStatus) {
int syncNumber = ((CdjStatus)update).getSyncNumber();
if (syncNumber > this.syncCounter.get()) {
this.syncCounter.set(syncNumber);
}
}
// Deal with the tempo master complexities, including handoff to/from us.
if (update.isTempoMaster()) {
final Integer packetYieldingTo = update.getDeviceMasterIsBeingYieldedTo();
if (packetYieldingTo == null) {
// This is a normal, non-yielding master packet. Update our notion of the current master, and,
// if we were yielding, finish that process, updating our sync number appropriately.
if (master.get()) {
if (nextMaster.get() == update.deviceNumber) {
syncCounter.set(largestSyncCounter.get() + 1);
} else {
if (nextMaster.get() == 0xff) {
logger.warn("Saw master asserted by player " + update.deviceNumber +
" when we were not yielding it.");
} else {
logger.warn("Expected to yield master role to player " + nextMaster.get() +
" but saw master asserted by player " + update.deviceNumber);
}
}
}
master.set(false);
nextMaster.set(0xff);
setTempoMaster(update);
setMasterTempo(update.getEffectiveTempo());
} else {
// This is a yielding master packet. If it is us that is being yielded to, take over master if we
// are expecting to, otherwise log a warning.
if (packetYieldingTo == getDeviceNumber()) {
if (update.deviceNumber != masterYieldedFrom.get()) {
logger.warn("Expected player " + masterYieldedFrom.get() + " to yield master to us, but player " +
update.deviceNumber + " did.");
}
master.set(true);
masterYieldedFrom.set(0);
setTempoMaster(null);
setMasterTempo(getTempo());
}
}
} else {
// This update did was not acting as a tempo master; if we thought it should be, update our records.
DeviceUpdate oldMaster = getTempoMaster();
if (oldMaster != null && oldMaster.getAddress().equals(update.getAddress())) {
// This device has resigned master status, and nobody else has claimed it so far
setTempoMaster(null);
}
}
deliverDeviceUpdate(update);
}
/**
* Process a beat packet, potentially updating the master tempo and sending our listeners a master
* beat notification. Does nothing if we are not active.
*/
void processBeat(Beat beat) {
if (isRunning() && beat.isTempoMaster()) {
setMasterTempo(beat.getEffectiveTempo());
deliverBeatAnnouncement(beat);
}
}
/**
* Scan a network interface to find if it has an address space which matches the device we are trying to reach.
* If so, return the address specification.
*
* @param aDevice the DJ Link device we are trying to communicate with
* @param networkInterface the network interface we are testing
* @return the address which can be used to communicate with the device on the interface, or null
*/
private InterfaceAddress findMatchingAddress(DeviceAnnouncement aDevice, NetworkInterface networkInterface) {
for (InterfaceAddress address : networkInterface.getInterfaceAddresses()) {
if ((address.getBroadcast() != null) &&
Util.sameNetwork(address.getNetworkPrefixLength(), aDevice.getAddress(), address.getAddress())) {
return address;
}
}
return null;
}
/**
* The number of milliseconds for which the {@link DeviceFinder} needs to have been watching the network in order
* for us to be confident we can choose a device number that will not conflict.
*/
private static final long SELF_ASSIGNMENT_WATCH_PERIOD = 4000;
/**
* Try to choose a device number, which we have not seen on the network. Start by making sure
* we have been watching long enough to have seen the other devices. Then, if {@link #useStandardPlayerNumber} is
* {@code true}, try to use a standard player number in the range 1-4 if possible. Otherwise (or if all those
* numbers are already in use), pick a number from 5 to 15.
*/
private boolean selfAssignDeviceNumber() {
final long now = System.currentTimeMillis();
final long started = DeviceFinder.getInstance().getFirstDeviceTime();
if (now - started < SELF_ASSIGNMENT_WATCH_PERIOD) {
try {
Thread.sleep(SELF_ASSIGNMENT_WATCH_PERIOD - (now - started)); // Sleep until we hit the right time
} catch (InterruptedException e) {
logger.warn("Interrupted waiting to self-assign device number, giving up.");
return false;
}
}
Set<Integer> numbersUsed = new HashSet<Integer>();
for (DeviceAnnouncement device : DeviceFinder.getInstance().getCurrentDevices()) {
numbersUsed.add(device.getNumber());
}
// Try all player numbers less than mixers use, only including the real player range if we are configured to.
final int startingNumber = (getUseStandardPlayerNumber() ? 1 : 5);
for (int result = startingNumber; result < 16; result++) {
if (!numbersUsed.contains(result)) { // We found one that is not used, so we can use it
setDeviceNumber((byte) result);
if (getUseStandardPlayerNumber() && (result > 4)) {
logger.warn("Unable to self-assign a standard player number, all are in use. Using number " +
result + ".");
}
return true;
}
}
logger.warn("Found no unused device numbers between " + startingNumber + " and 15, giving up.");
return false;
}
/**
* Once we have seen some DJ Link devices on the network, we can proceed to create a virtual player on that
* same network.
*
* @return true if we found DJ Link devices and were able to create the {@code VirtualCdj}.
* @throws SocketException if there is a problem opening a socket on the right network
*/
private boolean createVirtualCdj() throws SocketException {
// Find the network interface and address to use to communicate with the first device we found.
NetworkInterface matchedInterface = null;
InterfaceAddress matchedAddress = null;
DeviceAnnouncement aDevice = DeviceFinder.getInstance().getCurrentDevices().iterator().next();
for (NetworkInterface networkInterface : Collections.list(NetworkInterface.getNetworkInterfaces())) {
matchedAddress = findMatchingAddress(aDevice, networkInterface);
if (matchedAddress != null) {
matchedInterface = networkInterface;
break;
}
}
if (matchedAddress == null) {
logger.warn("Unable to find network interface to communicate with " + aDevice +
", giving up.");
return false;
}
if (getDeviceNumber() == 0) {
if (!selfAssignDeviceNumber()) {
return false;
}
}
// Copy the chosen interface's hardware and IP addresses into the announcement packet template
System.arraycopy(matchedInterface.getHardwareAddress(), 0, announcementBytes, 38, 6);
System.arraycopy(matchedAddress.getAddress().getAddress(), 0, announcementBytes, 44, 4);
broadcastAddress.set(matchedAddress.getBroadcast());
// Looking good. Open our communication socket and set up our threads.
socket.set(new DatagramSocket(UPDATE_PORT, matchedAddress.getAddress()));
// Inform the DeviceFinder to ignore our own device announcement packets.
DeviceFinder.getInstance().addIgnoredAddress(socket.get().getLocalAddress());
final byte[] buffer = new byte[512];
final DatagramPacket packet = new DatagramPacket(buffer, buffer.length);
// Create the update reception thread
Thread receiver = new Thread(null, new Runnable() {
@Override
public void run() {
boolean received;
while (isRunning()) {
try {
socket.get().receive(packet);
received = true;
} catch (IOException e) {
// Don't log a warning if the exception was due to the socket closing at shutdown.
if (isRunning()) {
// We did not expect to have a problem; log a warning and shut down.
logger.warn("Problem reading from DeviceStatus socket, stopping", e);
stop();
}
received = false;
}
try {
if (received && (packet.getAddress() != socket.get().getLocalAddress())) {
DeviceUpdate update = buildUpdate(packet);
if (update != null) {
processUpdate(update);
}
}
} catch (Exception e) {
logger.warn("Problem processing device update packet", e);
}
}
}
}, "beat-link VirtualCdj status receiver");
receiver.setDaemon(true);
receiver.setPriority(Thread.MAX_PRIORITY);
receiver.start();
// Create the thread which announces our participation in the DJ Link network, to request update packets
Thread announcer = new Thread(null, new Runnable() {
@Override
public void run() {
while (isRunning()) {
sendAnnouncement(broadcastAddress.get());
}
}
}, "beat-link VirtualCdj announcement sender");
announcer.setDaemon(true);
announcer.start();
deliverLifecycleAnnouncement(logger, true);
return true;
}
/**
* Makes sure we get shut down if the {@link DeviceFinder} does, because we rely on it.
*/
private final LifecycleListener deviceFinderLifecycleListener = new LifecycleListener() {
@Override
public void started(LifecycleParticipant sender) {
logger.debug("VirtualCDJ doesn't have anything to do when the DeviceFinder starts");
}
@Override
public void stopped(LifecycleParticipant sender) {
if (isRunning()) {
logger.info("VirtualCDJ stopping because DeviceFinder has stopped.");
stop();
}
}
};
/**
* Start announcing ourselves and listening for status packets. If already active, has no effect. Requires the
* {@link DeviceFinder} to be active in order to find out how to communicate with other devices, so will start
* that if it is not already.
*
* @return true if we found DJ Link devices and were able to create the {@code VirtualCdj}, or it was already running.
* @throws SocketException if the socket to listen on port 50002 cannot be created
*/
@SuppressWarnings("UnusedReturnValue")
public synchronized boolean start() throws SocketException {
if (!isRunning()) {
// Set up so we know we have to shut down if the DeviceFinder shuts down.
DeviceFinder.getInstance().addLifecycleListener(deviceFinderLifecycleListener);
// Find some DJ Link devices so we can figure out the interface and address to use to talk to them
DeviceFinder.getInstance().start();
for (int i = 0; DeviceFinder.getInstance().getCurrentDevices().isEmpty() && i < 20; i++) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
logger.warn("Interrupted waiting for devices, giving up", e);
return false;
}
}
if (DeviceFinder.getInstance().getCurrentDevices().isEmpty()) {
logger.warn("No DJ Link devices found, giving up");
return false;
}
return createVirtualCdj();
}
return true; // We were already active
}
/**
* Stop announcing ourselves and listening for status updates.
*/
public synchronized void stop() {
if (isRunning()) {
try {
setSendingStatus(false);
} catch (Exception e) {
logger.error("Problem stopping sending status during shutdown", e);
}
DeviceFinder.getInstance().removeIgnoredAddress(socket.get().getLocalAddress());
socket.get().close();
socket.set(null);
broadcastAddress.set(null);
updates.clear();
setTempoMaster(null);
setDeviceNumber((byte)0); // Set up for self-assignment if restarted.
deliverLifecycleAnnouncement(logger, false);
}
}
/**
* Send an announcement packet so the other devices see us as being part of the DJ Link network and send us
* updates.
*/
private void sendAnnouncement(InetAddress broadcastAddress) {
try {
DatagramPacket announcement = new DatagramPacket(announcementBytes, announcementBytes.length,
broadcastAddress, DeviceFinder.ANNOUNCEMENT_PORT);
socket.get().send(announcement);
Thread.sleep(getAnnounceInterval());
} catch (Exception e) {
logger.warn("Unable to send announcement packet, shutting down", e);
stop();
}
}
public Set<DeviceUpdate> getLatestStatus() {
ensureRunning();
Set<DeviceUpdate> result = new HashSet<DeviceUpdate>();
long now = System.currentTimeMillis();
for (DeviceUpdate update : updates.values()) {
if (now - update.getTimestamp() <= DeviceFinder.MAXIMUM_AGE) {
result.add(update);
}
}
return Collections.unmodifiableSet(result);
}
public DeviceUpdate getLatestStatusFor(DeviceUpdate device) {
ensureRunning();
return updates.get(device.getAddress());
}
public DeviceUpdate getLatestStatusFor(DeviceAnnouncement device) {
ensureRunning();
return updates.get(device.getAddress());
}
public DeviceUpdate getLatestStatusFor(int deviceNumber) {
ensureRunning();
for (DeviceUpdate update : updates.values()) {
if (update.getDeviceNumber() == deviceNumber) {
return update;
}
}
return null;
}
/**
* Keeps track of the registered master listeners.
*/
private final Set<MasterListener> masterListeners =
Collections.newSetFromMap(new ConcurrentHashMap<MasterListener, Boolean>());
public void addMasterListener(MasterListener listener) {
if (listener != null) {
masterListeners.add(listener);
}
}
/**
* Removes the specified master listener so that it no longer receives device updates when
* there are changes related to the tempo master. If {@code listener} is {@code null} or not present
* in the set of registered listeners, no exception is thrown and no action is performed.
*
* @param listener the master listener to remove
*/
public void removeMasterListener(MasterListener listener) {
if (listener != null) {
masterListeners.remove(listener);
}
}
/**
* Get the set of master listeners that are currently registered.
*
* @return the currently registered tempo master listeners
*/
@SuppressWarnings("WeakerAccess")
public Set<MasterListener> getMasterListeners() {
// Make a copy so callers get an immutable snapshot of the current state.
return Collections.unmodifiableSet(new HashSet<MasterListener>(masterListeners));
}
/**
* Send a master changed announcement to all registered master listeners.
*
* @param update the message announcing the new tempo master
*/
private void deliverMasterChangedAnnouncement(final DeviceUpdate update) {
for (final MasterListener listener : getMasterListeners()) {
try {
listener.masterChanged(update);
} catch (Exception e) {
logger.warn("Problem delivering master changed announcement to listener", e);
}
}
}
/**
* Send a tempo changed announcement to all registered master listeners.
*
* @param tempo the new master tempo
*/
private void deliverTempoChangedAnnouncement(final double tempo) {
for (final MasterListener listener : getMasterListeners()) {
try {
listener.tempoChanged(tempo);
} catch (Exception e) {
logger.warn("Problem delivering tempo changed announcement to listener", e);
}
}
}
/**
* Send a beat announcement to all registered master listeners.
*
* @param beat the beat sent by the tempo master
*/
private void deliverBeatAnnouncement(final Beat beat) {
for (final MasterListener listener : getMasterListeners()) {
try {
listener.newBeat(beat);
} catch (Exception e) {
logger.warn("Problem delivering master beat announcement to listener", e);
}
}
}
/**
* Keeps track of the registered device update listeners.
*/
private final Set<DeviceUpdateListener> updateListeners =
Collections.newSetFromMap(new ConcurrentHashMap<DeviceUpdateListener, Boolean>());
@SuppressWarnings("SameParameterValue")
public void addUpdateListener(DeviceUpdateListener listener) {
if (listener != null) {
updateListeners.add(listener);
}
}
/**
* Removes the specified device update listener so it no longer receives device updates when they come in.
* If {@code listener} is {@code null} or not present
* in the list of registered listeners, no exception is thrown and no action is performed.
*
* @param listener the device update listener to remove
*/
public void removeUpdateListener(DeviceUpdateListener listener) {
if (listener != null) {
updateListeners.remove(listener);
}
}
/**
* Get the set of device update listeners that are currently registered.
*
* @return the currently registered update listeners
*/
public Set<DeviceUpdateListener> getUpdateListeners() {
// Make a copy so callers get an immutable snapshot of the current state.
return Collections.unmodifiableSet(new HashSet<DeviceUpdateListener>(updateListeners));
}
/**
* Send a device update to all registered update listeners.
*
* @param update the device update that has just arrived
*/
private void deliverDeviceUpdate(final DeviceUpdate update) {
for (DeviceUpdateListener listener : getUpdateListeners()) {
try {
listener.received(update);
} catch (Exception e) {
logger.warn("Problem delivering device update to listener", e);
}
}
}
/**
* Finish the work of building and sending a protocol packet.
*
* @param kind the type of packet to create and send
* @param payload the content which will follow our device name in the packet
* @param destination where the packet should be sent
* @param port the port to which the packet should be sent
*
* @throws IOException if there is a problem sending the packet
*/
@SuppressWarnings("SameParameterValue")
private void assembleAndSendPacket(Util.PacketType kind, byte[] payload, InetAddress destination, int port) throws IOException {
DatagramPacket packet = Util.buildPacket(kind,
ByteBuffer.wrap(announcementBytes, DEVICE_NAME_OFFSET, DEVICE_NAME_LENGTH).asReadOnlyBuffer(),
ByteBuffer.wrap(payload));
packet.setAddress(destination);
packet.setPort(port);
socket.get().send(packet);
}
/**
* The bytes at the end of a sync control command packet.
*/
private final static byte[] SYNC_CONTROL_PAYLOAD = { 0x01,
0x00, 0x0d, 0x00, 0x08, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x0f };
/**
* Assemble and send a packet that performs sync control, turning a device's sync mode on or off, or telling it
* to become the tempo master.
*
* @param update an update from the device whose sync state is to be set
* @param command the byte identifying the specific sync command to be sent
*
* @throws IOException if there is a problem sending the command to the device
*/
private void sendSyncControlCommand(DeviceUpdate update, byte command) throws IOException {
ensureRunning();
byte[] payload = new byte[SYNC_CONTROL_PAYLOAD.length];
System.arraycopy(SYNC_CONTROL_PAYLOAD, 0, payload, 0, SYNC_CONTROL_PAYLOAD.length);
payload[2] = getDeviceNumber();
payload[8] = getDeviceNumber();
payload[12] = command;
assembleAndSendPacket(Util.PacketType.SYNC_CONTROL, payload, update.getAddress(), BeatFinder.BEAT_PORT);
}
public void sendSyncModeCommand(int deviceNumber, boolean synced) throws IOException {
final DeviceUpdate update = getLatestStatusFor(deviceNumber);
if (update == null) {
throw new IllegalArgumentException("Device " + deviceNumber + " not found on network.");
}
sendSyncModeCommand(update, synced);
}
public void sendSyncModeCommand(DeviceUpdate update, boolean synced) throws IOException {
sendSyncControlCommand(update, synced? (byte)0x10 : (byte)0x20);
}
public void appointTempoMaster(int deviceNumber) throws IOException {
final DeviceUpdate update = getLatestStatusFor(deviceNumber);
if (update == null) {
throw new IllegalArgumentException("Device " + deviceNumber + " not found on network.");
}
appointTempoMaster(update);
}
public void appointTempoMaster(DeviceUpdate update) throws IOException {
sendSyncControlCommand(update,(byte)0x01);
}
/**
* The bytes at the end of a fader start command packet.
*/
private final static byte[] FADER_START_PAYLOAD = { 0x01,
0x00, 0x0d, 0x00, 0x04, 0x02, 0x02, 0x02, 0x02 };
public void sendFaderStartCommand(Set<Integer> deviceNumbersToStart, Set<Integer> deviceNumbersToStop) throws IOException {
ensureRunning();
byte[] payload = new byte[FADER_START_PAYLOAD.length];
System.arraycopy(FADER_START_PAYLOAD, 0, payload, 0, FADER_START_PAYLOAD.length);
payload[2] = getDeviceNumber();
for (int i = 1; i <= 4; i++) {
if (deviceNumbersToStart.contains(i)) {
payload[i + 4] = 0;
}
if (deviceNumbersToStop.contains(i)) {
payload[i + 4] = 1;
}
}
assembleAndSendPacket(Util.PacketType.FADER_START_COMMAND, payload, getBroadcastAddress(), BeatFinder.BEAT_PORT);
}
/**
* The bytes at the end of a channels on-air report packet.
*/
private final static byte[] CHANNELS_ON_AIR_PAYLOAD = { 0x01,
0x00, 0x0d, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
public void sendOnAirCommand(Set<Integer> deviceNumbersOnAir) throws IOException {
ensureRunning();
byte[] payload = new byte[CHANNELS_ON_AIR_PAYLOAD.length];
System.arraycopy(CHANNELS_ON_AIR_PAYLOAD, 0, payload, 0, CHANNELS_ON_AIR_PAYLOAD.length);
payload[2] = getDeviceNumber();
for (int i = 1; i <= 4; i++) {
if (deviceNumbersOnAir.contains(i)) {
payload[i + 4] = 1;
}
}
assembleAndSendPacket(Util.PacketType.CHANNELS_ON_AIR, payload, getBroadcastAddress(), BeatFinder.BEAT_PORT);
}
@Override
public void channelsOnAir(Set<Integer> audibleChannels) {
setOnAir(audibleChannels.contains((int)getDeviceNumber()));
}
@Override
public void setSyncMode(boolean synced) {
setSynced(synced);
}
@Override
public void becomeMaster() {
logger.debug("Received packet telling us to become master.");
if (isSendingStatus()) {
new Thread(new Runnable() {
@Override
public void run() {
try {
becomeTempoMaster();
} catch (Exception e) {
logger.error("Problem becoming tempo master in response to sync command packet", e);
}
}
}).start();
} else {
logger.warn("Ignoring sync command to become tempo master, since we are not sending status packets.");
}
}
@Override
public void yieldMasterTo(int deviceNumber) {
if (logger.isDebugEnabled()) {
logger.debug("Received instruction to yield master to device " + deviceNumber);
}
if (isSendingStatus() && getDeviceNumber() != deviceNumber) {
nextMaster.set(deviceNumber);
}
// TODO send yield response!
}
@Override
public void yieldResponse(int deviceNumber, boolean yielded) {
if (logger.isDebugEnabled()) {
logger.debug("Received yield response of " + yielded + " from device " + deviceNumber);
}
if (yielded) {
if (isSendingStatus()) {
masterYieldedFrom.set(deviceNumber);
} else {
logger.warn("Ignoring master yield response because we are not sending status.");
}
} else {
logger.warn("Ignoring master yield response with unexpected non-yielding value.");
}
}
@Override
public void fadersChanged(Set<Integer> playersToStart, Set<Integer> playersToStop) {
if (playersToStart.contains((int)getDeviceNumber())) {
setPlaying(true);
} else if (playersToStop.contains((int)getDeviceNumber())) {
setPlaying(false);
}
}
/**
* The number of milliseconds that we will wait between sending status packets, when we are sending them.
*/
private int statusInterval = 200;
/**
* Check how often we will send status packets, if we are configured to send them.
*
* @return the millisecond interval that will pass between status packets we send
*/
public synchronized int getStatusInterval() {
return statusInterval;
}
public synchronized void setStatusInterval(int interval) {
if (interval < 20 || interval > 2000) {
throw new IllegalArgumentException("interval must be between 20 and 2000");
}
this.statusInterval = interval;
}
/**
* Makes sure we stop sending status if the {@link BeatFinder} shuts down, because we rely on it.
*/
private final LifecycleListener beatFinderLifecycleListener = new LifecycleListener() {
@Override
public void started(LifecycleParticipant sender) {
logger.debug("VirtualCDJ doesn't have anything to do when the BeatFinder starts");
}
@Override
public void stopped(LifecycleParticipant sender) {
if (isSendingStatus()) {
logger.info("VirtualCDJ no longer sending status updates because BeatFinder has stopped.");
try {
setSendingStatus(false);
} catch (Exception e) {
logger.error("Problem stopping sending status packets when the BeatFinder stopped", e);
}
}
}
};
/**
* Will hold an instance when we are actively sending beats, so we can let it know when the metronome changes,
* and when it is time to shut down.
*/
private final AtomicReference<BeatSender> beatSender = new AtomicReference<BeatSender>();
/**
* Check whether we are currently running a {@link BeatSender}; if we are, notify it that there has been a change
* to the metronome timeline, so it needs to wake up and reassess its situation.
*/
private void notifyBeatSenderOfChange() {
final BeatSender activeSender = beatSender.get();
if (activeSender != null) {
activeSender.timelineChanged();
}
}
/**
* The bytes following the device name in a beat packet.
*/
private static final byte[] BEAT_PAYLOAD = { 0x01,
0x00, 0x0d, 0x00, 0x3c, 0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x10, 0x10, 0x10, 0x10,
0x04, 0x04, 0x04, 0x04, 0x20, 0x20, 0x20, 0x20, 0x08, 0x08, 0x08, 0x08, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x0d};
/**
* Sends a beat packet. Generally this should only be invoked when our {@link BeatSender} has determined that it is
* time to do so, but it is public to allow experimentation.
*
* @return the beat number that was sent
*/
public long sendBeat() {
Snapshot snapshot = (playing ? metronome.getSnapshot() : whereStopped);
byte[] payload = new byte[BEAT_PAYLOAD.length];
System.arraycopy(BEAT_PAYLOAD, 0, payload, 0, BEAT_PAYLOAD.length);
payload[0x02] = getDeviceNumber();
Util.numberToBytes((int)snapshot.getBeatInterval(), payload, 0x05, 4);
Util.numberToBytes((int)(snapshot.getBeatInterval() * 2), payload, 0x09, 4);
Util.numberToBytes((int)(snapshot.getBeatInterval() * 4), payload, 0x11, 4);
Util.numberToBytes((int)(snapshot.getBeatInterval() * 8), payload, 0x19, 4);
final int beatsLeft = 5 - snapshot.getBeatWithinBar();
final int nextBar = (int)(snapshot.getBeatInterval() * beatsLeft);
Util.numberToBytes(nextBar, payload, 0x0d, 4);
Util.numberToBytes(nextBar + (int)snapshot.getBarInterval(), payload, 0x15, 4);
Util.numberToBytes((int)Math.round(snapshot.getTempo() * 100), payload, 0x3b, 2);
payload[0x3d] = (byte)snapshot.getBeatWithinBar();
payload[0x40] = getDeviceNumber();
try {
assembleAndSendPacket(Util.PacketType.BEAT, payload, broadcastAddress.get(), BeatFinder.BEAT_PORT);
} catch (IOException e) {
logger.error("VirtualCdj Failed to send beat packet.", e);
}
return snapshot.getBeat();
}
/**
* Will hold a non-null value when we are sending our own status packets, which can be used to stop the thread
* doing so. Most uses of Beat Link will not require this level of activity. However, if you want to be able to
* take over the tempo master role, and control the tempo and beat alignment of other players, you will need to
* turn on this feature, which also requires that you are using one of the standard player numbers, 1-4.
*/
private AtomicBoolean sendingStatus = null;
public synchronized void setSendingStatus(boolean send) throws IOException {
if (isSendingStatus() == send) {
return;
}
if (send) { // Start sending status packets.
ensureRunning();
if ((getDeviceNumber() < 1) || (getDeviceNumber() > 4)) {
throw new IllegalStateException("Can only send status when using a standard player number, 1 through 4.");
}
BeatFinder.getInstance().start();
BeatFinder.getInstance().addLifecycleListener(beatFinderLifecycleListener);
final AtomicBoolean stillRunning = new AtomicBoolean(true);
sendingStatus = stillRunning; // Allow other threads to stop us when necessary.
Thread sender = new Thread(null, new Runnable() {
@Override
public void run() {
while (stillRunning.get()) {
sendStatus();
try {
Thread.sleep(getStatusInterval());
} catch (InterruptedException e) {
logger.warn("beat-link VirtualCDJ status sender thread was interrupted; continuing");
}
}
}
}, "beat-link VirtualCdj status sender");
sender.setDaemon(true);
sender.start();
if (isSynced()) { // If we are supposed to be synced, we need to respond to beats.
BeatFinder.getInstance().addBeatListener(beatListener);
}
if (isPlaying()) { // Start the beat sender too, if we are supposed to be playing.
beatSender.set(new BeatSender(metronome));
}
} else { // Stop sending status packets, and responding to beats if we were synced.
BeatFinder.getInstance().removeLifecycleListener(beatFinderLifecycleListener);
BeatFinder.getInstance().removeBeatListener(beatListener);
sendingStatus.set(false); // Stop the status sending thread.
sendingStatus = null; // Indicate that we are no longer sending status.
final BeatSender activeSender = beatSender.get(); // And stop the beat sender if we have one.
if (activeSender != null) {
activeSender.shutDown();
beatSender.set(null);
}
}
}
/**
* Check whether we are currently sending status packets.
*
* @return {@code true} if we are sending status packets, and can participate in (and control) tempo and beat sync
*/
public synchronized boolean isSendingStatus() {
return (sendingStatus != null);
}
/**
* Used to keep time when we are pretending to play a track, and to allow us to sync with other players when we
* are told to do so.
*/
private final Metronome metronome = new Metronome();
/**
* Keeps track of our position when we are not playing; this beat gets loaded into the metronome when we start
* playing, and it will keep time from there. When we stop again, we save the metronome's current beat here.
*/
private Snapshot whereStopped = metronome.getSnapshot(metronome.getStartTime());
/**
* Indicates whether we should currently pretend to be playing. This will only have an impact when we are sending
* status and beat packets.
*/
private boolean playing = false;
/**
* Controls whether we report that we are playing. This will only have an impact when we are sending status and
* beat packets.
*
* @param playing {@code true} if we should seem to be playing
*/
public synchronized void setPlaying(boolean playing) {
if (this.playing == playing) {
return;
}
this.playing = playing;
if (playing) {
metronome.jumpToBeat(whereStopped.getBeat());
if (isSendingStatus()) { // Need to also start the beat sender.
beatSender.set(new BeatSender(metronome));
}
} else {
final BeatSender activeSender = beatSender.get();
if (activeSender != null) { // We have a beat sender we need to stop.
activeSender.shutDown();
beatSender.set(null);
}
whereStopped = metronome.getSnapshot();
}
}
/**
* Check whether we are pretending to be playing. This will only have an impact when we are sending status and
* beat packets.
*
* @return {@code true} if we are reporting active playback
*/
public synchronized boolean isPlaying() {
return playing;
}
/**
* Find details about the current simulated playback position.
*
* @return the current (or last, if we are stopped) playback state
*/
public Snapshot getPlaybackPosition() {
return metronome.getSnapshot();
}
/**
* Nudge the playback position by the specified number of milliseconds, to support synchronization with an external
* clock. Positive values move playback forward in time, while negative values jump back.
*
* @param ms the number of millisecond to shift the simulated playback position
*/
public void adjustPlaybackPosition(int ms) {
metronome.adjustStart(-ms);
}
/**
* Indicates whether we are currently the tempo master. Will only be meaningful (and get set) if we are sending
* status packets.
*/
private final AtomicBoolean master = new AtomicBoolean(false);
private static final byte[] MASTER_HANDOFF_REQUEST_PAYLOAD = { 0x01,
0x00, 0x0d, 0x00, 0x04, 0x00, 0x00, 0x00, 0x0d };
public synchronized void becomeTempoMaster() throws IOException {
logger.debug("Trying to become master.");
if (!isSendingStatus()) {
throw new IllegalStateException("Must be sending status updates to become the tempo master.");
}
// Is there someone we need to ask to yield to us?
final DeviceUpdate currentMaster = getTempoMaster();
if (currentMaster != null) {
// Send the yield request; we will become master when we get a successful response.
byte[] payload = new byte[MASTER_HANDOFF_REQUEST_PAYLOAD.length];
System.arraycopy(MASTER_HANDOFF_REQUEST_PAYLOAD, 0, payload, 0, MASTER_HANDOFF_REQUEST_PAYLOAD.length);
payload[2] = getDeviceNumber();
payload[8] = getDeviceNumber();
if (logger.isDebugEnabled()) {
logger.debug("Sending master yield request to player " + currentMaster);
}
assembleAndSendPacket(Util.PacketType.MASTER_HANDOFF_REQUEST, payload, currentMaster.address, BeatFinder.BEAT_PORT);
} else if (!master.get()) {
// There is no other master, we can just become it immediately.
setMasterTempo(getTempo());
master.set(true);
}
}
/**
* Check whether we are currently in charge of the tempo and beat alignment.
*
* @return {@code true} if we hold the tempo master role
*/
public boolean isTempoMaster() {
return master.get();
}
/**
* Used to respond to beats when we are synced, aligning our metronome.
*/
private final BeatListener beatListener = new BeatListener() {
@Override
public void newBeat(Beat beat) {
if (beat.isTempoMaster()) {
metronome.setBeatPhase(0.0);
}
}
};
/**
* Indicates whether we are currently staying in sync with the tempo master. Will only be meaningful if we are
* sending status packets.
*/
private boolean synced = false;
/**
* Controls whether we are currently staying in sync with the tempo master. Will only be meaningful if we are
* sending status packets.
*
* @param sync if {@code true}, our status packets will be tempo and beat aligned with the tempo master
*/
public synchronized void setSynced(boolean sync) {
if (synced != sync) {
// We are changing sync state, so add or remove our beat listener as appropriate.
if (sync && isSendingStatus()) {
BeatFinder.getInstance().addBeatListener(beatListener);
} else {
BeatFinder.getInstance().removeBeatListener(beatListener);
}
}
synced = sync;
}
/**
* Check whether we are currently staying in sync with the tempo master. Will only be meaningful if we are
* sending status packets.
*
* @return {@code true} if our status packets will be tempo and beat aligned with the tempo master
*/
public synchronized boolean isSynced() {
return synced;
}
/**
* Indicates whether we believe our channel is currently on the air (audible in the mixer output). Will only
* be meaningful if we are sending status packets.
*/
private boolean onAir = false;
/**
* Change whether we believe our channel is currently on the air (audible in the mixer output). Only meaningful
* if we are sending status packets. If there is a real DJM mixer on the network, it will rapidly override any
* value established by this method with its actual report about the channel state.
*
* @param audible {@code true} if we should report ourselves as being on the air in our status packets
*/
public synchronized void setOnAir(boolean audible) {
onAir = audible;
}
/**
* Checks whether we believe our channel is currently on the air (audible in the mixer output). Only meaningful
* if we are sending status packets. If there is a real DJM mixer on the network, it will be controlling the state
* of this property.
*
* @return audible {@code true} if we should report ourselves as being on the air in our status packets
*/
public synchronized boolean isOnAir() {
return onAir;
}
/**
* Controls the tempo at which we report ourselves to be playing. Only meaningful if we are sending status packets.
* If {@link #isSynced()} is {@code true} and we are not the tempo master, any value set by this method will
* overridden by the the next tempo master change.
*
* @param bpm the tempo, in beats per minute, that we should report in our status and beat packets
*/
public void setTempo(double bpm) {
final double oldTempo = metronome.getTempo();
metronome.setTempo(bpm);
notifyBeatSenderOfChange();
if (isTempoMaster() && (Math.abs(bpm - oldTempo) > getTempoEpsilon())) {
deliverTempoChangedAnnouncement(bpm);
}
}
/**
* Check the tempo at which we report ourselves to be playing. Only meaningful if we are sending status packets.
*
* @return the tempo, in beats per minute, that we are reporting in our status and beat packets
*/
public double getTempo() {
return metronome.getTempo();
}
/**
* The longest beat we will report playing; if we are still playing and reach this beat, we will loop back to beat
* one. If we are told to jump to a larger beat than this, we map it back into the range we will play. This would
* be a little over nine hours at 120 bpm, which seems long enough for any track.
*/
public final int MAX_BEAT = 65536;
/**
* Used to keep our beat number from growing indefinitely; we wrap it after a little over nine hours of playback;
* maybe we are playing a giant loop?
*/
private int wrapBeat(int beat) {
if (beat <= MAX_BEAT) {
return beat;
}
// This math is a little funky because beats are one-based rather than zero-based.
return ((beat - 1) % MAX_BEAT) + 1;
}
/**
* Moves our current playback position to the specified beat; this will be reflected in any status and beat packets
* that we are sending. An incoming value less than one will jump us to the first beat.
*
* @param beat the beat that we should pretend to be playing
*/
public synchronized void jumpToBeat(int beat) {
if (beat < 1) {
beat = 1;
} else {
beat = wrapBeat(beat);
}
if (playing) {
metronome.jumpToBeat(beat);
} else {
whereStopped = metronome.getSnapshot(metronome.getTimeOfBeat(beat));
}
}
/**
* Used in the process of handing off the tempo master role to another player.
*/
private final AtomicInteger syncCounter = new AtomicInteger(0);
/**
* Tracks the largest sync counter we have seen on the network, used in the process of handing off the tempo master
* role to another player.
*/
private final AtomicInteger largestSyncCounter = new AtomicInteger(0);
/**
* Used in the process of handing off the tempo master role to another player. Usually has the value 0xff, meaning
* no handoff is taking place. But when we are in the process of handing off the role, will hold the device number
* of the player that is taking over as tempo master.
*/
private final AtomicInteger nextMaster = new AtomicInteger(0xff);
/**
* Used in the process of being handed the tempo master role from another player. Usually has the value 0, meaning
* no handoff is taking place. But when we have received a successful yield response, will hold the device number
* of the player that is yielding to us, so we can watch for the next stage in its status updates.
*/
private final AtomicInteger masterYieldedFrom = new AtomicInteger(0);
/**
* Keeps track of the number of status packets we send.
*/
private final AtomicInteger packetCounter = new AtomicInteger(0);
/**
* The template used to assemble a status packet when we are sending them.
*/
private final static byte[] STATUS_PAYLOAD = { 0x01,
0x04, 0x00, 0x00, (byte)0xf8, 0x00, 0x00, 0x01, 0x00, 0x00, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, // 0x020
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x030
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x040
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x050
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x04, 0x04, 0x00, 0x00, 0x00, 0x04, // 0x060
0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x31, 0x2e, 0x34, 0x30, // 0x070
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, (byte)0xff, 0x00, 0x00, 0x10, 0x00, 0x00, // 0x080
(byte)0x80, 0x00, 0x00, 0x00, 0x7f, (byte)0xff, (byte)0xff, (byte)0xff, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x090
0x00, 0x00, 0x00, 0x00, 0x01, (byte)0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0a0
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0b0
0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x01, 0x00, 0x00, // 0x0c0
0x12, 0x34, 0x56, 0x78, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, // 0x0d0
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0e0
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x0f0
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x100
0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x07, 0x61, 0x00, 0x00, 0x06, 0x2f // 0x110
};
/**
* Send a status packet to all devices on the network. Used when we are actively sending status, presumably so we
* can be the tempo master.
*/
private synchronized void sendStatus() {
Snapshot playState = (playing ? metronome.getSnapshot() : whereStopped);
byte[] payload = new byte[STATUS_PAYLOAD.length];
System.arraycopy(STATUS_PAYLOAD, 0, payload, 0, STATUS_PAYLOAD.length);
payload[0x02] = getDeviceNumber();
payload[0x05] = payload[0x02];
payload[0x08] = (byte)(playing ? 1 : 0); // a, playing flag
payload[0x09] = payload[0x02]; // Dr, the player from which the track was loaded
payload[0x5c] = (byte)(playing ? 3 : 5); // P1, playing flag
Util.numberToBytes(syncCounter.get(), payload, 0x65, 4);
payload[0x6a] = (byte)(0x84 + // F, main status bit vector
(playing ? 0x40 : 0) + (master.get() ? 0x20 : 0) + (synced ? 0x10 : 0) + (onAir ? 0x08 : 0));
payload[0x6c] = (byte)(playing ? 0x7a : 0x7e); // P2, playing flag
Util.numberToBytes((int)Math.round(getTempo() * 100), payload, 0x73, 2);
payload[0x7e] = (byte)(playing ? 9 : 1); // P3, playing flag
payload[0x7f] = (byte)(master.get() ? 1 : 0); // Mm, tempo master flag
payload[0x80] = (byte)nextMaster.get(); // Mh, tempo master handoff indicator
Util.numberToBytes((int)playState.getBeat(), payload, 0x81, 4);
payload[0x87] = (byte)(playState.getBeatWithinBar());
Util.numberToBytes(packetCounter.incrementAndGet(), payload, 0xa9, 4);
DatagramPacket packet = Util.buildPacket(Util.PacketType.CDJ_STATUS,
ByteBuffer.wrap(announcementBytes, DEVICE_NAME_OFFSET, DEVICE_NAME_LENGTH).asReadOnlyBuffer(),
ByteBuffer.wrap(payload));
packet.setPort(UPDATE_PORT);
for (DeviceAnnouncement device : DeviceFinder.getInstance().getCurrentDevices()) {
packet.setAddress(device.getAddress());
try {
socket.get().send(packet);
} catch (IOException e) {
logger.warn("Unable to send status packet to " + device, e);
}
}
}
/**
* Holds the singleton instance of this class.
*/
private static final VirtualCdj ourInstance = new VirtualCdj();
/**
* Get the singleton instance of this class.
*
* @return the only instance of this class which exists
*/
public static VirtualCdj getInstance() {
return ourInstance;
}
/**
* Register any relevant listeners; private to prevent instantiation.
*/
private VirtualCdj() {
// Arrange to have our status accurately reflect any relevant updates and commands from the mixer.
BeatFinder.getInstance().addOnAirListener(this);
BeatFinder.getInstance().addFaderStartListener(this);
BeatFinder.getInstance().addSyncListener(this);
BeatFinder.getInstance().addMasterHandoffListener(this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("VirtualCdj[number:").append(getDeviceNumber()).append(", name:").append(getDeviceName());
sb.append(", announceInterval:").append(getAnnounceInterval());
sb.append(", useStandardPlayerNumber:").append(getUseStandardPlayerNumber());
sb.append(", tempoEpsilon:").append(getTempoEpsilon()).append(", active:").append(isRunning());
if (isRunning()) {
sb.append(", localAddress:").append(getLocalAddress().getHostAddress());
sb.append(", broadcastAddress:").append(getBroadcastAddress().getHostAddress());
sb.append(", latestStatus:").append(getLatestStatus()).append(", masterTempo:").append(getMasterTempo());
sb.append(", tempoMaster:").append(getTempoMaster());
sb.append(", isSendingStatus:").append(isSynced());
if (isSendingStatus()) {
sb.append(", isSynced:").append(isSynced());
sb.append(", isTempoMaster:").append(isTempoMaster());
sb.append(", isPlaying:").append((isPlaying()));
sb.append(", isOnAir:").append(isOnAir());
sb.append(", metronome:").append(metronome);
}
}
return sb.append("]").toString();
}
} |
package cn.updev.Users.Group.GroupMemberRule;
import cn.updev.Users.Group.GroupInfo.GroupMemberInfoFactory;
import cn.updev.Users.Static.EnumeRule.GroupRule;
import cn.updev.Users.Static.UserOrGroupDAO.UserOrGroupDelete;
import cn.updev.Users.Static.UserOrGroupDAO.UserOrGroupQuery;
import cn.updev.Users.Static.UserOrGroupDAO.UserOrGroupUpdate;
import cn.updev.Users.Static.UserOrGroupInterface.IGroupUser;
import cn.updev.Users.User.GroupUser;
public class RootRule {
public boolean dismissGroup(Integer userId,Integer groupId){
IGroupUser iGroupUser = new UserOrGroupQuery().queryGroupUser(userId,groupId);
if(iGroupUser != null && iGroupUser.getGroupMemberRule() == GroupRule.Creater){
return new UserOrGroupDelete().deleteGroupInfoById(groupId);
}
return false;
}
public boolean appointAdmin(Integer groupId,Integer createrId,Integer userId){
IGroupUser iGroupUser = new UserOrGroupQuery().queryGroupUser(createrId,groupId);
if(iGroupUser != null && iGroupUser.getGroupMemberRule() == GroupRule.Creater){
IGroupUser iGroupUser1 = new UserOrGroupQuery().queryGroupUser(userId,groupId);
iGroupUser1.setGroupMemberRule(GroupRule.Admin);
return new UserOrGroupUpdate().updateGroupUser((GroupUser)iGroupUser1);
}
return false;
}
public boolean cancelAdmin(Integer groupId,Integer createrId,Integer userId){
IGroupUser iGroupUser = new UserOrGroupQuery().queryGroupUser(createrId,groupId);
if(iGroupUser != null && iGroupUser.getGroupMemberRule() == GroupRule.Creater){
IGroupUser iGroupUser1 = new UserOrGroupQuery().queryGroupUser(userId,groupId);
iGroupUser1.setGroupMemberRule(GroupRule.User);
return new UserOrGroupUpdate().updateGroupUser((GroupUser)iGroupUser1);
}
return false;
}
} |
package org.intermine.bio.web.export;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.intermine.bio.io.gff3.GFF3Record;
import org.intermine.bio.util.GFF3Util;
import org.intermine.model.bio.LocatedSequenceFeature;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.util.IntPresentSet;
import org.intermine.web.logic.export.ExportException;
import org.intermine.web.logic.export.ExportHelper;
import org.intermine.web.logic.export.Exporter;
import org.intermine.web.logic.results.ResultElement;
/**
* Exports LocatedSequenceFeature objects in GFF3 format.
* @author Kim Rutherford
* @author Jakub Kulaviak
*/
public class GFF3Exporter implements Exporter
{
PrintWriter out;
private List<Integer> featureIndexes;
private Map<String, String> soClassNames;
private int writtenResultsCount = 0;
private boolean headerPrinted = false;
private IntPresentSet exportedIds = new IntPresentSet();
private List<String> attributesNames;
private String sourceName;
/**
* Constructor.
* @param out output stream
* @param indexes index of column with exported sequence
* @param soClassNames mapping
* @param attributesNames names of attributes that are printed in record,
* they are names of columns in results table, they are in the same order
* as corresponding columns in results table
* @param sourceName name of Mine to put in GFF source column
*/
public GFF3Exporter(PrintWriter out, List<Integer> indexes, Map<String, String> soClassNames,
List<String> attributesNames, String sourceName) {
this.out = out;
this.featureIndexes = indexes;
this.soClassNames = soClassNames;
this.attributesNames = attributesNames;
this.sourceName = sourceName;
}
/**
* {@inheritDoc}
*/
public void export(Iterator<? extends List<ResultElement>> resultIt) {
if (featureIndexes.size() == 0) {
throw new ExportException("No columns with sequence");
}
try {
while (resultIt.hasNext()) {
List<ResultElement> row = resultIt.next();
exportRow(row);
}
out.flush();
} catch (Exception ex) {
throw new ExportException("Export failed", ex);
}
}
private void exportRow(List<ResultElement> row)
throws ObjectStoreException,
IllegalAccessException {
ResultElement elWithObject = getResultElement(row);
if (elWithObject == null) {
return;
}
LocatedSequenceFeature lsf = (LocatedSequenceFeature) elWithObject.getObject();
if (exportedIds.contains(lsf.getId())) {
return;
}
Set<String> gffFields = new HashSet<String>();
gffFields.add("chromosome.primaryIdentifier");
gffFields.add("chromosomeLocation.start");
gffFields.add("chromosomeLocation.end");
gffFields.add("chromosomeLocation.strand");
gffFields.add("primaryIdentifier");
gffFields.add("score");
Map<String, List<String>> attributes = new LinkedHashMap<String, List<String>>();
for (int i = 0; i < row.size(); i++) {
ResultElement el = row.get(i);
if (!gffFields.contains(attributesNames.get(i))) {
attributes.put(attributesNames.get(i), formatElementValue(el));
}
}
GFF3Record gff3Record = GFF3Util.makeGFF3Record(lsf, soClassNames, sourceName,
attributes);
if (gff3Record == null) {
// no chromsome ref or no chromosomeLocation ref
return;
}
if (!headerPrinted) {
out.println("##gff-version 3");
headerPrinted = true;
}
out.println(gff3Record.toGFF3());
exportedIds.add(lsf.getId());
writtenResultsCount++;
}
private List<String> formatElementValue(ResultElement el) {
List<String> ret = new ArrayList<String>();
String s;
if (el == null) {
s = "-";
} else {
Object obj = el.getField();
if (obj == null) {
s = "-";
} else {
s = obj.toString();
}
}
ret.add(s);
return ret;
}
private ResultElement getResultElement(List<ResultElement> row) {
ResultElement el = null;
for (Integer index : featureIndexes) {
el = row.get(index);
if (el != null) {
break;
}
}
return el;
}
/**
* {@inheritDoc}
*/
public boolean canExport(List<Class> clazzes) {
return canExportStatic(clazzes);
}
/* Method must have different name than canExport because canExport() method
* is inherited from Exporter interface */
/**
* @param clazzes classes of result row
* @return true if this exporter can export result composed of specified classes
*/
public static boolean canExportStatic(List<Class> clazzes) {
return ExportHelper.getClassIndex(clazzes, LocatedSequenceFeature.class) >= 0;
}
/**
* {@inheritDoc}
*/
public int getWrittenResultsCount() {
return writtenResultsCount;
}
} |
package com.android.utils.wificonnecter;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import android.annotation.SuppressLint;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.NetworkInfo;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
public class WiFiConnecter {
// Combo scans can take 5-6s to complete
private static final int WIFI_RESCAN_INTERVAL_MS = 5 * 1000;
static final int SECURITY_NONE = 0;
static final int SECURITY_WEP = 1;
static final int SECURITY_PSK = 2;
static final int SECURITY_EAP = 3;
private static final String TAG = WiFiConnecter.class.getSimpleName();
public static final int MAX_TRY_COUNT = 3;
private Context mContext;
// private WifiManager mWifiManager;
private final IntentFilter mFilter;
private final BroadcastReceiver mReceiver;
private final Scanner mScanner;
private ActionListener mListener;
private String mSsid;
private String mPassword;
private boolean isRegistered;
private boolean isActiveScan;
private boolean bySsidIgnoreCase;
private TimerTask mTimerTask = null;
private boolean isWifiConnected = false;
private Timer mTimer = null;
public WiFiConnecter(Context context) {
this.mContext = context;
// mWifiManager = (WifiManager)
// context.getSystemService(Context.WIFI_SERVICE);
mFilter = new IntentFilter();
mFilter.addAction(WifiManager.WIFI_STATE_CHANGED_ACTION);
mFilter.addAction(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION);
mFilter.addAction(WifiManager.NETWORK_IDS_CHANGED_ACTION);
mFilter.addAction(WifiManager.SUPPLICANT_STATE_CHANGED_ACTION);
mFilter.addAction(WifiManager.NETWORK_STATE_CHANGED_ACTION);
mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
handleEvent(context, intent);
}
};
context.registerReceiver(mReceiver, mFilter);
isRegistered = true;
bySsidIgnoreCase = true;
mScanner = new Scanner();
}
public void openWifi() {
WiFiAdmin.getInstance(mContext).openWifi();
}
public List<ScanResult> getWifiList() {
List<ScanResult> wifiList = WiFiAdmin.getInstance(mContext).getScanResults();
List<ScanResult> newlist = new ArrayList<ScanResult>();
newlist.clear();
for (ScanResult result : wifiList) {
if (!TextUtils.isEmpty(result.SSID) && !containName(newlist, result.SSID))
newlist.add(result);
}
// return WiFiAdmin.getInstance(mContext).getWifiList();
return newlist;
}
public boolean containName(List<ScanResult> sr, String name) {
for (ScanResult result : sr) {
if (!TextUtils.isEmpty(result.SSID) && result.SSID.equals(name))
return true;
}
return false;
}
public String getConnectSsid() {
return WiFiAdmin.getInstance(mContext).getCurrentWifiNetName();
}
public boolean isWifiActive() {
return WiFiAdmin.getInstance(mContext).isWifiConnected();
}
/**
* Connect to a WiFi with the given ssid and password
*
* @param ssid
* @param password
* @param listener
* : for callbacks on start or success or failure. Can be null.
*/
public void connect(String ssid, String password, ActionListener listener) {
this.mListener = listener;
this.mSsid = ssid;
this.mPassword = password;
if (listener != null) {
listener.onStarted(ssid);
}
WifiInfo info = WiFiAdmin.getInstance(mContext).getWifiInfo();
String quotedString = StringUtils.convertToQuotedString(mSsid);
boolean ssidEquals = bySsidIgnoreCase ? quotedString.equalsIgnoreCase(StringUtils.convertToQuotedString(info.getSSID())) : quotedString.equals(StringUtils.convertToQuotedString(info.getSSID()));
if (ssidEquals) {
if (listener != null) {
listener.onSuccess(info);
listener.onFinished(true);
}
return;
}
mScanner.forceScan();
// start timmer
stopTimer();
startTimer();
}
private void startTimer() {
if (mTimer == null) {
mTimer = new Timer();
}
if (mTimerTask == null) {
mTimerTask = new TimerTask() {
@Override
public void run() {
if (!isWifiConnected) { // wifi maybe connect failed
mScanner.removeMessages(1);
mScanner.sendEmptyMessage(1);
}
}
};
}
if (mTimer != null && mTimerTask != null)
mTimer.schedule(mTimerTask, 12 * 1000, 1);
Log.i(TAG, "start timmer
}
private void stopTimer() {
if (mTimer != null) {
mTimer.cancel();
mTimer = null;
}
if (mTimerTask != null) {
mTimerTask.cancel();
mTimerTask = null;
}
isWifiConnected = false;
Log.i(TAG, "stop timmer
}
private void handleEvent(Context context, Intent intent) {
String action = intent.getAction();
// An access point scan has completed, and results are available from
// the supplicant.
if (WifiManager.SCAN_RESULTS_AVAILABLE_ACTION.equals(action) && isActiveScan) {
Log.i(TAG, "receiver action --> android.net.wifi.SCAN_RESULTS");
List<ScanResult> results = getWifiList();
Log.i(TAG, "results.size() == " + (results == null ? 0 : results.size()));
for (ScanResult result : results) {
Log.i(TAG, "ssid ---> " + result.SSID + " <-- bySsidIgnoreCase == " + bySsidIgnoreCase);
// 1.scan dest of ssid
String quotedString = StringUtils.convertToQuotedString(mSsid);
boolean ssidEquals = bySsidIgnoreCase ? quotedString.equalsIgnoreCase(StringUtils.convertToQuotedString(result.SSID)) : quotedString.equals(StringUtils.convertToQuotedString(result.SSID));
Log.i(TAG, mSsid + " wifi isExist --> " + ssidEquals);
if (ssidEquals) {
mScanner.pause();
// 2.input error password
if (!WiFi.connectToNewNetwork(WiFiAdmin.getInstance(mContext).getWifiManager(), result, mPassword)) {
Log.i(TAG, "connect failure");
if (mListener != null) {
mListener.onFailure();
mListener.onFinished(false);
}
onPause();
}
break;
}
}
// Broadcast intent action indicating that the state of Wi-Fi
// connectivity has changed.
} else if (WifiManager.NETWORK_STATE_CHANGED_ACTION.equals(action)) {
Log.i(TAG, "receiver action --> android.net.wifi.STATE_CHANGE");
NetworkInfo mInfo = intent.getParcelableExtra(WifiManager.EXTRA_NETWORK_INFO);
WifiInfo mWifiInfo = WiFiAdmin.getInstance(mContext).getWifiInfo();
// ssid equals&&connected
Log.i(TAG, " mInfo.getState() --> " + mInfo.getState());
if (mWifiInfo != null && mInfo.isConnected() && mWifiInfo.getSSID() != null) {
Log.i(TAG, "connect Success!");
String quotedString = StringUtils.convertToQuotedString(mSsid);
boolean ssidEquals = bySsidIgnoreCase ? quotedString.equalsIgnoreCase(StringUtils.convertToQuotedString(mWifiInfo.getSSID())) : quotedString.equals(StringUtils.convertToQuotedString(mWifiInfo.getSSID()));
if (ssidEquals) {
isWifiConnected = true;
stopTimer(); // connect success stop timer
if (mListener != null) {
mListener.onSuccess(mWifiInfo);
mListener.onFinished(true);
}
onPause();
} else {
// TODO connect other wifi ssid
}
} else {
Log.i(TAG, "mInfo.isConnected() --> " + mInfo.isConnected());
}
}
}
public boolean isScanResultOpen(ScanResult sr) {
return WiFi.OPEN.equals(WiFi.getScanResultSecurity(sr));
}
public void onPause() {
if (isRegistered) {
mContext.unregisterReceiver(mReceiver);
isRegistered = false;
}
mScanner.pause();
}
public void onResume() {
if (!isRegistered) {
mContext.registerReceiver(mReceiver, mFilter);
isRegistered = true;
}
mScanner.resume();
}
@SuppressLint("HandlerLeak")
private class Scanner extends Handler {
private int mRetry = 0;
void resume() {
if (!hasMessages(0)) {
sendEmptyMessage(0);
}
}
void forceScan() {
removeMessages(0);
sendEmptyMessage(0);
}
void pause() {
mRetry = 0;
isActiveScan = false;
removeMessages(0);
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case 0:
if (mRetry < MAX_TRY_COUNT) {
mRetry++;
isActiveScan = true;
openWifi();
boolean startScan = WiFiAdmin.getInstance(mContext).startScan();
Log.d(TAG, "startScan:" + startScan);
if (!startScan) {
if (mListener != null) {
mListener.onFailure();
mListener.onFinished(false);
}
onPause();
return;
}
} else {
mRetry = 0;
isActiveScan = false;
if (mListener != null) {
mListener.onFailure();
mListener.onFinished(false);
}
onPause();
return;
}
sendEmptyMessageDelayed(0, WIFI_RESCAN_INTERVAL_MS);
break;
case 1:
stopTimer();
if (mListener != null) {
mListener.onFailure();
mListener.onFinished(false);
}
onPause();
break;
}
}
}
public interface ActionListener {
/**
* The operation started
*
* @param ssid
*/
public void onStarted(String ssid);
/**
* The operation succeeded
*
* @param info
*/
public void onSuccess(WifiInfo info);
/**
* The operation failed
*/
public void onFailure();
/**
* The operation finished
*
* @param isSuccessed
*/
public void onFinished(boolean isSuccessed);
}
} |
package org.blendee.jdbc;
import java.lang.ref.PhantomReference;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* finalize() JDBC
* @author
*/
public class AutoCloseableFinalizer {
private static final AtomicInteger threadCounter = new AtomicInteger();
private final Object lock = new Object();
private final Map<Reference<Object>, AutoCloseable> map = new HashMap<>();
private final ReferenceQueue<Object> reaped = new ReferenceQueue<Object>();
private final Runnable runnable;
private Thread thread;
/**
*
* @param intervalmilliSeconds
*/
public AutoCloseableFinalizer(int intervalmilliSeconds) {
runnable = () -> {
while (!execute()) {
try {
TimeUnit.MILLISECONDS.sleep(intervalmilliSeconds);
} catch (InterruptedException e) {
break;
}
}
};
}
/**
* <br>
* closable JDBC {@link ResultSet} closeableEnclosure closeable Blendee
* @param closeableEnclosure Blendee
* @param closeable JDBC
*/
public void regist(Object closeableEnclosure, AutoCloseable closeable) {
synchronized (lock) {
map.put(new PhantomReference<>(closeableEnclosure, reaped), closeable);
lock.notify();
}
}
/**
*
* @return
*/
public boolean started() {
synchronized (lock) {
return thread != null && thread.isAlive();
}
}
public void start() {
synchronized (lock) {
if (started()) return;
thread = new Thread(runnable, AutoCloseableFinalizer.class.getName() + "-" + threadCounter.getAndIncrement());
thread.setDaemon(true);
thread.start();
}
}
/**
* <br>
* <br>
* {@link #closeAll()}
*/
public void stop() {
synchronized (lock) {
if (thread == null) return;
thread.interrupt();
thread = null;
System.gc();
lock.notify();
}
}
public void closeAll() {
synchronized (lock) {
map.values().forEach(c -> close(c));
map.clear();
}
}
private boolean execute() {
Reference<?> ref;
while ((ref = reaped.poll()) != null) {
ref.clear();
//syncregist
synchronized (lock) {
close(map.remove(ref));
try {
//wait
if (map.size() == 0) lock.wait();
} catch (InterruptedException e) {
return true;
}
}
}
return Thread.interrupted();
}
private static void close(AutoCloseable closeable) {
try {
if (closeable != null)
closeable.close();
} catch (Exception e) {}
}
} |
package org.freecode.irc.votebot;
import org.freecode.irc.CtcpRequest;
import org.freecode.irc.CtcpResponse;
import org.freecode.irc.IrcConnection;
import org.freecode.irc.Privmsg;
import org.freecode.irc.event.CtcpRequestListener;
import org.freecode.irc.event.NumericListener;
import org.freecode.irc.event.PrivateMessageListener;
import org.freecode.irc.votebot.api.FVBModule;
import org.freecode.irc.votebot.dao.PollDAO;
import org.freecode.irc.votebot.dao.VoteDAO;
import javax.script.ScriptException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Locale;
import java.util.TimeZone;
public class FreeVoteBot implements PrivateMessageListener {
public static final double VERSION = 1.074D;
public static final String CHANNEL_SOURCE = "#freecode";
private PollDAO pollDAO;
private VoteDAO voteDAO;
private String[] channels;
private String nick, realName, serverHost, user;
private int port;
private ScriptModuleLoader sml;
private IrcConnection connection;
private ExpiryQueue<String> expiryQueue = new ExpiryQueue<>(1500L);
private LinkedList<FVBModule> moduleList = new LinkedList<>();
public static final SimpleDateFormat SDF;
static {
SDF = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z", Locale.UK);
SDF.setTimeZone(TimeZone.getTimeZone("Europe/London"));
}
public void init() {
connectToIRCServer();
NoticeFilter.setFilterQueue(connection, 5000L);
addNickInUseListener();
registerUser();
addCTCPRequestListener();
identifyToNickServ();
joinChannels();
sml = new ScriptModuleLoader(this);
try {
moduleList.add(sml.loadFromFile(getClass().getResourceAsStream("/TestMod.py"), "TestMod.py"));
} catch (Exception e) {
e.printStackTrace();
}
}
private void registerUser() {
try {
connection.register(nick, user, realName);
} catch (IOException e) {
e.printStackTrace();
}
connection.addListener(this);
}
private void addNickInUseListener() {
NumericListener nickInUse = new NumericListener() {
public int getNumeric() {
return IrcConnection.ERR_NICKNAMEINUSE;
}
public void execute(String rawLine) {
FreeVoteBot.this.nick = FreeVoteBot.this.nick + "_";
try {
connection.sendRaw("NICK " + FreeVoteBot.this.nick);
} catch (IOException e) {
e.printStackTrace();
}
}
};
connection.addListener(nickInUse);
}
private void addCTCPRequestListener() {
connection.addListener(new CtcpRequestListener() {
public void onCtcpRequest(CtcpRequest request) {
if (request.getCommand().equals("VERSION")) {
request.getIrcConnection().send(new CtcpResponse(request.getIrcConnection(),
request.getNick(), "VERSION", "FreeVoteBot " + VERSION + " by " + CHANNEL_SOURCE + " on irc.rizon.net"));
} else if (request.getCommand().equals("PING")) {
request.getIrcConnection().send(new CtcpResponse(request.getIrcConnection(),
request.getNick(), "PING", request.getArguments()));
}
}
});
}
private void connectToIRCServer() {
try {
connection = new IrcConnection(serverHost, port);
} catch (IOException e) {
e.printStackTrace();
}
}
private void identifyToNickServ() {
File pass = new File("password.txt");
if (pass.exists()) {
try {
BufferedReader read = new BufferedReader(new FileReader(pass));
String s = read.readLine();
if (s != null) {
connection.send(new Privmsg("NickServ", "identify " + s, connection));
}
read.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void joinChannels() {
for (String channel : channels) {
connection.joinChannel(channel);
}
}
public void onPrivmsg(final Privmsg privmsg) {
if (privmsg.getNick().equalsIgnoreCase(nick)) {
return;
}
String sender = privmsg.getNick().toLowerCase();
if (expiryQueue.contains(sender) || !expiryQueue.insert(sender)) {
return;
}
for (FVBModule module : moduleList) {
try {
if (module.isEnabled() && module.canRun(privmsg)) {
module.process(privmsg);
return;
}
} catch (Exception e) {
privmsg.send(e.getMessage());
}
}
}
public static void askChanServForUserCreds(Privmsg privmsg) {
privmsg.getIrcConnection().send(new Privmsg("ChanServ", "WHY " + FreeVoteBot.CHANNEL_SOURCE + " " + privmsg.getNick(), privmsg.getIrcConnection()));
}
public void setPollDAO(PollDAO pollDAO) {
this.pollDAO = pollDAO;
}
public void setVoteDAO(VoteDAO voteDAO) {
this.voteDAO = voteDAO;
}
public void setNick(String nick) {
this.nick = nick;
}
public void setRealName(String realName) {
this.realName = realName;
}
public void setServerHost(String serverHost) {
this.serverHost = serverHost;
}
public void setUser(String user) {
this.user = user;
}
public void setPort(String port) {
this.port = Integer.parseInt(port);
}
public void setChannels(String channels) {
this.channels = channels.split(",");
}
public void setModules(final FVBModule[] modules) {
moduleList.clear();
moduleList.addAll(Arrays.asList(modules));
}
} |
package com.ecyrd.jspwiki.search;
import java.io.*;
import java.util.*;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import com.ecyrd.jspwiki.*;
import com.ecyrd.jspwiki.attachment.Attachment;
import com.ecyrd.jspwiki.attachment.AttachmentManager;
import com.ecyrd.jspwiki.providers.ProviderException;
import com.ecyrd.jspwiki.providers.WikiPageProvider;
import com.ecyrd.jspwiki.util.ClassUtil;
import com.ecyrd.jspwiki.util.WikiBackgroundThread;
/**
* Interface for the search providers that handle searching the Wiki
*
* @author Arent-Jan Banck for Informatica
* @since 2.2.21.
*/
public class LuceneSearchProvider implements SearchProvider
{
protected static final Logger log = Logger.getLogger(LuceneSearchProvider.class);
private WikiEngine m_engine;
// Lucene properties.
/** Which analyzer to use. Default is StandardAnalyzer. */
public static final String PROP_LUCENE_ANALYZER = "jspwiki.lucene.analyzer";
private static final String PROP_LUCENE_INDEXDELAY = "jspwiki.lucene.indexdelay";
private static final String PROP_LUCENE_INITIALDELAY = "jspwiki.lucene.initialdelay";
private String m_analyzerClass = "org.apache.lucene.analysis.standard.StandardAnalyzer";
private static final String LUCENE_DIR = "lucene";
// Number of page updates before we optimize the index.
public static final int LUCENE_OPTIMIZE_COUNT = 10;
private static final String LUCENE_ID = "id";
private static final String LUCENE_PAGE_CONTENTS = "contents";
private static final String LUCENE_AUTHOR = "author";
private static final String LUCENE_ATTACHMENTS = "attachment";
private static final String LUCENE_PAGE_NAME = "name";
private String m_luceneDirectory = null;
private int m_updateCount = 0;
protected Vector m_updates = new Vector(); // Vector because multi-threaded.
/** Maximum number of fragments from search matches. */
private static final int MAX_FRAGMENTS = 3;
public void initialize(WikiEngine engine, Properties props)
throws NoRequiredPropertyException, IOException
{
m_engine = engine;
m_luceneDirectory = engine.getWorkDir()+File.separator+LUCENE_DIR;
int initialDelay = TextUtil.getIntegerProperty( props, PROP_LUCENE_INITIALDELAY, LuceneUpdater.INITIAL_DELAY );
int indexDelay = TextUtil.getIntegerProperty( props, PROP_LUCENE_INDEXDELAY, LuceneUpdater.INDEX_DELAY );
m_analyzerClass = TextUtil.getStringProperty( props, PROP_LUCENE_ANALYZER, m_analyzerClass );
// FIXME: Just to be simple for now, we will do full reindex
// only if no files are in lucene directory.
File dir = new File(m_luceneDirectory);
log.info("Lucene enabled, cache will be in: "+dir.getAbsolutePath());
try
{
if( !dir.exists() )
{
dir.mkdirs();
}
if( !dir.exists() || !dir.canWrite() || !dir.canRead() )
{
log.error("Cannot write to Lucene directory, disabling Lucene: "+dir.getAbsolutePath());
throw new IOException( "Invalid Lucene directory." );
}
String[] filelist = dir.list();
if( filelist == null )
{
throw new IOException( "Invalid Lucene directory: cannot produce listing: "+dir.getAbsolutePath());
}
}
catch ( IOException e )
{
log.error("Problem while creating Lucene index - not using Lucene.", e);
}
// Start the Lucene update thread, which waits first
// for a little while before starting to go through
// the Lucene "pages that need updating".
LuceneUpdater updater = new LuceneUpdater( m_engine, this, initialDelay, indexDelay );
updater.start();
}
/**
* Performs a full Lucene reindex, if necessary.
* @throws IOException
*/
protected void doFullLuceneReindex()
throws IOException
{
File dir = new File(m_luceneDirectory);
String[] filelist = dir.list();
if( filelist == null )
{
throw new IOException( "Invalid Lucene directory: cannot produce listing: "+dir.getAbsolutePath());
}
try
{
if( filelist.length == 0 )
{
// No files? Reindex!
Date start = new Date();
IndexWriter writer = null;
log.info("Starting Lucene reindexing, this can take a couple minutes...");
// Do lock recovery, in case JSPWiki was shut down forcibly
Directory luceneDir = FSDirectory.getDirectory(dir,false);
if( IndexReader.isLocked(luceneDir) )
{
log.info("JSPWiki was shut down while Lucene was indexing - unlocking now.");
IndexReader.unlock( luceneDir );
}
try
{
writer = new IndexWriter( m_luceneDirectory,
getLuceneAnalyzer(),
true );
Collection allPages = m_engine.getPageManager().getAllPages();
for( Iterator iterator = allPages.iterator(); iterator.hasNext(); )
{
WikiPage page = (WikiPage) iterator.next();
String text = m_engine.getPageManager().getPageText( page.getName(),
WikiProvider.LATEST_VERSION );
luceneIndexPage( page, text, writer );
}
Collection allAttachments = m_engine.getAttachmentManager().getAllAttachments();
for( Iterator iterator = allAttachments.iterator(); iterator.hasNext(); )
{
Attachment att = (Attachment) iterator.next();
String text = getAttachmentContent( att.getName(),
WikiProvider.LATEST_VERSION );
luceneIndexPage( att, text, writer );
}
writer.optimize();
}
finally
{
try
{
if( writer != null ) writer.close();
}
catch( IOException e ) {}
}
Date end = new Date();
log.info("Full Lucene index finished in " +
(end.getTime() - start.getTime()) + " milliseconds.");
}
else
{
log.info("Files found in Lucene directory, not reindexing.");
}
}
catch( NoClassDefFoundError e )
{
log.info("Lucene libraries do not exist - not using Lucene.");
}
catch ( IOException e )
{
log.error("Problem while creating Lucene index - not using Lucene.", e);
}
catch ( ProviderException e )
{
log.error("Problem reading pages while creating Lucene index (JSPWiki won't start.)", e);
throw new IllegalArgumentException("unable to create Lucene index");
}
catch( ClassNotFoundException e )
{
log.error("Illegal Analyzer specified:",e);
}
catch( Exception e )
{
log.error("Unable to start lucene",e);
}
}
/**
* Fetches the attachment content from the repository.
* Content is flat text that can be used for indexing/searching or display
*/
private String getAttachmentContent( String attachmentName, int version )
{
AttachmentManager mgr = m_engine.getAttachmentManager();
try
{
Attachment att = mgr.getAttachmentInfo( attachmentName, version );
//FIXME: Find out why sometimes att is null
if(att != null)
{
return getAttachmentContent( att );
}
}
catch (ProviderException e)
{
log.error("Attachment cannot be loaded", e);
}
// Something was wrong, no result is returned.
return null;
}
/**
* @param att Attachment to get content for. Filename extension is used to determine the type of the attachment.
* @return String representing the content of the file.
* FIXME This is a very simple implementation of some text-based attachment, mainly used for testing.
* This should be replaced /moved to Attachment search providers or some other 'plugable' wat to search attachments
*/
private String getAttachmentContent( Attachment att )
{
AttachmentManager mgr = m_engine.getAttachmentManager();
//FIXME: Add attachment plugin structure
String filename = att.getFileName();
if(filename.endsWith(".txt") ||
filename.endsWith(".xml") ||
filename.endsWith(".ini") ||
filename.endsWith(".html"))
{
InputStream attStream;
try
{
attStream = mgr.getAttachmentStream( att );
StringWriter sout = new StringWriter();
FileUtil.copyContents( new InputStreamReader(attStream), sout );
attStream.close();
sout.close();
return sout.toString();
}
catch (ProviderException e)
{
log.error("Attachment cannot be loaded", e);
return null;
}
catch (IOException e)
{
log.error("Attachment cannot be loaded", e);
return null;
}
}
return null;
}
protected synchronized void updateLuceneIndex( WikiPage page, String text )
{
IndexWriter writer = null;
log.debug("Updating Lucene index for page '" + page.getName() + "'...");
try
{
pageRemoved(page);
// Now add back the new version.
writer = new IndexWriter(m_luceneDirectory, getLuceneAnalyzer(), false);
luceneIndexPage(page, text, writer);
m_updateCount++;
if( m_updateCount >= LUCENE_OPTIMIZE_COUNT )
{
writer.optimize();
m_updateCount = 0;
}
}
catch ( IOException e )
{
log.error("Unable to update page '" + page.getName() + "' from Lucene index", e);
}
catch( Exception e )
{
log.error("Unexpected Lucene exception - please check configuration!",e);
}
finally
{
try
{
if( writer != null ) writer.close();
}
catch( IOException e ) {}
}
log.debug("Done updating Lucene index for page '" + page.getName() + "'.");
}
private Analyzer getLuceneAnalyzer()
throws ClassNotFoundException,
InstantiationException,
IllegalAccessException
{
Class clazz = ClassUtil.findClass( "", m_analyzerClass );
Analyzer analyzer = (Analyzer)clazz.newInstance();
return analyzer;
}
private void luceneIndexPage( WikiPage page, String text, IndexWriter writer )
throws IOException
{
// make a new, empty document
Document doc = new Document();
if( text == null ) return;
// Raw name is the keyword we'll use to refer to this document for updates.
Field field = new Field(LUCENE_ID, page.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED);
doc.add( field );
// Body text. It is stored in the doc for search contexts.
field = new Field(LUCENE_PAGE_CONTENTS, text,
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
doc.add( field );
// Allow searching by page name. Both beautified and raw
field = new Field(LUCENE_PAGE_NAME, TextUtil.beautifyString( page.getName() ) + " " + page.getName(),
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
doc.add( field );
// Allow searching by authorname
if( page.getAuthor() != null )
{
field = new Field(LUCENE_AUTHOR, page.getAuthor(),
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
doc.add( field );
}
// Now add the names of the attachments of this page
try
{
Collection attachments = m_engine.getAttachmentManager().listAttachments(page);
String attachmentNames = "";
for( Iterator it = attachments.iterator(); it.hasNext(); )
{
Attachment att = (Attachment) it.next();
attachmentNames += att.getName() + ";";
}
field = new Field(LUCENE_ATTACHMENTS, attachmentNames,
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
doc.add( field );
}
catch(ProviderException e)
{
// Unable to read attachments
log.error("Failed to get attachments for page", e);
}
writer.addDocument(doc);
}
public void pageRemoved( WikiPage page )
{
try
{
// Must first remove existing version of page.
IndexReader reader = IndexReader.open(m_luceneDirectory);
reader.deleteDocuments(new Term(LUCENE_ID, page.getName()));
reader.close();
}
catch ( IOException e )
{
log.error("Unable to update page '" + page.getName() + "' from Lucene index", e);
}
}
/**
* Adds a page-text pair to the lucene update queue. Safe to call always
*/
public void reindexPage( WikiPage page )
{
if( page != null )
{
String text;
// TODO: Think if this was better done in the thread itself?
if( page instanceof Attachment )
{
text = getAttachmentContent( (Attachment) page );
}
else
{
text = m_engine.getPureText( page );
}
if( text != null )
{
// Add work item to m_updates queue.
Object[] pair = new Object[2];
pair[0] = page;
pair[1] = text;
m_updates.add(pair);
log.debug("Scheduling page " + page.getName() + " for index update");
}
}
}
public Collection findPages( String query )
throws ProviderException
{
Searcher searcher = null;
ArrayList list = null;
try
{
String[] queryfields = { LUCENE_PAGE_CONTENTS, LUCENE_PAGE_NAME, LUCENE_AUTHOR, LUCENE_ATTACHMENTS };
QueryParser qp = new MultiFieldQueryParser( queryfields, getLuceneAnalyzer() );
//QueryParser qp = new QueryParser( LUCENE_PAGE_CONTENTS, getLuceneAnalyzer() );
Query luceneQuery = qp.parse( query );
Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter("<span class=\"searchmatch\">", "</span>"),
new SimpleHTMLEncoder(),
new QueryScorer(luceneQuery));
try
{
searcher = new IndexSearcher(m_luceneDirectory);
}
catch( Exception ex )
{
log.info("Lucene not yet ready; indexing not started",ex);
return null;
}
Hits hits = searcher.search(luceneQuery);
list = new ArrayList(hits.length());
for ( int curr = 0; curr < hits.length(); curr++ )
{
Document doc = hits.doc(curr);
String pageName = doc.get(LUCENE_ID);
WikiPage page = m_engine.getPage(pageName, WikiPageProvider.LATEST_VERSION);
if(page != null)
{
if(page instanceof Attachment)
{
// Currently attachments don't look nice on the search-results page
// When the search-results are cleaned up this can be enabled again.
}
int score = (int)(hits.score(curr) * 100);
// Get highlighted search contexts
String text = doc.get(LUCENE_PAGE_CONTENTS);
String fragments[] = new String[0];
if (text != null)
{
TokenStream tokenStream = getLuceneAnalyzer()
.tokenStream(LUCENE_PAGE_CONTENTS, new StringReader(text));
fragments = highlighter.getBestFragments(tokenStream,
text, MAX_FRAGMENTS);
}
SearchResult result = new SearchResultImpl( page, score, fragments ); list.add(result);
}
else
{
log.error("Lucene found a result page '" + pageName + "' that could not be loaded, removing from Lucene cache");
pageRemoved(new WikiPage( m_engine, pageName ));
}
}
}
catch( IOException e )
{
log.error("Failed during lucene search",e);
}
catch( InstantiationException e )
{
log.error("Unable to get a Lucene analyzer",e);
}
catch( IllegalAccessException e )
{
log.error("Unable to get a Lucene analyzer",e);
}
catch( ClassNotFoundException e )
{
log.error("Specified Lucene analyzer does not exist",e);
}
catch( ParseException e )
{
log.info("Broken query; cannot parse",e);
throw new ProviderException("You have entered a query Lucene cannot process: "+e.getMessage());
}
finally
{
if( searcher != null ) try { searcher.close(); } catch( IOException e ) {}
}
return list;
}
public String getProviderInfo()
{
return "LuceneSearchProvider";
}
/**
* Updater thread that updates Lucene indexes.
*/
private static class LuceneUpdater extends WikiBackgroundThread
{
protected static final int INDEX_DELAY = 1;
protected static final int INITIAL_DELAY = 60;
private final LuceneSearchProvider m_provider;
private int initialDelay;
private LuceneUpdater( WikiEngine engine, LuceneSearchProvider provider,
int initialDelay, int indexDelay )
{
super( engine, indexDelay );
m_provider = provider;
setName("JSPWiki Lucene Indexer");
}
public void startupTask() throws Exception
{
// Sleep initially...
try
{
Thread.sleep( initialDelay * 1000L );
}
catch( InterruptedException e )
{
throw new InternalWikiException("Interrupted while waiting to start.");
}
// Reindex everything
m_provider.doFullLuceneReindex();
}
public void backgroundTask() throws Exception
{
synchronized ( m_provider.m_updates )
{
while( m_provider.m_updates.size() > 0 )
{
Object[] pair = ( Object[] ) m_provider.m_updates.remove(0);
WikiPage page = ( WikiPage ) pair[0];
String text = ( String ) pair[1];
m_provider.updateLuceneIndex(page, text);
}
}
}
}
// FIXME: This class is dumb; needs to have a better implementation
private static class SearchResultImpl
implements SearchResult
{
private WikiPage m_page;
private int m_score;
private String[] m_contexts;
public SearchResultImpl( WikiPage page, int score, String[] contexts )
{
m_page = page;
m_score = score;
m_contexts = contexts;
}
public WikiPage getPage()
{
return m_page;
}
/* (non-Javadoc)
* @see com.ecyrd.jspwiki.SearchResult#getScore()
*/
public int getScore()
{
return m_score;
}
public String[] getContexts()
{
return m_contexts;
}
}
} |
package org.testng;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.xml.parsers.ParserConfigurationException;
import org.testng.internal.ClassHelper;
import org.testng.internal.HostFile;
import org.testng.internal.Invoker;
import org.testng.internal.Utils;
import org.testng.internal.annotations.DefaultAnnotationTransformer;
import org.testng.internal.annotations.IAnnotationFinder;
import org.testng.internal.annotations.IAnnotationTransformer;
import org.testng.internal.annotations.ITest;
import org.testng.internal.annotations.JDK14AnnotationFinder;
import org.testng.internal.remote.SlavePool;
import org.testng.internal.thread.IPooledExecutor;
import org.testng.internal.thread.ThreadUtil;
import org.testng.internal.version.VersionInfo;
import org.testng.log4testng.Logger;
import org.testng.remote.ConnectionInfo;
import org.testng.remote.RemoteSuiteWorker;
import org.testng.remote.RemoteTestWorker;
import org.testng.reporters.EmailableReporter;
import org.testng.reporters.FailedReporter;
import org.testng.reporters.SuiteHTMLReporter;
import org.testng.xml.Parser;
import org.testng.xml.XmlClass;
import org.testng.xml.XmlSuite;
import org.testng.xml.XmlTest;
import org.xml.sax.SAXException;
/**
* This class is the main entry point for running tests in the TestNG framework.
* Users can create their own TestNG object and invoke it in many different
* ways:
* <ul>
* <li>On an existing testng.xml
* <li>On a synthetic testng.xml, created entirely from Java
* <li>By directly setting the test classes
* </ul>
* You can also define which groups to include or exclude, assign parameters, etc...
* <P/>
* The command line parameters are:
* <UL>
* <LI>-d <TT>outputdir</TT>: specify the output directory</LI>
* <LI>-testclass <TT>class_name</TT>: specifies one or several class names </li>
* <LI>-testjar <TT>jar_name</TT>: specifies the jar containing the tests</LI>
* <LI>-sourcedir <TT>src1;src2</TT>: ; separated list of source directories
* (used only when javadoc annotations are used)</LI>
* <LI>-target</LI>
* <LI>-groups</LI>
* <LI>-testrunfactory</LI>
* <LI>-listener</LI>
* </UL>
* <P/>
* Please consult documentation for more details.
*
* FIXME: should support more than simple paths for suite xmls
*
* @see #usage()
*
* @author <a href = "mailto:cedric@beust.com">Cedric Beust</a>
* @author <a href = "mailto:the_mindstorm@evolva.ro">Alex Popescu</a>
*/
public class TestNG {
/** This class' log4testng Logger. */
private static final Logger LOGGER = Logger.getLogger(TestNG.class);
/** The default name for a suite launched from the command line */
public static final String DEFAULT_SUITE_NAME = "Command line suite";
/** The default name for a test launched from the command line */
public static final String DEFAULT_TEST_NAME = "Command line test";
/** The default name of the result's output directory. */
public static final String DEFAULT_OUTPUTDIR = "test-output";
/** A separator constant (semi-colon). */
public static final String SRC_SEPARATOR = ";";
/** The JDK50 annotation type ID ("JDK5").*/
public static final String JDK5_ANNOTATION_TYPE = "JDK5";
/** The JavaDoc annotation type ID ("javadoc"). */
public static final String JAVADOC_ANNOTATION_TYPE = "javadoc";
private static TestNG m_instance;
/** Indicates the TestNG JAR version (JDK 1.4 or JDK 5.0+). */
private static final boolean m_isJdk14 = VersionInfo.IS_JDK14;
protected List<XmlSuite> m_suites = new ArrayList<XmlSuite>();
protected List<XmlSuite> m_cmdlineSuites;
protected String m_outputDir = DEFAULT_OUTPUTDIR;
/** The source directories as set by setSourcePath (or testng-sourcedir-override.properties). */
protected String[] m_sourceDirs;
/** The annotation type for suites/tests that have not explicitly set this attribute. */
protected String m_target = m_isJdk14 ? JAVADOC_ANNOTATION_TYPE : JDK5_ANNOTATION_TYPE;
protected IAnnotationFinder m_javadocAnnotationFinder;
protected IAnnotationFinder m_jdkAnnotationFinder;
protected String[] m_includedGroups;
protected String[] m_excludedGroups;
private Boolean m_isJUnit = Boolean.FALSE;
protected boolean m_useDefaultListeners = true;
protected ITestRunnerFactory m_testRunnerFactory;
// These listeners can be overridden from the command line
protected List<ITestListener> m_testListeners = new ArrayList<ITestListener>();
protected List<ISuiteListener> m_suiteListeners = new ArrayList<ISuiteListener>();
private List<IReporter> m_reporters = new ArrayList<IReporter>();
public static final int HAS_FAILURE = 1;
public static final int HAS_SKIPPED = 2;
public static final int HAS_FSP = 4;
public static final int HAS_NO_TEST = 8;
protected int m_status;
protected boolean m_hasTests= false;
/** The port on which this client will listen. */
private int m_clientPort = 0;
/** The name of the file containing the list of hosts where distributed
* tests will be dispatched. */
private String m_hostFile;
private SlavePool m_slavePool = new SlavePool();
// Command line suite parameters
private int m_threadCount;
private boolean m_useThreadCount;
private String m_parallelMode;
private boolean m_useParallelMode;
private Class[] m_commandLineTestClasses;
/**
* Default constructor. Setting also usage of default listeners/reporters.
*/
public TestNG() {
init(true);
}
/**
* Used by maven2 to have 0 output of any kind come out
* of testng.
* @param useDefaultListeners Whether or not any default reports
* should be added to tests.
*/
public TestNG(boolean useDefaultListeners) {
init(useDefaultListeners);
}
private void init(boolean useDefaultListeners) {
m_instance = this;
m_useDefaultListeners = useDefaultListeners;
}
/**
* @deprecated
*/
@Deprecated
public static TestNG getDefault() {
return m_instance;
}
public int getStatus() {
return m_status;
}
protected void setStatus(int status) {
m_status |= status;
}
/**
* Sets the output directory where the reports will be created.
* @param outputdir The directory.
*/
public void setOutputDirectory(final String outputdir) {
if ((null != outputdir) && !"".equals(outputdir)) {
m_outputDir = outputdir;
}
}
/**
* If this method is passed true before run(), the default listeners
* will not be used.
* <ul>
* <li>org.testng.reporters.TestHTMLReporter
* <li>org.testng.reporters.JUnitXMLReporter
* </ul>
*
* @see org.testng.reporters.TestHTMLReporter
* @see org.testng.reporters.JUnitXMLReporter
*/
public void setUseDefaultListeners(boolean useDefaultListeners) {
m_useDefaultListeners = useDefaultListeners;
}
/**
* The default annotation type for suites that have not explicitly set the annotation property.
* The target is used only in JDK5+.
* @param target the default annotation type. This is one of the two constants
* (TestNG.JAVADOC_ANNOTATION_TYPE or TestNG.JDK5_ANNOTATION_TYPE).
* For backward compatibility reasons we accept "1.4", "1.5". Any other value will
* default to TestNG.JDK5_ANNOTATION_TYPE.
*/
public void setTarget(final String target) {
// Target is used only in JDK 1.5 and may get null in JDK 1.4
if (null == target) {
return;
}
// The following switch block could be simplified with three test but the intent
// is to log at different levels when warning.
if (target.equals(JAVADOC_ANNOTATION_TYPE)) {
m_target = JAVADOC_ANNOTATION_TYPE;
}
else if (target.equals(JDK5_ANNOTATION_TYPE)) {
m_target = JDK5_ANNOTATION_TYPE;
}
else if (target.equals("1.4")
|| target.toLowerCase().equals(JAVADOC_ANNOTATION_TYPE.toLowerCase())) {
// For backward compatibility only
m_target = JAVADOC_ANNOTATION_TYPE;
LOGGER.info("Illegal target type " + target + " defaulting to " + JAVADOC_ANNOTATION_TYPE);
}
else if ("1.5".equals(target)
|| target.toLowerCase().equals(JDK5_ANNOTATION_TYPE.toLowerCase())) {
// For backward compatibility only
m_target = JDK5_ANNOTATION_TYPE;
LOGGER.info("Illegal target type " + target + " defaulting to " + JDK5_ANNOTATION_TYPE);
}
else if (target.toLowerCase().equals("jdk15")) {
// For backward compatibility only
m_target = JDK5_ANNOTATION_TYPE;
LOGGER.info("Illegal target type " + target + " defaulting to " + JDK5_ANNOTATION_TYPE);
}
else {
// For backward compatibility only
// Log at warn level
// TODO should we make this an error?
m_target = JDK5_ANNOTATION_TYPE;
LOGGER.warn("Illegal target type " + target + " defaulting to " + JDK5_ANNOTATION_TYPE);
}
}
/**
* Sets the ; separated path of source directories. This is used only with JavaDoc type
* annotations. The directories do not have to be the root of a class hierarchy. For
* example, "c:\java\src\org\testng" is a valid directory.
*
* If a resource named "testng-sourcedir-override.properties" is found in the classpath,
* it will override this call. "testng-sourcedir-override.properties" must contain a
* sourcedir property initialized with a semi-colon list of directories. For example:
*
* sourcedir=c:\java\src\org\testng;D:/dir2
*
* Note that for the override to occur, this method must be called. i.e. it is not sufficient
* to place "testng-sourcedir-override.properties" in the classpath.
*
* @param sourcePaths a semi-colon separated list of source directories.
*/
public void setSourcePath(String sourcePaths) {
// This is an optimization to reduce the sourcePath scope
// Is it OK to look only for the Thread context class loader?
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("testng-sourcedir-override.properties");
// Resource exists. Use override values and ignore given value
if (is != null) {
Properties props = new Properties();
try {
props.load(is);
}
catch (IOException e) {
throw new RuntimeException("Error loading testng-sourcedir-override.properties", e);
}
sourcePaths = props.getProperty("sourcedir");
}
if (null == sourcePaths || "".equals(sourcePaths.trim())) {
return;
}
m_sourceDirs = Utils.split(sourcePaths, SRC_SEPARATOR);
}
/**
* Sets a jar containing a testng.xml file.
*
* @param jarPath
*/
public void setTestJar(String jarPath) {
if ((null == jarPath) || "".equals(jarPath)) {
return;
}
File jarFile = new File(jarPath);
try {
URL jarfile = new URL("jar", "", "file:" + jarFile.getAbsolutePath() + "!/");
URLClassLoader jarLoader = new URLClassLoader(new URL[] { jarfile });
Thread.currentThread().setContextClassLoader(jarLoader);
m_suites.addAll(new Parser().parse());
}
catch(MalformedURLException mfurle) {
System.err.println("could not find jar file named: " + jarFile.getAbsolutePath());
}
catch(IOException ioe) {
System.out.println("An exception occurred while trying to load testng.xml from within jar "
+ jarFile.getAbsolutePath());
}
catch(SAXException saxe) {
System.out.println("testng.xml from within jar "
+ jarFile.getAbsolutePath()
+ " is not well formatted");
saxe.printStackTrace(System.out);
}
catch(ParserConfigurationException pce) {
pce.printStackTrace(System.out);
}
}
/**
* Define the number of threads in the thread pool.
*/
public void setThreadCount(int threadCount) {
if(threadCount < 1) {
exitWithError("Cannot use a threadCount parameter less than 1; 1 > " + threadCount);
}
m_threadCount = threadCount;
m_useThreadCount = true;
}
/**
* Define whether this run will be run in parallel mode.
*/
public void setParallel(String parallel) {
m_parallelMode = parallel;
m_useParallelMode = true;
}
public void setCommandLineSuite(XmlSuite suite) {
m_cmdlineSuites = new ArrayList<XmlSuite>();
m_cmdlineSuites.add(suite);
m_suites.add(suite);
}
/**
* Set the test classes to be run by this TestNG object. This method
* will create a dummy suite that will wrap these classes called
* "Command Line Test".
* <p/>
* If used together with threadCount, parallel, groups, excludedGroups than this one must be set first.
*
* @param classes An array of classes that contain TestNG annotations.
*/
public void setTestClasses(Class[] classes) {
m_commandLineTestClasses = classes;
}
private IAnnotationFinder getAnnotationFinder() {
return JDK5_ANNOTATION_TYPE.equals(m_target)
? m_jdkAnnotationFinder
: m_javadocAnnotationFinder;
}
private List<XmlSuite> createCommandLineSuites(Class[] classes) {
// See if any of the classes has an xmlSuite or xmlTest attribute.
// If it does, create the appropriate XmlSuite, otherwise, create
// the default one
XmlClass[] xmlClasses = Utils.classesToXmlClasses(classes);
Map<String, XmlSuite> suites = new HashMap<String, XmlSuite>();
IAnnotationFinder finder = getAnnotationFinder();
for (int i = 0; i < classes.length; i++) {
Class c = classes[i];
ITest test = (ITest) finder.findAnnotation(c, ITest.class);
String suiteName = DEFAULT_SUITE_NAME;
String testName = DEFAULT_TEST_NAME;
if (test != null) {
suiteName = test.getSuiteName();
testName = test.getTestName();
}
XmlSuite xmlSuite = suites.get(suiteName);
if (xmlSuite == null) {
xmlSuite = new XmlSuite();
xmlSuite.setName(suiteName);
suites.put(suiteName, xmlSuite);
}
XmlTest xmlTest = null;
for (XmlTest xt : xmlSuite.getTests()) {
if (xt.getName().equals(testName)) {
xmlTest = xt;
break;
}
}
if (xmlTest == null) {
xmlTest = new XmlTest(xmlSuite);
xmlTest.setName(testName);
}
xmlTest.getXmlClasses().add(xmlClasses[i]);
}
// XmlSuite[] result =
// (XmlSuite[]) suites.values().toArray(new XmlSuite[suites.size()]);
return new ArrayList<XmlSuite>(suites.values());
}
/**
* Set the suites file names to be run by this TestNG object. This method tries to load and
* parse the specified TestNG suite xml files. If a file is missing, it is ignored.
*
* @param suites A list of paths to one more XML files defining the tests. For example:
*
* <pre>
* TestNG tng = new TestNG();
* List<String> suites = new ArrayList<String>();
* suites.add("c:/tests/testng1.xml");
* suites.add("c:/tests/testng2.xml");
* tng.setTestSuites(suites);
* tng.run();
* </pre>
*/
public void setTestSuites(List<String> suites) {
for (String suiteXmlPath : suites) {
LOGGER.debug("suiteXmlPath: \"" + suiteXmlPath + "\"");
try {
Collection<XmlSuite> allSuites = new Parser(suiteXmlPath).parse();
for (XmlSuite s : allSuites) {
m_suites.add(s);
}
}
catch(FileNotFoundException e) {
e.printStackTrace(System.out);
}
catch(IOException e) {
e.printStackTrace(System.out);
}
catch(ParserConfigurationException e) {
e.printStackTrace(System.out);
}
catch(SAXException e) {
e.printStackTrace(System.out);
}
}
}
/**
* Specifies the XmlSuite objects to run.
* @param suites
* @see org.testng.xml.XmlSuite
*/
public void setXmlSuites(List<XmlSuite> suites) {
m_suites = suites;
}
/**
* Define which groups will be excluded from this run.
*
* @param groups A list of group names separated by a comma.
*/
public void setExcludedGroups(String groups) {
m_excludedGroups = Utils.split(groups, ",");
}
/**
* Define which groups will be included from this run.
*
* @param groups A list of group names separated by a comma.
*/
public void setGroups(String groups) {
m_includedGroups = Utils.split(groups, ",");
}
private void setTestRunnerFactoryClass(Class testRunnerFactoryClass) {
setTestRunnerFactory((ITestRunnerFactory) newInstance(testRunnerFactoryClass));
}
private void setTestRunnerFactory(ITestRunnerFactory itrf) {
m_testRunnerFactory= itrf;
}
/**
* Define which listeners to user for this run.
*
* @param classes A list of classes, which must be either ISuiteListener,
* ITestListener or IReporter
*/
public void setListenerClasses(List<Class> classes) {
for (Class cls: classes) {
addListener(newInstance(cls));
}
}
private void setListeners(List<Object> itls) {
for (Object obj: itls) {
addListener(obj);
}
}
public void addListener(Object listener) {
if (! (listener instanceof ISuiteListener)
&& ! (listener instanceof ITestListener)
&& ! (listener instanceof IReporter)
&& ! (listener instanceof IAnnotationTransformer))
{
exitWithError("Listener " + listener + " must be one of ITestListener, ISuiteListener, IReporter or IAnnotationTransformer");
}
else {
if (listener instanceof ISuiteListener) {
addListener((ISuiteListener) listener);
}
if (listener instanceof ITestListener) {
addListener((ITestListener) listener);
}
if (listener instanceof IReporter) {
addListener((IReporter) listener);
}
if (listener instanceof IAnnotationTransformer) {
setAnnotationTransformer((IAnnotationTransformer) listener);
}
}
}
public void addListener(ISuiteListener listener) {
if (null != listener) {
m_suiteListeners.add(listener);
}
}
public void addListener(ITestListener listener) {
if (null != listener) {
m_testListeners.add(listener);
}
}
public void addListener(IReporter listener) {
if (null != listener) {
m_reporters.add(listener);
}
}
public List<IReporter> getReporters() {
return m_reporters;
}
public List<ITestListener> getTestListeners() {
return m_testListeners;
}
public List<ISuiteListener> getSuiteListeners() {
return m_suiteListeners;
}
/** The verbosity level. TODO why not a simple int? */
private Integer m_verbose;
private IAnnotationTransformer m_annotationTransformer
= new DefaultAnnotationTransformer();
/**
* Sets the level of verbosity. This value will override the value specified
* in the test suites.
*
* @param verbose the verbosity level (0 to 10 where 10 is most detailed)
* Actually, this is a lie: you can specify -1 and this will put TestNG
* in debug mode (no longer slicing off stack traces and all).
*/
public void setVerbose(int verbose) {
m_verbose = new Integer(verbose);
}
private void initializeCommandLineSuites() {
if(null != m_commandLineTestClasses) {
m_cmdlineSuites = createCommandLineSuites(m_commandLineTestClasses);
for (XmlSuite s : m_cmdlineSuites) {
m_suites.add(s);
}
}
}
private void initializeCommandLineSuitesParams() {
if(null == m_cmdlineSuites) {
return;
}
for (XmlSuite s : m_cmdlineSuites) {
if(m_useThreadCount) {
s.setThreadCount(m_threadCount);
}
if(m_useParallelMode) {
s.setParallel(m_parallelMode);
}
}
}
private void initializeCommandLineSuitesGroups() {
if (null != m_cmdlineSuites) {
for (XmlSuite s : m_cmdlineSuites) {
if(null != m_includedGroups && m_includedGroups.length > 0) {
s.getTests().get(0).setIncludedGroups(Arrays.asList(m_includedGroups));
}
if(null != m_excludedGroups && m_excludedGroups.length > 0) {
s.getTests().get(0).setExcludedGroups(Arrays.asList(m_excludedGroups));
}
}
}
}
private void initializeListeners() {
m_testListeners.add(new ExitCodeListener(this));
if(m_useDefaultListeners) {
m_reporters.add(new SuiteHTMLReporter());
m_reporters.add(new FailedReporter());
m_reporters.add(new EmailableReporter());
}
}
private void initializeAnnotationFinders() {
m_javadocAnnotationFinder= new JDK14AnnotationFinder(getAnnotationTransformer());
if(null != m_sourceDirs) {
m_javadocAnnotationFinder.addSourceDirs(m_sourceDirs);
}
if(!isJdk14()) {
m_jdkAnnotationFinder= ClassHelper.createJdkAnnotationFinder(getAnnotationTransformer());
}
}
/**
* Run TestNG.
*/
public void run() {
initializeListeners();
initializeAnnotationFinders();
initializeCommandLineSuites();
initializeCommandLineSuitesParams();
initializeCommandLineSuitesGroups();
List<ISuite> suiteRunners = null;
// Slave mode
if (m_clientPort != 0) {
waitForSuites();
}
// Regular mode
else if (m_hostFile == null) {
suiteRunners = runSuitesLocally();
}
// Master mode
else {
suiteRunners = runSuitesRemotely();
}
if(null != suiteRunners) {
generateReports(suiteRunners);
}
if(!m_hasTests) {
setStatus(HAS_NO_TEST);
if (TestRunner.getVerbose() > 1) {
System.err.println("[TestNG] No tests found. Nothing was run");
}
}
}
private void generateReports(List<ISuite> suiteRunners) {
for (IReporter reporter : m_reporters) {
try {
reporter.generateReport(m_suites, suiteRunners, m_outputDir);
}
catch(Exception ex) {
System.err.println("[TestNG] Reporter " + reporter + " failed");
ex.printStackTrace(System.err);
}
}
}
private static ConnectionInfo resetSocket(int clientPort, ConnectionInfo oldCi)
throws IOException
{
ConnectionInfo result = new ConnectionInfo();
ServerSocket serverSocket = new ServerSocket(clientPort);
serverSocket.setReuseAddress(true);
log("Waiting for connections on port " + clientPort);
Socket socket = serverSocket.accept();
result.setSocket(socket);
return result;
}
/**
* Invoked in client mode. In this case, wait for a connection
* on the given port, run the XmlSuite we received and return the SuiteRunner
* created to run it.
* @throws IOException
*/
private void waitForSuites() {
try {
ConnectionInfo ci = resetSocket(m_clientPort, null);
while (true) {
try {
XmlSuite s = (XmlSuite) ci.getOis().readObject();
log("Processing " + s.getName());
m_suites = new ArrayList<XmlSuite>();
m_suites.add(s);
List<ISuite> suiteRunners = runSuitesLocally();
ISuite sr = suiteRunners.get(0);
log("Done processing " + s.getName());
ci.getOos().writeObject(sr);
}
catch (ClassNotFoundException e) {
e.printStackTrace(System.out);
}
catch(EOFException ex) {
log("Connection closed " + ex.getMessage());
ci = resetSocket(m_clientPort, ci);
}
catch(SocketException ex) {
log("Connection closed " + ex.getMessage());
ci = resetSocket(m_clientPort, ci);
}
}
}
catch(IOException ex) {
ex.printStackTrace(System.out);
}
}
private static void log(String string) {
Utils.log("", 2, string);
}
private List<ISuite> runSuitesRemotely() {
List<ISuite> result = new ArrayList<ISuite>();
HostFile hostFile = new HostFile(m_hostFile);
// Create one socket per host found
String[] hosts = hostFile.getHosts();
Socket[] sockets = new Socket[hosts.length];
for (int i = 0; i < hosts.length; i++) {
String host = hosts[i];
String[] s = host.split(":");
try {
sockets[i] = new Socket(s[0], Integer.parseInt(s[1]));
}
catch (NumberFormatException e) {
e.printStackTrace(System.out);
}
catch (UnknownHostException e) {
e.printStackTrace(System.out);
}
catch (IOException e) {
Utils.error("Couldn't connect to " + host + ": " + e.getMessage());
}
}
// Add these hosts to the pool
try {
m_slavePool.addSlaves(sockets);
}
catch (IOException e1) {
e1.printStackTrace(System.out);
}
// Dispatch the suites/tests to each host
List<Runnable> workers = new ArrayList<Runnable>();
// Send one XmlTest at a time to remote hosts
if (hostFile.isStrategyTest()) {
for (XmlSuite suite : m_suites) {
suite.setVerbose(hostFile.getVerbose());
SuiteRunner suiteRunner =
new SuiteRunner(suite, m_outputDir, new IAnnotationFinder[] {m_javadocAnnotationFinder, m_jdkAnnotationFinder});
for (XmlTest test : suite.getTests()) {
XmlSuite tmpSuite = new XmlSuite();
tmpSuite.setXmlPackages(suite.getXmlPackages());
tmpSuite.setAnnotations(suite.getAnnotations());
tmpSuite.setJUnit(suite.isJUnit());
tmpSuite.setName("Temporary suite for " + test.getName());
tmpSuite.setParallel(suite.getParallel());
tmpSuite.setParameters(suite.getParameters());
tmpSuite.setThreadCount(suite.getThreadCount());
tmpSuite.setVerbose(suite.getVerbose());
XmlTest tmpTest = new XmlTest(tmpSuite);
tmpTest.setAnnotations(test.getAnnotations());
tmpTest.setBeanShellExpression(test.getExpression());
tmpTest.setClassNames(test.getXmlClasses());
tmpTest.setExcludedGroups(test.getExcludedGroups());
tmpTest.setIncludedGroups(test.getIncludedGroups());
tmpTest.setJUnit(test.isJUnit());
tmpTest.setMethodSelectors(test.getMethodSelectors());
tmpTest.setName(test.getName());
tmpTest.setParallel(test.getParallel());
tmpTest.setParameters(test.getParameters());
tmpTest.setVerbose(test.getVerbose());
tmpTest.setXmlClasses(test.getXmlClasses());
tmpTest.setXmlPackages(test.getXmlPackages());
workers.add(new RemoteTestWorker(tmpSuite, m_slavePool, suiteRunner, result));
}
result.add(suiteRunner);
}
}
// Send one XmlSuite at a time to remote hosts
else {
for (XmlSuite suite : m_suites) {
workers.add(new RemoteSuiteWorker(suite, m_slavePool, result));
}
}
// Launch all the workers
IPooledExecutor executor= ThreadUtil.createPooledExecutor(1);
for (Runnable r : workers) {
executor.execute(r);
}
// Wait for completion
executor.shutdown();
// TODO(cbeust)
// Need to make this configurable
long maxTimeOut= 10 * 1000; // 10 minutes
try {
executor.awaitTermination(maxTimeOut);
}
catch (InterruptedException e) {
e.printStackTrace(System.out);
}
// Run test listeners
for (ISuite suite : result) {
for (ISuiteResult suiteResult : suite.getResults().values()) {
Collection<ITestResult> allTests[] = new Collection[] {
suiteResult.getTestContext().getPassedTests().getAllResults(),
suiteResult.getTestContext().getFailedTests().getAllResults(),
suiteResult.getTestContext().getSkippedTests().getAllResults(),
suiteResult.getTestContext().getFailedButWithinSuccessPercentageTests().getAllResults(),
};
for (Collection<ITestResult> all : allTests) {
for (ITestResult tr : all) {
Invoker.runTestListeners(tr, m_testListeners);
}
}
}
}
return result;
}
/**
* This needs to be public for maven2, for now..At least
* until an alternative mechanism is found.
* @return
*/
public List<ISuite> runSuitesLocally() {
List<ISuite> result = new ArrayList<ISuite>();
int v = TestRunner.getVerbose();
if (TestRunner.getVerbose() > 0) {
StringBuffer allFiles = new StringBuffer();
for (XmlSuite s : m_suites) {
allFiles.append(" ").append(s.getFileName()).append("\n");
}
Utils.log("Parser", 0, "Running:\n" + allFiles.toString());
}
if (m_suites.size() > 0) {
for (XmlSuite xmlSuite : m_suites) {
xmlSuite.setDefaultAnnotations(m_target);
if (null != m_isJUnit) {
xmlSuite.setJUnit(m_isJUnit);
}
// TODO CQ is this OK? Should the command line verbose flag override
// what is explicitly specified in the suite?
if (null != m_verbose) {
xmlSuite.setVerbose(m_verbose);
}
result.add(createAndRunSuiteRunners(xmlSuite));
}
}
else {
setStatus(HAS_NO_TEST);
System.err.println("[ERROR]: No test suite found. Nothing to run");
}
// Generate the suites report
return result;
}
protected SuiteRunner createAndRunSuiteRunners(XmlSuite xmlSuite) {
SuiteRunner result = new SuiteRunner(xmlSuite,
m_outputDir,
m_testRunnerFactory,
m_useDefaultListeners,
new IAnnotationFinder[] {m_javadocAnnotationFinder, m_jdkAnnotationFinder});
for (ISuiteListener isl : m_suiteListeners) {
result.addListener(isl);
}
result.setTestListeners(m_testListeners);
// Set the hostname, if any
if (m_clientPort != 0) {
try {
result.setHost(InetAddress.getLocalHost() + ":" + m_clientPort);
}
catch (UnknownHostException e) {
e.printStackTrace(System.out);
}
}
result.run();
return result;
}
private Object newInstance(Class clazz) {
try {
Object instance = clazz.newInstance();
return instance;
}
catch(IllegalAccessException iae) {
throw new TestNGException("Class "
+ clazz.getName()
+ " does not have a no-args constructor",
iae);
}
catch(InstantiationException ie) {
throw new TestNGException("Cannot instantiate class "
+ clazz.getName(),
ie);
}
catch(ExceptionInInitializerError eiierr) {
throw new TestNGException("An exception occurred in static initialization of class "
+ clazz.getName(),
eiierr);
}
catch(SecurityException se) {
throw new TestNGException(se);
}
}
/**
* The TestNG entry point for command line execution.
*
* @param argv the TestNG command line parameters.
*/
public static void main(String[] argv) {
TestNG testng = privateMain(argv, null);
System.exit(testng.getStatus());
}
/**
* TODO JavaDoc.
*
* @param argv
* @param listener
* @return
*/
public static TestNG privateMain(String[] argv, ITestListener listener) {
for (int i = 0; i < argv.length; ++i)
{
LOGGER.debug("privateMain: argv[" + i + "] = \"" + argv[i] + "\"");
}
Map cmdLineArgs = TestNGCommandLineArgs.parseCommandLine(argv);
TestNG result = new TestNG();
if (null != listener) {
result.addListener(listener);
}
try {
checkConditions(cmdLineArgs);
{
Integer verbose = (Integer) cmdLineArgs.get(TestNGCommandLineArgs.LOG);
if (null != verbose) {
result.setVerbose(verbose.intValue());
}
}
result.setOutputDirectory((String) cmdLineArgs.get(TestNGCommandLineArgs.OUTDIR_COMMAND_OPT));
result.setSourcePath((String) cmdLineArgs.get(TestNGCommandLineArgs.SRC_COMMAND_OPT));
result.setTarget((String) cmdLineArgs.get(TestNGCommandLineArgs.TARGET_COMMAND_OPT));
List<String> testClasses = (List<String>) cmdLineArgs.get(TestNGCommandLineArgs.TESTCLASS_COMMAND_OPT);
if (null != testClasses) {
Class[] classes = (Class[]) testClasses.toArray(new Class[testClasses.size()]);
result.setTestClasses(classes);
}
List<String> testNgXml =
(List<String>) cmdLineArgs.get(TestNGCommandLineArgs.SUITE_DEF_OPT);
if (null != testNgXml) {
result.setTestSuites(testNgXml);
}
String useDefaultListeners =
(String) cmdLineArgs.get(TestNGCommandLineArgs.USE_DEFAULT_LISTENERS);
if (null != useDefaultListeners) {
result.setUseDefaultListeners("true".equalsIgnoreCase(useDefaultListeners));
}
result.setGroups((String) cmdLineArgs.get(TestNGCommandLineArgs.GROUPS_COMMAND_OPT));
result.setExcludedGroups((String) cmdLineArgs.get(TestNGCommandLineArgs.EXCLUDED_GROUPS_COMMAND_OPT));
result.setTestJar((String) cmdLineArgs.get(TestNGCommandLineArgs.TESTJAR_COMMAND_OPT));
result.setJUnit((Boolean) cmdLineArgs.get(TestNGCommandLineArgs.JUNIT_DEF_OPT));
result.setHostFile((String) cmdLineArgs.get(TestNGCommandLineArgs.HOSTFILE_OPT));
String threadCount = (String) cmdLineArgs.get(TestNGCommandLineArgs.THREAD_COUNT);
if (threadCount != null) {
result.setThreadCount(Integer.parseInt(threadCount));
}
String client = (String) cmdLineArgs.get(TestNGCommandLineArgs.SLAVE_OPT);
if (client != null) {
result.setClientPort(Integer.parseInt(client));
}
List<Class> listenerClasses =
(List<Class>) cmdLineArgs.get(TestNGCommandLineArgs.LISTENER_COMMAND_OPT);
if (null != listenerClasses) {
result.setListenerClasses(listenerClasses);
}
result.run();
}
catch(TestNGException ex) {
if (TestRunner.getVerbose() > 1) {
ex.printStackTrace(System.out);
}
else {
System.err.println("[ERROR]: " + ex.getMessage());
}
System.exit(1);
}
return result;
}
private void setClientPort(int clientPort) {
m_clientPort = clientPort;
}
/**
* Set the path to the file that contains the list of slaves.
* @param hostFile
*/
public void setHostFile(String hostFile) {
m_hostFile = hostFile;
}
/**
* Specify if this run should be made in JUnit mode
*
* @param isJUnit
*/
public void setJUnit(Boolean isJUnit) {
m_isJUnit = isJUnit;
}
/**
* @deprecated The TestNG version is now established at load time. This
* method is not required anymore and is now a no-op.
*/
@Deprecated
public static void setTestNGVersion() {
LOGGER.info("setTestNGVersion has been deprecated.");
}
/**
* Returns true if this is the JDK 1.4 JAR version of TestNG, false otherwise.
*
* @return true if this is the JDK 1.4 JAR version of TestNG, false otherwise.
*/
public static boolean isJdk14() {
return m_isJdk14;
}
/**
* Checks TestNG preconditions. For example, this method makes sure that if this is the
* JDK 1.4 version of TestNG, a source directory has been specified. This method calls
* System.exit(-1) or throws an exception if the preconditions are not satisfied.
*
* @param params the parsed command line parameters.
*/
private static void checkConditions(Map params) {
// TODO CQ document why sometimes we throw exceptions and sometimes we exit.
List<String> testClasses = (List<String>) params.get(TestNGCommandLineArgs.TESTCLASS_COMMAND_OPT);
List<String> testNgXml = (List<String>) params.get(TestNGCommandLineArgs.SUITE_DEF_OPT);
Object testJar = params.get(TestNGCommandLineArgs.TESTJAR_COMMAND_OPT);
Object port = params.get(TestNGCommandLineArgs.SLAVE_OPT);
if (testClasses == null && testNgXml == null && port == null && testJar == null) {
System.err.println("You need to specify at least one testng.xml or one class");
usage();
System.exit(-1);
}
if (isJdk14()) {
String srcPath = (String) params.get(TestNGCommandLineArgs.SRC_COMMAND_OPT);
if ((null == srcPath) || "".equals(srcPath)) {
throw new TestNGException("No sourcedir was specified");
}
}
String groups = (String) params.get(TestNGCommandLineArgs.GROUPS_COMMAND_OPT);
String excludedGroups = (String) params.get(TestNGCommandLineArgs.EXCLUDED_GROUPS_COMMAND_OPT);
if ((null != groups || null != excludedGroups) && null == testClasses) {
throw new TestNGException("Groups option should be used with testclass option");
}
}
private static void ppp(String s) {
System.out.println("[TestNG] " + s);
}
/**
* @return true if at least one test failed.
*/
public boolean hasFailure() {
return (getStatus() & HAS_FAILURE) == HAS_FAILURE;
}
/**
* @deprecated
*/
@Deprecated
public void setHasFailure(boolean hasFailure) {
m_status |= HAS_FAILURE;
}
/**
* @return true if at least one test failed within success percentage.
*/
public boolean hasFailureWithinSuccessPercentage() {
return (getStatus() & HAS_FSP) == HAS_FSP;
}
/**
* @deprecated
*/
@Deprecated
public void setHasFailureWithinSuccessPercentage(boolean hasFailureWithinSuccessPercentage) {
m_status |= HAS_FSP;
}
/**
* @return true if at least one test was skipped.
*/
public boolean hasSkip() {
return (getStatus() & HAS_SKIPPED) == HAS_SKIPPED;
}
/**
* @deprecated
*/
@Deprecated
public void setHasSkip(boolean hasSkip) {
m_status |= HAS_SKIPPED;
}
/**
* Prints the usage message to System.out. This message describes all the command line
* options.
*/
public static void usage() {
TestNGCommandLineArgs.usage();
}
static void exitWithError(String msg) {
System.err.println(msg);
usage();
System.exit(1);
}
public String getOutputDirectory() {
return m_outputDir;
}
public IAnnotationTransformer getAnnotationTransformer() {
return m_annotationTransformer;
}
public void setAnnotationTransformer(IAnnotationTransformer t) {
m_annotationTransformer = t;
}
public static class ExitCodeListener implements ITestListener {
protected TestNG m_mainRunner;
public ExitCodeListener() {
m_mainRunner = TestNG.m_instance;
}
public ExitCodeListener(TestNG runner) {
m_mainRunner = runner;
}
public void onTestFailure(ITestResult result) {
m_mainRunner.m_status |= HAS_FAILURE;
}
public void onTestSkipped(ITestResult result) {
m_mainRunner.m_status |= HAS_SKIPPED;
}
public void onTestFailedButWithinSuccessPercentage(ITestResult result) {
m_mainRunner.m_status |= HAS_FSP;
}
public void onTestSuccess(ITestResult result) {
}
public void onStart(ITestContext context) {
}
public void onFinish(ITestContext context) {
}
public void onTestStart(ITestResult result) {
setHasRunTests();
}
private void setHasRunTests() {
m_mainRunner.m_hasTests= true;
}
}
} |
package com.ea.orbit.actors;
import com.ea.orbit.actors.annotation.StatelessWorker;
import com.ea.orbit.actors.cluster.ClusterPeer;
import com.ea.orbit.actors.cluster.JGroupsClusterPeer;
import com.ea.orbit.actors.cluster.NodeAddress;
import com.ea.orbit.actors.concurrent.MultiExecutionSerializer;
import com.ea.orbit.actors.concurrent.WaitFreeMultiExecutionSerializer;
import com.ea.orbit.actors.extensions.ActorClassFinder;
import com.ea.orbit.actors.extensions.ActorExtension;
import com.ea.orbit.actors.extensions.DefaultLoggerExtension;
import com.ea.orbit.actors.extensions.LifetimeExtension;
import com.ea.orbit.actors.extensions.LoggerExtension;
import com.ea.orbit.actors.extensions.MessageSerializer;
import com.ea.orbit.actors.extensions.PipelineExtension;
import com.ea.orbit.actors.extensions.StreamProvider;
import com.ea.orbit.actors.net.DefaultPipeline;
import com.ea.orbit.actors.net.Pipeline;
import com.ea.orbit.actors.runtime.AbstractActor;
import com.ea.orbit.actors.runtime.ActorBaseEntry;
import com.ea.orbit.actors.runtime.ActorEntry;
import com.ea.orbit.actors.runtime.ActorRuntime;
import com.ea.orbit.actors.runtime.ActorTaskContext;
import com.ea.orbit.actors.runtime.ClusterHandler;
import com.ea.orbit.actors.runtime.DefaultActorClassFinder;
import com.ea.orbit.actors.runtime.DefaultClassDictionary;
import com.ea.orbit.actors.runtime.DefaultDescriptorFactory;
import com.ea.orbit.actors.runtime.DefaultHandlers;
import com.ea.orbit.actors.runtime.Execution;
import com.ea.orbit.actors.runtime.Hosting;
import com.ea.orbit.actors.runtime.Invocation;
import com.ea.orbit.actors.runtime.JavaMessageSerializer;
import com.ea.orbit.actors.runtime.LocalObjects;
import com.ea.orbit.actors.runtime.Messaging;
import com.ea.orbit.actors.runtime.NodeCapabilities;
import com.ea.orbit.actors.runtime.ObjectInvoker;
import com.ea.orbit.actors.runtime.ObserverEntry;
import com.ea.orbit.actors.runtime.Registration;
import com.ea.orbit.actors.runtime.ReminderController;
import com.ea.orbit.actors.runtime.RemoteReference;
import com.ea.orbit.actors.runtime.ResponseCaching;
import com.ea.orbit.actors.runtime.SerializationHandler;
import com.ea.orbit.actors.runtime.StatelessActorEntry;
import com.ea.orbit.actors.runtime.Utils;
import com.ea.orbit.actors.runtime.cloner.ExecutionObjectCloner;
import com.ea.orbit.actors.runtime.cloner.KryoCloner;
import com.ea.orbit.actors.streams.AsyncObserver;
import com.ea.orbit.actors.streams.AsyncStream;
import com.ea.orbit.actors.streams.StreamSubscriptionHandle;
import com.ea.orbit.actors.streams.simple.SimpleStreamExtension;
import com.ea.orbit.actors.transactions.TransactionUtils;
import com.ea.orbit.annotation.Config;
import com.ea.orbit.annotation.Wired;
import com.ea.orbit.concurrent.ExecutorUtils;
import com.ea.orbit.concurrent.Task;
import com.ea.orbit.container.Container;
import com.ea.orbit.container.Startable;
import com.ea.orbit.exception.UncheckedException;
import com.ea.orbit.metrics.annotations.ExportMetric;
import com.ea.orbit.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Singleton;
import java.lang.annotation.Annotation;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.time.Clock;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static com.ea.orbit.async.Await.await;
@Singleton
public class Stage implements Startable, ActorRuntime
{
private Logger logger = LoggerFactory.getLogger(Stage.class);
private static final int DEFAULT_EXECUTION_POOL_SIZE = 128;
LocalObjects objects = new LocalObjects()
{
@Override
protected <T> LocalObjectEntry createLocalObjectEntry(final RemoteReference<T> reference, final T object)
{
return Stage.this.createLocalObjectEntry(reference, object);
}
};
@Config("orbit.actors.clusterName")
private String clusterName;
@Config("orbit.actors.nodeName")
private String nodeName;
@Config("orbit.actors.stageMode")
private StageMode mode = StageMode.HOST;
@Config("orbit.actors.executionPoolSize")
private int executionPoolSize = DEFAULT_EXECUTION_POOL_SIZE;
@Config("orbit.actors.extensions")
private List<ActorExtension> extensions = new CopyOnWriteArrayList<>();
@Config("orbit.actors.stickyHeaders")
private Set<String> stickyHeaders = new HashSet<>(Arrays.asList(TransactionUtils.ORBIT_TRANSACTION_ID, "orbit.traceId"));
@Config("orbit.actors.cleanupInterval")
private long cleanupIntervalMillis = TimeUnit.SECONDS.toMillis(10);
private Timer timer = new Timer("Orbit stage timer");
@Wired
private Container container;
private Pipeline pipeline;
private final String runtimeIdentity;
private ResponseCaching cacheManager;
private MultiExecutionSerializer<Object> executionSerializer;
private ActorClassFinder finder;
private LoggerExtension loggerExtension;
private NodeCapabilities.NodeState state;
@Config("orbit.actors.concurrentDeactivations")
private int concurrentDeactivations = 16;
@Config("orbit.actors.defaultActorTTL")
private long defaultActorTTL = TimeUnit.MINUTES.toMillis(10);
private Task<Void> startPromise = new Task<>();
public enum StageMode
{
FRONT_END, // no activations
HOST // allows activations
}
private ClusterPeer clusterPeer;
@Wired
private Messaging messaging;
@Wired
private Execution execution;
@Wired
private Hosting hosting;
private boolean startCalled;
private Clock clock;
private ExecutorService executionPool;
private ExecutorService messagingPool;
private ExecutionObjectCloner objectCloner;
private MessageSerializer messageSerializer;
private final WeakReference<ActorRuntime> cachedRef = new WeakReference<>(this);
static
{
try
{
Class.forName("com.ea.orbit.async.Async");
try
{
// async is present in the classpath, let's make sure await is initialized
Class.forName("com.ea.orbit.async.Await").getMethod("init").invoke(null);
}
catch (Exception ex)
{
// this might be a problem, logging.
LoggerFactory.getLogger(Stage.class).error("Error initializing orbit-async", ex);
}
}
catch (Exception ex)
{
// no problem, application doesn't use orbit async.
}
}
public static class Builder
{
private Clock clock;
private ExecutorService executionPool;
private ExecutorService messagingPool;
private ExecutionObjectCloner objectCloner;
private ClusterPeer clusterPeer;
private String clusterName;
private String nodeName;
private StageMode mode = StageMode.HOST;
private int executionPoolSize = DEFAULT_EXECUTION_POOL_SIZE;
private Messaging messaging;
private List<ActorExtension> extensions = new ArrayList<>();
private Set<String> stickyHeaders = new HashSet<>();
public Builder clock(Clock clock)
{
this.clock = clock;
return this;
}
public Builder executionPool(ExecutorService executionPool)
{
this.executionPool = executionPool;
return this;
}
public Builder messagingPool(ExecutorService messagingPool)
{
this.messagingPool = messagingPool;
return this;
}
public Builder clusterPeer(ClusterPeer clusterPeer)
{
this.clusterPeer = clusterPeer;
return this;
}
public Builder objectCloner(ExecutionObjectCloner objectCloner)
{
this.objectCloner = objectCloner;
return this;
}
public Builder clusterName(String clusterName)
{
this.clusterName = clusterName;
return this;
}
public Builder nodeName(String nodeName)
{
this.nodeName = nodeName;
return this;
}
public Builder mode(StageMode mode)
{
this.mode = mode;
return this;
}
public Builder messaging(Messaging messaging)
{
this.messaging = messaging;
return this;
}
public Builder extensions(ActorExtension... extensions)
{
Collections.addAll(this.extensions, extensions);
return this;
}
public Builder stickyHeaders(String... stickyHeaders)
{
Collections.addAll(this.stickyHeaders, stickyHeaders);
return this;
}
public Stage build()
{
Stage stage = new Stage();
stage.setClock(clock);
stage.setExecutionPool(executionPool);
stage.setMessagingPool(messagingPool);
stage.setObjectCloner(objectCloner);
stage.setClusterName(clusterName);
stage.setClusterPeer(clusterPeer);
stage.setNodeName(nodeName);
stage.setMode(mode);
stage.setExecutionPoolSize(executionPoolSize);
extensions.forEach(stage::addExtension);
stage.setMessaging(messaging);
stage.addStickyHeaders(stickyHeaders);
return stage;
}
}
public Stage()
{
ActorRuntime.runtimeCreated(cachedRef);
runtimeIdentity = generateRuntimeIdentity();
}
public void addStickyHeaders(Collection<String> stickyHeaders)
{
this.stickyHeaders.addAll(stickyHeaders);
}
public void setClock(final Clock clock)
{
this.clock = clock;
}
public void setMessaging(final Messaging messaging)
{
this.messaging = messaging;
}
public void setExecutionPool(final ExecutorService executionPool)
{
this.executionPool = executionPool;
}
public ExecutorService getExecutionPool()
{
return executionPool;
}
public void setMessagingPool(final ExecutorService messagingPool)
{
this.messagingPool = messagingPool;
}
public ExecutorService getMessagingPool()
{
return messagingPool;
}
public int getExecutionPoolSize()
{
return executionPoolSize;
}
public void setExecutionPoolSize(int defaultPoolSize)
{
this.executionPoolSize = defaultPoolSize;
}
public ExecutionObjectCloner getObjectCloner()
{
return objectCloner;
}
public void setObjectCloner(ExecutionObjectCloner objectCloner)
{
this.objectCloner = objectCloner;
}
public String getClusterName()
{
return clusterName;
}
public void setClusterName(final String clusterName)
{
this.clusterName = clusterName;
}
public String getNodeName()
{
return nodeName;
}
public void setNodeName(final String nodeName)
{
this.nodeName = nodeName;
}
public StageMode getMode()
{
return mode;
}
public void setMode(final StageMode mode)
{
if (startCalled)
{
throw new IllegalStateException("Stage mode cannot be changed after startup.");
}
this.mode = mode;
}
public Task<Void> getStartPromise()
{
return startPromise;
}
public Task<?> start()
{
startCalled = true;
if (state != null)
{
throw new IllegalStateException("Can't start the stage at this state=" + state);
}
state = NodeCapabilities.NodeState.RUNNING;
if (container != null)
{
extensions.addAll(container.getClasses().stream().filter(c -> ActorExtension.class.isAssignableFrom(c) && c.isAnnotationPresent(Singleton.class))
.map(c -> (ActorExtension) container.get(c)).collect(Collectors.toList()));
// pre create the class descriptors if possible.
container.getClasses().stream()
.filter(c -> (c != null && c.isInterface() && Actor.class.isAssignableFrom(c)))
.parallel()
.forEach(c -> DefaultDescriptorFactory.get().getInvoker(c));
}
if (loggerExtension == null)
{
loggerExtension = getFirstExtension(LoggerExtension.class);
if (loggerExtension == null)
{
loggerExtension = new DefaultLoggerExtension();
}
}
logger = loggerExtension.getLogger(this);
if (clusterName == null || clusterName.isEmpty())
{
setClusterName("orbit-cluster");
}
if (nodeName == null || nodeName.isEmpty())
{
setNodeName(getClusterName());
}
if (executionPool == null || messagingPool == null)
{
final ExecutorService newService = ExecutorUtils.newScalingThreadPool(executionPoolSize);
if (executionPool == null)
{
executionPool = newService;
}
if (messagingPool == null)
{
messagingPool = newService;
}
}
executionSerializer = new WaitFreeMultiExecutionSerializer<>(executionPool);
if (hosting == null)
{
hosting = container == null ? new Hosting() : container.get(Hosting.class);
}
if (messaging == null)
{
messaging = container == null ? new Messaging() : container.get(Messaging.class);
}
if (execution == null)
{
execution = container == null ? new Execution() : container.get(Execution.class);
}
if (messageSerializer == null)
{
messageSerializer = new JavaMessageSerializer();
}
if (clusterPeer == null)
{
if (container != null)
{
if (!container.getClasses().stream().filter(ClusterPeer.class::isAssignableFrom).findAny().isPresent())
{
clusterPeer = container.get(JGroupsClusterPeer.class);
}
else
{
clusterPeer = container.get(ClusterPeer.class);
}
}
else
{
clusterPeer = new JGroupsClusterPeer();
}
}
if (clock == null)
{
clock = Clock.systemUTC();
}
if (objectCloner == null)
{
objectCloner = new KryoCloner();
}
finder = getFirstExtension(ActorClassFinder.class);
if (finder == null)
{
finder = new DefaultActorClassFinder();
finder.start().join();
}
cacheManager = new ResponseCaching();
this.configureOrbitContainer();
hosting.setNodeType(mode == StageMode.HOST ? NodeCapabilities.NodeTypeEnum.SERVER : NodeCapabilities.NodeTypeEnum.CLIENT);
execution.setRuntime(this);
execution.setExecutor(executionPool);
execution.setObjects(objects);
execution.setExecutionSerializer(executionSerializer);
cacheManager.setObjectCloner(objectCloner);
cacheManager.setRuntime(this);
cacheManager.setMessageSerializer(messageSerializer);
messaging.setRuntime(this);
hosting.setStage(this);
hosting.setClusterPeer(clusterPeer);
pipeline = new DefaultPipeline();
// caches responses
pipeline.addLast(DefaultHandlers.CACHING, cacheManager);
pipeline.addLast(DefaultHandlers.EXECUTION, execution);
// handles invocation messages and request-response matching
pipeline.addLast(DefaultHandlers.HOSTING, hosting);
// handles invocation messages and request-response matching
pipeline.addLast(DefaultHandlers.MESSAGING, messaging);
// message serializer handler
pipeline.addLast(DefaultHandlers.SERIALIZATION, new SerializationHandler(this, messageSerializer));
// cluster peer handler
pipeline.addLast(DefaultHandlers.NETWORK, new ClusterHandler(clusterPeer, clusterName, nodeName));
extensions.stream().filter(extension -> extension instanceof PipelineExtension)
.map(extension -> (PipelineExtension) extension)
.forEach(extension -> {
if (extension.beforeHandlerName() != null)
{
pipeline.addHandlerBefore(extension.beforeHandlerName(), extension.getName(), extension);
}
else if (extension.afterHandlerName() != null)
{
pipeline.addHandlerAfter(extension.afterHandlerName(), extension.getName(), extension);
}
else
{
pipeline.addFirst(extension.getName(), extension);
}
});
StreamProvider defaultStreamProvider = extensions.stream()
.filter(p -> p instanceof StreamProvider)
.map(p -> (StreamProvider) p)
.filter(p -> StringUtils.equals(p.getName(), AsyncStream.DEFAULT_PROVIDER)).findFirst().orElse(null);
if (defaultStreamProvider == null)
{
defaultStreamProvider = new SimpleStreamExtension(AsyncStream.DEFAULT_PROVIDER);
extensions.add(defaultStreamProvider);
}
messaging.start();
hosting.start();
execution.start();
await(Task.allOf(extensions.stream().map(Startable::start)));
Task<Void> future = pipeline.connect(null);
if (mode == StageMode.HOST)
{
future = future.thenRun(() -> {
this.bind();
getReference(ReminderController.class, "0").ensureStart();
});
}
future = future.thenRun(() -> bind());
// schedules the cleanup
timer.schedule(new TimerTask()
{
@Override
public void run()
{
if (state == NodeCapabilities.NodeState.RUNNING)
{
ForkJoinTask.adapt(() -> cleanup().join()).fork();
}
}
}, cleanupIntervalMillis, cleanupIntervalMillis);
future.whenComplete((r, e) -> {
if (e != null)
{
startPromise.completeExceptionally(e);
}
else
{
startPromise.complete(r);
}
});
return startPromise;
}
private void configureOrbitContainer()
{
// orbitContainer will be null if the application is not using it
if (container != null)
{
// Create a lifetime provider for actor DI
LifetimeExtension containerLifetime = new LifetimeExtension()
{
@Override
public Task<?> preActivation(AbstractActor<?> actor)
{
container.inject(actor);
return Task.done();
}
};
extensions.add(containerLifetime);
}
}
public void setClusterPeer(final ClusterPeer clusterPeer)
{
this.clusterPeer = clusterPeer;
}
/**
* Installs extensions to the stage.
* <p>
* Example:
* <pre>
* stage.addExtension(new MongoDbProvider(...));
* </pre>
*
* @param extension Actor Extensions instance.
*/
public void addExtension(final ActorExtension extension)
{
this.extensions.add(extension);
}
public Task<?> stop()
{
if (getState() != NodeCapabilities.NodeState.RUNNING)
{
throw new IllegalStateException("Stage is not in the running mode, mode: " + mode);
}
state = NodeCapabilities.NodeState.STOPPING;
// * refuse new actor activations
// first notify other nodes
// * deactivate all actors
// * notify rest of the cluster (no more observer messages)
// * finalize all timers
// * stop processing new received messages
// * wait pending tasks execution
// * stop the network
logger.debug("start stopping pipeline");
await(pipeline.write(NodeCapabilities.NodeState.STOPPING));
logger.debug("stopping actors");
await(stopActors());
logger.debug("stopping timers");
await(stopTimers());
logger.debug("stopping extensions");
await(stopExtensions());
do
{
Utils.sleep(100);
} while (executionSerializer.isBusy());
logger.debug("closing pipeline");
await(pipeline.close());
state = NodeCapabilities.NodeState.STOPPED;
logger.debug("stop done");
return Task.done();
}
private Task<Void> stopActors()
{
for (int passes = 0; passes < 2; passes++)
{
// using negative age meaning all actors, regardless of age
cleanupActors(Long.MIN_VALUE);
}
return Task.done();
}
public Task<Void> cleanupActors()
{
return cleanupActors(defaultActorTTL);
}
private Task<Void> cleanupActors(long maxAge)
{
// sort by last access (older first)
final Iterator<ActorBaseEntry> iterator = objects.stream()
.filter(o -> o instanceof ActorBaseEntry)
.map(o -> (ActorBaseEntry) o)
.sorted((a, b) -> Long.compare(a.getLastAccess(), b.getLastAccess()))
.iterator();
final List<Task> pending = new ArrayList<>();
// ensure that certain number of concurrent deactivations is happening at each moment
while (iterator.hasNext())
{
while (pending.size() < concurrentDeactivations && iterator.hasNext())
{
final ActorBaseEntry<?> actor = iterator.next();
if (clock().millis() - actor.getLastAccess() > maxAge)
{
if (logger.isTraceEnabled())
{
logger.trace("deactivating " + actor.getRemoteReference());
}
pending.add(actor.deactivate());
}
}
if (pending.size() > 0)
{
// await for at least one deactivation to complete
await(Task.anyOf(pending));
// remove all completed deactivations
for (int i = pending.size(); --i >= 0; )
{
if (pending.get(i).isDone())
{
pending.remove(i);
}
}
}
}
if (pending.size() > 0)
{
await(Task.allOf(pending));
}
return Task.done();
}
private Task<Void> stopTimers()
{
try
{
timer.cancel();
}
catch (Throwable ex)
{
logger.error("Error stopping timers", ex);
}
return Task.done();
}
private Task<Void> stopExtensions()
{
for (ActorExtension e : getExtensions())
{
try
{
await(e.stop());
}
catch (Throwable ex)
{
logger.error("Error stopping extension: " + e);
}
}
return Task.done();
}
public Hosting getHosting()
{
return hosting;
}
public ClusterPeer getClusterPeer()
{
return clusterPeer != null ? clusterPeer : (clusterPeer = new JGroupsClusterPeer());
}
public Task cleanup()
{
await(execution.cleanup());
await(cleanupActors(defaultActorTTL));
await(messaging.cleanup());
return Task.done();
}
/**
* Binds this stage to the current thread.
* This tells ungrounded references to use this stage to call remote methods.
* <p>
* An ungrounded reference is a reference created with {@code Actor.getRemoteReference} and used outside of an actor method.
* <p>
* This is only necessary when there are <i>two or more</i> OrbitStages active in the same virtual machine and
* remote calls need to be issued from outside an actor.
* This method was created to help with test cases.
* <p>
* A normal application will have a single stage and should have no reason to call this method.
* <p>
* This method writes a weak reference to the runtime in a thread local.
* No cleanup is necessary, so none is available.
*/
public void bind()
{
ActorRuntime.setRuntime(this.cachedRef);
}
public List<NodeAddress> getAllNodes()
{
if (hosting == null)
{
return Collections.emptyList();
}
return hosting.getAllNodes();
}
public List<NodeAddress> getServerNodes()
{
if (hosting == null)
{
return Collections.emptyList();
}
return hosting.getServerNodes();
}
public NodeCapabilities.NodeState getState()
{
return state;
}
@ExportMetric(name = "localActorCount")
public long getLocalActorCount()
{
long value = 0;
return value;
}
@ExportMetric(name = "messagesReceived")
public long getMessagesReceived()
{
long value = 0;
return value;
}
@ExportMetric(name = "messagesHandled")
public long getMessagesHandled()
{
long value = 0;
return value;
}
@ExportMetric(name = "refusedExecutions")
public long getRefusedExecutions()
{
long value = 0;
return value;
}
public ActorRuntime getRuntime()
{
return this;
}
public MessageSerializer getMessageSerializer()
{
return messageSerializer;
}
public void setMessageSerializer(final MessageSerializer messageSerializer)
{
this.messageSerializer = messageSerializer;
}
@Override
public Task<?> invoke(final RemoteReference toReference, final Method m, final boolean oneWay, final int methodId, final Object[] params)
{
if (state == NodeCapabilities.NodeState.STOPPED)
{
throw new IllegalStateException("Stage is stopped");
}
final Invocation invocation = new Invocation(toReference, m, oneWay, methodId, params, null);
// copy stick context valued to the message headers headers
final ActorTaskContext context = ActorTaskContext.current();
if (context != null)
{
LinkedHashMap<Object, Object> headers = null;
for (String key : stickyHeaders)
{
final Object value = context.getProperty(key);
if (value != null)
{
if (headers == null)
{
headers = new LinkedHashMap<>();
}
headers.put(key, value);
}
}
invocation.setHeaders(headers);
}
final Task<Void> result = pipeline.write(invocation);
return result;
}
@Override
public Registration registerTimer(final AbstractActor<?> actor,
final Callable<Task<?>> taskCallable,
final long dueTime, final long period,
final TimeUnit timeUnit)
{
final Object key = actor.getClass().isAnnotationPresent(StatelessWorker.class)
? actor : RemoteReference.from(actor);
final ActorEntry localActor = (ActorEntry) objects.findLocalActor((Actor) actor);
if (localActor == null || localActor.isDeactivated())
{
return () -> {
// do nothing;
};
}
// TODO: handle deactivation: add the timers to a localActor list for cancelation
final TimerTask timerTask = new TimerTask()
{
boolean canceled;
@Override
public void run()
{
if (localActor.isDeactivated())
{
cancel();
return;
}
executionSerializer.offerJob(key,
() -> {
if (localActor.isDeactivated())
{
bind();
}
try
{
if (!canceled)
{
return taskCallable.call();
}
}
catch (Exception ex)
{
logger.warn("Error calling timer", ex);
}
return Task.done();
}, 1000);
}
@Override
public boolean cancel()
{
canceled = true;
return super.cancel();
}
};
if (period > 0)
{
timer.schedule(timerTask, timeUnit.toMillis(dueTime), timeUnit.toMillis(period));
}
else
{
timer.schedule(timerTask, timeUnit.toMillis(dueTime));
}
return timerTask::cancel;
}
@Override
public Clock clock()
{
return clock;
}
@Override
public Task<?> registerReminder(final Remindable actor, final String reminderName, final long dueTime, final long period, final TimeUnit timeUnit)
{
return getReference(ReminderController.class, "0").registerOrUpdateReminder(actor, reminderName, new Date(clock.millis() + timeUnit.toMillis(dueTime)), period, timeUnit);
}
@Override
public Task<?> unregisterReminder(final Remindable actor, final String reminderName)
{
return getReference(ReminderController.class, "0").unregisterReminder(actor, reminderName);
}
@Override
public String runtimeIdentity()
{
return runtimeIdentity;
}
private String generateRuntimeIdentity()
{
final UUID uuid = UUID.randomUUID();
final String encoded = Base64.getEncoder().encodeToString(
ByteBuffer.allocate(16).putLong(uuid.getMostSignificantBits()).putLong(uuid.getLeastSignificantBits()).array());
return "Orbit[" + encoded.substring(0, encoded.length() - 2) + "]";
}
@Override
public Task<NodeAddress> locateActor(final Addressable actorReference, final boolean forceActivation)
{
return hosting.locateActor((RemoteReference<?>) actorReference, forceActivation);
}
@Override
public <T extends ActorObserver> T registerObserver(Class<T> iClass, String id, final T observer)
{
final RemoteReference<T> reference = objects.getOrAddLocalObjectReference(hosting.getNodeAddress(), iClass, id, observer);
RemoteReference.setRuntime(reference, this);
return iClass.cast(reference);
}
@Override
public <T extends ActorObserver> T registerObserver(final Class<T> iClass, final T observer)
{
final RemoteReference<T> reference = objects.getOrAddLocalObjectReference(hosting.getNodeAddress(), iClass, null, observer);
RemoteReference.setRuntime(reference, this);
//noinspection unchecked
return iClass != null ? iClass.cast(reference) : (T) reference;
}
@Override
public <T extends ActorObserver> T getRemoteObserverReference(final NodeAddress address, final Class<T> iClass, final Object id)
{
return DefaultDescriptorFactory.get().getReference(this, address, iClass, id);
}
@Override
public <T extends Actor> T getReference(final Class<T> iClass, final Object id)
{
return DefaultDescriptorFactory.get().getReference(this, null, iClass, id);
}
@Override
public ObjectInvoker<?> getInvoker(final int interfaceId)
{
return DefaultDescriptorFactory.get().getInvoker(DefaultClassDictionary.get().getClassById(interfaceId));
}
@Override
public StreamProvider getStreamProvider(final String name)
{
StreamProvider streamProvider = getAllExtensions(StreamProvider.class).stream()
.filter(p -> StringUtils.equals(p.getName(), name))
.findFirst().orElseThrow(() -> new UncheckedException(String.format("Provider: %s not found", name)));
final AbstractActor<?> actor = ActorTaskContext.currentActor();
if (actor != null)
{
// wraps the stream provider to ensure sequential execution
return new StreamProvider()
{
@Override
public <T> AsyncStream<T> getStream(final Class<T> dataClass, final String id)
{
final AsyncStream<T> stream = streamProvider.getStream(dataClass, id);
return new AsyncStream<T>()
{
@Override
public Task<Void> unSubscribe(final StreamSubscriptionHandle<T> handle)
{
return stream.unSubscribe(handle);
}
@Override
public Task<StreamSubscriptionHandle<T>> subscribe(final AsyncObserver<T> observer)
{
return stream.subscribe(new AsyncObserver<T>()
{
@Override
public Task<Void> onNext(final T data)
{
// TODO use actor executor, when available
return observer.onNext(data);
}
@Override
public Task<Void> onError(final Exception ex)
{
// TODO use actor executor, when available
return observer.onError(ex);
}
});
// TODO unsubscribe automatically on deactivation
}
@Override
public Task<Void> post(final T data)
{
return stream.post(data);
}
};
}
@Override
public String getName()
{
return streamProvider.getName();
}
};
}
return streamProvider;
}
@Override
public <T> AsyncStream<T> getStream(final String provider, final Class<T> dataClass, final String id)
{
return getStreamProvider(provider).getStream(dataClass, id);
}
@Override
public List<ActorExtension> getExtensions()
{
return extensions;
}
private <T> LocalObjects.LocalObjectEntry createLocalObjectEntry(final RemoteReference<T> reference, final T object)
{
final Class<T> interfaceClass = RemoteReference.getInterfaceClass(reference);
if (Actor.class.isAssignableFrom(interfaceClass))
{
final ActorBaseEntry actorEntry;
if (interfaceClass.isAnnotationPresent(StatelessWorker.class))
{
actorEntry = new StatelessActorEntry<>(reference);
}
else
{
actorEntry = new ActorEntry<>(reference);
}
actorEntry.setExecutionSerializer(executionSerializer);
actorEntry.setLoggerExtension(loggerExtension);
actorEntry.setRuntime(this);
final Class actorImplementation = finder.findActorImplementation((Class) interfaceClass);
actorEntry.setConcreteClass(actorImplementation);
actorEntry.setStorageExtension(getStorageExtensionFor(actorImplementation));
return actorEntry;
}
if (ActorObserver.class.isAssignableFrom(interfaceClass))
{
final ObserverEntry observerEntry = new ObserverEntry(reference, object);
observerEntry.setExecutionSerializer(executionSerializer);
return observerEntry;
}
throw new IllegalArgumentException("Invalid object type: " + object.getClass());
}
@SuppressWarnings("unchecked")
public <T extends ActorExtension> T getStorageExtensionFor(Class actorClass)
{
if (extensions == null)
{
return null;
}
final Annotation annotation = actorClass.getAnnotation(com.ea.orbit.actors.annotation.StorageExtension.class);
com.ea.orbit.actors.annotation.StorageExtension ann = (com.ea.orbit.actors.annotation.StorageExtension) annotation;
String extensionName = ann == null ? "default" : ann.value();
// selects the fist provider with the right name
return (T) extensions.stream()
.filter(p -> (p instanceof com.ea.orbit.actors.extensions.StorageExtension) && extensionName.equals(((com.ea.orbit.actors.extensions.StorageExtension) p).getName()))
.findFirst()
.orElse(null);
}
public boolean canActivateActor(final String interfaceName)
{
if (getState() != NodeCapabilities.NodeState.RUNNING)
{
// todo, improve this
if (hosting.getServerNodes().size() > 1)
{
return false;
}
}
Class<Actor> aInterface = Utils.classForName(interfaceName, true);
if (aInterface == null)
{
return false;
}
final Class<?> concreteClass = finder.findActorImplementation(aInterface);
return concreteClass != null;
}
public Pipeline getPipeline()
{
return pipeline;
}
@Override
public Logger getLogger(Object object)
{
return loggerExtension.getLogger(object);
}
public Set<String> getStickyHeaders()
{
return stickyHeaders;
}
} |
package org.icij.extract.core;
import java.util.concurrent.Callable;
import java.nio.file.Path;
import java.io.IOException;
import java.io.FileNotFoundException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.exception.TikaException;
import org.apache.tika.exception.EncryptedDocumentException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A task is defined as both the extraction from a file and the output of extracted data.
* Completion is only considered successful if both parts of the task complete with no exceptions.
*
* The final status of each task is saved to the reporter, if any is set.
*
* @since 1.0.0
*/
class ExtractingTask implements Runnable, Callable<Path> {
private static final Logger logger = LoggerFactory.getLogger(ExtractingTask.class);
protected final Path file;
protected final Extractor extractor;
protected final Spewer spewer;
private final Reporter reporter;
ExtractingTask(final Path file, final Extractor extractor, final Spewer spewer, final Reporter reporter) {
this.file = file;
this.extractor = extractor;
this.spewer = spewer;
this.reporter = reporter;
}
@Override
public void run() {
try {
call();
} catch (Exception e) {
logger.error(String.format("Exception while consuming file: \"%s\".", file), e);
}
}
@Override
public Path call() throws Exception {
// Check status in reporter. Skip if good.
// Otherwise save status to registry and start a new job.
if (null != reporter) {
if (reporter.check(file, ExtractionResult.SUCCEEDED)) {
logger.info(String.format("File already extracted; skipping: \"%s\".", file));
} else {
reporter.save(file, extractResult(file));
}
return file;
}
try {
extract(file);
// Catch exceptions that should be converted into warnings.
} catch (IOException e) {
final Throwable cause = e.getCause();
if (null != cause && cause instanceof ExcludedMediaTypeException) {
logger.warn(String.format("The document was not parsed because all of the parsers that handle it " +
"were excluded: \"%s\".", file));
} else {
throw e;
}
}
return file;
}
/**
* Send a file to the {@link Extractor}.
*
* @param file path of file to extract from
* @throws Exception if the extraction or output could not be completed
*/
private void extract(final Path file) throws Exception {
final Metadata metadata = new Metadata();
logger.info(String.format("Beginning extraction: \"%s\".", file));
try (final ParsingReader reader = extractor.extract(file, metadata)) {
spewer.write(file, metadata, reader);
}
}
/**
* Send a file to the {@link Extractor} and return the result.
*
* @param file path of file to extract from
* @return The extraction result code.
*/
private ExtractionResult extractResult(final Path file) {
ExtractionResult status = ExtractionResult.SUCCEEDED;
try {
extract(file);
// SpewerException is thrown exclusively due to an output endpoint error.
// It means that extraction succeeded, but the result could not be saved.
} catch (SpewerException e) {
logger.error(String.format("The extraction result could not be outputted: \"%s\".", file), e);
status = ExtractionResult.NOT_SAVED;
} catch (FileNotFoundException e) {
logger.error(String.format("File not found: \"%s\". Skipping.", file), e);
status = ExtractionResult.NOT_FOUND;
} catch (IOException e) {
// ParsingReader#read catches exceptions and wraps them in an IOException.
final Throwable c = e.getCause();
if (c instanceof ExcludedMediaTypeException) {
status = ExtractionResult.NOT_PARSED;
} else if (c instanceof EncryptedDocumentException) {
logger.warn(String.format("Skipping encrypted file: \"%s\".", file), e);
status = ExtractionResult.NOT_DECRYPTED;
// TIKA-198: IOExceptions thrown by parsers will be wrapped in a TikaException.
// This helps us differentiate input stream exceptions from output stream exceptions.
} else if (c instanceof TikaException) {
logger.error(String.format("The document could not be parsed: \"%s\".", file), e);
status = ExtractionResult.NOT_PARSED;
} else {
logger.error(String.format("The document stream could not be read: \"%s\".", file), e);
status = ExtractionResult.NOT_READ;
}
} catch (Exception e) {
logger.error(String.format("Unknown exception during extraction or output: \"%s\".", file), e);
status = ExtractionResult.NOT_CLEAR;
}
if (ExtractionResult.SUCCEEDED == status) {
logger.info(String.format("Finished outputting file: \"%s\".", file));
}
return status;
}
} |
package org.yecht;
import java.util.Map;
import java.util.HashMap;
/**
*
* @author <a href="mailto:ola.bini@gmail.com">Ola Bini</a>
*/
public class Emitter {
public static class Node {
public int pos;
public int indent;
public boolean is_shortcut;
}
public boolean headless;
public boolean use_header;
public boolean use_version;
public boolean sort_keys;
public String anchor_format;
public boolean explicit_typing;
public int best_width;
public ScalarStyle style;
public DocStage stage;
public int level;
public int indent;
public long ignore_id;
Map<Long, Long> markers;
Map<Long, String> anchors;
Map<String, Object> anchored;
int bufsize;
byte[] buffer;
int marker;
int bufpos;
EmitterHandler emitter_handler;
OutputHandler output_handler;
Level[] levels;
int lvl_idx;
int lvl_capa;
Object bonus;
// syck_new_emitter
public Emitter() {
this.headless = false;
this.use_header = false;
this.use_version = false;
this.sort_keys = false;
this.anchor_format = null;
this.explicit_typing = false;
this.best_width = 80;
this.style = ScalarStyle.None;
this.stage = DocStage.open;
this.indent = 2;
this.level = -1;
this.anchors = null;
this.markers = null;
this.anchored = null;
this.bufsize = YAML.BUFFERSIZE;
this.buffer = null;
this.marker = -1;
this.bufpos = 0;
this.emitter_handler = null;
this.output_handler = null;
this.lvl_idx = 0;
this.lvl_capa = YAML.ALLOC_CT;
this.levels = new Level[this.lvl_capa];
resetLevels();
this.bonus = null;
}
// syck_emitter_current_level
public Level currentLevel() {
return levels[lvl_idx-1];
}
// syck_emitter_parent_level
public Level parentLevel() {
return levels[lvl_idx-2];
}
// syck_emitter_pop_level
public void popLevel() {
if(lvl_idx <= 1) {
return;
}
lvl_idx
}
// syck_emitter_add_level
public void addLevel(int len, LevelStatus status) {
if(lvl_idx + 1 > lvl_capa) {
lvl_capa += YAML.ALLOC_CT;
levels = YAML.realloc(levels, lvl_capa);
}
levels[lvl_idx] = new Level();
levels[lvl_idx].spaces = len;
levels[lvl_idx].ncount = 0;
levels[lvl_idx].domain = levels[lvl_idx-1].domain;
levels[lvl_idx].status = status;
levels[lvl_idx].anctag = 0;
lvl_idx++;
}
// syck_emitter_reset_levels
public void resetLevels() {
while(lvl_idx > 1) {
popLevel();
}
if(lvl_idx < 1) {
lvl_idx = 1;
levels[0] = new Level();
levels[0].spaces = -1;
levels[0].ncount = 0;
levels[0].domain = "";
levels[0].anctag = 0;
}
levels[0].status = LevelStatus.header;
}
// syck_emitter_handler
public void handler(EmitterHandler hdlr) {
this.emitter_handler = hdlr;
}
// syck_output_handler
public void outputHandler(OutputHandler hdlr) {
this.output_handler = hdlr;
}
// syck_emitter_clear
public void clear() {
if(this.buffer == null) {
this.buffer = new byte[this.bufsize];
}
this.buffer[0] = 0;
this.marker = 0;
this.bufpos = 0;
}
// syck_emitter_write
public void write(Pointer _str, final int _len) {
int len = _len;
byte[] bstr = _str.buffer;
int str = _str.start;
if(this.buffer == null) {
clear();
}
int at = this.marker;
if(len + at >= this.bufsize - 1) {
flush(0);
for(;;) {
int rest = (this.bufsize - 1) - this.marker;
if(len <= rest) break;
System.arraycopy(bstr, str, this.buffer, this.marker, rest);
this.marker += rest;
str += rest;
len -= rest;
flush(0);
}
}
System.arraycopy(bstr, str, this.buffer, this.marker, len);
this.marker += len;
this.buffer[this.marker] = 0;
}
// syck_emitter_flush
public void flush(int check_room ){
if(check_room > 0) {
if((this.bufsize - 1) > (this.marker + check_room)) {
return;
}
} else {
check_room = this.bufsize - 1;
}
if(check_room > this.marker ) {
check_room = this.marker;
}
this.output_handler.handle(this, this.buffer, check_room);
this.bufpos += check_room;
this.marker -= check_room;
}
private final static Pointer NEWLINE = Pointer.create("\n");
private final static Pointer SPACE = Pointer.create(" ");
private final static Pointer SLASH = Pointer.create("/");
private final static Pointer THREE_DASHES = Pointer.create("
private final static Pointer QUESTION_MARK_SPACE = Pointer.create("? ");
private final static Pointer BANG = Pointer.create("!");
private final static Pointer BANG_SPACE = Pointer.create("! ");
private final static Pointer TWO_BANGS = Pointer.create("!!");
private final static Pointer COLON_SPACE = Pointer.create(": ");
private final static Pointer BACKSLASH = Pointer.create("\\");
private final static Pointer ZERO = Pointer.create("0");
private final static Pointer X = Pointer.create("x");
/*
* Start emitting from the given node, check for anchoring and then
* issue the callback to the emitter handler.
*/
// syck_emit
public void emit(long n) {
int indent = 0;
int x = 0;
Level lvl = currentLevel();
if(stage == DocStage.open && (!headless || use_header)) {
if(use_version) {
String header = "--- %YAML:" + YAML.YAML_MAJOR + "." + YAML.YAML_MINOR + " ";
write(Pointer.create(header), header.length());
} else {
write(THREE_DASHES, 4);
}
stage = DocStage.processing;
}
if(lvl.spaces >= 0) {
indent = lvl.spaces + this.indent;
}
addLevel(indent, LevelStatus.open);
Level parent = lvl;
lvl = currentLevel();
boolean handle = true;
if(this.anchors != null && this.markers.containsKey(n)) {
long oid = this.markers.get(n);
if(this.anchors.containsKey(oid)) {
String anchor_name = this.anchors.get(oid);
if(this.anchored == null) {
anchored = new HashMap<String, Object>();
}
if(!anchored.containsKey(anchor_name)) {
String an = "&" + anchor_name + " ";
if(parent.status == LevelStatus.map && parent.ncount % 2 == 1) {
write(QUESTION_MARK_SPACE, 2);
parent.status = LevelStatus.mapx;
}
write(Pointer.create(an), an.length());
this.anchored.put(anchor_name, null);
lvl.anctag = 1;
} else {
String an = "*" + anchor_name;
write(Pointer.create(an), an.length());
handle = false;
}
}
}
if(handle) {
this.emitter_handler.handle(this, n);
}
popLevel();
if(lvl_idx == 1) {
write(NEWLINE, 1);
this.headless = false;
this.stage = DocStage.open;
}
}
// syck_emit_tag
public void emitTag(String tag, String ignore) {
if(tag == null) {
return;
}
if(ignore != null && ImplicitScanner.tagcmp(tag, ignore) && !this.explicit_typing) {
return;
}
Level lvl = currentLevel();
if(tag.length() == 0) {
write(BANG_SPACE, 2);
} else if(tag.startsWith("tag:")) {
int taglen = tag.length();
Pointer ptag = Pointer.create(tag);
write(BANG, 1);
if(tag.substring(4).startsWith(YAML.DOMAIN)) {
int skip = 4 + YAML.DOMAIN.length() + 1;
write(ptag.withStart(skip), taglen - skip);
} else {
int subd = 4;
while(subd < taglen && tag.charAt(subd) != ':') {
subd++;
}
if(subd < taglen && tag.charAt(subd) == ':') {
if(subd > (YAML.DOMAIN.length() + 5) &&
tag.substring(subd - YAML.DOMAIN.length()).startsWith(YAML.DOMAIN)) {
write(ptag.withStart(4), (subd - YAML.DOMAIN.length()) - 5);
write(SLASH, 1);
write(ptag.withStart(subd+1), taglen - (subd + 1));
} else {
write(ptag.withStart(4), subd - 4);
write(SLASH, 1);
write(ptag.withStart(subd + 1), taglen - (subd + 1));
}
} else {
/* TODO: Invalid tag (no colon after domain) */
return;
}
}
write(SPACE, 1);
} else if(tag.startsWith("x-private:")) {
write(TWO_BANGS, 2);
write(Pointer.create(tag.substring(10)), tag.length()-10);
write(SPACE, 1);
}
lvl.anctag = 1;
}
// syck_emit_indent
public void emitIndent() {
Level lvl = currentLevel();
if(bufpos == 0 && marker == 0) {
return;
}
if(lvl.spaces >= 0) {
byte[] spcs = new byte[lvl.spaces + 2];
spcs[0] = '\n';
spcs[lvl.spaces + 1] = 0;
for(int i=0; i<lvl.spaces; i++) {
spcs[i+1] = ' ';
}
write(Pointer.create(spcs, 0), lvl.spaces + 1);
}
}
/* Clear the scan */
private final static int SCAN_NONE =0;
/* All printable characters? */
private final static int SCAN_NONPRINT =1;
/* Any indented lines? */
private final static int SCAN_INDENTED =2;
/* Larger than the requested width? */
private final static int SCAN_WIDE =4;
/* Opens or closes with whitespace? */
private final static int SCAN_WHITEEDGE =8;
/* Contains a newline */
private final static int SCAN_NEWLINE =16;
/* Contains a single quote */
private final static int SCAN_SINGLEQ =32;
/* Contains a double quote */
private final static int SCAN_DOUBLEQ =64;
/* Starts with a token */
private final static int SCAN_INDIC_S =128;
/* Contains a flow indicator */
private final static int SCAN_INDIC_C =256;
/* Ends without newlines */
private final static int SCAN_NONL_E =512;
/* Ends with many newlines */
private final static int SCAN_MANYNL_E =1024;
/* Contains flow map indicators */
private final static int SCAN_FLOWMAP =2048;
/* Contains flow seq indicators */
private final static int SCAN_FLOWSEQ =4096;
/* Contains a valid doc separator */
private final static int SCAN_DOCSEP =8192;
// syck_scan_scalar
public int scanScalar(int req_width, Pointer _cursor, int len) {
byte[] cursorb = _cursor.buffer;
int cursor = _cursor.start;
int start = 0;
int flags = SCAN_NONE;
if(len < 1) {
return flags;
}
switch(cursorb[cursor]) {
case '[': case ']':
case '{': case '}':
case '!': case '*':
case '&': case '|':
case '>': case '\'':
case '"': case '
case '%': case '@':
case '`':
flags |= SCAN_INDIC_S;
break;
case '-': case ':':
case '?': case ',':
if(len == 1 || cursorb[cursor+1] == ' ' || cursorb[cursor+1] == '\n') {
flags |= SCAN_INDIC_S;
}
break;
}
if(cursorb[cursor + len - 1] != '\n') {
flags |= SCAN_NONL_E;
} else if(len > 1 && cursorb[cursor + len - 2] == '\n') {
flags |= SCAN_MANYNL_E;
}
if(
(len>0 && (cursorb[cursor] == ' ' || cursorb[cursor] == '\t')) ||
(len>1 && (cursorb[cursor + len - 1] == ' ' || cursorb[cursor + len - 1] == '\t'))
) {
flags |= SCAN_WHITEEDGE;
}
if(len >= 3 && cursorb[cursor] == '-' && cursorb[cursor+1] == '-' && cursorb[cursor+2] == '-') {
flags |= SCAN_DOCSEP;
}
for(int i=0; i<len; i++) {
int ci = (int)(cursorb[cursor+i]&0xFF);
if(! ( ci == 0x9 ||
ci == 0xA ||
ci == 0xD ||
(ci >= 0x20 && ci <= 0x7E))) {
flags |= SCAN_NONPRINT;
} else if(ci == '\n') {
flags |= SCAN_NEWLINE;
if(len - i >= 3 && cursorb[cursor+i+1] == '-' && cursorb[cursor+i+2] == '-' && cursorb[cursor+i+3] == '-' ) {
flags |= SCAN_DOCSEP;
}
if(cursorb[cursor+i+1] == ' ' || cursorb[cursor+i+1] == '\t') {
flags |= SCAN_INDENTED;
}
if(req_width > 0 && (i - start) > req_width) {
flags |= SCAN_WIDE;
}
start = i;
} else if(ci == '\'') {
flags |= SCAN_SINGLEQ;
} else if(ci == '"') {
flags |= SCAN_DOUBLEQ;
} else if(ci == ']') {
flags |= SCAN_FLOWSEQ;
} else if(ci == '}') {
flags |= SCAN_FLOWMAP;
} else if((ci == ' ' && cursorb[cursor+i+1] == '
(ci == ':' && (cursorb[cursor+i+1] == ' ' ||
cursorb[cursor+i+1] == '\n' ||
i == len - 1 ))) {
flags |= SCAN_INDIC_C;
} else if(ci == ',' && (cursorb[cursor+i+1] == ' ' ||
cursorb[cursor+i+1] == '\n' ||
i == len - 1 )) {
flags |= SCAN_FLOWMAP;
flags |= SCAN_FLOWSEQ;
}
}
return flags;
}
private final static Pointer EMPTY = Pointer.create(new byte[0], 0);
private final static Pointer TILDE = Pointer.create("~");
// syck_emit_scalar
public void emitScalar(String tag, ScalarStyle force_style, int force_indent, int force_width, int keep_nl, Pointer _str, int len) {
if(_str == null) {
_str = EMPTY;
}
byte[] bstr = _str.buffer;
int str = _str.start;
ScalarStyle favor_style = ScalarStyle.Literal;
Level parent = parentLevel();
Level lvl = currentLevel();
if(len == 0 && (parent.status == LevelStatus.map || parent.status == LevelStatus.imap) && parent.ncount % 2 == 1 && ImplicitScanner.tagcmp(tag, "tag:yaml.org,2002:null")) {
_str = TILDE;
bstr = _str.buffer;
str = _str.start;
len = 1;
}
int scan = scanScalar(force_width, _str, len);
String implicit = Parser.taguri(YAML.DOMAIN, ImplicitScanner.matchImplicit(_str, len));
if(!ImplicitScanner.tagcmp(tag, implicit) && ImplicitScanner.tagcmp(tag, "tag:yaml.org,2002:str")) {
force_style = ScalarStyle.TwoQuote;
} else {
if(parent.status == LevelStatus.map && parent.ncount % 2 == 1 && ( !(tag == null || (implicit != null && ImplicitScanner.tagcmp(tag, implicit) && !explicit_typing)))) {
write(QUESTION_MARK_SPACE, 2);
parent.status = LevelStatus.mapx;
}
emitTag(tag, implicit);
}
if(force_style == ScalarStyle.None) {
if((scan & SCAN_NEWLINE) != 0) {
force_style = ScalarStyle.Literal;
} else {
force_style = ScalarStyle.Plain;
}
}
if(this.style == ScalarStyle.Fold) {
favor_style = ScalarStyle.Fold;
}
if((scan & SCAN_NONPRINT) != 0) {
force_style = ScalarStyle.TwoQuote;
} else if((scan & SCAN_WHITEEDGE) != 0) {
force_style = ScalarStyle.TwoQuote;
} else if(force_style != ScalarStyle.Fold && (scan & SCAN_INDENTED) != 0) {
force_style = ScalarStyle.Literal;
} else if(force_style == ScalarStyle.Plain && (scan & SCAN_NEWLINE) != 0) {
force_style = favor_style;
} else if(force_style == ScalarStyle.Plain && parent.status == LevelStatus.iseq && (scan & SCAN_FLOWSEQ) != 0) {
force_style = ScalarStyle.TwoQuote;
} else if(force_style == ScalarStyle.Plain && parent.status == LevelStatus.imap && (scan & SCAN_FLOWMAP) != 0) {
force_style = ScalarStyle.TwoQuote;
} else if(force_style == ScalarStyle.Plain && ((scan & SCAN_INDIC_S) != 0 || (scan & SCAN_INDIC_C) != 0)) {
if((scan & SCAN_NEWLINE) != 0) {
force_style = favor_style;
} else {
force_style = ScalarStyle.TwoQuote;
}
}
if(force_indent > 0) {
lvl.spaces = parent.spaces + force_indent;
} else if((scan & SCAN_DOCSEP) != 0) {
lvl.spaces = parent.spaces + this.indent;
}
if((parent.status == LevelStatus.map || parent.status == LevelStatus.mapx) && parent.ncount % 2 == 1) {
if(force_style != ScalarStyle.Plain) {
force_style = ScalarStyle.TwoQuote;
}
}
if(parent.status == LevelStatus.imap || parent.status == LevelStatus.iseq) {
if(force_style != ScalarStyle.Plain && force_style != ScalarStyle.OneQuote) {
force_style = ScalarStyle.TwoQuote;
}
}
if((scan & SCAN_NONL_E) != 0) {
keep_nl = YAML.NL_CHOMP;
} else if((scan & SCAN_MANYNL_E) != 0) {
keep_nl = YAML.NL_KEEP;
}
switch(force_style) {
case OneQuote:
// syck_emit_1quoted( e, force_width, str, len );
break;
case None:
case TwoQuote:
// syck_emit_2quoted( e, force_width, str, len );
break;
case Fold:
// syck_emit_folded( e, force_width, keep_nl, str, len );
break;
case Literal:
// syck_emit_literal( e, keep_nl, str, len );
break;
case Plain:
write(_str, len);
break;
}
if(parent.status == LevelStatus.mapx) {
write(NEWLINE, 1);
}
}
private final static Pointer hex_table = Pointer.create("0123456789ABCDEF");
// syck_emitter_escape
public void escape(Pointer _src, int len) {
byte[] bsrc = _src.buffer;
int src = _src.start;
for(int i=0; i<len; i++) {
int curr = (int)bsrc[src+i]&0xFF;
if(curr < 0x20 || (0x7E < curr)) {
write(BACKSLASH, 1);
if(curr == 0) {
write(ZERO, 1);
} else {
write(X, 1);
write(hex_table.withStart((curr & 0xF0) >> 4), 1);
write(hex_table.withStart(curr & 0x0F), 1);
}
} else {
write(_src.withStart(src+i), 1);
if(curr == '\\') {
write(BACKSLASH, 1);
}
}
}
}
}// Emitter |
package txnIdSelfCheck;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import org.voltcore.logging.VoltLogger;
import org.voltdb.CLIConfig;
import org.voltdb.ClientResponseImpl;
import org.voltdb.VoltTable;
import org.voltdb.client.Client;
import org.voltdb.client.ClientConfig;
import org.voltdb.client.ClientFactory;
import org.voltdb.client.ClientImpl;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.ClientStatusListenerExt;
import org.voltdb.client.ProcCallException;
import org.voltdb.utils.MiscUtils;
public class Benchmark {
static VoltLogger log = new VoltLogger("Benchmark");
// handy, rather than typing this out several times
static final String HORIZONTAL_RULE =
"
"
// validated command line configuration
final Config config;
// create a client for each server node
Client client;
// Timer for periodic stats printing
Timer timer;
// Benchmark start time
long benchmarkStartTS;
// Timer for writing the checkpoint count for apprunner
Timer checkpointTimer;
// Timer for refreshing ratelimit permits
Timer permitsTimer;
final TxnId2RateLimiter rateLimiter;
final TxnId2PayloadProcessor processor;
final AtomicInteger activeConnections = new AtomicInteger(0);
final AtomicBoolean shutdown = new AtomicBoolean(false);
// for reporting and detecting progress
public static AtomicLong txnCount = new AtomicLong();
private long txnCountAtLastCheck;
private long lastProgressTimestamp = System.currentTimeMillis();
// For retry connections
private final ExecutorService es = Executors.newCachedThreadPool(new ThreadFactory() {
@Override
public Thread newThread(Runnable arg0) {
Thread thread = new Thread(arg0, "Retry Connection");
thread.setDaemon(true);
return thread;
}
});
/**
* Uses included {@link CLIConfig} class to
* declaratively state command line options with defaults
* and validation.
*/
private static class Config extends CLIConfig {
@Option(desc = "Interval for performance feedback, in seconds.")
long displayinterval = 5;
@Option(desc = "Benchmark duration, in seconds.")
int duration = 20;
@Option(desc = "Comma separated list of the form server[:port] to connect to.")
String servers = "localhost";
String[] parsedServers = null;
@Option(desc = "Number of parallel syncrhonous threads.")
int threads = 100;
@Option(desc = "Id of the first thread (useful for running multiple clients).")
int threadoffset = 0;
@Option(desc = "Minimum value size in bytes.")
int minvaluesize = 1024;
@Option(desc = "Maximum value size in bytes.")
int maxvaluesize = 1024;
@Option(desc = "Number of values considered for each value byte.")
int entropy = 127;
@Option(desc = "Compress values on the client side.")
boolean usecompression = false;
@Option(desc = "Filler table blob size.")
int fillerrowsize = 5128;
@Option(desc = "Target data size for the filler replicated table (at each site).")
long replfillerrowmb = 32;
@Option(desc = "Target data size for the partitioned filler table.")
long partfillerrowmb = 128;
@Option(desc = "Timeout that kills the client if progress is not made.")
int progresstimeout = 120;
@Option(desc = "Whether or not to disable adhoc writes.")
boolean disableadhoc = false;
@Option(desc = "Maximum TPS rate for benchmark.")
int ratelimit = Integer.MAX_VALUE;
@Option(desc = "Filename to write raw summary statistics to.")
String statsfile = "";
@Option(desc = "Allow experimental in-procedure adhoc statments.")
boolean allowinprocadhoc = true;
@Option(desc = "Allow set ratio of mp to sp workload.")
float mpratio = (float)0.20;
@Override
public void validate() {
if (duration <= 0) exitWithMessageAndUsage("duration must be > 0");
if (displayinterval <= 0) exitWithMessageAndUsage("displayinterval must be > 0");
if (threadoffset < 0) exitWithMessageAndUsage("threadoffset must be >= 0");
if (threads <= 0) exitWithMessageAndUsage("threads must be > 0");
if (threadoffset > 127) exitWithMessageAndUsage("threadoffset must be within [0, 127]");
if (threadoffset + threads > 127) exitWithMessageAndUsage("max thread offset must be <= 127");
if (ratelimit <= 0) exitWithMessageAndUsage("ratelimit must be > 0");
if (minvaluesize <= 0) exitWithMessageAndUsage("minvaluesize must be > 0");
if (maxvaluesize <= 0) exitWithMessageAndUsage("maxvaluesize must be > 0");
if (entropy <= 0) exitWithMessageAndUsage("entropy must be > 0");
if (entropy > 127) exitWithMessageAndUsage("entropy must be <= 127");
if (mpratio < 0.0 || mpratio > 1.0) exitWithMessageAndUsage("mpRatio must be between 0.0 and 1.0");
}
@Override
public void parse(String cmdName, String[] args) {
super.parse(cmdName, args);
// parse servers
parsedServers = servers.split(",");
}
}
/**
* Fake an internal jstack to the log
*/
static public void printJStack() {
Map<String, List<String>> deduped = new HashMap<String, List<String>>();
// collect all the output, but dedup the identical stack traces
for (Entry<Thread, StackTraceElement[]> e : Thread.getAllStackTraces().entrySet()) {
Thread t = e.getKey();
String header = String.format("\"%s\" %sprio=%d tid=%d %s",
t.getName(),
t.isDaemon() ? "daemon " : "",
t.getPriority(),
t.getId(),
t.getState().toString());
String stack = "";
for (StackTraceElement ste : e.getValue()) {
stack += " at " + ste.toString() + "\n";
}
if (deduped.containsKey(stack)) {
deduped.get(stack).add(header);
}
else {
ArrayList<String> headers = new ArrayList<String>();
headers.add(header);
deduped.put(stack, headers);
}
}
String logline = "";
for (Entry<String, List<String>> e : deduped.entrySet()) {
for (String header : e.getValue()) {
logline += "\n" + header + "\n";
}
logline += e.getKey();
}
log.info("Full thread dump:\n" + logline);
}
static public void hardStop(String msg) {
logHardStop(msg);
stopTheWorld();
}
static public void hardStop(Exception e) {
logHardStop("Unexpected exception", e);
stopTheWorld();
}
static public void hardStop(String msg, Exception e) {
logHardStop(msg, e);
if (e instanceof ProcCallException) {
ClientResponse cr = ((ProcCallException) e).getClientResponse();
hardStop(msg, cr);
}
}
static public void hardStop(String msg, ClientResponse resp) {
hardStop(msg, (ClientResponseImpl) resp);
}
static public void hardStop(String msg, ClientResponseImpl resp) {
logHardStop(msg);
log.error("[HardStop] " + resp.toJSONString());
stopTheWorld();
}
static private void logHardStop(String msg, Exception e) {
log.error("[HardStop] " + msg, e);
}
static private void logHardStop(String msg) {
log.error("[HardStop] " + msg);
}
static private void stopTheWorld() {
Benchmark.printJStack();
log.error("Terminating abnormally");
System.exit(-1);
}
private class StatusListener extends ClientStatusListenerExt {
@Override
public void uncaughtException(ProcedureCallback callback, ClientResponse resp, Throwable e) {
hardStop("Uncaught exception in procedure callback ", new Exception(e));
}
/**
* Remove the client from the list if connection is broken.
*/
@Override
public void connectionLost(String hostname, int port, int connectionsLeft, DisconnectCause cause) {
if (shutdown.get()) {
return;
}
activeConnections.decrementAndGet();
// reset the connection id so the client will connect to a recovered cluster
// this is a bit of a hack
if (connectionsLeft == 0) {
((ClientImpl) client).resetInstanceId();
}
// if the benchmark is still active
if ((System.currentTimeMillis() - benchmarkStartTS) < (config.duration * 1000)) {
log.warn(String.format("Connection to %s:%d was lost.", hostname, port));
}
// setup for retry
final String server = MiscUtils.getHostnameColonPortString(hostname, port);
es.execute(new Runnable() {
@Override
public void run() {
connectToOneServerWithRetry(server);
}
});
}
}
/**
* Constructor for benchmark instance.
* Configures VoltDB client and prints configuration.
*
* @param config Parsed & validated CLI options.
*/
Benchmark(Config config) {
this.config = config;
rateLimiter = new TxnId2RateLimiter(config.ratelimit);
processor = new TxnId2PayloadProcessor(4, config.minvaluesize, config.maxvaluesize,
config.entropy, Integer.MAX_VALUE, config.usecompression);
log.info(HORIZONTAL_RULE);
log.info(" Command Line Configuration");
log.info(HORIZONTAL_RULE);
log.info(config.getConfigDumpString());
StatusListener statusListener = new StatusListener();
ClientConfig clientConfig = new ClientConfig("", "", statusListener);
client = ClientFactory.createClient(clientConfig);
}
/**
* Connect to a single server with retry. Limited exponential backoff.
* No timeout. This will run until the process is killed if it's not
* able to connect.
*
* @param server hostname:port or just hostname (hostname can be ip).
*/
private void connectToOneServerWithRetry(String server) {
int sleep = 1000;
while (!shutdown.get()) {
try {
client.createConnection(server);
activeConnections.incrementAndGet();
log.info(String.format("Connected to VoltDB node at: %s.", server));
break;
}
catch (Exception e) {
log.warn(String.format("Connection to " + server + " failed - retrying in %d second(s).", sleep / 1000));
try { Thread.sleep(sleep); } catch (Exception interruted) {}
if (sleep < 8000) sleep += sleep;
}
}
}
/**
* Connect to a set of servers in parallel. Each will retry until
* connection. This call will block until all have connected.
*
* @throws InterruptedException if anything bad happens with the threads.
*/
private void connect() throws InterruptedException {
log.info("Connecting to VoltDB...");
final CountDownLatch connections = new CountDownLatch(1);
// use a new thread to connect to each server
for (final String server : config.parsedServers) {
new Thread(new Runnable() {
@Override
public void run() {
connectToOneServerWithRetry(server);
connections.countDown();
}
}).start();
}
// block until at least one connection is established
connections.await();
}
/**
* Create a Timer task to write the value of the txnCount to
* disk to make it available to apprunner
*/
private void schedulePeriodicCheckpoint() throws IOException {
checkpointTimer = new Timer("Checkpoint Timer", true);
TimerTask checkpointTask = new TimerTask() {
@Override
public void run() {
String count = String.valueOf(txnCount.get()) + "\n";
try {
FileWriter writer = new FileWriter(".checkpoint", false);
writer.write(count);
writer.close();
}
catch (Exception e) {
System.err.println("Caught exception writing checkpoint file.");
}
}
};
checkpointTimer.scheduleAtFixedRate(checkpointTask,
1 * 1000,
1 * 1000);
}
/**
* Create a Timer task to display performance data on the Vote procedure
* It calls printStatistics() every displayInterval seconds
*/
private void schedulePeriodicStats() {
timer = new Timer("Stats Timer", true);
TimerTask statsPrinting = new TimerTask() {
@Override
public void run() { printStatistics(); }
};
timer.scheduleAtFixedRate(statsPrinting,
config.displayinterval * 1000,
config.displayinterval * 1000);
}
/**
* Create a Timer task to refresh ratelimit permits
*/
private void scheduleRefreshPermits() {
permitsTimer = new Timer("Ratelimiter Permits Timer", true);
TimerTask refreshPermits = new TimerTask() {
@Override
public void run() { rateLimiter.updateActivePermits(System.currentTimeMillis()); }
};
permitsTimer.scheduleAtFixedRate(refreshPermits, 0, 10);
}
/**
* Prints a one line update on performance that can be printed
* periodically during a benchmark.
*/
private synchronized void printStatistics() {
long txnCountNow = txnCount.get();
long now = System.currentTimeMillis();
boolean madeProgress = txnCountNow > txnCountAtLastCheck;
if (madeProgress) {
lastProgressTimestamp = now;
}
txnCountAtLastCheck = txnCountNow;
long diffInSeconds = (now - lastProgressTimestamp) / 1000;
log.info(String.format("Executed %d%s", txnCount.get(),
madeProgress ? "" : " (no progress made in " + diffInSeconds + " seconds, last at " +
(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S")).format(new Date(lastProgressTimestamp)) + ")"));
if (diffInSeconds > config.progresstimeout) {
log.error("No progress was made in over " + diffInSeconds + " seconds while connected to a cluster. Exiting.");
printJStack();
System.exit(-1);
}
}
private int getUniquePartitionCount() throws Exception {
int partitionCount = -1;
ClientResponse cr = client.callProcedure("@Statistics", "PARTITIONCOUNT");
if (cr.getStatus() != ClientResponse.SUCCESS) {
log.error("Failed to call Statistics proc at startup. Exiting.");
log.error(((ClientResponseImpl) cr).toJSONString());
printJStack();
System.exit(-1);
}
VoltTable t = cr.getResults()[0];
partitionCount = (int) t.fetchRow(0).getLong(3);
log.info("unique partition count is " + partitionCount);
if (partitionCount <= 0) {
log.error("partition count is zero");
System.exit(-1);
}
return partitionCount;
}
private byte reportDeadThread(Thread th) {
log.error("Thread '" + th.getName() + "' is not alive");
return 1;
}
private byte reportDeadThread(Thread th, String msg) {
log.error("Thread '" + th.getName() + "' is not alive, " + msg);
return 1;
}
public static Thread.UncaughtExceptionHandler h = new UncaughtExceptionHandler() {
public void uncaughtException(Thread th, Throwable ex) {
log.error("Uncaught exception: " + ex.getMessage(), ex);
printJStack();
System.exit(-1);
}
};
/**
* Core benchmark code.
* Connect. Initialize. Run the loop. Cleanup. Print Results.
*
* @throws Exception if anything unexpected happens.
*/
public void runBenchmark() throws Exception {
byte exitcode = 0;
log.info(HORIZONTAL_RULE);
log.info(" Setup & Initialization");
log.info(HORIZONTAL_RULE);
// Only rate limit the ClientThread for now. Share the same permits for all type of invocations.
Semaphore permits = rateLimiter.addType(0, 1);
final int cidCount = 128;
final long[] lastRid = new long[cidCount];
for (int i = 0; i < lastRid.length; i++) {
lastRid[i] = 0;
}
// connect to one or more servers, loop until success
connect();
// get partition count
int partitionCount = getUniquePartitionCount();
// get stats
try {
ClientResponse cr = client.callProcedure("Summarize");
if (cr.getStatus() != ClientResponse.SUCCESS) {
log.error("Failed to call Summarize proc at startup. Exiting.");
log.error(((ClientResponseImpl) cr).toJSONString());
printJStack();
System.exit(-1);
}
// successfully called summarize
VoltTable t = cr.getResults()[0];
long ts = t.fetchRow(0).getLong("ts");
String tsStr = ts == 0 ? "NO TIMESTAMPS" : String.valueOf(ts) + " / " + new Date(ts).toString();
long count = t.fetchRow(0).getLong("count");
log.info("STARTUP TIMESTAMP OF LAST UPDATE (GMT): " + tsStr);
log.info("UPDATES RUN AGAINST THIS DB TO DATE: " + count);
}
catch (ProcCallException e) {
log.error("Failed to call Summarize proc at startup. Exiting.", e);
log.error(((ClientResponseImpl) e.getClientResponse()).toJSONString());
printJStack();
System.exit(-1);
}
log.info(HORIZONTAL_RULE);
log.info("Loading Filler Tables...");
log.info(HORIZONTAL_RULE);
BigTableLoader partitionedLoader = new BigTableLoader(client, "bigp",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 50, permits, partitionCount);
partitionedLoader.start();
BigTableLoader replicatedLoader = null;
if (config.mpratio > 0.0) {
replicatedLoader = new BigTableLoader(client, "bigr",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 3, permits, partitionCount);
replicatedLoader.start();
}
// wait for the filler tables to load up
//partitionedLoader.join();
//replicatedLoader.join();
log.info(HORIZONTAL_RULE);
log.info("Starting Benchmark");
log.info(HORIZONTAL_RULE);
// print periodic statistics to the console
benchmarkStartTS = System.currentTimeMillis();
// reset progress tracker
lastProgressTimestamp = System.currentTimeMillis();
schedulePeriodicStats();
schedulePeriodicCheckpoint();
scheduleRefreshPermits();
// Run the benchmark loop for the requested duration
// The throughput may be throttled depending on client configuration
log.info("Running benchmark...");
while (((ClientImpl) client).isHashinatorInitialized() == false) {
Thread.sleep(1000);
System.out.println("Wait for hashinator..");
}
TruncateTableLoader partitionedTruncater = new TruncateTableLoader(client, "trup",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 50, permits, config.mpratio);
partitionedTruncater.start();
TruncateTableLoader replicatedTruncater = null;
if (config.mpratio > 0.0) {
replicatedTruncater = new TruncateTableLoader(client, "trur",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 3, permits, config.mpratio);
replicatedTruncater.start();
}
LoadTableLoader plt = new LoadTableLoader(client, "loadp",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, 50, permits, false, 0);
plt.start();
LoadTableLoader rlt = null;
if (config.mpratio > 0.0) {
rlt = new LoadTableLoader(client, "loadmp",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, 3, permits, true, -1);
rlt.start();
}
ReadThread readThread = new ReadThread(client, config.threads, config.threadoffset,
config.allowinprocadhoc, config.mpratio, permits);
readThread.start();
AdHocMayhemThread adHocMayhemThread = new AdHocMayhemThread(client, config.mpratio, permits);
if (!config.disableadhoc) {
adHocMayhemThread.start();
}
InvokeDroppedProcedureThread idpt = new InvokeDroppedProcedureThread(client);
idpt.start();
DdlThread ddlt = new DdlThread(client);
ddlt.start();
List<ClientThread> clientThreads = new ArrayList<ClientThread>();
for (byte cid = (byte) config.threadoffset; cid < config.threadoffset + config.threads; cid++) {
ClientThread clientThread = new ClientThread(cid, txnCount, client, processor, permits,
config.allowinprocadhoc, config.mpratio);
clientThread.start();
clientThreads.add(clientThread);
}
log.info("All threads started...");
// subtract time spent initializing threads and starting them
long rt = (1000l * config.duration) - (System.currentTimeMillis() - benchmarkStartTS);
if (rt > 0) {
Thread.sleep(rt);
}
log.info("Duration completed shutting down...");
// check if loaders are done or still working
int lpcc = partitionedLoader.getPercentLoadComplete();
if (! partitionedLoader.isAlive() && lpcc < 100) {
exitcode = reportDeadThread(partitionedLoader, " yet only " + Integer.toString(lpcc) + "% rows have been loaded");
} else
log.info(partitionedLoader + " was at " + lpcc + "% of rows loaded");
lpcc = replicatedLoader.getPercentLoadComplete();
if (! replicatedLoader.isAlive() && lpcc < 100) {
exitcode = reportDeadThread(replicatedLoader, " yet only " + Integer.toString(lpcc) + "% rows have been loaded");
} else
log.info(replicatedLoader + " was at " + lpcc + "% of rows loaded");
// check if all threads still alive
if (! partitionedTruncater.isAlive())
exitcode = reportDeadThread(partitionedTruncater);
if (! replicatedTruncater.isAlive())
exitcode = reportDeadThread(replicatedTruncater);
/* XXX if (! plt.isAlive())
exitcode = reportDeadThread(plt);
if (! rlt.isAlive())
exitcode = reportDeadThread(rlt);
*/if (! readThread.isAlive())
exitcode = reportDeadThread(readThread);
if (! adHocMayhemThread.isAlive())
exitcode = reportDeadThread(adHocMayhemThread);
if (! idpt.isAlive())
exitcode = reportDeadThread(idpt);
if (! ddlt.isAlive())
exitcode = reportDeadThread(ddlt);
for (ClientThread ct : clientThreads) {
if (! ct.isAlive()) {
exitcode = reportDeadThread(ct);
}
}
/* XXX/PSR
replicatedLoader.shutdown();
partitionedLoader.shutdown();
replicatedTruncater.shutdown();
partitionedTruncater.shutdown();
readThread.shutdown();
adHocMayhemThread.shutdown();
idpt.shutdown();
ddlt.shutdown();
for (ClientThread clientThread : clientThreads) {
clientThread.shutdown();
}
replicatedLoader.join();
partitionedLoader.join();
readThread.join();
adHocMayhemThread.join();
idpt.join();
ddlt.join();
//Shutdown LoadTableLoader
rlt.shutdown();
plt.shutdown();
rlt.join();
plt.join();
for (ClientThread clientThread : clientThreads) {
clientThread.join();
}
*/
// cancel periodic stats printing
timer.cancel();
checkpointTimer.cancel();
/*
shutdown.set(true);
es.shutdownNow();
// block until all outstanding txns return
client.drain();
client.close();
permitsTimer.cancel();
*/
log.info(HORIZONTAL_RULE);
log.info("Benchmark Complete");
System.exit(exitcode);
}
/**
* Main routine creates a benchmark instance and kicks off the run method.
*
* @param args Command line arguments.
* @throws Exception if anything goes wrong.
* @see {@link Config}
*/
public static void main(String[] args) throws Exception {
// create a configuration from the arguments
Config config = new Config();
config.parse(Benchmark.class.getName(), args);
Benchmark benchmark = new Benchmark(config);
benchmark.runBenchmark();
}
} |
package org.jboss.virtual;
import java.io.IOException;
import java.net.URL;
import java.net.URISyntaxException;
import java.util.List;
import java.util.regex.Pattern;
import org.jboss.virtual.plugins.context.jar.JarUtils;
import org.jboss.virtual.plugins.context.vfs.AssembledContext;
import org.jboss.virtual.plugins.context.vfs.AssembledDirectoryHandler;
import org.jboss.virtual.plugins.context.vfs.AssembledFileHandler;
import org.jboss.virtual.plugins.context.vfs.ByteArrayHandler;
import org.jboss.virtual.plugins.vfs.helpers.FilterVirtualFileVisitor;
import org.jboss.virtual.plugins.vfs.helpers.SuffixesExcludeFilter;
/**
* Extension of VirtualFile that represents a virtual directory that can be composed of arbitrary files and resources
* spread throughout the file system or embedded in jar files.
*
* @author <a href="bill@jboss.com">Bill Burke</a>
* @author <a href="ales.justin@jboss.com">Ales Justin</a>
* @version $Revision: 1.1 $
*/
public class AssembledDirectory extends VirtualFile
{
/** No jars file filter */
private static final VirtualFileFilter noJars = new SuffixesExcludeFilter(JarUtils.getSuffixes());
/** The directory */
private AssembledDirectoryHandler directory;
public AssembledDirectory(AssembledDirectoryHandler handler)
{
super(handler);
directory = handler;
}
/**
* Create assembled directory.
*
* @param name context's name
* @param rootName root name
* @return new assembled directory instance
* @throws IOException for any IO error
* @throws URISyntaxException for any URI error
*/
public static AssembledDirectory createAssembledDirectory(String name, String rootName) throws IOException, URISyntaxException
{
AssembledContext context = new AssembledContext(name, rootName);
return context.getRoot().getVirtualFile();
}
/**
* Add files recursively from root, using the no jars filter.
*
* @param root the root
* @throws IOException for any error
*/
public void addPath(VirtualFile root) throws IOException
{
addPath(root, noJars);
}
/**
* Add files recursively from root, using the filter.
*
* @param root the root
* @param recurseFilter the recurse filter
* @throws IOException for any error
*/
public void addPath(VirtualFile root, VirtualFileFilter recurseFilter) throws IOException
{
final VisitorAttributes va = new VisitorAttributes();
va.setLeavesOnly(true);
va.setRecurseFilter(recurseFilter);
VirtualFileVisitor visitor = new VirtualFileVisitor()
{
public VisitorAttributes getAttributes()
{
return va;
}
public void visit(VirtualFile virtualFile)
{
mkdirs(virtualFile.getPathName()).addChild(virtualFile);
}
};
root.visit(visitor);
}
/**
* Find the underlying .class file representing this class and create it within this directory, along with
* its packages.
*
* So, if you added com.acme.Customer class, then a directory structure com/acme would be created
* and an entry in the acme directory would be the .class file.
*
* @param clazz the class
*/
public void addClass(Class<?> clazz)
{
if (clazz == null)
throw new IllegalArgumentException("Null clazz");
addClass(clazz.getName(), clazz.getClassLoader());
}
/**
* Find the underlying .class file representing this class and create it within this directory, along with
* its packages.
*
* So, if you added com.acme.Customer class, then a directory structure com/acme would be created
* and an entry in the acme directory would be the .class file.
*
* @param className the class name
*/
public void addClass(String className)
{
addClass(className, Thread.currentThread().getContextClassLoader());
}
/**
* Find the underlying .class file representing this class and create it within this directory, along with
* its packages.
*
* So, if you added com.acme.Customer class, then a directory structure com/acme would be created
* and an entry in the acme directory would be the .class file.
*
* @param className the class name
* @param loader ClassLoader to look for class resource
*/
public void addClass(String className, ClassLoader loader)
{
if (className == null)
throw new IllegalArgumentException("Null className");
if (loader == null)
throw new IllegalArgumentException("Null loader");
String resource = className.replace('.', '/') + ".class";
URL url = loader.getResource(resource);
if (url == null)
throw new RuntimeException("Could not find resource: " + resource);
AssembledDirectory p = mkdirs(resource);
p.addResource(resource, loader);
}
/**
* Make any directories for the give path to a file.
*
* @param path must be a path to a file as last element in path does not have a directory created
* @return directory file will live in
*/
public AssembledDirectory mkdirs(String path)
{
if (path == null)
throw new IllegalArgumentException("Null path");
String[] pkgs = path.split("/");
AssembledDirectoryHandler dir = directory;
for (int i = 0; i < pkgs.length - 1; i++)
{
AssembledDirectoryHandler next = (AssembledDirectoryHandler) dir.findChild(pkgs[i]);
if (next == null)
{
try
{
next = new AssembledDirectoryHandler(dir.getVFSContext(), dir, pkgs[i]);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
dir.addChild(next);
}
dir = next;
}
return dir.getVirtualFile();
}
/**
* Locate the .class resource of baseResource. From this resource, determine the base of the resource
* i.e. what jar or classpath directory it lives in.
*
* Once the base of the resource is found, scan all files recursively within the base using the include and exclude
* patterns. A mirror file structure will be created within this AssembledDirectory. Ths is very useful when you
* want to create a virtual jar that contains a subset of .class files in your classpath.
*
* The include/exclude patterns follow the Ant file pattern matching syntax. See ant.apache.org for more details.
*
* @param baseResource the base resource
* @param includes the includes
* @param excludes the excludes
*/
public void addResources(Class<?> baseResource, String[] includes, String[] excludes)
{
if (baseResource == null)
throw new IllegalArgumentException("Null base resource");
String resource = baseResource.getName().replace('.', '/') + ".class";
addResources(resource, includes, excludes, baseResource.getClassLoader());
}
/**
* From the baseResource, determine the base of that resource
* i.e. what jar or classpath directory it lives in. The Thread.currentThread().getContextClassloader() will be used
*
* Once the base of the resource is found, scan all files recursively within the base using the include and exclude
* patterns. A mirror file structure will be created within this AssembledDirectory. Ths is very useful when you
* want to create a virtual jar that contains a subset of .class files in your classpath.
*
* The include/exclude patterns follow the Ant file pattern matching syntax. See ant.apache.org for more details.
*
* @param baseResource the base resource
* @param includes the includes
* @param excludes the excludes
*/
public void addResources(String baseResource, final String[] includes, final String[] excludes)
{
if (baseResource == null)
throw new IllegalArgumentException("Null base resource");
addResources(baseResource, includes, excludes, Thread.currentThread().getContextClassLoader());
}
/**
* From the baseResource, determine the base of that resource
* i.e. what jar or classpath directory it lives in. The loader parameter will be used to find the resource.
*
* Once the base of the resource is found, scan all files recursively within the base using the include and exclude
* patterns. A mirror file structure will be created within this AssembledDirectory. Ths is very useful when you
* want to create a virtual jar that contains a subset of .class files in your classpath.
*
* The include/exclude patterns follow the Ant file pattern matching syntax. See ant.apache.org for more details.
*
* @param baseResource the base resource
* @param includes the includes
* @param excludes the excludes
* @param loader the loader
*/
public void addResources(String baseResource, final String[] includes, final String[] excludes, ClassLoader loader)
{
if (baseResource == null)
throw new IllegalArgumentException("Null baseResource");
if (loader == null)
throw new IllegalArgumentException("Null loader");
URL url = loader.getResource(baseResource);
if (url == null)
throw new RuntimeException("Could not find baseResource: " + baseResource);
String urlString = url.toString();
int idx = urlString.lastIndexOf(baseResource);
urlString = urlString.substring(0, idx);
try
{
url = new URL(urlString);
VirtualFile parent = VFS.getRoot(url);
VisitorAttributes va = new VisitorAttributes();
va.setLeavesOnly(true);
va.setRecurseFilter(noJars);
VirtualFileFilter filter = new VirtualFileFilter()
{
public boolean accepts(VirtualFile file)
{
boolean matched = false;
String path = file.getPathName();
for (String include : includes)
{
if (antMatch(path, include))
{
matched = true;
break;
}
}
if (matched == false)
return false;
if (excludes != null)
{
for (String exclude : excludes)
{
if (antMatch(path, exclude))
return false;
}
}
return true;
}
};
FilterVirtualFileVisitor visitor = new FilterVirtualFileVisitor(filter, va);
parent.visit(visitor);
List<VirtualFile> files = visitor.getMatched();
for (VirtualFile vf : files)
{
mkdirs(vf.getPathName()).addChild(vf);
}
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Create a regular expression pattern from an Ant file matching pattern
*
* @param matcher the matcher pattern
* @return the pattern instance
*/
public static Pattern getPattern(String matcher)
{
if (matcher == null)
throw new IllegalArgumentException("Null matcher");
matcher = matcher.replace(".", "\\.");
matcher = matcher.replace("*", ".*");
matcher = matcher.replace("?", ".{1}");
return Pattern.compile(matcher);
}
/**
* Determine whether a given file path matches an Ant pattern.
*
* @param path the path
* @param expression the expression
* @return true if we match
*/
public static boolean antMatch(String path, String expression)
{
if (path == null)
throw new IllegalArgumentException("Null path");
if (expression == null)
throw new IllegalArgumentException("Null expression");
if (path.startsWith("/")) path = path.substring(1);
if (expression.endsWith("/")) expression += "**";
String[] paths = path.split("/");
String[] expressions = expression.split("/");
int x = 0, p;
Pattern pattern = getPattern(expressions[0]);
for (p = 0; p < paths.length && x < expressions.length; p++)
{
if (expressions[x].equals("**"))
{
do
{
x++;
} while (x < expressions.length && expressions[x].equals("**"));
if (x == expressions.length)
return true; // "**" with nothing after it
pattern = getPattern(expressions[x]);
}
String element = paths[p];
if (pattern.matcher(element).matches())
{
x++;
if (x < expressions.length)
{
pattern = getPattern(expressions[x]);
}
}
else if (!(x > 0 && expressions[x - 1].equals("**"))) // our previous isn't "**"
{
return false;
}
}
if (p < paths.length)
return false;
if (x < expressions.length)
return false;
return true;
}
/**
* Add a VirtualFile as a child to this AssembledDirectory.
*
* @param vf the virtual file
* @return the file
*/
public VirtualFile addChild(VirtualFile vf)
{
if (vf == null)
throw new IllegalArgumentException("Null virtual file");
return directory.addChild(vf.getHandler()).getVirtualFile();
}
/**
* Add a VirtualFile as a child to this AssembledDirectory. This file will be added
* under a new aliased name.
*
* @param vf the virtual file
* @param newName the new name
* @return new file
*/
public VirtualFile addChild(VirtualFile vf, String newName)
{
try
{
AssembledFileHandler handler = new AssembledFileHandler(directory.getVFSContext(), directory, newName, vf.getHandler());
directory.addChild(handler);
return handler.getVirtualFile();
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Add a classloader found resource to as a child to this AssembledDirectory. The base file name of the
* resource will be used as the child's name.
*
* Thread.currentThread.getCOntextClassLoader() will be used to load the resource.
*
* @param resource the resource
* @return the file
*/
public VirtualFile addResource(String resource)
{
return addResource(resource, Thread.currentThread().getContextClassLoader());
}
/**
* Add a classloader found resource to as a child to this AssembledDirectory. The newName parameter will be used
* as the name of the child.
*
* Thread.currentThread.getCOntextClassLoader() will be used to load the resource.
*
* @param resource the resource
* @param newName the new name
* @return the file
*/
public VirtualFile addResource(String resource, String newName)
{
return addResource(resource, Thread.currentThread().getContextClassLoader(), newName);
}
/**
* Add a classloader found resource to as a child to this AssembledDirectory. The base file name of the
* resource will be used as the child's name.
*
* The loader parameter will be used to load the resource.
*
* @param resource the resource
* @param loader the loader
* @return the file
*/
public VirtualFile addResource(String resource, ClassLoader loader)
{
if (resource == null)
throw new IllegalArgumentException("Null resource");
if (loader == null)
throw new IllegalArgumentException("Null loader");
URL url = loader.getResource(resource);
if (url == null)
throw new RuntimeException("Could not find resource: " + resource);
return addResource(url);
}
/**
* Add a resource identified by the URL as a child to this AssembledDirectory.
*
* @param url the url
* @return the file
*/
public VirtualFile addResource(URL url)
{
if (url == null)
throw new IllegalArgumentException("Null url");
try
{
VirtualFile vf = VFS.getRoot(url);
return addChild(vf);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Add a classloader found resource to as a child to this AssembledDirectory. The newName parameter will be used
* as the name of the child.
*
* The loader parameter will be used to load the resource.
*
* @param resource the resource
* @param loader the loader
* @param newName the new name
* @return the file
*/
public VirtualFile addResource(String resource, ClassLoader loader, String newName)
{
if (resource == null)
throw new IllegalArgumentException("Null resource");
if (loader == null)
throw new IllegalArgumentException("Null loader");
if (newName == null)
throw new IllegalArgumentException("Null newName");
URL url = loader.getResource(resource);
if (url == null)
throw new RuntimeException("Could not find resource: " + resource);
try
{
VirtualFile vf = VFS.getRoot(url);
return addChild(vf, newName);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Add raw bytes as a file to this assembled directory
*
* @param bytes the bytes
* @param name the name
* @return the file
*/
public VirtualFile addBytes(byte[] bytes, String name)
{
if (bytes == null)
throw new IllegalArgumentException("Null bytes");
if (name == null)
throw new IllegalArgumentException("Null name");
ByteArrayHandler handler;
try
{
handler = new ByteArrayHandler(directory.getVFSContext(), directory, name, bytes);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
directory.addChild(handler);
return handler.getVirtualFile();
}
/**
* Create a directory within this directory.
*
* @param name the name
* @return the directory
*/
public AssembledDirectory mkdir(String name)
{
if (name == null)
throw new IllegalArgumentException("Null name");
try
{
AssembledDirectoryHandler handler = new AssembledDirectoryHandler(directory.getVFSContext(), directory, name);
directory.addChild(handler);
return new AssembledDirectory(handler);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
} |
package txnIdSelfCheck;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.voltcore.logging.VoltLogger;
import org.voltdb.CLIConfig;
import org.voltdb.ClientResponseImpl;
import org.voltdb.VoltTable;
import org.voltdb.client.Client;
import org.voltdb.client.ClientConfig;
import org.voltdb.client.ClientFactory;
import org.voltdb.client.ClientImpl;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.ClientStatusListenerExt;
import org.voltdb.client.ProcCallException;
import org.voltdb.client.ProcedureCallback;
import org.voltdb.utils.MiscUtils;
public class Benchmark {
static VoltLogger log = new VoltLogger("Benchmark");
// handy, rather than typing this out several times
static final String HORIZONTAL_RULE =
"
"
// validated command line configuration
final Config config;
// create a client for each server node
Client client;
// Timer for periodic stats printing
Timer timer;
// Benchmark start time
long benchmarkStartTS;
// Timer to time the run
Timer runTimer;
// Timer for writing the checkpoint count for apprunner
Timer checkpointTimer;
// Timer for refreshing ratelimit permits
Timer permitsTimer;
final TxnId2RateLimiter rateLimiter;
final TxnId2PayloadProcessor processor;
final AtomicInteger activeConnections = new AtomicInteger(0);
final AtomicBoolean shutdown = new AtomicBoolean(false);
// for reporting and detecting progress
public static AtomicLong txnCount = new AtomicLong();
private long txnCountAtLastCheck;
private long lastProgressTimestamp = System.currentTimeMillis();
// For retry connections
private final ExecutorService es = Executors.newCachedThreadPool(new ThreadFactory() {
@Override
public Thread newThread(Runnable arg0) {
Thread thread = new Thread(arg0, "Retry Connection");
thread.setDaemon(true);
return thread;
}
});
/**
* Uses included {@link CLIConfig} class to
* declaratively state command line options with defaults
* and validation.
*/
private static class Config extends CLIConfig {
@Option(desc = "Interval for performance feedback, in seconds.")
long displayinterval = 5;
@Option(desc = "Benchmark duration, in seconds.")
int duration = 20;
@Option(desc = "Comma separated list of the form server[:port] to connect to.")
String servers = "localhost";
String[] parsedServers = null;
@Option(desc = "Number of parallel syncrhonous threads.")
int threads = 100;
@Option(desc = "Id of the first thread (useful for running multiple clients).")
int threadoffset = 0;
@Option(desc = "Minimum value size in bytes.")
int minvaluesize = 1024;
@Option(desc = "Maximum value size in bytes.")
int maxvaluesize = 1024;
@Option(desc = "Number of values considered for each value byte.")
int entropy = 127;
@Option(desc = "Compress values on the client side.")
boolean usecompression = false;
@Option(desc = "Filler table blob size.")
int fillerrowsize = 5128;
@Option(desc = "Target data size for the filler replicated table (at each site).")
long replfillerrowmb = 32;
@Option(desc = "Target data size for the partitioned filler table.")
long partfillerrowmb = 128;
@Option(desc = "Timeout that kills the client if progress is not made.")
int progresstimeout = 120;
@Option(desc = "Whether or not to disable adhoc writes.")
boolean disableadhoc = false;
@Option(desc = "Maximum TPS rate for benchmark.")
int ratelimit = Integer.MAX_VALUE;
@Option(desc = "Filename to write raw summary statistics to.")
String statsfile = "";
@Option(desc = "Allow experimental in-procedure adhoc statments.")
boolean allowinprocadhoc = true;
@Option(desc = "Allow set ratio of mp to sp workload.")
float mpratio = (float)0.20;
@Option(desc = "Allow set ratio of swap to truncate table workload.")
float swapratio = (float)0.0;
@Option(desc = "Allow set ratio of upsert to insert workload.")
float upsertratio = (float)0.50;
@Option(desc = "Allow set ratio of upsert against exist column.")
float upserthitratio = (float)0.20;
@Option(desc = "Allow disabling different threads for testing specific functionality. ")
String disabledthreads = "none";
ArrayList<String> disabledThreads = null;
@Option(desc = "Enable topology awareness")
boolean topologyaware = false;
@Override
public void validate() {
if (duration <= 0) exitWithMessageAndUsage("duration must be > 0");
if (displayinterval <= 0) exitWithMessageAndUsage("displayinterval must be > 0");
if (threadoffset < 0) exitWithMessageAndUsage("threadoffset must be >= 0");
if (threads <= 0) exitWithMessageAndUsage("threads must be > 0");
if (threadoffset > 127) exitWithMessageAndUsage("threadoffset must be within [0, 127]");
if (threadoffset + threads > 127) exitWithMessageAndUsage("max thread offset must be <= 127");
if (ratelimit <= 0) exitWithMessageAndUsage("ratelimit must be > 0");
if (minvaluesize <= 0) exitWithMessageAndUsage("minvaluesize must be > 0");
if (maxvaluesize <= 0) exitWithMessageAndUsage("maxvaluesize must be > 0");
if (entropy <= 0) exitWithMessageAndUsage("entropy must be > 0");
if (entropy > 127) exitWithMessageAndUsage("entropy must be <= 127");
if (mpratio < 0.0 || mpratio > 1.0) exitWithMessageAndUsage("mpratio must be between 0.0 and 1.0");
if (swapratio < 0.0 || swapratio > 1.0) exitWithMessageAndUsage("swapratio must be between 0.0 and 1.0");
if (upsertratio < 0.0 || upsertratio > 1.0) exitWithMessageAndUsage("upsertratio must be between 0.0 and 1.0");
if (upserthitratio < 0.0 || upserthitratio > 1.0) exitWithMessageAndUsage("upserthitratio must be between 0.0 and 1.0");
}
@Override
public void parse(String cmdName, String[] args) {
super.parse(cmdName, args);
// parse servers
parsedServers = servers.split(",");
disabledThreads = new ArrayList<String>(Arrays.asList(disabledthreads.split(",")));
}
}
/**
* Fake an internal jstack to the log
*/
static public void printJStack() {
Map<String, List<String>> deduped = new HashMap<String, List<String>>();
// collect all the output, but dedup the identical stack traces
for (Entry<Thread, StackTraceElement[]> e : Thread.getAllStackTraces().entrySet()) {
Thread t = e.getKey();
String header = String.format("\"%s\" %sprio=%d tid=%d %s",
t.getName(),
t.isDaemon() ? "daemon " : "",
t.getPriority(),
t.getId(),
t.getState().toString());
String stack = "";
for (StackTraceElement ste : e.getValue()) {
stack += " at " + ste.toString() + "\n";
}
if (deduped.containsKey(stack)) {
deduped.get(stack).add(header);
}
else {
ArrayList<String> headers = new ArrayList<String>();
headers.add(header);
deduped.put(stack, headers);
}
}
String logline = "";
for (Entry<String, List<String>> e : deduped.entrySet()) {
for (String header : e.getValue()) {
logline += "\n" + header + "\n";
}
logline += e.getKey();
}
log.info("Full thread dump:\n" + logline);
}
static public void hardStop(String msg) {
logHardStop(msg);
stopTheWorld();
}
static public void hardStop(Exception e) {
logHardStop("Unexpected exception", e);
stopTheWorld();
}
static public void hardStop(String msg, Exception e) {
logHardStop(msg, e);
if (e instanceof ProcCallException) {
ClientResponse cr = ((ProcCallException) e).getClientResponse();
hardStop(msg, cr);
}
}
static public void hardStop(String msg, ClientResponse resp) {
hardStop(msg, (ClientResponseImpl) resp);
}
static public void hardStop(String msg, ClientResponseImpl resp) {
logHardStop(msg);
log.error("[HardStop] " + resp.toJSONString());
stopTheWorld();
}
static private void logHardStop(String msg, Exception e) {
log.error("[HardStop] " + msg, e);
}
static private void logHardStop(String msg) {
log.error("[HardStop] " + msg);
}
static private void stopTheWorld() {
Benchmark.printJStack();
log.error("Terminating abnormally");
System.exit(-1);
}
private class StatusListener extends ClientStatusListenerExt {
@Override
public void uncaughtException(ProcedureCallback callback, ClientResponse resp, Throwable e) {
hardStop("Uncaught exception in procedure callback ", new Exception(e));
}
/**
* Remove the client from the list if connection is broken.
*/
@Override
public void connectionLost(String hostname, int port, int connectionsLeft, DisconnectCause cause) {
if (shutdown.get()) {
return;
}
activeConnections.decrementAndGet();
// reset the connection id so the client will connect to a recovered cluster
// this is a bit of a hack
if (connectionsLeft == 0) {
((ClientImpl) client).resetInstanceId();
}
// if the benchmark is still active
if ((System.currentTimeMillis() - benchmarkStartTS) < (config.duration * 1000)) {
log.warn(String.format("Connection to %s:%d was lost.", hostname, port));
}
// setup for retry
final String server = MiscUtils.getHostnameColonPortString(hostname, port);
es.execute(new Runnable() {
@Override
public void run() {
connectToOneServerWithRetry(server);
}
});
}
}
/**
* Constructor for benchmark instance.
* Configures VoltDB client and prints configuration.
*
* @param config Parsed & validated CLI options.
*/
Benchmark(Config config) {
this.config = config;
rateLimiter = new TxnId2RateLimiter(config.ratelimit);
processor = new TxnId2PayloadProcessor(4, config.minvaluesize, config.maxvaluesize,
config.entropy, Integer.MAX_VALUE, config.usecompression);
log.info(HORIZONTAL_RULE);
log.info(" Command Line Configuration");
log.info(HORIZONTAL_RULE);
log.info(config.getConfigDumpString());
StatusListener statusListener = new StatusListener();
ClientConfig clientConfig = new ClientConfig("", "", statusListener);
if (config.topologyaware) {
clientConfig.setTopologyChangeAware(true);
}
client = ClientFactory.createClient(clientConfig);
}
/**
* Connect to a single server with retry. Limited exponential backoff.
* No timeout. This will run until the process is killed if it's not
* able to connect.
*
* @param server hostname:port or just hostname (hostname can be ip).
*/
private void connectToOneServerWithRetry(String server) {
int sleep = 1000;
while (!shutdown.get()) {
try {
client.createConnection(server);
activeConnections.incrementAndGet();
log.info(String.format("Connected to VoltDB node at: %s.", server));
break;
}
catch (Exception e) {
log.warn(String.format("Connection to " + server + " failed - retrying in %d second(s).", sleep / 1000));
try { Thread.sleep(sleep); } catch (Exception interruted) {}
if (sleep < 8000) sleep += sleep;
}
}
}
/**
* Connect to a set of servers in parallel. Each will retry until
* connection. This call will block until all have connected.
*
* @throws InterruptedException if anything bad happens with the threads.
*/
private void connect() throws InterruptedException {
log.info("Connecting to VoltDB...");
if (config.topologyaware) {
connectToOneServerWithRetry(config.parsedServers[0]);
} else {
final CountDownLatch connections = new CountDownLatch(1);
// use a new thread to connect to each server
for (final String server : config.parsedServers) {
new Thread(new Runnable() {
@Override
public void run() {
connectToOneServerWithRetry(server);
connections.countDown();
}
}).start();
}
// block until at least one connection is established
connections.await();
}
}
/**
* Create a Timer task to write the value of the txnCount to
* disk to make it available to apprunner
*/
private void schedulePeriodicCheckpoint() throws IOException {
checkpointTimer = new Timer("Checkpoint Timer", true);
TimerTask checkpointTask = new TimerTask() {
@Override
public void run() {
String count = String.valueOf(txnCount.get()) + "\n";
try {
FileWriter writer = new FileWriter(".checkpoint", false);
writer.write(count);
writer.close();
}
catch (Exception e) {
System.err.println("Caught exception writing checkpoint file.");
}
}
};
checkpointTimer.scheduleAtFixedRate(checkpointTask,
1 * 1000,
1 * 1000);
}
/**
* Create a Timer task to display performance data on the Vote procedure
* It calls printStatistics() every displayInterval seconds
*/
private void schedulePeriodicStats() {
timer = new Timer("Stats Timer", true);
TimerTask statsPrinting = new TimerTask() {
@Override
public void run() { printStatistics(); }
};
timer.scheduleAtFixedRate(statsPrinting,
config.displayinterval * 1000,
config.displayinterval * 1000);
}
/**
* Create a Timer task to refresh ratelimit permits
*/
private void scheduleRefreshPermits() {
permitsTimer = new Timer("Ratelimiter Permits Timer", true);
TimerTask refreshPermits = new TimerTask() {
@Override
public void run() { rateLimiter.updateActivePermits(System.currentTimeMillis()); }
};
permitsTimer.scheduleAtFixedRate(refreshPermits, 0, 10);
}
/**
* Prints a one line update on performance that can be printed
* periodically during a benchmark.
*/
private synchronized void printStatistics() {
long txnCountNow = txnCount.get();
long now = System.currentTimeMillis();
boolean madeProgress = txnCountNow > txnCountAtLastCheck;
if (madeProgress) {
lastProgressTimestamp = now;
}
txnCountAtLastCheck = txnCountNow;
long diffInSeconds = (now - lastProgressTimestamp) / 1000;
log.info(String.format("Executed %d%s", txnCount.get(),
madeProgress ? "" : " (no progress made in " + diffInSeconds + " seconds, last at " +
(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S")).format(new Date(lastProgressTimestamp)) + ")"));
if (diffInSeconds > config.progresstimeout) {
log.error("No progress was made in over " + diffInSeconds + " seconds while connected to a cluster. Exiting.");
printJStack();
System.exit(-1);
}
}
private int getUniquePartitionCount() throws Exception {
int partitionCount = -1;
ClientResponse cr = client.callProcedure("@Statistics", "PARTITIONCOUNT");
if (cr.getStatus() != ClientResponse.SUCCESS) {
log.error("Failed to call Statistics proc at startup. Exiting.");
log.error(((ClientResponseImpl) cr).toJSONString());
printJStack();
System.exit(-1);
}
VoltTable t = cr.getResults()[0];
partitionCount = (int) t.fetchRow(0).getLong(3);
log.info("unique partition count is " + partitionCount);
if (partitionCount <= 0) {
log.error("partition count is zero");
System.exit(-1);
}
return partitionCount;
}
private byte reportDeadThread(Thread th) {
log.error("Thread '" + th.getName() + "' is not alive");
return 1;
}
private byte reportDeadThread(Thread th, String msg) {
log.error("Thread '" + th.getName() + "' is not alive, " + msg);
return 1;
}
public static Thread.UncaughtExceptionHandler h = new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread th, Throwable ex) {
log.error("Uncaught exception: " + ex.getMessage(), ex);
printJStack();
System.exit(-1);
}
};
BigTableLoader partBiglt = null;
BigTableLoader replBiglt = null;
TruncateTableLoader partTrunclt = null;
TruncateTableLoader replTrunclt = null;
CappedTableLoader partCappedlt = null;
CappedTableLoader replCappedlt = null;
LoadTableLoader partLoadlt = null;
LoadTableLoader replLoadlt = null;
ReadThread readThread = null;
AdHocMayhemThread adHocMayhemThread = null;
InvokeDroppedProcedureThread idpt = null;
DdlThread ddlt = null;
List<ClientThread> clientThreads = null;
/**
* Core benchmark code.
* Connect. Initialize. Run the loop. Cleanup. Print Results.
*
* @throws Exception if anything unexpected happens.
*/
public void runBenchmark() throws Exception {
byte exitcode = 0;
log.info(HORIZONTAL_RULE);
log.info(" Setup & Initialization");
log.info(HORIZONTAL_RULE);
// Only rate limit the ClientThread for now. Share the same permits for all type of invocations.
Semaphore permits = rateLimiter.addType(0, 1);
final int cidCount = 128;
final long[] lastRid = new long[cidCount];
for (int i = 0; i < lastRid.length; i++) {
lastRid[i] = 0;
}
// connect to one or more servers, loop until success
connect();
// get partition count
int partitionCount = 0;
int trycount = 12;
while (trycount
try {
partitionCount = getUniquePartitionCount();
break;
} catch (Exception e) {
}
Thread.sleep(10000);
}
// get stats
try {
ClientResponse cr = TxnId2Utils.doProcCall(client, "Summarize_Replica", config.threadoffset, config.threads);
if (cr.getStatus() != ClientResponse.SUCCESS) {
log.error("Failed to call Summarize proc at startup. Exiting.");
log.error(((ClientResponseImpl) cr).toJSONString());
printJStack();
System.exit(-1);
}
// successfully called summarize
VoltTable t = cr.getResults()[0];
long ts = t.fetchRow(0).getLong("ts");
String tsStr = ts == 0 ? "NO TIMESTAMPS" : String.valueOf(ts) + " / " + new Date(ts).toString();
long count = t.fetchRow(0).getLong("count");
log.info("STARTUP TIMESTAMP OF LAST UPDATE (GMT): " + tsStr);
log.info("UPDATES RUN AGAINST THIS DB TO DATE: " + count);
} catch (ProcCallException e) {
log.error("Failed to call Summarize proc at startup. Exiting.", e);
log.error(((ClientResponseImpl) e.getClientResponse()).toJSONString());
printJStack();
System.exit(-1);
}
clientThreads = new ArrayList<ClientThread>();
if (!config.disabledThreads.contains("clients")) {
for (byte cid = (byte) config.threadoffset; cid < config.threadoffset + config.threads; cid++) {
ClientThread clientThread = new ClientThread(cid, txnCount, client, processor, permits,
config.allowinprocadhoc, config.mpratio);
//clientThread.start(); # started after preload is complete
clientThreads.add(clientThread);
}
}
log.info(HORIZONTAL_RULE);
log.info("Loading Filler Tables...");
log.info(HORIZONTAL_RULE);
// Big Partitioned Loader
if (!config.disabledThreads.contains("partBiglt")) {
partBiglt = new BigTableLoader(client, "bigp",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 50, permits, partitionCount);
partBiglt.start();
}
replBiglt = null;
if (config.mpratio > 0.0 && !config.disabledThreads.contains("replBiglt")) {
replBiglt = new BigTableLoader(client, "bigr",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 3, permits, partitionCount);
replBiglt.start();
}
// wait for the filler tables to load up
//partBiglt.join();
//replBiglt.join();
log.info(HORIZONTAL_RULE);
log.info("Starting Benchmark");
log.info(HORIZONTAL_RULE);
// print periodic statistics to the console
benchmarkStartTS = System.currentTimeMillis();
scheduleRunTimer();
// reset progress tracker
lastProgressTimestamp = System.currentTimeMillis();
schedulePeriodicStats();
schedulePeriodicCheckpoint();
scheduleRefreshPermits();
// Run the benchmark loop for the requested duration
// The throughput may be throttled depending on client configuration
log.info("Running benchmark...");
while (((ClientImpl) client).isHashinatorInitialized() == false) {
Thread.sleep(1000);
System.out.println("Wait for hashinator..");
}
if (!config.disabledThreads.contains("clients")) {
for (ClientThread t : clientThreads) {
t.start();
}
}
if (!config.disabledThreads.contains("partTrunclt")) {
partTrunclt = new TruncateTableLoader(client, "trup",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 50, permits, config.mpratio, config.swapratio);
partTrunclt.start();
}
replTrunclt = null;
if (config.mpratio > 0.0 && !config.disabledThreads.contains("replTrunclt")) {
replTrunclt = new TruncateTableLoader(client, "trur",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 3, permits, config.mpratio, config.swapratio);
replTrunclt.start();
}
if (!config.disabledThreads.contains("partCappedlt")) {
partCappedlt = new CappedTableLoader(client, "capp", // more
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 50, permits, config.mpratio);
partCappedlt.start();
}
if (config.mpratio > 0.0 && !config.disabledThreads.contains("replCappedlt")) {
replCappedlt = new CappedTableLoader(client, "capr", // more
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, config.fillerrowsize, 3, permits, config.mpratio);
replCappedlt.start();
}
if (!config.disabledThreads.contains("partLoadlt")) {
partLoadlt = new LoadTableLoader(client, "loadp",
(config.partfillerrowmb * 1024 * 1024) / config.fillerrowsize, 50, permits, false, 0);
// XXX temporary partLoadlt.start();
}
replLoadlt = null;
if (config.mpratio > 0.0 && !config.disabledThreads.contains("replLoadlt")) {
replLoadlt = new LoadTableLoader(client, "loadmp",
(config.replfillerrowmb * 1024 * 1024) / config.fillerrowsize, 3, permits, true, -1);
// XXX temporary replLoadlt.start();
}
if (!config.disabledThreads.contains("readThread")) {
readThread = new ReadThread(client, config.threads, config.threadoffset,
config.allowinprocadhoc, config.mpratio, permits);
readThread.start();
}
if (!config.disabledThreads.contains("adHocMayhemThread")) {
adHocMayhemThread = new AdHocMayhemThread(client, config.mpratio, permits);
if (!config.disableadhoc) {
adHocMayhemThread.start();
}
}
if (!config.disabledThreads.contains("idpt")) {
idpt = new InvokeDroppedProcedureThread(client);
idpt.start();
} if (!config.disabledThreads.contains("ddlt")) {
ddlt = new DdlThread(client);
// XXX/PSR ddlt.start();
}
log.info("All threads started...");
while (true) {
Thread.sleep(Integer.MAX_VALUE);
}
}
/**
* Create a Timer task to time the run
* at end of run, check if we actually did anything
*/
private void scheduleRunTimer() throws IOException {
runTimer = new Timer("Run Timer", true);
TimerTask runEndTask = new TimerTask() {
@Override
public void run() {
log.info(HORIZONTAL_RULE);
log.info("Benchmark Complete");
int exitcode = 0;
// check if loaders are done or still working
if (partBiglt != null) {
int lpcc = partBiglt.getPercentLoadComplete();
if (!partBiglt.isAlive() && lpcc < 100) {
exitcode = reportDeadThread(partBiglt, " yet only " + Integer.toString(lpcc) + "% rows have been loaded");
} else
log.info(partBiglt + " was at " + lpcc + "% of rows loaded");
} if (replBiglt != null) {
int lpcc = replBiglt.getPercentLoadComplete();
if (!replBiglt.isAlive() && lpcc < 100) {
exitcode = reportDeadThread(replBiglt, " yet only " + Integer.toString(lpcc) + "% rows have been loaded");
} else
log.info(replBiglt + " was at " + lpcc + "% of rows loaded");
}
// check if all threads still alive
if (partTrunclt != null && !partTrunclt.isAlive())
exitcode = reportDeadThread(partTrunclt);
if (replTrunclt != null && !replTrunclt.isAlive())
exitcode = reportDeadThread(replTrunclt);
/* XXX if (! partLoadlt.isAlive())
exitcode = reportDeadThread(partLoadlt);
if (! replLoadlt.isAlive())
exitcode = reportDeadThread(replLoadlt);
*/
if (readThread != null && !readThread.isAlive())
exitcode = reportDeadThread(readThread);
if (adHocMayhemThread != null && !config.disableadhoc && !adHocMayhemThread.isAlive())
exitcode = reportDeadThread(adHocMayhemThread);
if (idpt != null && !idpt.isAlive())
exitcode = reportDeadThread(idpt);
/* XXX if (! ddlt.isAlive())
exitcode = reportDeadThread(ddlt);*/
for (ClientThread ct : clientThreads) {
if (!ct.isAlive()) {
exitcode = reportDeadThread(ct);
}
}
/*
replBiglt.shutdown();
partBiglt.shutdown();
replTrunclt.shutdown();
partTrunclt.shutdown();
readThread.shutdown();
adHocMayhemThread.shutdown();
idpt.shutdown();
ddlt.shutdown();
for (ClientThread clientThread : clientThreads) {
clientThread.shutdown();
}
replBiglt.join();
partBiglt.join();
readThread.join();
adHocMayhemThread.join();
idpt.join();
ddlt.join();
//Shutdown LoadTableLoader
replLoadlt.shutdown();
partLoadlt.shutdown();
replLoadlt.join();
partLoadlt.join();
for (ClientThread clientThread : clientThreads) {
clientThread.join();
}
*/
// cancel periodic stats printing
timer.cancel();
checkpointTimer.cancel();
/*
shutdown.set(true);
es.shutdownNow();
// block until all outstanding txns return
client.drain();
client.close();
permitsTimer.cancel();
*/
long count = txnCount.get();
log.info("Client thread transaction count: " + count + "\n");
if (exitcode > 0 && txnCount.get() == 0) {
System.err.println("Shutting down, but found that no work was done.");
exitcode = 2;
}
System.exit(exitcode);
}
};
runTimer.schedule(runEndTask, config.duration * 1000);
}
/**
* Main routine creates a benchmark instance and kicks off the run method.
*
* @param args Command line arguments.
* @throws Exception if anything goes wrong.
* @see {@link Config}
*/
public static void main(String[] args) throws Exception {
// create a configuration from the arguments
Config config = new Config();
config.parse(Benchmark.class.getName(), args);
Benchmark benchmark = new Benchmark(config);
benchmark.runBenchmark();
}
} |
package abra;
import joptsimple.OptionParser;
public class ReAlignerOptions extends Options {
private static final String INPUT_SAM = "in";
private static final String INPUT_SAM2 = "in2";
private static final String INPUT_SAM3 = "in3";
private static final String OUTPUT_SAM = "out";
private static final String OUTPUT_SAM2 = "out2";
private static final String OUTPUT_SAM3 = "out3";
private static final String REFERENCE = "ref";
private static final String TARGET_REGIONS = "targets";
private static final String WORKING_DIR = "working";
private static final String KMER_SIZE = "kmer";
private static final String MIN_NODE_FREQUENCY = "mnf";
private static final String MIN_UNALIGNED_NODE_FREQUENCY = "umnf";
private static final String MIN_CONTIG_LENGTH = "mcl";
private static final String MAX_POTENTIAL_CONTIGS = "mpc";
private static final String MIN_CONTIG_RATIO = "mcr";
private static final String MIN_CONTIG_MAPQ = "mc-mapq";
private static final String NUM_THREADS = "threads";
private static final String SKIP_UNALIGNED_ASSEMBLY = "no-unalign";
private static final String MAX_UNALIGNED_READS = "mur";
private static final String PAIRED_END = "paired";
private static final String RNA = "rna";
private static final String RNA_OUTPUT = "rna-out";
private static final String MIN_BASE_QUALITY = "mbq";
private OptionParser parser;
private boolean isValid;
@Override
protected OptionParser getOptionParser() {
if (parser == null) {
parser = new OptionParser();
parser.accepts(INPUT_SAM, "Required input sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(INPUT_SAM2, "Optional input sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(INPUT_SAM3, "Optional input sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(OUTPUT_SAM, "Required output sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(OUTPUT_SAM2, "Optional output sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(OUTPUT_SAM3, "Optional output sam or bam file").withRequiredArg().ofType(String.class);
parser.accepts(REFERENCE, "Genome reference location").withRequiredArg().ofType(String.class);
parser.accepts(TARGET_REGIONS, "GTF containing target regions").withRequiredArg().ofType(String.class);
parser.accepts(WORKING_DIR, "Working directory for intermediate output").withRequiredArg().ofType(String.class);
parser.accepts(KMER_SIZE, "Assembly kmer size(delimit by commas if more than 1").withRequiredArg().ofType(String.class);
parser.accepts(MIN_NODE_FREQUENCY, "Assembly minimum node frequency").withRequiredArg().ofType(Integer.class);
parser.accepts(MIN_UNALIGNED_NODE_FREQUENCY, "Assembly minimum unaligned node frequency").withRequiredArg().ofType(Integer.class);
parser.accepts(MIN_CONTIG_LENGTH, "Assembly minimum contig length").withRequiredArg().ofType(Integer.class);
parser.accepts(MAX_POTENTIAL_CONTIGS, "Maximum number of potential contigs for a region").withRequiredArg().ofType(Integer.class);
parser.accepts(MIN_CONTIG_RATIO, "Minimum contig length as percentage of observed region length").withRequiredArg().ofType(Double.class);
parser.accepts(NUM_THREADS, "Number of threads (default: 2)").withRequiredArg().ofType(Integer.class);
parser.accepts(MIN_CONTIG_MAPQ, "Minimum contig mapping quality").withRequiredArg().ofType(Integer.class);
parser.accepts(SKIP_UNALIGNED_ASSEMBLY, "Skip assembly of reads that did not initially align.");
parser.accepts(MAX_UNALIGNED_READS, "Maximum number of unaligned reads to assemble").withRequiredArg().ofType(Integer.class);
parser.accepts(PAIRED_END, "Paired end");
parser.accepts(RNA, "Input RNA sam or bam file (optional)").withRequiredArg().ofType(String.class);
parser.accepts(RNA_OUTPUT, "Output RNA sam or bam file (required if RNA input file specified)").withRequiredArg().ofType(String.class);
parser.accepts(MIN_BASE_QUALITY, "Minimum base quality for inclusion in assembly").withRequiredArg().ofType(Integer.class);
}
return parser;
}
@Override
protected void validate() {
isValid = true;
if (!getOptions().hasArgument(INPUT_SAM)) {
isValid = false;
System.out.println("Missing required input SAM/BAM file");
}
if (!getOptions().hasArgument(OUTPUT_SAM)) {
isValid = false;
System.out.println("Missing required input SAM/BAM file");
}
if (!getOptions().hasArgument(REFERENCE)) {
isValid = false;
System.out.println("Missing required reference");
}
if (!getOptions().hasArgument(TARGET_REGIONS)) {
isValid = false;
System.out.println("Missing required target regions");
}
if (!getOptions().hasArgument(WORKING_DIR)) {
isValid = false;
System.out.println("Missing required working directory");
}
if ((getOptions().hasArgument(NUM_THREADS) && (Integer) getOptions().valueOf(NUM_THREADS) < 1)) {
isValid = false;
System.out.println("Num threads must be greater than zero.");
}
if (!isValid) {
printHelp();
}
}
public String getInputFile() {
return (String) getOptions().valueOf(INPUT_SAM);
}
public String getOutputFile() {
return (String) getOptions().valueOf(OUTPUT_SAM);
}
public String getInputFile2() {
return (String) getOptions().valueOf(INPUT_SAM2);
}
public String getOutputFile2() {
return (String) getOptions().valueOf(OUTPUT_SAM2);
}
public String getInputFile3() {
return (String) getOptions().valueOf(INPUT_SAM3);
}
public String getOutputFile3() {
return (String) getOptions().valueOf(OUTPUT_SAM3);
}
public String getReference() {
return (String) getOptions().valueOf(REFERENCE);
}
public String getTargetRegionFile() {
return (String) getOptions().valueOf(TARGET_REGIONS);
}
public String getWorkingDir() {
return (String) getOptions().valueOf(WORKING_DIR);
}
public int[] getKmerSizes() {
String[] kmerStr = ((String) getOptions().valueOf(KMER_SIZE)).split(",");
int[] kmers = new int[kmerStr.length];
for (int i=0; i<kmerStr.length; i++) {
kmers[i] = Integer.parseInt(kmerStr[i]);
}
return kmers;
}
public int getMinNodeFrequency() {
return (Integer) getOptions().valueOf(MIN_NODE_FREQUENCY);
}
public int getMinUnalignedNodeFrequency() {
return (Integer) getOptions().valueOf(MIN_UNALIGNED_NODE_FREQUENCY);
}
public int getMinContigLength() {
return (Integer) getOptions().valueOf(MIN_CONTIG_LENGTH);
}
public int getMaxPotentialContigs() {
return (Integer) getOptions().valueOf(MAX_POTENTIAL_CONTIGS);
}
public double getMinContigRatio() {
return (Double) getOptions().valueOf(MIN_CONTIG_RATIO);
}
public int getNumThreads() {
return getOptions().hasArgument(NUM_THREADS) ? (Integer) getOptions().valueOf(NUM_THREADS) : 2;
}
public int getMinContigMapq() {
return (Integer) getOptions().valueOf(MIN_CONTIG_MAPQ);
}
public boolean isSkipUnalignedAssembly() {
return getOptions().has(SKIP_UNALIGNED_ASSEMBLY);
}
public int getMaxUnalignedReads() {
return (Integer) getOptions().valueOf(MAX_UNALIGNED_READS);
}
public boolean isPairedEnd() {
return getOptions().has(PAIRED_END);
}
public String getRnaSam() {
return (String) getOptions().valueOf(RNA);
}
public String getRnaSamOutput() {
return (String) getOptions().valueOf(RNA_OUTPUT);
}
public int getMinBaseQuality() {
return (Integer) getOptions().valueOf(MIN_BASE_QUALITY);
}
public boolean isValid() {
return isValid;
}
} |
package org.jitsi.impl.neomedia.codec;
import org.jitsi.util.*;
import java.nio.*;
/**
* Provides the interface to the native FFmpeg library.
*
* @author Lyubomir Marinov
* @author Sebastien Vincent
*/
public class FFmpeg
{
/**
* No pts value.
*/
public static final long AV_NOPTS_VALUE = 0x8000000000000000L;
public static final int AV_NUM_DATA_POINTERS = 8;
/**
* The AV sample format for signed 16.
*/
public static final int AV_SAMPLE_FMT_S16 = 1;
/**
* The AV sample format for signed 16 planar.
*/
public static final int AV_SAMPLE_FMT_S16P = 6;
/**
* AC pred flag.
*/
public static final int CODEC_FLAG_AC_PRED = 0x02000000;
/**
* H263+ slice struct flag.
*/
public static final int CODEC_FLAG_H263P_SLICE_STRUCT = 0x10000000;
/**
* H263+ UMV flag.
*/
public static final int CODEC_FLAG_H263P_UMV = 0x01000000 ;
/**
* Loop filter flag.
*/
public static final int CODEC_FLAG_LOOP_FILTER = 0x00000800;
/**
* The flag which allows incomplete frames to be passed to a decoder.
*/
public static final int CODEC_FLAG2_CHUNKS = 0x00008000;
/**
* Intra refresh flag2.
*/
public static final int CODEC_FLAG2_INTRA_REFRESH = 0x00200000;
/**
* AMR-NB codec ID.
*/
private static final int CODEC_ID_AMR_NB = 0x12000;
/**
* AMR-WB codec ID
*/
public static final int CODEC_ID_AMR_WB = CODEC_ID_AMR_NB + 1;
/**
* H263 codec ID.
*/
public static final int CODEC_ID_H263 = 5;
/**
* H263+ codec ID.
*/
public static final int CODEC_ID_H263P = 20;
/**
* H264 codec ID.
*/
public static final int CODEC_ID_H264 = 28;
/**
* MJPEG codec ID.
*/
public static final int CODEC_ID_MJPEG = 8;
/**
* MP3 codec ID.
*/
public static final int CODEC_ID_MP3 = 0x15000 + 1;
/**
* VP8 codec ID
*/
public static final int CODEC_ID_VP8 = 142;
/**
* Work around bugs in encoders which sometimes cannot be detected
* automatically.
*/
public static final int FF_BUG_AUTODETECT = 1;
public static final int FF_CMP_CHROMA = 256;
/**
* Padding size for FFmpeg input buffer.
*/
public static final int FF_INPUT_BUFFER_PADDING_SIZE = 8;
public static final int FF_MB_DECISION_SIMPLE = 0;
/**
* The minimum encoding buffer size defined by libavcodec.
*/
public static final int FF_MIN_BUFFER_SIZE = 16384;
/**
* The H264 baseline profile.
*/
public static final int FF_PROFILE_H264_BASELINE = 66;
/**
* The H264 high profile.
*/
public static final int FF_PROFILE_H264_HIGH = 100;
/**
* The H264 main profile.
*/
public static final int FF_PROFILE_H264_MAIN = 77;
/**
* ARGB format.
*/
public static final int PIX_FMT_ARGB = 27;
/**
* BGR24 format as of FFmpeg.
*/
public static final int PIX_FMT_BGR24_1 = 3;
/**
* BGR32 format handled in endian specific manner.
* It is stored as ABGR on big-endian and RGBA on little-endian.
*/
public static final int PIX_FMT_BGR32;
/**
* BGR32_1 format handled in endian specific manner.
* It is stored as BGRA on big-endian and ARGB on little-endian.
*/
public static final int PIX_FMT_BGR32_1;
/**
* "NONE" format.
*/
public static final int PIX_FMT_NONE = -1;
/**
* NV12 format.
*/
public static final int PIX_FMT_NV12 = 25;
/**
* RGB24 format handled in endian specific manner.
* It is stored as RGB on big-endian and BGR on little-endian.
*/
public static final int PIX_FMT_RGB24;
/**
* RGB24 format as of FFmpeg.
*/
public static final int PIX_FMT_RGB24_1 = 2;
/**
* RGB32 format handled in endian specific manner.
* It is stored as ARGB on big-endian and BGRA on little-endian.
*/
public static final int PIX_FMT_RGB32;
/**
* RGB32_1 format handled in endian specific manner.
* It is stored as RGBA on big-endian and ABGR on little-endian.
*/
public static final int PIX_FMT_RGB32_1;
/**
* UYVY422 format.
*/
public static final int PIX_FMT_UYVY422 = 17;
/**
* UYYVYY411 format.
*/
public static final int PIX_FMT_UYYVYY411 = 18;
/** Y41P format */
public static final int PIX_FMT_YUV411P = 7;
/**
* YUV420P format.
*/
public static final int PIX_FMT_YUV420P = 0;
/**
* YUVJ422P format.
*/
public static final int PIX_FMT_YUVJ422P = 13;
/**
* YUYV422 format.
*/
public static final int PIX_FMT_YUYV422 = 1;
/**
* BICUBIC type for libswscale conversion.
*/
public static final int SWS_BICUBIC = 4;
//public static final int X264_RC_ABR = 2;
static
{
try
{
System.loadLibrary("libopenh264");
}
catch (Throwable t){}
boolean jnffmpegLoaded = false;
try
{
JNIUtils.loadLibrary("jnffmpeg", FFmpeg.class.getClassLoader());
jnffmpegLoaded = true;
}
catch (Throwable t)
{
// TODO remove stacktrace print
t.printStackTrace();
}
try
{
if (!jnffmpegLoaded)
JNIUtils.loadLibrary(
"jnffmpeg-no-openh264", FFmpeg.class.getClassLoader());
}
catch (Throwable t)
{
// TODO remove stacktrace print
t.printStackTrace();
throw t;
}
av_register_all();
avcodec_register_all();
avfilter_register_all();
PIX_FMT_BGR32 = av_get_pix_fmt("bgr32"); //PIX_FMT_BGR32();
PIX_FMT_RGB24 = av_get_pix_fmt("rgb24"); //PIX_FMT_RGB24();
PIX_FMT_RGB32 = av_get_pix_fmt("rgb32"); //PIX_FMT_RGB32();
if (ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN)) {
PIX_FMT_BGR32_1 = av_get_pix_fmt("bgra");
PIX_FMT_RGB32_1 = av_get_pix_fmt("rgba");
} else {
PIX_FMT_BGR32_1 = av_get_pix_fmt("argb");
PIX_FMT_RGB32_1 = av_get_pix_fmt("abgr");
}
}
public static native String av_strerror(int errnum);
public static native int av_get_pix_fmt(String name);
/**
* Free a native pointer allocated by av_malloc.
*
* @param ptr native pointer to free
*/
public static native void av_free(long ptr);
/**
* Allocate memory.
*
* @param size size to allocate
* @return native pointer or 0 if av_malloc failed
*/
public static native long av_malloc(int size);
/**
* Initialize libavformat and register all the muxers, demuxers and
* protocols.
*/
public static native void av_register_all();
/**
* Allocates a new <tt>AVCodecContext</tt>.
*
* @param codec
* @return native pointer to the new <tt>AVCodecContext</tt>
*/
public static native long avcodec_alloc_context3(long codec);
/**
* Allocates an <tt>AVFrame</tt> instance and sets its fields to default
* values. The result must be freed using {@link #avcodec_free_frame(long)}.
*
* @return an <tt>AVFrame *</tt> value which points to an <tt>AVFrame</tt>
* instance filled with default values or <tt>0</tt> on failure
*/
public static native long avcodec_alloc_frame();
public static native long avcodec_alloc_packet(int size);
/**
* Close an AVCodecContext
*
* @param ctx pointer to AVCodecContex
* @return 0 if success, -1 otherwise
*/
public static native int avcodec_close(long ctx);
public static native int avcodec_decode_audio4(long avctx, long frame,
boolean[] got_frame, long avpkt);
/**
* Decode a video frame.
*
* @param ctx codec context
* @param frame frame decoded
* @param got_picture if the decoding has produced a valid picture
* @param buf the input buffer
* @param buf_size input buffer size
* @return number of bytes written to buff if success
*/
public static native int avcodec_decode_video(long ctx, long frame,
boolean[] got_picture, byte[] buf, int buf_size);
/**
* Decode a video frame.
*
* @param ctx codec context
* @param avframe frame decoded
* @param src input buffer
* @param src_length input buffer size
* @return number of bytes written to buff if success
*/
public static native int avcodec_decode_video(long ctx, long avframe,
long src, int src_length);
/**
* Encodes an audio frame from <tt>samples</tt> into <tt>buf</tt>.
*
* @param ctx the codec context
* @param buf the output buffer
* @param buf_offset the output buffer offset
* @param buf_size the output buffer size
* @param samples the input buffer containing the samples. The number of
* samples read from this buffer is <tt>frame_size</tt>*<tt>channels</tt>,
* both of which are defined in <tt>ctx</tt>. For PCM audio the number of
* samples read from samples is equal to
* <tt>buf_size</tt>*<tt>input_sample_size</tt>/<tt>output_sample_size</tt>.
* @param samples_offset the offset in the input buffer containing the
* samples
* @return on error a negative value is returned, on success zero or the
* number of bytes used to encode the data read from the input buffer
*/
public static native int avcodec_encode_audio(
long ctx,
byte[] buf, int buf_offset, int buf_size,
byte[] samples, int samples_offset);
/**
* Encode a video frame.
*
* @param ctx codec context
* @param buff the output buffer
* @param buf_size output buffer size
* @param frame frame to encode
* @return number of bytes written to buff if success
*/
public static native int avcodec_encode_video(long ctx, byte[] buff,
int buf_size, long frame);
/**
* Finds a registered decoder with a matching ID.
*
* @param id <tt>AVCodecID</tt> of the requested encoder
* @return an <tt>AVCodec</tt> decoder if one was found; <tt>0</tt>,
* otherwise
*/
public static native long avcodec_find_decoder(int id);
/**
* Finds a registered encoder with a matching codec ID.
*
* @param id <tt>AVCodecID</tt> of the requested encoder
* @return an <tt>AVCodec</tt> encoder if one was found; <tt>0</tt>,
* otherwise
*/
public static native long avcodec_find_encoder(int id);
/**
* Frees an <tt>AVFrame</tt> instance specified as an <tt>AVFrame *</tt>
* value and any dynamically allocated objects in it (e.g.
* <tt>extended_data</tt>).
* <p>
* <b>Warning</b>: The method/function does NOT free the data buffers
* themselves because it does not know how since they might have been
* allocated with a custom <tt>get_buffer()</tt>.
* </p>
*
* @param frame an <tt>AVFrame *</tt> value which points to the
* <tt>AVFrame</tt> instance to be freed
*/
public static void avcodec_free_frame(long frame)
{
// FIXME Invoke the native function avcodec_free_frame(AVFrame **).
av_free(frame);
}
public static native void avcodec_free_packet(long pkt);
/**
* Initializes the specified <tt>AVCodecContext</tt> to use the specified
* <tt>AVCodec</tt>.
*
* @param ctx the <tt>AVCodecContext</tt> which will be set up to use the
* specified <tt>AVCodec</tt>
* @param codec the <tt>AVCodec</tt> to use within the
* <tt>AVCodecContext</tt>
* @param options
* @return zero on success, a negative value on error
*/
public static native int avcodec_open2(
long ctx,
long codec,
String... options);
public static native void avcodec_register_all();
/**
* Add specific flags to AVCodecContext's flags member.
*
* @param ctx pointer to AVCodecContext
* @param flags flags to add
*/
public static native void avcodeccontext_add_flags(long ctx, int flags);
/**
* Add specific flags to AVCodecContext's flags2 member.
*
* @param ctx pointer to AVCodecContext
* @param flags2 flags to add
*/
public static native void avcodeccontext_add_flags2(long ctx, int flags2);
/**
* Gets the samples per packet of the specified <tt>AVCodecContext</tt>. The
* property is set by libavcodec upon {@link #avcodec_open(long, long)}.
*
* @param ctx the <tt>AVCodecContext</tt> to get the samples per packet of
* @return the samples per packet of the specified <tt>AVCodecContext</tt>
*/
public static native int avcodeccontext_get_frame_size(long ctx);
/**
* Get height of the video.
*
* @param ctx pointer to AVCodecContext
* @return video height
*/
public static native int avcodeccontext_get_height(long ctx);
/**
* Get pixel format.
*
* @param ctx pointer to AVCodecContext
* @return pixel format
*/
public static native int avcodeccontext_get_pix_fmt(long ctx);
/**
* Get width of the video.
*
* @param ctx pointer to AVCodecContext
* @return video width
*/
public static native int avcodeccontext_get_width(long ctx);
/**
* Set the B-Frame strategy.
*
* @param ctx AVCodecContext pointer
* @param b_frame_strategy strategy
*/
public static native void avcodeccontext_set_b_frame_strategy(long ctx,
int b_frame_strategy);
/**
* Sets the average bit rate of the specified <tt>AVCodecContext</tt>. The
* property is to be set by the user when encoding and is unused for
* constant quantizer encoding. It is set by libavcodec when decoding and
* its value is <tt>0</tt> or some bitrate if this info is available in the
* stream.
*
* @param ctx the <tt>AVCodecContext</tt> to set the average bit rate of
* @param bit_rate the average bit rate to be set to the specified
* <tt>AVCodecContext</tt>
*/
public static native void avcodeccontext_set_bit_rate(long ctx,
int bit_rate);
/**
* Set the bit rate tolerance
*
* @param ctx the <tt>AVCodecContext</tt> to set the bit rate of
* @param bit_rate_tolerance bit rate tolerance
*/
public static native void avcodeccontext_set_bit_rate_tolerance(long ctx,
int bit_rate_tolerance);
/**
* Sets the number of channels of the specified <tt>AVCodecContext</tt>. The
* property is audio only.
*
* @param ctx the <tt>AVCodecContext</tt> to set the number of channels of
* @param channels the number of channels to set to the specified
* <tt>AVCodecContext</tt>
*/
public static native void avcodeccontext_set_channels(
long ctx, int channels);
public static native void avcodeccontext_set_channel_layout(
long ctx, int channelLayout);
public static native void avcodeccontext_set_chromaoffset(long ctx,
int chromaoffset);
/**
* Sets the maximum number of pictures in a group of pictures i.e. the
* maximum interval between keyframes.
*
* @param ctx the <tt>AVCodecContext</tt> to set the <tt>gop_size</tt> of
* @param gop_size the maximum number of pictures in a group of pictures
* i.e. the maximum interval between keyframes
*/
public static native void avcodeccontext_set_gop_size(long ctx,
int gop_size);
public static native void avcodeccontext_set_i_quant_factor(long ctx,
float i_quant_factor);
/**
* Sets the minimum GOP size.
*
* @param ctx the <tt>AVCodecContext</tt> to set the minimum GOP size of
* @param keyint_min the minimum GOP size to set on <tt>ctx</tt>
*/
public static native void avcodeccontext_set_keyint_min(long ctx,
int keyint_min);
/**
* Set the maximum B frames.
*
* @param ctx the <tt>AVCodecContext</tt> to set the maximum B frames of
* @param max_b_frames maximum B frames
*/
public static native void avcodeccontext_set_max_b_frames(long ctx,
int max_b_frames);
public static native void avcodeccontext_set_mb_decision(long ctx,
int mb_decision);
public static native void avcodeccontext_set_me_cmp(long ctx, int me_cmp);
public static native void avcodeccontext_set_me_method(long ctx,
int me_method);
public static native void avcodeccontext_set_me_range(long ctx,
int me_range);
public static native void avcodeccontext_set_me_subpel_quality(long ctx,
int me_subpel_quality);
/**
* Set the pixel format.
*
* @param ctx the <tt>AVCodecContext</tt> to set the pixel format of
* @param pix_fmt pixel format
*/
public static native void avcodeccontext_set_pix_fmt(long ctx,
int pix_fmt);
public static native void avcodeccontext_set_profile(long ctx,
int profile);
public static native void avcodeccontext_set_qcompress(long ctx,
float qcompress);
public static native void avcodeccontext_set_quantizer(long ctx,
int qmin, int qmax, int max_qdiff);
public static native void avcodeccontext_set_rc_buffer_size(long ctx,
int rc_buffer_size);
public static native void avcodeccontext_set_rc_eq(long ctx, String rc_eq);
public static native void avcodeccontext_set_rc_max_rate(long ctx,
int rc_max_rate);
public static native void avcodeccontext_set_refs(long ctx,
int refs);
/**
* Set the RTP payload size.
*
* @param ctx the <tt>AVCodecContext</tt> to set the RTP payload size of
* @param rtp_payload_size RTP payload size
*/
public static native void avcodeccontext_set_rtp_payload_size(long ctx,
int rtp_payload_size);
public static native void avcodeccontext_set_sample_aspect_ratio(
long ctx, int num, int den);
public static native void avcodeccontext_set_sample_fmt(
long ctx, int sample_fmt);
/**
* Sets the samples per second of the specified <tt>AVCodecContext</tt>. The
* property is audio only.
*
* @param ctx the <tt>AVCodecContext</tt> to set the samples per second of
* @param sample_rate the samples per second to set to the specified
* <tt>AVCodecContext</tt>
*/
public static native void avcodeccontext_set_sample_rate(
long ctx, int sample_rate);
/**
* Set the scene change threshold (in percent).
*
* @param ctx AVCodecContext pointer
* @param scenechange_threshold value between 0 and 100
*/
public static native void avcodeccontext_set_scenechange_threshold(
long ctx, int scenechange_threshold);
/**
* Set the size of the video.
*
* @param ctx pointer to AVCodecContext
* @param width video width
* @param height video height
*/
public static native void avcodeccontext_set_size(long ctx, int width,
int height);
/**
* Set the number of thread.
*
* @param ctx the <tt>AVCodecContext</tt> to set the number of thread of
* @param thread_count number of thread to set
*/
public static native void avcodeccontext_set_thread_count(long ctx,
int thread_count);
public static native void avcodeccontext_set_ticks_per_frame(long ctx,
int ticks_per_frame);
public static native void avcodeccontext_set_time_base(long ctx, int num,
int den);
public static native void avcodeccontext_set_trellis(long ctx,
int trellis);
public static native void avcodeccontext_set_workaround_bugs(long ctx,
int workaround_bugs);
/**
* Allocates a new <tt>AVFilterGraph</tt> instance.
*
* @return a pointer to the newly-allocated <tt>AVFilterGraph</tt> instance
*/
public static native long avfilter_graph_alloc();
/**
* Checks the validity and configures all the links and formats in a
* specific <tt>AVFilterGraph</tt> instance.
*
* @param graph a pointer to the <tt>AVFilterGraph</tt> instance to check
* the validity of and configure
* @param log_ctx the <tt>AVClass</tt> context to be used for logging
* @return <tt>0</tt> on success; a negative <tt>AVERROR</tt> on error
*/
public static native int avfilter_graph_config(long graph, long log_ctx);
/**
* Frees a specific <tt>AVFilterGraph</tt> instance and destroys its links.
*
* @param graph a pointer to the <tt>AVFilterGraph</tt> instance to free
*/
public static native void avfilter_graph_free(long graph);
/**
* Gets a pointer to an <tt>AVFilterContext</tt> instance with a specific
* name in a specific <tt>AVFilterGraph</tt> instance.
*
* @param graph a pointer to the <tt>AVFilterGraph</tt> instance where the
* <tt>AVFilterContext</tt> instance with the specified name is to be found
* @param name the name of the <tt>AVFilterContext</tt> instance which is to
* be found in the specified <tt>graph</tt>
* @return the filter graph pointer
*/
public static native long avfilter_graph_get_filter(
long graph,
String name);
/**
* Adds a filter graph described by a <tt>String</tt> to a specific
* <tt>AVFilterGraph</tt> instance.
*
* @param graph a pointer to the <tt>AVFilterGraph</tt> instance where to
* link the parsed graph context
* @param filters the <tt>String</tt> to be parsed
* @param inputs a pointer to a linked list to the inputs of the graph if
* any; otherwise, <tt>0</tt>
* @param outputs a pointer to a linked list to the outputs of the graph if
* any; otherwise, <tt>0</tt>
* @param log_ctx the <tt>AVClass</tt> context to be used for logging
* @return <tt>0</tt> on success; a negative <tt>AVERROR</tt> on error
*/
public static native int avfilter_graph_parse(
long graph,
String filters, long inputs, long outputs, long log_ctx);
/**
* Initializes the <tt>libavfilter</tt> system and registers all built-in
* filters.
*/
public static native void avfilter_register_all();
public static native long avframe_get_data0(long frame);
public static native int avframe_get_linesize0(long frame);
public static native long avframe_get_pts(long frame);
public static native void avframe_set_data(
long frame,
long data0, long offset1, long offset2);
public static native void avframe_set_key_frame(
long frame,
boolean key_frame);
public static native void avframe_set_linesize(
long frame,
int linesize0, int linesize1, int linesize2);
public static native void avpacket_set_data(
long pkt,
byte[] data, int offset, int length);
public static native int avpicture_fill(long picture, long ptr,
int pix_fmt, int width, int height);
public static native long get_filtered_video_frame(
long input, int width, int height, int pixFmt,
long buffer,
long ffsink,
long output);
public static native void memcpy(byte[] dst, int dst_offset, int dst_length,
long src);
public static native void memcpy(int[] dst, int dst_offset, int dst_length,
long src);
public static native void memcpy(long dst, byte[] src, int src_offset,
int src_length);
/**
* Get BGR32 pixel format.
*
* @return BGR32 pixel format
*/
private static native int PIX_FMT_BGR32();
/**
* Get BGR32_1 pixel format.
*
* @return BGR32_1 pixel format
*/
private static native int PIX_FMT_BGR32_1();
/**
* Get RGB24 pixel format.
*
* @return RGB24 pixel format
*/
private static native int PIX_FMT_RGB24();
/**
* Get RGB32 pixel format.
*
* @return RGB32 pixel format
*/
private static native int PIX_FMT_RGB32();
/**
* Get RGB32_1 pixel format.
*
* @return RGB32_1 pixel format
*/
private static native int PIX_FMT_RGB32_1();
/**
* Free an SwsContext.
*
* @param ctx SwsContext native pointer
*/
public static native void sws_freeContext(long ctx);
/**
* Get a SwsContext pointer.
*
* @param ctx SwsContext
* @param srcW width of source image
* @param srcH height of source image
* @param srcFormat image format
* @param dstW width of destination image
* @param dstH height destination image
* @param dstFormat destination format
* @param flags flags
* @return cached SwsContext pointer
*/
public static native long sws_getCachedContext(
long ctx,
int srcW, int srcH, int srcFormat,
int dstW, int dstH, int dstFormat,
int flags);
/**
* Scale an image.
*
* @param ctx SwsContext native pointer
* @param src source image (native pointer)
* @param srcSliceY slice Y of source image
* @param srcSliceH slice H of source image
* @param dst destination image (java type)
* @param dstFormat destination format
* @param dstW width of destination image
* @param dstH height destination image
* @return 0 if success, -1 otherwise
*/
public static native int sws_scale(
long ctx,
long src, int srcSliceY, int srcSliceH,
Object dst, int dstFormat, int dstW, int dstH);
/**
* Scale image an image.
*
* @param ctx SwsContext native pointer
* @param src source image (java type)
* @param srcFormat image format
* @param srcW width of source image
* @param srcH height of source image
* @param srcSliceY slice Y of source image
* @param srcSliceH slice H of source image
* @param dst destination image (java type)
* @param dstFormat destination format
* @param dstW width of destination image
* @param dstH height destination image
* @return 0 if success, -1 otherwise
*/
public static native int sws_scale(
long ctx,
Object src, int srcFormat, int srcW, int srcH,
int srcSliceY, int srcSliceH,
Object dst, int dstFormat, int dstW, int dstH);
} |
package com.bitplan.rest;
import java.security.Principal;
/**
* general user interface
* @author wf
*
*/
public interface User extends XMLPresentable<User>, Principal{
/**
* @return the id
*/
public abstract String getId();
/**
* @param id the id to set
*/
public abstract void setId(String id);
/**
* @return the name
*/
public abstract String getName();
/**
* @param name the name to set
*/
public abstract void setName(String name);
/**
* @return the firstname
*/
public abstract String getFirstname();
/**
* @param firstname the firstname to set
*/
public abstract void setFirstname(String firstname);
/**
* @return the email
*/
public abstract String getEmail();
/**
* @param email the email to set
*/
public abstract void setEmail(String email);
/**
* @return the encrypted password
*/
public abstract String getPassword();
/**
* @param password the encrypted password to set
*/
public abstract void setPassword(String password);
/**
* @return the comment
*/
public abstract String getComment();
/**
* @param comment the comment to set
*/
public abstract void setComment(String comment);
/**
* @return the role
*/
public String getRole();
/**
* @param role the role to set
*/
public void setRole(String role);
} |
package sophena.rcp.wizards;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ITableLabelProvider;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.window.Window;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sophena.db.daos.BoilerDao;
import sophena.db.daos.ProductGroupDao;
import sophena.db.daos.ProjectDao;
import sophena.model.Boiler;
import sophena.model.FuelSpec;
import sophena.model.Producer;
import sophena.model.ProducerFunction;
import sophena.model.ProductCosts;
import sophena.model.ProductGroup;
import sophena.model.ProductType;
import sophena.model.Project;
import sophena.model.descriptors.ProjectDescriptor;
import sophena.rcp.App;
import sophena.rcp.Icon;
import sophena.rcp.Labels;
import sophena.rcp.M;
import sophena.rcp.editors.producers.ProducerEditor;
import sophena.rcp.navigation.Navigator;
import sophena.rcp.utils.Controls;
import sophena.rcp.utils.Strings;
import sophena.rcp.utils.Tables;
import sophena.rcp.utils.Texts;
import sophena.rcp.utils.UI;
import sophena.rcp.utils.Viewers;
import sophena.utils.Num;
public class ProducerWizard extends Wizard {
private Logger log = LoggerFactory.getLogger(getClass());
private Page page;
private Project project;
public static void open(ProjectDescriptor d) {
if (d == null)
return;
ProjectDao dao = new ProjectDao(App.getDb());
open(dao.get(d.id));
}
public static void open(Project project) {
if (project == null)
return;
ProducerWizard wiz = new ProducerWizard();
wiz.setWindowTitle(M.CreateNewProducer);
wiz.project = project;
WizardDialog dialog = new WizardDialog(UI.shell(), wiz);
dialog.setPageSize(150, 400);
if (dialog.open() == Window.OK)
Navigator.refresh();
}
@Override
public boolean performFinish() {
try {
Producer producer = new Producer();
producer.id = UUID.randomUUID().toString();
page.data.bindToModel(producer);
addFuelSpec(producer);
addCosts(producer);
project.producers.add(producer);
ProjectDao dao = new ProjectDao(App.getDb());
dao.update(project);
Navigator.refresh();
ProducerEditor
.open(project.toDescriptor(), producer.toDescriptor());
return true;
} catch (Exception e) {
log.error("failed to update project with new producer", e);
return false;
}
}
private void addFuelSpec(Producer producer) {
FuelSpec spec = new FuelSpec();
producer.fuelSpec = spec;
spec.taxRate = (double) 19;
spec.waterContent = (double) 20;
}
private void addCosts(Producer producer) {
ProductCosts costs = new ProductCosts();
producer.costs = costs;
Boiler b = producer.boiler;
if (b == null)
return;
if (b.purchasePrice != null)
costs.investment = b.purchasePrice;
ProductCosts.copy(b.group, costs);
}
@Override
public void addPages() {
page = new Page();
addPage(page);
}
private class Page extends WizardPage {
private DataBinding data = new DataBinding();
private Text nameText;
private boolean nameEdited;
private Combo groupCombo;
private TableViewer boilerTable;
private Text rankText;
private Combo functionCombo;
private Page() {
super("ProducerWizardPage", M.CreateNewProducer, null);
}
@Override
public void createControl(Composite parent) {
Composite root = new Composite(parent, SWT.NONE);
setControl(root);
UI.gridLayout(root, 1, 5, 5);
Composite comp = UI.formComposite(root);
UI.gridData(comp, true, false);
nameField(comp);
groupCombo(comp);
boilerTable(root);
functionFields(root);
data.bindToUI();
}
private void nameField(Composite comp) {
nameText = UI.formText(comp, M.Name);
nameEdited = false;
// smart identification if the name was edited by the user
Texts.on(nameText).required().onChanged((t) -> {
Boiler b = Viewers.getFirstSelected(boilerTable);
if (b == null) {
nameEdited = true;
} else {
nameEdited = !Strings.nullOrEqual(t, b.name);
}
});
}
private void groupCombo(Composite comp) {
groupCombo = UI.formCombo(comp, "Produktgruppe");
Controls.onSelect(groupCombo, e -> {
data.updateBoilers();
data.suggestName();
});
}
private void boilerTable(Composite root) {
Composite composite = new Composite(root, SWT.NONE);
UI.gridData(composite, true, true);
UI.gridLayout(composite, 1);
boilerTable = Tables.createViewer(composite, "Leistungsbereich",
"Bezeichnung", "Hersteller");
Tables.bindColumnWidths(boilerTable, 0.3, 0.4, 0.3);
boilerTable.setContentProvider(ArrayContentProvider.getInstance());
boilerTable.setLabelProvider(new BoilerLabel());
boilerTable.addSelectionChangedListener((e) -> {
data.suggestName();
data.validate();
});
}
private void functionFields(Composite root) {
Composite composite = new Composite(root, SWT.NONE);
UI.gridLayout(composite, 4);
UI.gridData(composite, true, false);
rankText = UI.formText(composite, "Rang");
Texts.on(rankText).integer().required().validate(data::validate);
functionCombo = UI.formCombo(composite, "Funktion");
}
private class BoilerLabel extends LabelProvider
implements ITableLabelProvider {
@Override
public Image getColumnImage(Object elem, int col) {
return col == 0 ? Icon.BOILER_16.img() : null;
}
@Override
public String getColumnText(Object elem, int col) {
if (!(elem instanceof Boiler))
return null;
Boiler b = (Boiler) elem;
switch (col) {
case 0:
return Num.str(b.minPower) + " - "
+ Num.str(b.maxPower) + " kW";
case 1:
return b.name;
case 2:
return b.manufacturer != null ? b.manufacturer.name : null;
default:
return null;
}
}
}
private class DataBinding {
private void bindToModel(Producer producer) {
if (producer == null)
return;
Boiler b = Viewers.getFirstSelected(boilerTable);
producer.boiler = b;
producer.name = nameText.getText();
producer.rank = Texts.getInt(rankText);
int fnIdx = functionCombo.getSelectionIndex();
if (fnIdx == 0)
producer.function = ProducerFunction.BASE_LOAD;
else
producer.function = ProducerFunction.PEAK_LOAD;
}
private void bindToUI() {
String[] groupItems = getGroupItems();
groupCombo.setItems(groupItems);
groupCombo.select(groupItems.length > 1 ? 1 : 0);
Texts.set(rankText, getNextRank());
updateBoilers();
fillFunctionCombo();
setPageComplete(false);
}
private void suggestName() {
if (nameEdited && !Texts.isEmpty(nameText))
return;
Boiler b = Viewers.getFirstSelected(boilerTable);
if (b == null)
nameText.setText("");
else
Texts.set(nameText, b.name);
}
private String[] getGroupItems() {
java.util.List<String> list = new ArrayList<>();
list.add("");
ProductGroupDao dao = new ProductGroupDao(App.getDb());
EnumSet<ProductType> types = EnumSet.of(
ProductType.BIOMASS_BOILER,
ProductType.FOSSIL_FUEL_BOILER,
ProductType.COGENERATION_PLANT);
for (ProductGroup g : dao.getAll()) {
if (g.name == null || g.type == null)
continue;
if (types.contains(g.type)) {
list.add(g.name);
}
}
Collections.sort(list);
return list.toArray(new String[list.size()]);
}
private int getNextRank() {
Set<Integer> set = new HashSet<>();
for (Producer p : project.producers)
set.add(p.rank);
int next = 1;
while (set.contains(next))
next++;
return next;
}
private void fillFunctionCombo() {
String[] items = new String[2];
items[0] = Labels.get(ProducerFunction.BASE_LOAD);
items[1] = Labels.get(ProducerFunction.PEAK_LOAD);
int selection = 0;
for (Producer p : project.producers) {
if (p.function == ProducerFunction.BASE_LOAD) {
selection = 1;
break;
}
}
functionCombo.setItems(items);
functionCombo.select(selection);
}
private void updateBoilers() {
BoilerDao dao = new BoilerDao(App.getDb());
ArrayList<Boiler> input = new ArrayList<>();
for (Boiler b : dao.getAll()) {
if (matchGroup(b)) {
input.add(b);
}
}
input.sort((b1, b2) -> {
if (Math.abs(b1.minPower - b2.minPower) > 0.1)
return Double.compare(b1.minPower, b2.minPower);
if (Math.abs(b1.maxPower - b2.maxPower) > 0.1)
return Double.compare(b1.maxPower, b2.maxPower);
return Strings.compare(b1.name, b2.name);
});
boilerTable.setInput(input);
setPageComplete(false);
}
private boolean matchGroup(Boiler b) {
if (b == null)
return false;
int idx = groupCombo.getSelectionIndex();
String group = groupCombo.getItem(idx);
if (group.equals(""))
return true;
if (b.group == null || b.group.name == null)
return false;
return Strings.nullOrEqual(group, b.group.name);
}
private boolean validate() {
if (!Texts.hasNumber(rankText)) {
setPageComplete(false);
setErrorMessage("Der Rang muss ein numerischer Wert sein");
return false;
}
setErrorMessage(null);
if (Viewers.getFirstSelected(boilerTable) == null) {
setPageComplete(false);
return false;
} else {
setPageComplete(true);
return true;
}
}
}
}
} |
package com.karateca.ddescriber.dialog;
import com.intellij.openapi.actionSystem.CustomShortcutSet;
import com.intellij.openapi.actionSystem.KeyboardShortcut;
import com.intellij.openapi.actionSystem.Shortcut;
import com.intellij.openapi.actionSystem.ShortcutSet;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.ui.SpeedSearchComparator;
import com.intellij.ui.TreeSpeedSearch;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ui.tree.TreeUtil;
import com.karateca.ddescriber.JasmineTreeUtil;
import com.karateca.ddescriber.VoidFunction;
import com.karateca.ddescriber.model.JasmineFile;
import com.karateca.ddescriber.model.TestCounts;
import com.karateca.ddescriber.model.TestFindResult;
import com.karateca.ddescriber.model.TestState;
import com.karateca.ddescriber.model.TreeNode;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
/**
* @author Andres Dominguez.
*/
public class TreeViewDialog extends DialogWrapper {
public static final int CLEAN_CURRENT_EXIT_CODE = 100;
public static final int GO_TO_TEST_EXIT_CODE = 101;
public static final int EXCLUDE_EXIT_CODE = 104;
private static final int VISIBLE_ROW_COUNT = 17;
private final int caretOffset;
private Tree tree;
private TestFindResult selectedTest;
private final JasmineFile jasmineFile;
private final PendingChanges pendingChanges;
public TreeViewDialog(Project project, JasmineFile jasmineFile, int caretOffset) {
super(project);
this.jasmineFile = jasmineFile;
this.caretOffset = caretOffset;
pendingChanges = new PendingChanges();
init();
setTitle("Select the Test or Suite to Add / Remove");
}
@Nullable
@Override
protected JComponent createCenterPanel() {
final TestFindResult closest = jasmineFile.getClosestTestFromCaret(caretOffset);
// Build the tree.
TreeNode root = jasmineFile.getTreeNode();
tree = new Tree(root);
tree.setVisibleRowCount(VISIBLE_ROW_COUNT);
tree.setCellRenderer(new CustomTreeCellRenderer());
// Check if there are multiple describes in the file.
if (root.getUserObject() instanceof String) {
tree.setRootVisible(false);
}
TreeUtil.expandAll(tree);
// Add search, make it case insensitive.
new TreeSpeedSearch(tree) {
@Override
protected boolean compare(String text, String pattern) {
return super.compare(text.toLowerCase(), pattern.toLowerCase());
}
}.setComparator(new SpeedSearchComparator(false));
addKeyAndMouseEvents();
JBScrollPane scrollPane = new JBScrollPane(tree);
selectClosestTest(root, closest);
JPanel panel = new JPanel(new BorderLayout());
panel.add(BorderLayout.CENTER, scrollPane);
panel.add(BorderLayout.SOUTH, createPanelWithLabels());
return panel;
}
private JPanel createPanelWithLabels() {
JPanel panel = new JPanel(new BorderLayout());
TestCounts testCounts = jasmineFile.getTestCounts();
String values = String.format("Tests: %s, Excluded: %s, Included: %s", testCounts.getTestCount(),
testCounts.getExcludedCount(), testCounts.getIncludedCount());
panel.add(BorderLayout.CENTER, new JLabel(values));
return panel;
}
private void addKeyAndMouseEvents() {
// Perform the OK action on enter.
tree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent keyEvent) {
if (keyEvent.getKeyCode() == KeyEvent.VK_ENTER) {
doOKAction();
}
}
});
// Go to the test on double click.
JasmineTreeUtil.addDoubleClickListener(tree, new VoidFunction<TreePath>() {
@Override
public void fun(TreePath treePath) {
nodeWasDoubleClicked(treePath);
}
});
}
private void nodeWasDoubleClicked(TreePath selPath) {
DefaultMutableTreeNode lastPathComponent = (DefaultMutableTreeNode) selPath.getLastPathComponent();
selectedTest = (TestFindResult) lastPathComponent.getUserObject();
close(GO_TO_TEST_EXIT_CODE);
}
private void selectClosestTest(DefaultMutableTreeNode root, final TestFindResult closest) {
Enumeration enumeration = root.breadthFirstEnumeration();
while (enumeration.hasMoreElements()) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) enumeration.nextElement();
if (node.getUserObject() == closest) {
TreePath treePath = new TreePath(node.getPath());
tree.setSelectionPath(treePath);
tree.scrollPathToVisible(treePath);
return;
}
}
}
@Nullable
@Override
public JComponent getPreferredFocusedComponent() {
return tree;
}
public List<TestFindResult> getSelectedValues() {
List<TestFindResult> selected = new ArrayList<TestFindResult>();
for (DefaultMutableTreeNode node : tree.getSelectedNodes(DefaultMutableTreeNode.class, null)) {
selected.add((TestFindResult) node.getUserObject());
}
return selected;
}
@NotNull
@Override
protected Action[] createLeftSideActions() {
return new Action[]{
new DialogWrapperExitAction("Clean file", CLEAN_CURRENT_EXIT_CODE)
};
}
@NotNull
@Override
protected Action[] createActions() {
Action excludeAction = new MyAction("Exclude (Alt E)", TestState.Excluded);
Action includeAction = new MyAction("Include (Alt I)", TestState.Included);
ShortcutSet ALT_X = new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_X, InputEvent.ALT_DOWN_MASK));
ShortcutSet ALT_I = new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_I, InputEvent.ALT_DOWN_MASK));
registerForEveryKeyboardShortcut(excludeAction, ALT_X);
registerForEveryKeyboardShortcut(includeAction, ALT_I);
return new Action[]{
excludeAction,
includeAction,
getCancelAction(),
getOKAction()
};
}
// todo: fix this
private void registerForEveryKeyboardShortcut(ActionListener action, @NotNull ShortcutSet shortcuts) {
for (Shortcut shortcut : shortcuts.getShortcuts()) {
if (shortcut instanceof KeyboardShortcut) {
KeyboardShortcut ks = (KeyboardShortcut) shortcut;
KeyStroke first = ks.getFirstKeyStroke();
KeyStroke second = ks.getSecondKeyStroke();
if (second == null) {
getRootPane().registerKeyboardAction(action, first, JComponent.WHEN_IN_FOCUSED_WINDOW);
}
}
}
}
public TestFindResult getSelectedTest() {
return selectedTest;
}
class MyAction extends DialogWrapperAction {
private final TestState changeState;
MyAction(String name, TestState changeState) {
super(name);
this.changeState = changeState;
}
@Override
protected void doAction(ActionEvent e) {
for (TestFindResult testFindResult : getSelectedValues()) {
pendingChanges.itemChanged(testFindResult, changeState);
}
tree.repaint();
}
}
} |
package com.gooddata;
import com.gooddata.account.AccountService;
import com.gooddata.auditevent.AuditEventService;
import com.gooddata.connector.ConnectorService;
import com.gooddata.dataload.OutputStageService;
import com.gooddata.dataload.processes.ProcessService;
import com.gooddata.executeafm.ExecuteAfmService;
import com.gooddata.export.ExportService;
import com.gooddata.featureflag.FeatureFlagService;
import com.gooddata.gdc.Header;
import com.gooddata.lcm.LcmService;
import com.gooddata.md.maintenance.ExportImportService;
import com.gooddata.notification.NotificationService;
import com.gooddata.projecttemplate.ProjectTemplateService;
import com.gooddata.retry.RetrySettings;
import com.gooddata.retry.GetServerErrorRetryStrategy;
import com.gooddata.retry.RetryableRestTemplate;
import com.gooddata.util.ResponseErrorHandler;
import com.gooddata.authentication.LoginPasswordAuthentication;
import com.gooddata.warehouse.WarehouseService;
import com.gooddata.dataset.DatasetService;
import com.gooddata.gdc.DataStoreService;
import com.gooddata.gdc.GdcService;
import com.gooddata.md.MetadataService;
import com.gooddata.model.ModelService;
import com.gooddata.project.ProjectService;
import com.gooddata.report.ReportService;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.CookieSpecs;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.config.SocketConfig;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.VersionInfo;
import org.springframework.context.annotation.Bean;
import org.springframework.http.MediaType;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.util.StreamUtils;
import org.springframework.web.client.RestTemplate;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import static com.gooddata.util.Validate.notNull;
import static java.util.Arrays.asList;
import static org.apache.http.util.VersionInfo.loadVersionInfo;
/**
* Entry point for GoodData SDK usage.
* <p>
* Configure connection to GoodData using one of constructors. One can then get initialized service he needs from
* the newly constructed instance. This instance can be also used later for logout from GoodData Platform.
* <p>
* Usage example:
* <pre><code>
* GoodData gd = new GoodData("roman@gooddata.com", "Roman1");
* // do something useful like: gd.getSomeService().doSomething()
* gd.logout();
* </code></pre>
*/
public class GoodData {
/**
* @deprecated use {@link Header#GDC_REQUEST_ID} instead.
*/
@Deprecated
public static final String GDC_REQUEST_ID_HEADER = Header.GDC_REQUEST_ID;
protected static final String PROTOCOL = GoodDataEndpoint.PROTOCOL;
protected static final int PORT = GoodDataEndpoint.PORT;
protected static final String HOSTNAME = GoodDataEndpoint.HOSTNAME;
private static final String UNKNOWN_VERSION = "UNKNOWN";
private final RestTemplate restTemplate;
private final HttpClient httpClient;
private final AccountService accountService;
private final ProjectService projectService;
private final MetadataService metadataService;
private final ModelService modelService;
private final GdcService gdcService;
private final DataStoreService dataStoreService;
private final DatasetService datasetService;
@SuppressWarnings("deprecation")
private final ReportService reportService;
private final ConnectorService connectorService;
private final ProcessService processService;
private final WarehouseService warehouseService;
private final NotificationService notificationService;
private final ExportImportService exportImportService;
private final FeatureFlagService featureFlagService;
private final OutputStageService outputStageService;
private final ProjectTemplateService projectTemplateService;
private final ExportService exportService;
private final AuditEventService auditEventService;
private final ExecuteAfmService executeAfmService;
private final LcmService lcmService;
/**
* Create instance configured to communicate with GoodData Platform under user with given credentials.
*
* @param login GoodData user's login
* @param password GoodData user's password
*/
public GoodData(String login, String password) {
this(HOSTNAME, login, password, new GoodDataSettings());
}
/**
* Create instance configured to communicate with GoodData Platform under user with given credentials.
*
* @param login GoodData user's login
* @param password GoodData user's password
* @param settings additional settings
*/
public GoodData(String login, String password, GoodDataSettings settings) {
this(HOSTNAME, login, password, settings);
}
/**
* Create instance configured to communicate with GoodData Platform running on given host using given user's
* credentials.
*
* @param hostname GoodData Platform's host name (e.g. secure.gooddata.com)
* @param login GoodData user's login
* @param password GoodData user's password
*/
public GoodData(String hostname, String login, String password) {
this(hostname, login, password, PORT, PROTOCOL, new GoodDataSettings());
}
/**
* Create instance configured to communicate with GoodData Platform running on given host using given user's
* credentials.
*
* @param hostname GoodData Platform's host name (e.g. secure.gooddata.com)
* @param login GoodData user's login
* @param password GoodData user's password
* @param settings additional settings
*/
public GoodData(String hostname, String login, String password, GoodDataSettings settings) {
this(hostname, login, password, PORT, PROTOCOL, settings);
}
/**
* Create instance configured to communicate with GoodData Platform running on given host and port using given user's
* credentials.
*
* @param hostname GoodData Platform's host name (e.g. secure.gooddata.com)
* @param login GoodData user's login
* @param password GoodData user's password
* @param port GoodData Platform's API port (e.g. 443)
*/
public GoodData(String hostname, String login, String password, int port) {
this(hostname, login, password, port, PROTOCOL, new GoodDataSettings());
}
/**
* Create instance configured to communicate with GoodData Platform running on given host and port using given user's
* credentials.
*
* @param hostname GoodData Platform's host name (e.g. secure.gooddata.com)
* @param login GoodData user's login
* @param password GoodData user's password
* @param port GoodData Platform's API port (e.g. 443)
* @param settings additional settings
*/
public GoodData(String hostname, String login, String password, int port, GoodDataSettings settings) {
this(hostname, login, password, port, PROTOCOL, settings);
}
/**
* Create instance configured to communicate with GoodData Platform running on given host, port and protocol using
* given user's credentials.
*
* @param hostname GoodData Platform's host name (e.g. secure.gooddata.com)
* @param login GoodData user's login
* @param password GoodData user's password
* @param port GoodData Platform's API port (e.g. 443)
* @param protocol GoodData Platform's API protocol (e.g. https)
* @param settings additional settings
*/
protected GoodData(String hostname, String login, String password, int port, String protocol, GoodDataSettings settings) {
this(
new GoodDataEndpoint(hostname, port, protocol),
new LoginPasswordAuthentication(login, password),
settings
);
}
/**
* Create instance configured to communicate with GoodData Platform running on given endpoint and using
* given http client factory.
*
* @param endpoint GoodData Platform's endpoint
* @param authentication authentication
*/
protected GoodData(GoodDataEndpoint endpoint, Authentication authentication) {
this(endpoint, authentication, new GoodDataSettings());
}
/**
* Create instance configured to communicate with GoodData Platform running on given endpoint and using
* given http client factory.
*
* @param endpoint GoodData Platform's endpoint
* @param authentication authentication
* @param settings additional settings
*/
@SuppressWarnings("deprecation")
protected GoodData(GoodDataEndpoint endpoint, Authentication authentication, GoodDataSettings settings) {
httpClient = authentication.createHttpClient(endpoint, createHttpClientBuilder(settings));
restTemplate = createRestTemplate(endpoint, httpClient, settings.getRetrySettings());
accountService = new AccountService(getRestTemplate(), settings);
projectService = new ProjectService(getRestTemplate(), accountService, settings);
metadataService = new MetadataService(getRestTemplate(), settings);
modelService = new ModelService(getRestTemplate(), settings);
gdcService = new GdcService(getRestTemplate(), settings);
dataStoreService = new DataStoreService(getHttpClient(), getRestTemplate(), gdcService, endpoint.toUri());
datasetService = new DatasetService(getRestTemplate(), dataStoreService, settings);
exportService = new ExportService(getRestTemplate(), endpoint, settings);
reportService = new ReportService(exportService, getRestTemplate(), settings);
processService = new ProcessService(getRestTemplate(), accountService, dataStoreService, settings);
warehouseService = new WarehouseService(getRestTemplate(), settings);
connectorService = new ConnectorService(getRestTemplate(), projectService, settings);
notificationService = new NotificationService(getRestTemplate(), settings);
exportImportService = new ExportImportService(getRestTemplate(), settings);
featureFlagService = new FeatureFlagService(getRestTemplate(), settings);
outputStageService = new OutputStageService(getRestTemplate(), settings);
projectTemplateService = new ProjectTemplateService(getRestTemplate(), settings);
auditEventService = new AuditEventService(getRestTemplate(), accountService, settings);
executeAfmService = new ExecuteAfmService(getRestTemplate(), settings);
lcmService = new LcmService(getRestTemplate(), settings);
}
static RestTemplate createRestTemplate(GoodDataEndpoint endpoint, HttpClient httpClient, RetrySettings retrySettings) {
notNull(endpoint, "endpoint");
notNull(httpClient, "httpClient");
final UriPrefixingClientHttpRequestFactory factory = new UriPrefixingClientHttpRequestFactory(
new HttpComponentsClientHttpRequestFactory(httpClient),
endpoint.toUri()
);
final Map<String, String> presetHeaders = new HashMap<>(2);
presetHeaders.put("Accept", MediaType.APPLICATION_JSON_VALUE);
presetHeaders.put(Header.GDC_VERSION, readApiVersion());
final RestTemplate restTemplate;
if (retrySettings == null) {
restTemplate = new RestTemplate(factory);
} else {
restTemplate = RetryableRestTemplate.create(retrySettings, factory);
}
restTemplate.setInterceptors(asList(
new HeaderSettingRequestInterceptor(presetHeaders),
new DeprecationWarningRequestInterceptor()));
restTemplate.setErrorHandler(new ResponseErrorHandler(restTemplate.getMessageConverters()));
return restTemplate;
}
private HttpClientBuilder createHttpClientBuilder(final GoodDataSettings settings) {
final PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
connectionManager.setDefaultMaxPerRoute(settings.getMaxConnections());
connectionManager.setMaxTotal(settings.getMaxConnections());
final SocketConfig.Builder socketConfig = SocketConfig.copy(SocketConfig.DEFAULT);
socketConfig.setSoTimeout(settings.getSocketTimeout());
connectionManager.setDefaultSocketConfig(socketConfig.build());
final RequestConfig.Builder requestConfig = RequestConfig.copy(RequestConfig.DEFAULT);
requestConfig.setConnectTimeout(settings.getConnectionTimeout());
requestConfig.setConnectionRequestTimeout(settings.getConnectionRequestTimeout());
requestConfig.setSocketTimeout(settings.getSocketTimeout());
requestConfig.setCookieSpec(CookieSpecs.STANDARD);
return HttpClientBuilder.create()
.setUserAgent(StringUtils.isNotBlank(settings.getUserAgent()) ? String.format("%s %s", settings.getUserAgent(), getUserAgent()) : getUserAgent())
.setConnectionManager(connectionManager)
.setDefaultRequestConfig(requestConfig.build());
}
private String getUserAgent() {
final Package pkg = Package.getPackage("com.gooddata");
final String clientVersion = pkg != null && pkg.getImplementationVersion() != null
? pkg.getImplementationVersion() : UNKNOWN_VERSION;
final VersionInfo vi = loadVersionInfo("org.apache.http.client", HttpClientBuilder.class.getClassLoader());
final String apacheVersion = vi != null ? vi.getRelease() : UNKNOWN_VERSION;
return String.format("%s/%s (%s; %s) %s/%s", "GoodData-Java-SDK", clientVersion,
System.getProperty("os.name"), System.getProperty("java.specification.version"),
"Apache-HttpClient", apacheVersion);
}
private static String readApiVersion() {
try {
return StreamUtils.copyToString(GoodData.class.getResourceAsStream("/GoodDataApiVersion"), Charset.defaultCharset());
} catch (IOException e) {
throw new IllegalStateException("Cannot read GoodDataApiVersion from classpath", e);
}
}
/**
* Get the configured {@link RestTemplate} used by the library.
* This is the extension point for inheriting classes providing additional services.
* @return REST template
*/
protected final RestTemplate getRestTemplate() {
return restTemplate;
}
/**
* Get the configured {@link HttpClient} used by the library.
* This is the extension point for inheriting classes providing additional services.
* @return HTTP client
*/
protected final HttpClient getHttpClient() {
return httpClient;
}
/**
* Logout from GoodData Platform
*/
public void logout() {
getAccountService().logout();
}
/**
* Get initialized service for project management (to list projects, create a project, ...)
*
* @return initialized service for project management
*/
@Bean
public ProjectService getProjectService() {
return projectService;
}
/**
* Get initialized service for account management (to get current account information, logout, ...)
*
* @return initialized service for account management
*/
@Bean
public AccountService getAccountService() {
return accountService;
}
/**
* Get initialized service for metadata management (to query, create and update project metadata like attributes,
* facts, metrics, reports, ...)
*
* @return initialized service for metadata management
*/
@Bean
public MetadataService getMetadataService() {
return metadataService;
}
/**
* Get initialized service for model management (to get model diff, update model, ...)
*
* @return initialized service for model management
*/
@Bean
public ModelService getModelService() {
return modelService;
}
/**
* Get initialized service for API root management (to get API root links, ...)
*
* @return initialized service for API root management
*/
@Bean
public GdcService getGdcService() {
return gdcService;
}
/**
* Get initialized service for data store (user staging/WebDAV) management (to upload, download, delete, ...)
*
* @return initialized service for data store management
*/
@Bean
public DataStoreService getDataStoreService() {
return dataStoreService;
}
/**
* Get initialized service for dataset management (to list manifest, get datasets, load dataset, ...)
*
* @return initialized service for dataset management
*/
@Bean
public DatasetService getDatasetService() {
return datasetService;
}
/**
* Get initialized service for report management (to execute and export report, ...)
*
* @return initialized service for report management
*/
@Bean
@SuppressWarnings("deprecation")
public ReportService getReportService() {
return reportService;
}
/**
* Get initialized service for exports management (export report,...)
*
* @return initialized service for exports
*/
@Bean
public ExportService getExportService() {
return exportService;
}
/**
* Get initialized service for dataload processes management and process executions.
*
* @return initialized service for dataload processes management and process executions
*/
@Bean
public ProcessService getProcessService() {
return processService;
}
/**
* Get initialized service for ADS management (create, access and delete ads instances).
*
* @return initialized service for ADS management
*/
@Bean
public WarehouseService getWarehouseService() {
return warehouseService;
}
/**
* Get initialized service for connector integration management (create, update, start process, ...).
*
* @return initialized service for connector integration management
*/
@Bean
public ConnectorService getConnectorService() {
return connectorService;
}
/**
* Get initialized service for project notifications management.
*
* @return initialized service for project notifications management
*/
@Bean
public NotificationService getNotificationService() {
return notificationService;
}
/**
* Get initialized service for metadata export/import.
*
* @return initialized service for metadata export/import
*/
@Bean
public ExportImportService getExportImportService() {
return exportImportService;
}
/**
* Get initialized service for feature flag management.
*
* @return initialized service for feature flag management
*/
@Bean
public FeatureFlagService getFeatureFlagService() {
return featureFlagService;
}
/**
* Get initialized service for output stage management.
*
* @return initialized service for output stage management
*/
@Bean
public OutputStageService getOutputStageService() {
return outputStageService;
}
/**
* Get initialized service for project templates
*
* @return initialized service for project templates
*/
@Bean
public ProjectTemplateService getProjectTemplateService() {
return projectTemplateService;
}
/**
* Get initialized service for audit events
* @return initialized service for audit events
*/
@Bean
public AuditEventService getAuditEventService() {
return auditEventService;
}
/**
* Get initialized service for afm execution
* @return initialized service for afm execution
*/
@Bean
public ExecuteAfmService getExecuteAfmService() {
return executeAfmService;
}
/**
* Get initialized service for Life Cycle Management
* @return initialized service for Life Cycle Management
*/
@Bean
public LcmService getLcmService() {
return lcmService;
}
} |
package org.jtrfp.trcl;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.Future;
import org.apache.commons.collections4.functors.TruePredicate;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.coll.CollectionActionDispatcher;
import org.jtrfp.trcl.coll.CollectionActionPacker;
import org.jtrfp.trcl.coll.CollectionThreadDecoupler;
import org.jtrfp.trcl.coll.PredicatedORCollectionActionFilter;
import org.jtrfp.trcl.coll.PropertyBasedTagger;
import org.jtrfp.trcl.core.Renderer;
import org.jtrfp.trcl.obj.Positionable;
import com.ochafik.util.Adapter;
import com.ochafik.util.listenable.Pair;
public abstract class SpacePartitioningGrid<E extends Positionable>{
private double squareSize, viewingRadius;
private int squaresX, squaresY, squaresZ;
private final List<E> alwaysVisible = new ArrayList<E>(300);
private final HashSet<E> localTaggerSet = new HashSet<E>();
private WeakReference<SpacePartitioningGrid<E>>
parentGrid = null;
private Map<SpacePartitioningGrid<E>,String>
branchGrids =
Collections.synchronizedMap(new WeakHashMap<SpacePartitioningGrid<E>,String>());
private final com.ochafik.util.listenable.Adapter<Vector3D,Vector3D>cubeSpaceQuantizingAdapter = new com.ochafik.util.listenable.Adapter<Vector3D,Vector3D>(){
@Override
public Vector3D adapt(Vector3D value) {
final int granularity = World.CUBE_GRANULARITY;
final Vector3D newCenterCube = new Vector3D(
Math.rint(value.getX()/granularity),
Math.rint(value.getY()/granularity),
Math.rint(value.getZ()/granularity));
return newCenterCube;
}
};
private final CollectionActionDispatcher<Pair<Vector3D,CollectionActionDispatcher<Positionable>>> packedObjectsDispatcher =
new CollectionActionDispatcher<Pair<Vector3D,CollectionActionDispatcher<Positionable>>>(new ArrayList<Pair<Vector3D,CollectionActionDispatcher<Positionable>>>());
private final PredicatedORCollectionActionFilter<Pair<Vector3D,CollectionActionDispatcher<Positionable>>> packedObjectValve =
new PredicatedORCollectionActionFilter<Pair<Vector3D,CollectionActionDispatcher<Positionable>>>(packedObjectsDispatcher);
private final CollectionActionPacker<Positionable,Vector3D> objectPacker = new CollectionActionPacker<Positionable,Vector3D>(packedObjectValve.input);
private final PropertyBasedTagger<Positionable, Vector3D, Vector3D> localTagger
= new PropertyBasedTagger<Positionable, Vector3D, Vector3D>(objectPacker, cubeSpaceQuantizingAdapter, Positionable.POSITIONV3D,World.relevanceExecutor);
private List<E> [] elements;
private double radiusInWorldUnits;
private int rolloverPoint,
rawDia,
rawDiaX,rawDiaY,rawDiaZ,
xProgression,yProgression,zProgression;
private final Adapter<Vector3D,Integer> cubeSpaceRasterizer = new Adapter<Vector3D,Integer>(){
@Override
public Integer adapt(Vector3D value) {
return (int)(
value.getX()+
value.getY()*squaresX+
value.getZ()*squaresX*squaresY);
}//end adapt()
@Override
public Vector3D reAdapt(Integer value) {
// TODO Auto-generated method stub
return null;
}};
private final Adapter<Vector3D,Integer> worldSpaceRasterizer = new Adapter<Vector3D,Integer>(){
@Override
public Integer adapt(Vector3D value) {
return (int)(
(int)absMod(Math.round(value.getX()/getSquareSize()),squaresX)+
(int)absMod(Math.round(value.getY()/getSquareSize()),squaresY)*squaresX+
(int)absMod(Math.round(value.getZ()/getSquareSize()),squaresZ)*squaresX*squaresY);
}//end adapt()
@Override
public Vector3D reAdapt(Integer value) {
throw new UnsupportedOperationException();
}};
protected SpacePartitioningGrid(){
/*if(Renderer.NEW_MODE)
newActivate();*/
}
public void activate() {
if(Renderer.NEW_MODE)
newActivate();
else{
if (parentGrid != null) {
final SpacePartitioningGrid g = parentGrid.get();
if (g != null)
g.addBranch(this);
}
}//end(!NEW_MODE)
}//end activate()
public Adapter<Vector3D,Integer> getCubeSpaceRasterizer(){
return cubeSpaceRasterizer;
}
public Adapter<Vector3D,Integer> getWorldSpaceRasterizer(){
return worldSpaceRasterizer;
}
public void deactivate() {
if(Renderer.NEW_MODE){
newDeactivate();return;}
if (parentGrid != null) {
final SpacePartitioningGrid g = parentGrid.get();
if (g != null)
g.removeBranch(this);
}
}//end deactivate()
public void notifyBranchAdded(SpacePartitioningGrid b){
final SpacePartitioningGrid<E> g = parentGrid.get();
if (g != null)
g.notifyBranchAdded(b);
}//end notifyBranchAdded(...)
public void notifyBranchRemoved(SpacePartitioningGrid b){
final SpacePartitioningGrid<E> g = parentGrid.get();
if (g != null)
g.notifyBranchRemoved(b);
}//end notifyBranchRemoved(...)
private void addBranch(SpacePartitioningGrid<E> branchToAdd){
if(Renderer.NEW_MODE){
newAddBranch(branchToAdd);
return;
}
if(!branchGrids.containsKey(branchToAdd)){
branchGrids.put(branchToAdd,"");
if(parentGrid==null)return;
final SpacePartitioningGrid<E> g = parentGrid.get();
if (g != null)
g.notifyBranchAdded(branchToAdd);
}//end if(!contains)
}//end addBranch(...)
private void removeBranch(SpacePartitioningGrid<E> branchToRemove){
if(Renderer.NEW_MODE){
newRemoveBranch(branchToRemove);
return;
}
if(branchGrids.remove(branchToRemove)!=null){
if(parentGrid==null)return;
final SpacePartitioningGrid<E> g = parentGrid.get();
if (g != null)
g.notifyBranchRemoved(branchToRemove);
}//end if(!contains)
}//end removeBranch(...)
private void setParentGrid(SpacePartitioningGrid<E> parentGrid){
if(Renderer.NEW_MODE){
if(this.parentGrid!=null)
this.parentGrid.get().removeBranch(this);
parentGrid.addBranch(this);
this.parentGrid=new WeakReference<SpacePartitioningGrid<E>>(parentGrid);
return;
}
this.parentGrid=new WeakReference<SpacePartitioningGrid<E>>(parentGrid);
setSquareSize(parentGrid.getSquareSize());
setSquaresX(parentGrid.getSquaresX());
setSquaresY(parentGrid.getSquaresY());
setSquaresZ(parentGrid.getSquaresZ());
setViewingRadius(parentGrid.getViewingRadius());
allocateSquares();
}//end setParentGrid(...)
public SpacePartitioningGrid(Vector3D size, double squareSize, double viewingRadius){
this();
if(!Renderer.NEW_MODE){
setSquareSize(squareSize);
setSquaresX((int)(size.getX()/squareSize));
setSquaresY((int)(size.getY()/squareSize));
setSquaresZ((int)(size.getZ()/squareSize));
setViewingRadius(viewingRadius);
allocateSquares();
}//end if(old mode)
}//end constructor
public SpacePartitioningGrid(SpacePartitioningGrid<E> parentGrid)
{this();setParentGrid(parentGrid);}
private void allocateSquares(){
elements = new List[squaresX*squaresY*squaresZ];
//Fudge factor to fix suddenly appearing terrain at distance
radiusInWorldUnits =getViewingRadius()*1.25;
rolloverPoint =elements.length;
rawDia =(int)((radiusInWorldUnits*2)/getSquareSize());
rawDiaX =rawDia<getSquaresX()?rawDia:getSquaresX();
rawDiaY =rawDia<getSquaresY()?rawDia:getSquaresY();
rawDiaZ =rawDia<getSquaresZ()?rawDia:getSquaresZ();
zProgression =getSquaresX()*getSquaresY()-rawDiaY*getSquaresX();
yProgression =getSquaresX()-rawDiaX;
xProgression=1;
}//end allocateSquares()
public Future<?> nonBlockingActivate(){
return World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
activate();
}});
}//end nonBlockingActivate()
public void blockingActivate(){
try {nonBlockingActivate().get();}
catch(Exception e){throw new RuntimeException(e);}
}//end blockingActivate()
public void blockingDeactivate(){
try {nonBlockingDeactivate().get();}
catch(Exception e){throw new RuntimeException(e);}
}//end blockingDeactivate()
public Future<?> nonBlockingDeactivate(){
return World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
deactivate();
}});
}//end nonBlockingActivate()
public synchronized void newActivate(){
if(!packedObjectValve.contains(TruePredicate.INSTANCE))
packedObjectValve.add(TruePredicate.INSTANCE);
}
public synchronized void newDeactivate(){
if(packedObjectValve.contains(TruePredicate.INSTANCE))
packedObjectValve.clear();
}
public synchronized void newAdd(E objectToAdd){//TODO: Enforce set instead?
if(!localTaggerSet.add(objectToAdd))
return;
localTagger.add(objectToAdd);
objectToAdd.setContainingGrid(this);
}
public synchronized void newRemove(E objectToRemove){
if(!localTaggerSet.remove(objectToRemove))
return;
localTagger.remove(objectToRemove);
}
public synchronized void newAddBranch(SpacePartitioningGrid<E> toAdd){
toAdd.getPackedObjectsDispatcher().addTarget(packedObjectValve.input, true);
branchGrids.put(toAdd, null);
}
public synchronized void newRemoveBranch(SpacePartitioningGrid<E> toRemove){
toRemove.getPackedObjectsDispatcher().removeTarget(packedObjectValve.input, true);
branchGrids.remove(toRemove);
}
public CollectionActionDispatcher<Pair<Vector3D,CollectionActionDispatcher<Positionable>>> getPackedObjectsDispatcher(){
return packedObjectsDispatcher;
}
public synchronized void add(E objectWithPosition){//TODO: Remove old
if(Renderer.NEW_MODE)
newAdd(objectWithPosition);//TODO: Remove stub
else{
//Figure out where it goes
if(objectWithPosition==null)throw new NullPointerException("Passed objectWithPosition is intolerably null.");
objectWithPosition.setContainingGrid(this);
}//end (!NEW_MODE)
}//end add()
public synchronized void remove(E objectWithPosition){//TODO Remove old
if(Renderer.NEW_MODE)
newRemove(objectWithPosition);//TODO: Remove stub
else
objectWithPosition.setContainingGrid(null);
}//end remove(...)
private static double absMod(double value, double mod){
if(value>=-0.)
{return value%mod;}
value*=-1;
value%=mod;
if(value==0)return 0;
return mod-value;
}//end absMod
public void cubesWithinRadiusOf(Vector3D centerInWorldUnits, Submitter<List<E>> submitter){
recursiveAlwaysVisibleGridCubeSubmit(submitter);
final double [] startPoint=centerInWorldUnits.subtract(new Vector3D(radiusInWorldUnits,radiusInWorldUnits,radiusInWorldUnits)).toArray();
int startRaw = worldSpaceRasterizer.adapt(new Vector3D(startPoint[0],startPoint[1],startPoint[2]));
final int zEnd=startRaw+getSquaresX()*getSquaresY()*rawDiaZ + (rawDiaY*getSquaresX()) + (rawDiaX);
for(int point=startRaw; point<zEnd; point+=zProgression){
final int yEnd=point+getSquaresX()*rawDiaY;
for(;point<yEnd; point+=yProgression){
final int xEnd=point+rawDiaX;
for(;point<xEnd; point+=xProgression){
final int wrappedPoint=point%rolloverPoint;
recursiveGridCubeSubmit(submitter,wrappedPoint);
}//end for(X)
}//end for(Y)
}//end for(Z)
}//end cubesWithRadiusOf(...)
@SuppressWarnings("unchecked")
public void itemsWithinRadiusOf(Vector3D centerInWorldUnits, Submitter<E> submitter){
recursiveAlwaysVisibleSubmit(submitter);
final double [] startPoint=centerInWorldUnits.subtract(new Vector3D(radiusInWorldUnits,radiusInWorldUnits,radiusInWorldUnits)).toArray();
int startRaw = worldSpaceRasterizer.adapt(new Vector3D(startPoint[0],startPoint[1],startPoint[2]));
final int zEnd=startRaw+getSquaresX()*getSquaresY()*rawDiaZ + (rawDiaY*getSquaresX()) + (rawDiaX);
for(int point=startRaw; point<zEnd; point+=zProgression){
final int yEnd=point+getSquaresX()*rawDiaY;
for(;point<yEnd; point+=yProgression){
final int xEnd=point+rawDiaX;
for(;point<xEnd; point+=xProgression){
final int wrappedPoint=point%rolloverPoint;
recursiveBlockSubmit(submitter,wrappedPoint);
}//end for(X)
}//end for(Y)
}//end for(Z)
}//end itemsInRadiusOf(...)
private void recursiveAlwaysVisibleSubmit(Submitter<E> sub) {
sub.submit(alwaysVisible);
synchronized(branchGrids){
for(SpacePartitioningGrid<E> g:branchGrids.keySet())
g.recursiveAlwaysVisibleSubmit(sub);
}//end sync(branchGrids)
}// end recursiveAlwaysVisisbleSubmit(...)
private void recursiveAlwaysVisibleGridCubeSubmit(Submitter<List<E>> sub) {
sub.submit(alwaysVisible);
synchronized(branchGrids){
for(SpacePartitioningGrid<E> g:branchGrids.keySet())
g.recursiveAlwaysVisibleGridCubeSubmit(sub);
}//end sync(branchGrids)
}// end recursiveAlwaysVisisbleSubmit(...)
private void recursiveBlockSubmit(Submitter<E> sub, int blockID) {
final List<E> elements = this.elements[blockID];
if (elements != null) {
synchronized (elements) {
final int size = elements.size();
for (int i = 0; i < size; i++) {
sub.submit(elements.get(i));
}// end for(size)
}// end sync(elements)
}// end if(!null)
synchronized(branchGrids){
for(SpacePartitioningGrid<E> g:branchGrids.keySet())
g.recursiveBlockSubmit(sub, blockID);
}//end sync(branchGrids)
}// end recusiveBlockSubmit(...)
private void recursiveGridCubeSubmit(Submitter<List<E>> sub, int blockID) {
sub.submit(elements[blockID]);
synchronized(branchGrids){
for(SpacePartitioningGrid<E> g:branchGrids.keySet())
g.recursiveGridCubeSubmit(sub, blockID);
}//end sync(branchGrids)
}// end recusiveGridCubeSubmit(...)
private Collection<E> getAlwaysVisible()
{return alwaysVisible;}
/**
* @return the squareSize
*/
public double getSquareSize(){
return squareSize;
}
/**
* @param squareSize the squareSize to set
*/
public void setSquareSize(double squareSize){
this.squareSize = squareSize;
}
/**
* @return the squaresX
*/
public int getSquaresX(){
return squaresX;
}
/**
* @param squaresX the squaresX to set
*/
public void setSquaresX(int squaresX){
if(squaresX<=0)throw new RuntimeException("Invalid size: "+squaresX);
this.squaresX = squaresX;
}
/**
* @return the squaresY
*/
public int getSquaresY(){
return squaresY;
}
/**
* @param squaresY the squaresY to set
*/
public void setSquaresY(int squaresY){
if(squaresY<=0)throw new RuntimeException("Invalid size: "+squaresY);
this.squaresY = squaresY;
}
/**
* @return the squaresZ
*/
public int getSquaresZ(){
return squaresZ;
}
/**
* @param squaresZ the squaresZ to set
*/
public void setSquaresZ(int squaresZ){
if(squaresZ<=0)throw new RuntimeException("Invalid size: "+squaresZ);
this.squaresZ = squaresZ;
}
/**
* @return the viewingRadius
*/
public double getViewingRadius(){
return viewingRadius;
}
/**
* @param viewingRadius the viewingRadius to set
*/
public void setViewingRadius(double viewingRadius){
this.viewingRadius = viewingRadius;
}
public void removeDirect(int flatPos, E objectWithPosition) {
List<E> list = elements[flatPos];
if(list==null)
return;
synchronized(list){
if(list.remove(objectWithPosition))
objectWithPosition.setContainingGrid(null);
elements[flatPos].remove(objectWithPosition);
}//end sync(list)
}//end removeDirect(...)
public void addDirect(int flatPos, E objectWithPosition) {
List<E> list = elements[flatPos];
if(list==null)
elements[flatPos] = list = new ArrayList<E>(8);
synchronized(list){
list.add(objectWithPosition);}
}//end addDirect(...)
public List<E> world2List(double x, double y,
double z, boolean newListIfNull) {
final int pos = worldSpaceRasterizer.adapt(new Vector3D(x,y,z));
List<E> result = elements[pos];
if(newListIfNull && result==null)
result = elements[pos] = new ArrayList<E>(8);
return result;
}//end world2List
public List<E> getAlwaysVisibleList(){
return alwaysVisible;
}
public void removeAll(){
if(Renderer.NEW_MODE){
final ArrayList<SpacePartitioningGrid> branches = new ArrayList<SpacePartitioningGrid>();
for(SpacePartitioningGrid g:branchGrids.keySet())
branches.add(g);
for(SpacePartitioningGrid g:branches)
removeBranch(g);
final ArrayList<E> alwaysVisible = new ArrayList<E>();
localTagger.clear();
return;
}
final ArrayList<SpacePartitioningGrid> branches = new ArrayList<SpacePartitioningGrid>();
for(SpacePartitioningGrid g:branchGrids.keySet())
branches.add(g);
for(SpacePartitioningGrid g:branches)
removeBranch(g);
final ArrayList<E> alwaysVisible = new ArrayList<E>();
for(E e:getAlwaysVisibleList())
alwaysVisible.add(e);
for(E e:alwaysVisible)
remove(e);
final ArrayList<E> everythingElse = new ArrayList<E>();
for(List<E> l:elements)
if(l!=null)
for(E e:l)
everythingElse.add(e);
for(E e:everythingElse)
remove(e);
}//end removeAll()
}//end SpacePartitionGrid |
package com.localhop.swipe.eventlist;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageButton;
import android.widget.TextView;
import com.localhop.R;
import com.localhop.objects.DateTime;
import com.localhop.objects.Event;
import com.localhop.utils.ViewUtils;
import java.util.ArrayList;
import java.util.Date;
/**
* Adapter for displaying custom list items in the Event List tab
*/
public class AdapterEventList extends ArrayAdapter<Event> {
private final Context mContext;
private final ArrayList<Event> mEventListItems;
private final String mEventNameSpacing; //< Spacing for the Event Name UI Component
/**
* Constructor
* @param context - The context the adapter is being used in
* @param itemsArrayList - List of items for the Custom ListView
*/
public AdapterEventList(Context context, ArrayList<Event> itemsArrayList) {
super(context, R.layout.list_item_event, itemsArrayList);
this.mContext = context;
this.mEventListItems = itemsArrayList;
this.mEventNameSpacing = " ";
} // end of Constructor
/**
* Retrieves a single custom list item view for a particular event in the Events List
* @param position - current Event from the itemsArrayList
* @param convertView
* @param parent
* @return - list item view for a particular event in the Events List
*/
@Override
public View getView(int position, View convertView, ViewGroup parent) {
// Create inflater
LayoutInflater inflater = (LayoutInflater) mContext
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
// Create the view to be returned
View rowView = setViewLayout(position, parent, inflater,
mEventListItems.get(position).getType());
// Setup the UI for the new list item view
setItemView(position, rowView);
return rowView;
} // end of function getView()
/**
* Return the ListItemEvent object at the indicated position within itemsArrayList
* @param position
* @return
*/
@Override
public Event getItem(int position){
return mEventListItems.get(position);
} // end of function getItem()
/**
* Gets the view for an Event List Item
* @param position
* @param rowView
* @return
*/
public void setItemView(int position, View rowView) {
// Get the UI components
TextView tvStartTime = ViewUtils.findViewById(rowView, R.id.tvStartTime);
TextView tvName = ViewUtils.findViewById(rowView, R.id.tvName);
TextView tvAttendees = ViewUtils.findViewById(rowView, R.id.tvAttendees);
TextView tvDirection = ViewUtils.findViewById(rowView, R.id.tvDirection);
TextView tvNotification = ViewUtils.findViewById(rowView, R.id.tvNotification);
ImageButton ibDirection = ViewUtils.findViewById(rowView, R.id.ibDirections);
// Get the current event
Event event = mEventListItems.get(position);
// Set the Event Time
DateTime datetime = new DateTime(getContext(), event.getStartDateTime());
tvStartTime.setText(datetime.getTimeFormat());
// Set the remaining UI components
tvName.setText(mEventNameSpacing + event.getEventName());
tvAttendees.setText(event.getAttendees().toString());
tvDirection.setText(event.getLocation());
ibDirection.setBackgroundResource(R.drawable.ic_directions_selector);
ibDirection.setOnClickListener(new ImageButton.OnClickListener() {
@Override
public void onClick(View v) {
// TODO: List Item Direction Image Button Click
}
});
String sNotificationCount = "" + event.getNotificationCount();
if ( sNotificationCount.compareTo("0") == 0 ) {
sNotificationCount = "";
}
tvNotification.setText(sNotificationCount);
// Add Date delimiter UI if the event is not today
if(event.getType() != Event.EventType.Today)
{
if(position > 0)
{
DateTime dateTime = new DateTime(mContext, new Date());
if(!dateTime.compareAreDatesSameDay(mEventListItems.get(position).getStartDateTime(),
mEventListItems.get(position - 1).getStartDateTime()))
{
TextView tvEventListDateDelimiter = ViewUtils.findViewById(rowView, R.id.tvEventListDateDelimiter);
// Set the date into the format DayOfWeek, Month/Day, Year
tvEventListDateDelimiter.setText(
datetime.getDayOfWeekString() + ", " +
datetime.getMonthDayYearFormat());
}
}
else
{
TextView tvEventListDateDelimiter = ViewUtils.findViewById(rowView, R.id.tvEventListDateDelimiter);
// Set the date into the format DayOfWeek, Month/Day, Year
tvEventListDateDelimiter.setText(
datetime.getDayOfWeekString() + ", " +
datetime.getMonthDayYearFormat());
}
}
} // end of function setItemView()
/**
* Sets the layout for the next list item View to be added to the list view
* @param position
* @param parent
* @param inflater
* @param type
* @return
*/
public View setViewLayout(int position, ViewGroup parent, LayoutInflater inflater,
Event.EventType type) {
int layout = R.layout.list_item_event;
// If the event is not today, there isn't already an event listed on the same day, post this
// event with the date delimiter layout
if (type != Event.EventType.Today)
{
if( position > 0 )
{
DateTime dateTime = new DateTime(mContext, new Date());
if(!dateTime.compareAreDatesSameDay(mEventListItems.get(position).getStartDateTime(),
mEventListItems.get(position - 1).getStartDateTime()))
{
layout = R.layout.list_item_event_with_date_delimiter;
}
}
else
{
layout = R.layout.list_item_event_with_date_delimiter;
}
}
View rowView = inflater.inflate(layout, parent, false);
return rowView;
} // end of function setViewLayout()
} // end of class AdapterEventList.java |
package com.jcabi.github;
import com.jcabi.aspects.Immutable;
import com.jcabi.aspects.Loggable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import javax.json.JsonObject;
import javax.json.JsonValue;
import javax.validation.constraints.NotNull;
import lombok.EqualsAndHashCode;
import lombok.ToString;
@Immutable
@SuppressWarnings("PMD.TooManyMethods")
public interface Gist extends JsonReadable {
/**
* Github we're in.
* @return Github
*/
@NotNull(message = "Github is never NULL")
Github github();
/**
* Get gist identifier.
* @return Gist identifier
*/
@NotNull(message = "Identifier is never NULL")
String identifier();
@NotNull(message = "file content is never NULL")
String read(@NotNull(message = "file name can't be NULL") String name)
throws IOException;
void write(
@NotNull(message = "file name can't be NULL") String name,
@NotNull(message = "file content can't be NULL") String content)
throws IOException;
/**
* Star a gist.
* @throws IOException If there is any I/O problem
*/
void star() throws IOException;
/**
* Unstar a gist.
* @throws IOException If there is any I/O problem
*/
void unstar() throws IOException;
/**
* Checks if Gist is starred.
* @throws IOException If there is any I/O problem
* @return True if gist is starred
*/
boolean starred() throws IOException;
/**
* Fork the gist.
* @return Forked gist
* @throws IOException If there is any I/O problem
*/
@NotNull(message = "gist is never NULL")
Gist fork() throws IOException;
@NotNull(message = "comments are never NULL")
GistComments comments() throws IOException;
/**
* Smart Gist with extra features.
*/
@Immutable
@ToString
@Loggable(Loggable.DEBUG)
@EqualsAndHashCode(of = "gist")
final class Smart implements Gist {
/**
* Encapsulated gist.
*/
private final transient Gist gist;
/**
* Public ctor.
* @param gst Gist
*/
public Smart(final Gist gst) {
this.gist = gst;
}
/**
* Get gist id.
* @return Gist id
*/
@Override
public String identifier() {
return this.gist.identifier();
}
/**
* Get a list of all file names in the gist.
* @return File names
* @throws IOException If there is any I/O problem
*/
public Iterable<String> files() throws IOException {
final JsonObject array = this.gist.json().getJsonObject("files");
final Collection<String> files =
new ArrayList<String>(array.size());
for (final JsonValue value : array.values()) {
files.add(JsonObject.class.cast(value).getString("filename"));
}
return files;
}
@Override
public Github github() {
return this.gist.github();
}
@Override
public String read(final String name) throws IOException {
return this.gist.read(name);
}
@Override
public void write(final String name, final String content)
throws IOException {
this.gist.write(name, content);
}
@Override
public void star() throws IOException {
this.gist.star();
}
@Override
public void unstar() throws IOException {
this.gist.unstar();
}
@Override
public boolean starred() throws IOException {
return this.gist.starred();
}
@Override
public Gist fork() throws IOException {
return this.gist.fork();
}
@Override
public GistComments comments() throws IOException {
return this.gist.comments();
}
@Override
public JsonObject json() throws IOException {
return this.gist.json();
}
}
} |
package tlc2.tool.liveness;
import tla2sany.semantic.ASTConstants;
import tla2sany.semantic.ExprNode;
import tla2sany.semantic.ExprOrOpArgNode;
import tla2sany.semantic.FormalParamNode;
import tla2sany.semantic.LetInNode;
import tla2sany.semantic.LevelConstants;
import tla2sany.semantic.OpApplNode;
import tla2sany.semantic.OpDefNode;
import tla2sany.semantic.Subst;
import tla2sany.semantic.SubstInNode;
import tla2sany.semantic.SymbolNode;
import tlc2.output.EC;
import tlc2.output.MP;
import tlc2.tool.Action;
import tlc2.tool.BuiltInOPs;
import tlc2.tool.EvalControl;
import tlc2.tool.IContextEnumerator;
import tlc2.tool.ITool;
import tlc2.tool.ModelChecker;
import tlc2.tool.Specs;
import tlc2.tool.TLCState;
import tlc2.tool.ToolGlobals;
import tlc2.util.Context;
import tlc2.util.Vect;
import tlc2.value.IBoolValue;
import tlc2.value.IFcnLambdaValue;
import tlc2.value.IValue;
import util.Assert;
import util.ToolIO;
public class Liveness implements ToolGlobals, ASTConstants {
private static LiveExprNode astToLive(ITool tool, ExprNode expr, Context con, int level) {
if (level == LevelConstants.ConstantLevel) {
IValue val = tool.eval(expr, con, TLCState.Empty);
if (!(val instanceof IBoolValue)) {
Assert.fail(EC.TLC_EXPECTED_VALUE, new String[] { "boolean", expr.toString() });
}
return ((IBoolValue) val).getVal() ? LNBool.TRUE : LNBool.FALSE;
} else if (level == LevelConstants.VariableLevel) {
return new LNStateAST(expr, con);
} else {
// Assert.check(level == LevelConstants.ActionLevel;
return new LNAction(expr, con);
}
}
/**
* The method astToLive converts an ExprNode into a LiveExprNode. o We are
* passing down a tool and a context as we parse the expressions
* recursively. That's for calling eval(). o The method has some
* restrictions. If you did Predicate([]p), then we'd need to instantiate
* the predicate body with []p. For the moment, we require that arguments to
* predicates be computable from its context.
*/
private static LiveExprNode astToLive(ITool tool, ExprNode expr, Context con) {
switch (expr.getKind()) {
case OpApplKind: {
OpApplNode expr1 = (OpApplNode) expr;
return astToLiveAppl(tool, expr1, con);
}
case LetInKind: {
LetInNode expr1 = (LetInNode) expr;
return astToLive(tool, expr1.getBody(), con);
}
case SubstInKind: {
SubstInNode expr1 = (SubstInNode) expr;
Subst[] subs = expr1.getSubsts();
int slen = subs.length;
Context con1 = con;
for (int i = 0; i < slen; i++) {
Subst sub = subs[i];
con1 = con1.cons(sub.getOp(), tool.getVal(sub.getExpr(), con, false));
}
return astToLive(tool, expr1.getBody(), con1);
}
default: {
int level = Specs.getLevel(expr, con);
if (level > LevelConstants.ActionLevel) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
}
return astToLive(tool, expr, con, level);
}
}
}
private static LiveExprNode astToLiveAppl(ITool tool, OpApplNode expr, Context con) {
ExprOrOpArgNode[] args = expr.getArgs();
int alen = args.length;
SymbolNode opNode = expr.getOperator();
int opcode = BuiltInOPs.getOpCode(opNode.getName());
if (opcode == 0) {
// This is a user-defined operator with one exception: it may
// be substed by a builtin operator. This special case is handled
// by checking if the lookup returns a OpDef with opcode = 0.
Object val = tool.lookup(opNode, con, false);
if (val instanceof OpDefNode) {
OpDefNode opDef = (OpDefNode) val;
opcode = BuiltInOPs.getOpCode(opDef.getName());
if (opcode == 0) {
try {
FormalParamNode[] formals = opDef.getParams();
Context con1 = con;
for (int i = 0; i < alen; i++) {
IValue argVal = tool.eval(args[i], con, TLCState.Empty);
con1 = con1.cons(formals[i], argVal);
}
LiveExprNode res = astToLive(tool, opDef.getBody(), con1);
int level = res.getLevel();
if (level > LevelConstants.ActionLevel) {
return res;
}
return astToLive(tool, expr, con, level);
} catch (Exception e) { /* SKIP */
}
}
} else if (val instanceof IBoolValue) {
return ((IBoolValue) val).getVal() ? LNBool.TRUE : LNBool.FALSE;
}
if (opcode == 0) {
int level = Specs.getLevel(expr, con);
if (level > LevelConstants.ActionLevel) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
}
return astToLive(tool, expr, con, level);
}
}
switch (opcode) {
case OPCODE_be: // BoundedExists
{
ExprNode body = (ExprNode) args[0];
try {
IContextEnumerator Enum = tool.contexts(expr, con, TLCState.Empty, TLCState.Empty, EvalControl.Clear);
Context con1;
LNDisj res = new LNDisj(0);
while ((con1 = Enum.nextElement()) != null) {
LiveExprNode kid = astToLive(tool, body, con1);
res.addDisj(kid);
}
int level = res.getLevel();
if (level > LevelConstants.ActionLevel) {
return res;
}
return astToLive(tool, expr, con, level);
} catch (Exception e) {
// Catching Exception here seem dangerous
// Assert.printStack(e);
int level = Specs.getLevel(expr, con);
if (level > LevelConstants.ActionLevel) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
;
}
return astToLive(tool, expr, con, level);
}
}
case OPCODE_bf: // BoundedForall
{
ExprNode body = (ExprNode) args[0];
try {
IContextEnumerator Enum = tool.contexts(expr, con, TLCState.Empty, TLCState.Empty, EvalControl.Clear);
Context con1;
LNConj res = new LNConj(0);
while ((con1 = Enum.nextElement()) != null) {
LiveExprNode kid = astToLive(tool, body, con1);
res.addConj(kid);
}
int level = res.getLevel();
if (level > LevelConstants.ActionLevel) {
return res;
}
return astToLive(tool, expr, con, level);
} catch (Exception e) {
// Catching Exception here seem dangerous
// Assert.printStack(e);
int level = Specs.getLevel(expr, con);
if (level > LevelConstants.ActionLevel) {
if (e instanceof Assert.TLCRuntimeException) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, new String[] {expr.toString(), e.getMessage()});
} else {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
}
}
return astToLive(tool, expr, con, level);
}
}
case OPCODE_cl: // ConjList
case OPCODE_land: {
LNConj lnConj = new LNConj(alen);
for (int i = 0; i < alen; i++) {
LiveExprNode kid = astToLive(tool, (ExprNode) args[i], con);
lnConj.addConj(kid);
}
int level = lnConj.getLevel();
if (level > LevelConstants.ActionLevel) {
return lnConj;
}
return astToLive(tool, expr, con, level);
}
case OPCODE_dl: // DisjList
case OPCODE_lor: {
LNDisj lnDisj = new LNDisj(alen);
for (int i = 0; i < alen; i++) {
LiveExprNode kid = astToLive(tool, (ExprNode) args[i], con);
lnDisj.addDisj(kid);
}
int level = lnDisj.getLevel();
if (level > LevelConstants.ActionLevel) {
return lnDisj;
}
return astToLive(tool, expr, con, level);
}
case OPCODE_fa: // FcnApply
{
try {
IValue fval = tool.eval(args[0], con, TLCState.Empty);
if (fval instanceof IFcnLambdaValue) {
IFcnLambdaValue fcn = (IFcnLambdaValue) fval;
if (!fcn.hasRcd()) {
// this could be a bug, since con1 is created but not
// used
// SZ Jul 13, 2009: removed to kill the warning
// SZ Feb 20, 2009: variable never read locally
// Context con1 =
tool.getFcnContext(fcn, args, con, TLCState.Empty, TLCState.Empty, EvalControl.Clear);
return astToLive(tool, (ExprNode) fcn.getBody(), con);
}
}
} catch (Exception e) { /* SKIP */
// Swallowing Exception here seem dangerous
}
int level = expr.getLevel();
if (level > LevelConstants.ActionLevel) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
}
return astToLive(tool, expr, con, level);
}
case OPCODE_ite: // IfThenElse
{
LiveExprNode guard = astToLive(tool, (ExprNode) args[0], con);
LiveExprNode e1 = astToLive(tool, (ExprNode) args[1], con);
LiveExprNode e2 = astToLive(tool, (ExprNode) args[2], con);
LiveExprNode conj1 = new LNConj(guard, e1);
LiveExprNode conj2 = new LNConj(new LNNeg(guard), e2);
LiveExprNode res = new LNDisj(conj1, conj2);
int level = res.getLevel();
if (level > LevelConstants.ActionLevel) {
return res;
}
return astToLive(tool, expr, con, level);
}
case OPCODE_lnot: {
LiveExprNode lnArg = astToLive(tool, (ExprNode) args[0], con);
int level = lnArg.getLevel();
if (level > LevelConstants.ActionLevel) {
return new LNNeg(lnArg);
}
return astToLive(tool, expr, con, level);
}
case OPCODE_implies: {
LiveExprNode lnLeft = astToLive(tool, (ExprNode) args[0], con);
LiveExprNode lnRight = astToLive(tool, (ExprNode) args[1], con);
int level = Math.max(lnLeft.getLevel(), lnRight.getLevel());
if (level > LevelConstants.ActionLevel) {
return new LNDisj(new LNNeg(lnLeft), lnRight);
}
return astToLive(tool, expr, con, level);
}
case OPCODE_prime: {
return new LNAction(expr, con);
}
case OPCODE_sf:
{
// expand SF_e(A) into <>[]-EN<A>_e \/ []<><A>_e
ExprNode subs = (ExprNode) args[0]; // the e in SF_e(A)
ExprNode body = (ExprNode) args[1]; // the A in SF_e(A)
LiveExprNode en = new LNNeg(new LNStateEnabled(body, con, subs, false));
LiveExprNode act = new LNAction(body, con, subs, false);
return new LNDisj(new LNEven(new LNAll(en)), new LNAll(new LNEven(act)));
}
case OPCODE_wf:
{
// expand WF_e(A) into []<>(-EN<A>_e \/ <A>_e)
ExprNode subs = (ExprNode) args[0]; // the e in WF_e(A)
ExprNode body = (ExprNode) args[1]; // the A in WF_e(A)
LiveExprNode ln1 = new LNNeg(new LNStateEnabled(body, con, subs, false));
LiveExprNode ln2 = new LNAction(body, con, subs, false);
LiveExprNode disj = new LNDisj(ln1, ln2);
return new LNAll(new LNEven(disj));
}
case OPCODE_leadto: {
// F ~> G equals [](F => <>G), however TLC does not have an
// implementation for logical implication. Thus, the rule of
// material implication ("->") is used to transform it into a
// disjunct.
LiveExprNode lnLeft = astToLive(tool, (ExprNode) args[0], con);
LiveExprNode lnRight = astToLive(tool, (ExprNode) args[1], con);
// expand a ~> b into [](-a \/ <>b)
LNDisj lnd = new LNDisj(new LNNeg(lnLeft), new LNEven(lnRight));
return new LNAll(lnd);
}
case OPCODE_box: {
LiveExprNode lnArg = astToLive(tool, (ExprNode) args[0], con);
return new LNAll(lnArg);
}
case OPCODE_diamond: {
LiveExprNode lnArg = astToLive(tool, (ExprNode) args[0], con);
return new LNEven(lnArg);
}
case OPCODE_aa: { // AngleAct <A>_e
assert Specs.getLevel(expr, con) == LevelConstants.ActionLevel;
final ExprNode body = (ExprNode) args[0]; // the A in <<A>>_e
final ExprNode subs = (ExprNode) args[1]; // the e in <<A>>_e
return new LNAction(body, con, subs, false);
}
// The following case added by LL on 13 Nov 2009 to handle subexpression
// names.
case OPCODE_nop: {
return astToLive(tool, (ExprNode) args[0], con);
}
default: {
// We handle all the other built-in operators here. Surprisingly, even OPCODE_aa
// (AngleAct <A>_e) is handled here and not as the dedicated case statement below
// such that e gets passed as subscript to LNAction:
// case OPCODE_aa: { // AngleAct <A>_e
// assert Spec.getLevel(expr, con) == 2;
// final ExprNode body = (ExprNode) args[0]; // the A in <<A>>_e
// final ExprNode subscript = (ExprNode) args[1]; // the e in <<A>>_e
// return new LNAction(body, con, subscript, false);
// The default handling here results in LNAction#subscript to be null skipping
// the subscript related branch in LNAction#eval(Tool, TLCState, TLCState). This
// poses no problem though because Tool#evalAppl eventually checks if e' = e.
int level = Specs.getLevel(expr, con);
if (level > LevelConstants.ActionLevel) {
Assert.fail(EC.TLC_LIVE_CANNOT_HANDLE_FORMULA, expr.toString());
}
return astToLive(tool, expr, con, level);
}
}
}
/**
* Parse the temporals and impliedTemporals given in the config file. It
* returns null if there is nothing to check.
*/
private static LiveExprNode parseLiveness(ITool tool) {
Action[] fairs = tool.getTemporals();
LNConj lnc = new LNConj(fairs.length);
for (int i = 0; i < fairs.length; i++) {
LiveExprNode ln = astToLive(tool, (ExprNode) fairs[i].pred, fairs[i].con);
lnc.addConj(ln);
}
Action[] checks = tool.getImpliedTemporals();
if (checks.length == 0) {
if (fairs.length == 0) {
return null;
}
} else if (checks.length == 1) {
LiveExprNode ln = astToLive(tool, (ExprNode) checks[0].pred, checks[0].con);
if (lnc.getCount() == 0) {
return new LNNeg(ln);
}
lnc.addConj(new LNNeg(ln));
} else {
LNDisj lnd = new LNDisj(checks.length);
for (int i = 0; i < checks.length; i++) {
LiveExprNode ln = astToLive(tool, (ExprNode) checks[i].pred, checks[i].con);
lnd.addDisj(new LNNeg(ln));
}
if (lnc.getCount() == 0) {
return lnd;
}
lnc.addConj(lnd);
}
return lnc;
}
/**
* The method processLiveness normalizes the list of temporals and
* impliedTemporals to check their validity, and to figure out the order and
* manner in which things should ultimately be checked. This method returns
* a handle, which can subsequently be passed to the other liveness things.
*
* Theory: we're looking for counterexamples to:
*
* <pre>
* spec /\ livespec => []inv /\ livecheck
* </pre>
*
* i.e.
*
* <pre>
* \/ (spec /\ livespec /\ <>-inv)
* \/ (spec /\ livespec /\ -livecheck)
* </pre>
*
* <p>
* The first half of this disjunction (inv) is already checked by the model
* checker on the fly (@see
* {@link ModelChecker#doNext(TLCState, tlc2.util.ObjLongTable)}).
* <p>
* We're converting the second half into <i>normal form</i>. We actually
* omit spec in what we produce. It will be left implicit. So, the only job
* is to turn:
*
* <pre>
* livespec /\ -livecheck
* </pre>
*
* into:
*
* <pre>
* live1 /\ live2 ... /\ (-check1 \/ -check2 ...)
* </pre>
*
* into <i>normal form</i>. livespec corresponds to the spec's
* <i>fairness</i> formulae where check1, check2, ... are the actual
* <i>liveness properties</i> to be checked.
*/
public static OrderOfSolution[] processLiveness(final ITool tool) {
LiveExprNode lexpr = parseLiveness(tool);
if (lexpr == null) {
return new OrderOfSolution[0];
}
// Give tags to all action and state predicates, for equality
// checking (tlc2.tool.liveness.LiveExprNode.equals(LiveExprNode)).
// We tag them here so that, if disjunct normal form (DNF) should happen to
// duplicate exprs, then they will still have the same tag.
lexpr.tagExpr(1);
lexpr = lexpr.simplify().toDNF();
if ((lexpr instanceof LNBool) && !((LNBool) lexpr).b) {
return new OrderOfSolution[0]; // must be unsatisfiable
}
final LNDisj dnf = (lexpr instanceof LNDisj) ? (LNDisj) lexpr : (new LNDisj(lexpr));
// Now we will turn DNF into a format that can be tested by the
// tableau method. The first step is to collect everything into
// pems+lexps: listof-(listof-<>[],[]<> /\ tf)
final OSExprPem[] pems = new OSExprPem[dnf.getCount()];
final LiveExprNode[] tfs = new LiveExprNode[dnf.getCount()];
for (int i = 0; i < dnf.getCount(); i++) {
// Flatten junctions, because DNF may contain singleton junctions
final LiveExprNode ln = dnf.getBody(i).flattenSingleJunctions();
final OSExprPem pem = new OSExprPem();
pems[i] = pem;
if (ln instanceof LNConj) {
final LNConj lnc2 = (LNConj) ln;
for (int j = 0; j < lnc2.getCount(); j++) {
classifyExpr(lnc2.getBody(j), pem);
}
} else {
classifyExpr(ln, pem);
}
tfs[i] = null;
if (pem.tfs.size() == 1) {
tfs[i] = (LiveExprNode) pem.tfs.elementAt(0);
} else if (pem.tfs.size() > 1) {
final LNConj lnc2 = new LNConj(pem.tfs.size());
for (int j = 0; j < pem.tfs.size(); j++) {
lnc2.addConj((LiveExprNode) pem.tfs.elementAt(j));
}
tfs[i] = lnc2;
}
}
// Now, we will create our OrderOfSolutions. We lump together all
// disjunctions that have the same tf. This will happen often in
// cases such as (WF /\ SF) => (WF /\ SF /\ TF), since the WF and
// SF will be broken up into many cases and the TF will remain the
// same throughout. (Incidentally, we're checking equality on TFs
// just syntactically. This is hopefully sufficient, because we
// haven't done any real rearrangement of them, apart from munging
// up \/ and /\ above them. tfbin contains the different tf's.
// pembin is a vect of vect-of-pems collecting each tf's pems.
final TBPar tfbin = new TBPar(dnf.getCount());
final Vect<Vect<OSExprPem>> pembin = new Vect<>(dnf.getCount());
for (int i = 0; i < dnf.getCount(); i++) {
int found = -1;
final LiveExprNode tf = tfs[i];
for (int j = 0; j < tfbin.size() && found == -1; j++) {
final LiveExprNode tf0 = tfbin.exprAt(j);
if ((tf == null && tf0 == null) || (tf != null && tf0 != null && tf.equals(tf0))) {
found = j;
}
}
if (found == -1) {
found = tfbin.size();
tfbin.addElement(tf);
pembin.addElement(new Vect<OSExprPem>());
}
((Vect<OSExprPem>) pembin.elementAt(found)).addElement(pems[i]);
}
// We then create an OrderOfSolution for each tf in tfbin.
final OrderOfSolution[] oss = new OrderOfSolution[tfbin.size()];
for (int i = 0; i < tfbin.size(); i++) {
final LiveExprNode tf = tfbin.exprAt(i);
if (tf == null) {
oss[i] = new OrderOfSolution(new LNEven[0]);
} else {
final LiveExprNode tf1 = tf.makeBinary();
final TBPar promises = new TBPar(10);
tf1.extractPromises(promises);
oss[i] = new OrderOfSolution(new TBGraph(tf1), new LNEven[promises.size()]);
for (int j = 0; j < promises.size(); j++) {
oss[i].getPromises()[j] = (LNEven) promises.exprAt(j);
}
}
// We lump all the pems into a single checkState and checkAct,
// and oss[i].pems will simply be integer lookups into them.
final Vect<LiveExprNode> stateBin = new Vect<>();
final Vect<LiveExprNode> actionBin = new Vect<>();
final Vect<OSExprPem> tfPems = (Vect<OSExprPem>) pembin.elementAt(i);
oss[i].setPems(new PossibleErrorModel[tfPems.size()]);
for (int j = 0; j < tfPems.size(); j++) {
final OSExprPem pem = (OSExprPem) tfPems.elementAt(j);
oss[i].getPems()[j] = new PossibleErrorModel(addToBin(pem.AEAction, actionBin),
addToBin(pem.AEState, stateBin), addToBin(pem.EAAction, actionBin));
}
// Finally, store the bins with the order of solution.
oss[i].setCheckState(new LiveExprNode[stateBin.size()]);
for (int j = 0; j < stateBin.size(); j++) {
oss[i].getCheckState()[j] = (LiveExprNode) stateBin.elementAt(j);
}
oss[i].setCheckAction(new LiveExprNode[actionBin.size()]);
for (int j = 0; j < actionBin.size(); j++) {
oss[i].getCheckAction()[j] = (LiveExprNode) actionBin.elementAt(j);
}
}
MP.printMessage(EC.TLC_LIVE_IMPLIED, String.valueOf(oss.length));
// SZ Jul 28, 2009: What for?
// ToolIO.out.flush();
return oss;
}
/**
* Given a list of checks, ensures that the checks are in the bin. It
* returns an array of index of the checks in the bin.
*/
private static int addToBin(LiveExprNode check, Vect<LiveExprNode> bin) {
if (check == null) {
return -1;
}
int len = bin.size();
int idx;
for (idx = 0; idx < len; idx++) {
LiveExprNode ln = (LiveExprNode) bin.elementAt(idx);
if (check.equals(ln)) {
break;
}
}
if (idx >= len) {
bin.addElement(check);
}
return idx;
}
private static int[] addToBin(Vect<LiveExprNode> checks, Vect<LiveExprNode> bin) {
int[] index = new int[checks.size()];
for (int i = 0; i < checks.size(); i++) {
LiveExprNode check = (LiveExprNode) checks.elementAt(i);
index[i] = addToBin(check, bin);
}
return index;
}
/**
* A conjunct makes up of exprs of forms <>[]act, []<>act, []<>state, and
* tf. For a model to be a valid counterexample, it must pass all of these
* tests. This method classifies an expression into <>[]act, []<>act,
* []<>state, temporal formulas (without actions), or erroneous things.
*/
// TODO Explore the idea to syntactically rewrite an LNActions A into a
// ordinary predicate and the next state operator ()A in the tableau.
private static void classifyExpr(LiveExprNode ln, OSExprPem pem) {
// TLC is clever enough to optimize the case where some temporal formula
// can be handled WITHOUT a tableau. In this case, the state graph IS
// the behavior graph and thus the overall verification time is reduced.
// Additionally, the tableau generation does not support formulas
// containing (nested) LNActions.
if (ln instanceof LNEven) {
LiveExprNode ln1 = ((LNEven) ln).getBody();
if (ln1 instanceof LNAll) {
LiveExprNode ln2 = ((LNAll) ln1).getBody();
if (ln2.getLevel() < LevelConstants.TemporalLevel) {
pem.EAAction.addElement(ln2);
return;
}
}
} else if (ln instanceof LNAll) {
LiveExprNode ln1 = ((LNAll) ln).getBody();
if (ln1 instanceof LNEven) {
LiveExprNode ln2 = ((LNEven) ln1).getBody();
int level = ln2.getLevel();
if (level <= LevelConstants.VariableLevel) {
pem.AEState.addElement(ln2);
return;
}
if (level == LevelConstants.ActionLevel) {
pem.AEAction.addElement(ln2);
return;
}
}
}
if (ln.containAction()) {
Assert.fail(EC.TLC_LIVE_WRONG_FORMULA_FORMAT);
}
// If we get here (because of a temporal formula), at tableau is
// consequently going to be created. This part corresponds to the
// ideas in the MP book.
pem.tfs.addElement(ln);
}
public static void printTBGraph(TBGraph tableau) {
if (tableau == null) {
ToolIO.out.println("No tableau.");
} else {
ToolIO.out.println(tableau.toString());
}
}
/**
* OSExprPem is a temporary data structure for producing the
* PossibleErrorModel and OrderOfSolution.
*/
private static class OSExprPem {
Vect<LiveExprNode> EAAction; // <>[]action's
Vect<LiveExprNode> AEState; // []<>state's
Vect<LiveExprNode> AEAction; // []<>action's
Vect<LiveExprNode> tfs; // other temp formulae with no actions
public OSExprPem() {
this.EAAction = new Vect<>();
this.AEState = new Vect<>();
this.AEAction = new Vect<>();
this.tfs = new Vect<>();
}
}
} |
package com.valkryst.VTerminal.misc;
import com.valkryst.VTerminal.AsciiCharacter;
import com.valkryst.VTerminal.font.Font;
import lombok.Getter;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
public class ColoredImageCache {
/** The cache. */
private final LinkedHashMap<AsciiCharacterShell, BufferedImage> cachedImages;
/** The font of the character images. */
@Getter private final Font font;
/**
* Constructs a new ColoredImageCache.
*
* @param font
* The font.
*/
public ColoredImageCache(final Font font) {
this.font = font;
cachedImages = new LinkedHashMap<AsciiCharacterShell, BufferedImage>() {
private static final long serialVersionUID = 3550239335645856488L;
protected boolean removeEldestEntry(final Map.Entry<AsciiCharacterShell, BufferedImage> eldest) {
return this.size() >= 10000;
}
};
}
/**
* Constructs a new ColoredImageCache.
*
* @param font
* The font.
*
* @param maxCacheSize
* The maximum number of images to save in the cache.
*
* When this value is reached, or exceeded, then the cache
* discards the eldest cache entry to make room for a new
* entry.
*/
public ColoredImageCache(final Font font, final int maxCacheSize) {
this.font = font;
cachedImages = new LinkedHashMap<AsciiCharacterShell, BufferedImage>() {
private static final long serialVersionUID = 7940325226870365646L;
protected boolean removeEldestEntry(final Map.Entry<AsciiCharacterShell, BufferedImage> eldest) {
return this.size() >= maxCacheSize;
}
};
}
@Override
public int hashCode() {
return Objects.hash(cachedImages);
}
/**
* Retrieves a character image from the cache.
*
* If no image could be found, then one is created, inserted into
* the cache, and then returned.
*
* @param character
* The character.
*
* @return
* The character image.
*/
public BufferedImage retrieveFromCache(final AsciiCharacter character) {
final AsciiCharacterShell shell = new AsciiCharacterShell(character, font);
return cachedImages.computeIfAbsent(shell, s -> applyColorSwap(s, font));
}
/**
* Gets a character image for a character shell and applies the
* back/foreground colors to it.
*
* @param characterShell
* The character shell.
*
* @param font
* The font to retrieve the base character image from.
*
* @return
* The character image.
*/
private static BufferedImage applyColorSwap(final AsciiCharacterShell characterShell, final Font font) {
final BufferedImage image = cloneImage(font.getCharacterImage(characterShell.getCharacter()));
final int backgroundRGB = characterShell.getBackgroundColor().getRGB();
final int foregroundRGB = characterShell.getForegroundColor().getRGB();
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
int pixel = image.getRGB(x, y);
int alpha = (pixel >> 24) & 0xff;
int red = (pixel >> 16) & 0xff;
int green = (pixel >> 8) & 0xff;
int blue = (pixel) & 0xff;
boolean isTransparent = alpha != 255;
isTransparent &= red == 0;
isTransparent &= green == 0;
isTransparent &= blue == 0;
if (isTransparent) {
image.setRGB(x, y, backgroundRGB);
} else {
image.setRGB(x, y, foregroundRGB);
}
}
}
System.out.println("\n\n");
return image;
}
/**
* Makes a clone of an image.
*
* @param image
* The image.
*
* @return
* The clone image.
*/
private static BufferedImage cloneImage(final BufferedImage image) {
final BufferedImage newImage = new BufferedImage(image.getWidth(), image.getHeight(), image.getType());
final Graphics g = newImage.getGraphics();
g.drawImage(image, 0, 0, null);
g.dispose();
return newImage;
}
private class AsciiCharacterShell {
/** The character. */
@Getter private final char character;
/** The background color. Defaults to black. */
@Getter private final Color backgroundColor;
/** The foreground color. Defaults to white. */
@Getter private final Color foregroundColor;
public AsciiCharacterShell(final AsciiCharacter character, final Font font) {
if (character == null) {
throw new IllegalArgumentException("The AsciiCharacterShell cannot use a null character");
}
if (font == null) {
throw new IllegalArgumentException("The AsciiCharacterShell cannot have a null font.");
}
this.character = character.getCharacter();
this.backgroundColor = character.getBackgroundColor();
this.foregroundColor = character.getForegroundColor();
}
@Override
public String toString() {
String res = "Color Shell:";
res += "\n\tCharacter:\t'" + character +"'";
res += "\n\tBackground Color:\t" + backgroundColor;
res += "\n\tForeground Color:\t" + foregroundColor;
return res;
}
@Override
public boolean equals(final Object otherObj) {
if (otherObj instanceof AsciiCharacterShell == false) {
return false;
}
if (otherObj == this) {
return true;
}
final AsciiCharacterShell otherShell = (AsciiCharacterShell) otherObj;
boolean isEqual = character == otherShell.getCharacter();
isEqual &= backgroundColor.equals(otherShell.getBackgroundColor());
isEqual &= foregroundColor.equals(otherShell.getForegroundColor());
return isEqual;
}
@Override
public int hashCode() {
return Objects.hash(character, backgroundColor, foregroundColor);
}
}
} |
package org.mariadb.jdbc.internal.util;
import org.mariadb.jdbc.UrlParser;
import org.mariadb.jdbc.internal.failover.FailoverProxy;
import org.mariadb.jdbc.internal.failover.impl.AuroraListener;
import org.mariadb.jdbc.internal.failover.impl.MastersFailoverListener;
import org.mariadb.jdbc.internal.failover.impl.MastersSlavesListener;
import org.mariadb.jdbc.internal.io.socket.NamedPipeSocket;
import org.mariadb.jdbc.internal.io.socket.SharedMemorySocket;
import org.mariadb.jdbc.internal.io.socket.UnixDomainSocket;
import org.mariadb.jdbc.internal.logging.ProtocolLoggingProxy;
import org.mariadb.jdbc.internal.protocol.AuroraProtocol;
import org.mariadb.jdbc.internal.protocol.MasterProtocol;
import org.mariadb.jdbc.internal.protocol.MastersSlavesProtocol;
import org.mariadb.jdbc.internal.protocol.Protocol;
import javax.net.SocketFactory;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Proxy;
import java.net.Socket;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.SQLException;
import java.util.TimeZone;
import java.util.concurrent.locks.ReentrantLock;
public class Utils {
private static final char[] hexArray = "0123456789ABCDEF".toCharArray();
/**
* Escape String.
*
* @param value value to escape
* @param noBackslashEscapes must backslash be escaped
* @return escaped string.
*/
public static String escapeString(String value, boolean noBackslashEscapes) {
if (value.indexOf("'") == -1) {
if (noBackslashEscapes) {
return value;
}
if (value.indexOf("\\") == -1) {
return value;
}
}
String escaped = value.replace("'", "''");
if (noBackslashEscapes) {
return escaped;
}
return escaped.replace("\\", "\\\\");
}
/**
* encrypts a password
* <p>
* protocol for authentication is like this: 1. mysql server sends a random array of bytes (the seed) 2. client
* makes a sha1 digest of the password 3. client hashes the output of 2 4. client digests the seed 5. client updates
* the digest with the output from 3 6. an xor of the output of 5 and 2 is sent to server 7. server does the same
* thing and verifies that the scrambled passwords match
*
* @param password the password to encrypt
* @param seed the seed to use
* @param passwordCharacterEncoding password character encoding
* @return a scrambled password
* @throws NoSuchAlgorithmException if SHA1 is not available on the platform we are using
* @throws UnsupportedEncodingException if passwordCharacterEncoding is not a valid charset name
*/
public static byte[] encryptPassword(final String password, final byte[] seed, String passwordCharacterEncoding)
throws NoSuchAlgorithmException, UnsupportedEncodingException {
if (password == null || password.equals("")) return new byte[0];
final MessageDigest messageDigest = MessageDigest.getInstance("SHA-1");
byte[] bytePwd;
if (passwordCharacterEncoding != null && !passwordCharacterEncoding.isEmpty()) {
bytePwd = password.getBytes(passwordCharacterEncoding);
} else {
bytePwd = password.getBytes();
}
final byte[] stage1 = messageDigest.digest(bytePwd);
messageDigest.reset();
final byte[] stage2 = messageDigest.digest(stage1);
messageDigest.reset();
messageDigest.update(seed);
messageDigest.update(stage2);
final byte[] digest = messageDigest.digest();
final byte[] returnBytes = new byte[digest.length];
for (int i = 0; i < digest.length; i++) {
returnBytes[i] = (byte) (stage1[i] ^ digest[i]);
}
return returnBytes;
}
/**
* Copies the original byte array content to a new byte array. The resulting byte array is
* always "length" size. If length is smaller than the original byte array, the resulting
* byte array is truncated. If length is bigger than the original byte array, the resulting
* byte array is filled with zero bytes.
*
* @param orig the original byte array
* @param length how big the resulting byte array will be
* @return the copied byte array
*/
public static byte[] copyWithLength(byte[] orig, int length) {
// No need to initialize with zero bytes, because the bytes are already initialized with that
byte[] result = new byte[length];
int howMuchToCopy = length < orig.length ? length : orig.length;
System.arraycopy(orig, 0, result, 0, howMuchToCopy);
return result;
}
/**
* Copies from original byte array to a new byte array. The resulting byte array is
* always "to-from" size.
*
* @param orig the original byte array
* @param from index of first byte in original byte array which will be copied
* @param to index of last byte in original byte array which will be copied. This can be
* outside of the original byte array
* @return resulting array
*/
public static byte[] copyRange(byte[] orig, int from, int to) {
int length = to - from;
byte[] result = new byte[length];
int howMuchToCopy = orig.length - from < length ? orig.length - from : length;
System.arraycopy(orig, from, result, 0, howMuchToCopy);
return result;
}
/**
* Helper function to replace function parameters in escaped string.
* 3 functions are handles :
* - CONVERT(value, type) , we replace SQL_XXX types with XXX, i.e SQL_INTEGER with INTEGER
* - TIMESTAMPDIFF(type, ...) or TIMESTAMPADD(type, ...) , we replace SQL_TSI_XXX in type with XXX, i.e
* SQL_TSI_HOUR with HOUR
*
* @param functionString - input string
* @return unescaped string
*/
public static String replaceFunctionParameter(String functionString) {
if (!functionString.contains("SQL_")) {
return functionString;
}
char[] input = functionString.toCharArray();
StringBuilder sb = new StringBuilder();
int index;
for (index = 0; index < input.length; index++) {
if (input[index] != ' ') {
break;
}
}
for (; ((input[index] >= 'a' && index <= 'z') || (input[index] >= 'A' && input[index] <= 'Z')) && index < input.length; index++) {
sb.append(input[index]);
}
String func = sb.toString().toLowerCase();
if (func.equals("convert") || func.equals("timestampdiff") || func.equals("timestampadd")) {
String paramPrefix;
if (func.equals("timestampdiff") || func.equals("timestampadd")) {
// Skip to first parameter
for (; index < input.length; index++) {
if (!Character.isWhitespace(input[index]) && input[index] != '(') {
break;
}
}
if (index == input.length) {
return new String(input);
}
if (index >= input.length - 8) {
return new String(input);
}
paramPrefix = new String(input, index, 8);
if (paramPrefix.equals("SQL_TSI_")) {
return new String(input, 0, index) + new String(input, index + 8, input.length - (index + 8));
}
return new String(input);
}
// Handle "convert(value, type)" case
// extract last parameter, after the last ','
int lastCommaIndex = functionString.lastIndexOf(',');
for (index = lastCommaIndex + 1; index < input.length; index++) {
if (!Character.isWhitespace(input[index])) {
break;
}
}
if (index >= input.length - 4) {
return new String(input);
}
paramPrefix = new String(input, index, 4);
if (paramPrefix.equals("SQL_")) {
return new String(input, 0, index) + new String(input, index + 4, input.length - (index + 4));
}
}
return new String(input);
}
private static String resolveEscapes(String escaped, boolean noBackslashEscapes) throws SQLException {
if (escaped.charAt(0) != '{' || escaped.charAt(escaped.length() - 1) != '}') {
throw new SQLException("unexpected escaped string");
}
int endIndex = escaped.length() - 1;
String escapedLower = escaped.toLowerCase();
if (escaped.startsWith("{fn ")) {
String resolvedParams = replaceFunctionParameter(escaped.substring(4, endIndex));
return nativeSql(resolvedParams, noBackslashEscapes);
} else if (escapedLower.startsWith("{oj ")) {
// Outer join
// the server supports "oj" in any case, even "oJ"
return nativeSql(escaped.substring(4, endIndex), noBackslashEscapes);
} else if (escaped.startsWith("{d ")) {
// date literal
return escaped.substring(3, endIndex);
} else if (escaped.startsWith("{t ")) {
// time literal
return escaped.substring(3, endIndex);
} else if (escaped.startsWith("{ts ")) {
//timestamp literal
return escaped.substring(4, endIndex);
} else if (escaped.startsWith("{d'")) {
// date literal, no space
return escaped.substring(2, endIndex);
} else if (escaped.startsWith("{t'")) {
// time literal
return escaped.substring(2, endIndex);
} else if (escaped.startsWith("{ts'")) {
//timestamp literal
return escaped.substring(3, endIndex);
} else if (escaped.startsWith("{call ") || escaped.startsWith("{CALL ")) {
// We support uppercase "{CALL" only because Connector/J supports it. It is not in the JDBC spec.
return nativeSql(escaped.substring(1, endIndex), noBackslashEscapes);
} else if (escaped.startsWith("{escape ")) {
return escaped.substring(1, endIndex);
} else if (escaped.startsWith("{?")) {
// likely ?=call(...)
return nativeSql(escaped.substring(1, endIndex), noBackslashEscapes);
} else if (escaped.startsWith("{ ")) {
// Spaces before keyword, this is not JDBC compliant, however some it works in some drivers,
// so we support it, too
for (int i = 2; i < escaped.length(); i++) {
if (!Character.isWhitespace(escaped.charAt(i))) {
return resolveEscapes("{" + escaped.substring(i), noBackslashEscapes);
}
}
}
throw new SQLException("unknown escape sequence " + escaped);
}
/**
* Escape sql String
*
* @param sql initial sql
* @param noBackslashEscapes must backslash be escape
* @return escaped sql string
* @throws SQLException if escape sequence is incorrect.
*/
public static String nativeSql(String sql, boolean noBackslashEscapes) throws SQLException {
if (sql.indexOf('{') == -1) {
return sql;
}
StringBuilder escapeSequenceBuf = new StringBuilder();
StringBuilder sqlBuffer = new StringBuilder();
char[] charArray = sql.toCharArray();
char lastChar = 0;
boolean inQuote = false;
char quoteChar = 0;
boolean inComment = false;
boolean isSlashSlashComment = false;
int inEscapeSeq = 0;
for (int i = 0; i < charArray.length; i++) {
char car = charArray[i];
if (lastChar == '\\' && !noBackslashEscapes) {
sqlBuffer.append(car);
continue;
}
switch (car) {
case '\'':
case '"':
if (!inComment) {
if (inQuote) {
if (quoteChar == car) {
inQuote = false;
}
} else {
inQuote = true;
quoteChar = car;
}
}
break;
case '*':
if (!inQuote && !inComment && lastChar == '/') {
inComment = true;
isSlashSlashComment = false;
}
break;
case '/':
case '-':
if (!inQuote) {
if (inComment) {
if (lastChar == '*' && !isSlashSlashComment) {
inComment = false;
} else if (lastChar == car && isSlashSlashComment) {
inComment = false;
}
} else {
if (lastChar == car) {
inComment = true;
isSlashSlashComment = true;
} else if (lastChar == '*') {
inComment = true;
isSlashSlashComment = false;
}
}
}
break;
case 'S':
// skip SQL_xxx and SQL_TSI_xxx in functions
// This would convert e.g SQL_INTEGER => INTEGER, SQL_TSI_HOUR=>HOUR
if (!inQuote && !inComment && inEscapeSeq > 0
&& i + 4 < charArray.length && charArray[i + 1] == 'Q'
&& charArray[i + 2] == 'L' && charArray[i + 3] == 'L'
&& charArray[i + 4] == '_') {
if (i + 8 < charArray.length
&& charArray[i + 5] == 'T'
&& charArray[i + 6] == 'S'
&& charArray[i + 7] == 'I'
&& charArray[i + 8] == '_') {
i += 8;
continue;
}
i += 4;
continue;
}
break;
case '\n':
if (inComment && isSlashSlashComment) {
// slash-slash and dash-dash comments ends with the end of line
inComment = false;
}
break;
case '{':
if (!inQuote && !inComment) {
inEscapeSeq++;
}
break;
case '}':
if (!inQuote && !inComment) {
inEscapeSeq
if (inEscapeSeq == 0) {
escapeSequenceBuf.append(car);
sqlBuffer.append(resolveEscapes(escapeSequenceBuf.toString(), noBackslashEscapes));
escapeSequenceBuf.setLength(0);
continue;
}
}
break;
default:
break;
}
lastChar = car;
if (inEscapeSeq > 0) {
escapeSequenceBuf.append(car);
} else {
sqlBuffer.append(car);
}
}
if (inEscapeSeq > 0) {
throw new SQLException("Invalid escape sequence , missing closing '}' character in '" + sqlBuffer);
}
return sqlBuffer.toString();
}
/**
* Retrieve protocol corresponding to the failover options.
* if no failover option, protocol will not be proxied.
* if a failover option is precised, protocol will be proxied so that any connection error will be handle directly.
*
* @param urlParser urlParser corresponding to connection url string.
* @param lock lock to handle thread synchronisation
* @return protocol
* @throws SQLException if any error occur during connection
*/
public static Protocol retrieveProxy(final UrlParser urlParser, final ReentrantLock lock) throws SQLException {
Protocol protocol;
switch (urlParser.getHaMode()) {
case AURORA:
return getProxyLoggingIfNeeded(urlParser, (Protocol) Proxy.newProxyInstance(
AuroraProtocol.class.getClassLoader(),
new Class[]{Protocol.class},
new FailoverProxy(new AuroraListener(urlParser), lock)));
case REPLICATION:
return getProxyLoggingIfNeeded(urlParser,
(Protocol) Proxy.newProxyInstance(
MastersSlavesProtocol.class.getClassLoader(),
new Class[]{Protocol.class},
new FailoverProxy(new MastersSlavesListener(urlParser), lock)));
case FAILOVER:
case SEQUENTIAL:
return getProxyLoggingIfNeeded(urlParser, (Protocol) Proxy.newProxyInstance(
MasterProtocol.class.getClassLoader(),
new Class[]{Protocol.class},
new FailoverProxy(new MastersFailoverListener(urlParser), lock)));
default:
protocol = getProxyLoggingIfNeeded(urlParser, new MasterProtocol(urlParser, lock));
protocol.connectWithoutProxy();
return protocol;
}
}
private static Protocol getProxyLoggingIfNeeded(UrlParser urlParser, Protocol protocol) {
if (urlParser.getOptions().profileSql || urlParser.getOptions().slowQueryThresholdNanos != null) {
return (Protocol) Proxy.newProxyInstance(
MasterProtocol.class.getClassLoader(),
new Class[]{Protocol.class},
new ProtocolLoggingProxy(protocol, urlParser.getOptions()));
}
return protocol;
}
/**
* Get timezone from Id.
* This differ from java implementation : by default, if timezone Id is unknown, java return GMT timezone.
* GMT will be return only if explicitly asked.
*
* @param id timezone id
* @return timezone.
* @throws SQLException if no timezone is found for this Id
*/
public static TimeZone getTimeZone(String id) throws SQLException {
TimeZone tz = TimeZone.getTimeZone(id);
// Validate the timezone ID. JDK maps invalid timezones to GMT
if (tz.getID().equals("GMT") && !id.equals("GMT")) {
throw new SQLException("invalid timezone id '" + id + "'");
}
return tz;
}
/**
* Create socket accordingly to options.
*
* @param urlParser urlParser
* @param host hostName ( mandatory only for named pipe)
* @return a nex socket
* @throws IOException if connection error occur
*/
@SuppressWarnings("unchecked")
public static Socket createSocket(UrlParser urlParser, String host) throws IOException {
if (urlParser.getOptions().pipe != null) {
return new NamedPipeSocket(host, urlParser.getOptions().pipe);
} else if (urlParser.getOptions().localSocket != null) {
try {
return new UnixDomainSocket(urlParser.getOptions().localSocket);
} catch (RuntimeException re) {
throw new IOException(re.getMessage(), re.getCause());
}
} else if (urlParser.getOptions().sharedMemory != null) {
try {
return new SharedMemorySocket(urlParser.getOptions().sharedMemory);
} catch (RuntimeException re) {
throw new IOException(re.getMessage(), re.getCause());
}
} else {
SocketFactory socketFactory;
String socketFactoryName = urlParser.getOptions().socketFactory;
if (socketFactoryName != null) {
try {
Class<? extends SocketFactory> socketFactoryClass = (Class<? extends SocketFactory>) Class.forName(socketFactoryName);
if (socketFactoryClass != null) {
Constructor<? extends SocketFactory> constructor = socketFactoryClass.getConstructor();
socketFactory = constructor.newInstance();
return socketFactory.createSocket();
}
} catch (Exception sfex) {
throw new IOException("Socket factory failed to initialized with option \"socketFactory\" set to \""
+ urlParser.getOptions().socketFactory + "\"", sfex);
}
}
socketFactory = SocketFactory.getDefault();
return socketFactory.createSocket();
}
}
/**
* Hexdump.
*
* @param bytes byte arrays
* @return String
*/
public static String hexdump(byte[]... bytes) {
return hexdump(Integer.MAX_VALUE, 0, Integer.MAX_VALUE, bytes);
}
/**
* Hexdump.
*
* @param maxQuerySizeToLog max log size
* @param offset offset of last byte array
* @param bytes byte arrays
* @return String
*/
public static String hexdump(int maxQuerySizeToLog, int offset, byte[]... bytes) {
return hexdump(maxQuerySizeToLog, offset, Integer.MAX_VALUE, bytes);
}
/**
* Hexdump.
* <p>
* String output example :
* <pre>
* {@code
* 7D 00 00 01 C5 00 00 }...... <- first byte array
* 01 00 00 01 02 33 00 00 02 03 64 65 66 05 74 65 .....3....def.te <- second byte array
* 73 74 6A 0A 74 65 73 74 5F 62 61 74 63 68 0A 74 stj.test_batch.t
* 65 73 74 5F 62 61 74 63 68 02 69 64 02 69 64 0C est_batch.id.id.
* 3F 00 0B 00 00 00 03 03 42 00 00 00 37 00 00 03 ?.......B...7...
* 03 64 65 66 05 74 65 73 74 6A 0A 74 65 73 74 5F .def.testj.test_
* 62 61 74 63 68 0A 74 65 73 74 5F 62 61 74 63 68 batch.test_batch
* 04 74 65 73 74 04 74 65 73 74 0C 21 00 1E 00 00 .test.test.!....
* 00 FD 00 00 00 00 00 05 00 00 04 FE 00 00 22 00 ..............".
* 06 00 00 05 01 31 03 61 61 61 06 00 00 06 01 32 .....1.aaa.....2
* 03 62 62 62 06 00 00 07 01 33 03 63 63 63 06 00 .bbb.....3.ccc..
* 00 08 01 34 03 61 61 61 06 00 00 09 01 35 03 62 ...4.aaa.....5.b
* 62 62 06 00 00 0A 01 36 03 63 63 63 05 00 00 0B bb.....6.ccc....
* FE 00 00 22 00 ...".
* }
* </pre>
*
* @param maxQuerySizeToLog max log size
* @param offset offset of last byte array
* @param length length of last byte array
* @param byteArr byte arrays. if many, only the last may have offset and size limitation
* others will be displayed completely.
* @return String
*/
public static String hexdump(int maxQuerySizeToLog, int offset, int length, byte[]... byteArr) {
switch (byteArr.length) {
case 0:
return "";
case 1:
byte[] bytes = byteArr[0];
if (bytes.length <= offset) return "";
int dataLength = Math.min(maxQuerySizeToLog, Math.min(bytes.length - offset, length));
StringBuilder outputBuilder = new StringBuilder(dataLength * 5);
outputBuilder.append("\n");
writeHex(bytes, offset, dataLength, outputBuilder);
default:
StringBuilder sb = new StringBuilder();
sb.append("\n");
byte[] arr;
for (int i = 0; i < byteArr.length - 1; i++) {
arr = byteArr[i];
writeHex(arr, 0, arr.length, sb);
}
arr = byteArr[byteArr.length - 1];
int dataLength2 = Math.min(maxQuerySizeToLog, Math.min(arr.length - offset, length));
writeHex(arr, offset, dataLength2, sb);
return sb.toString();
}
}
/**
* Write bytes/hexadecimal value of a byte array to a StringBuilder.
* <p>
* String output example :
* <pre>
* {@code
* 38 00 00 00 03 63 72 65 61 74 65 20 74 61 62 6C 8....create tabl
* 65 20 42 6C 6F 62 54 65 73 74 63 6C 6F 62 74 65 e BlobTestclobte
* 73 74 32 20 28 73 74 72 6D 20 74 65 78 74 29 20 st2 (strm text)
* 43 48 41 52 53 45 54 20 75 74 66 38 CHARSET utf8
* }
* </pre>
*
* @param bytes byte array
* @param offset offset
* @param dataLength byte length to write
* @param outputBuilder string builder
*/
public static void writeHex(byte[] bytes, int offset, int dataLength, StringBuilder outputBuilder) {
if (bytes == null || bytes.length == 0) return;
char[] hexaValue = new char[16];
hexaValue[8] = ' ';
int pos = offset;
int posHexa = 0;
while (pos < dataLength) {
int byteValue = bytes[pos] & 0xFF;
outputBuilder.append(hexArray[byteValue >>> 4])
.append(hexArray[byteValue & 0x0F])
.append(" ");
hexaValue[posHexa++] = (byteValue > 31 && byteValue < 127) ? (char) byteValue : '.';
if (posHexa == 8) {
outputBuilder.append(" ");
}
if (posHexa == 16) {
outputBuilder.append(" ")
.append(hexaValue)
.append("\n");
posHexa = 0;
}
pos++;
}
int remaining = posHexa;
if (remaining > 0) {
if (remaining < 8) {
for (; remaining < 8; remaining++) outputBuilder.append(" ");
outputBuilder.append(" ");
}
for (; remaining < 16; remaining++) outputBuilder.append(" ");
outputBuilder.append(" ")
.append(hexaValue, 0, posHexa)
.append("\n");
}
}
protected static String getHex(final byte[] raw) {
final StringBuilder hex = new StringBuilder(2 * raw.length);
for (final byte b : raw) {
hex.append(hexArray[(b & 0xF0) >> 4])
.append(hexArray[(b & 0x0F)]);
}
return hex.toString();
}
public static String byteArrayToHexString(final byte[] bytes) {
return (bytes != null) ? getHex(bytes) : "";
}
} |
package com.saspes.rest;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.http.HttpEntity;
import org.apache.http.ParseException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
/**
*
* @author SasPes
*/
public class Utils {
public static Auth auth = new Auth();
public static boolean checkCordova(Apk apk) throws IOException {
System.out.println("[ Downloading ... ] " + apk.getAppId());
File targetFile = new File("tempapk\\" + apk.getAppId() + ".apk");
targetFile.getParentFile().mkdirs();
CloseableHttpClient client = HttpClients.createDefault();
try (CloseableHttpResponse response = client.execute(new HttpGet(apk.getDownload()))) {
HttpEntity entity = response.getEntity();
if (entity != null) {
try (FileOutputStream outstream = new FileOutputStream(targetFile)) {
entity.writeTo(outstream);
}
}
}
System.out.println("[ Saved ] " + targetFile.getAbsolutePath());
if (UnApk7z.unzip(targetFile.getPath())) {
System.out.println("*********************************************************");
System.out.println("* " + apk.getName() + " (" + apk.getAppId() + " ) is Cordova/PhoneGap app");
System.out.println("*********************************************************");
return true;
} else {
return false;
}
}
public static Document getApkPage(String link) throws ParseException, IOException {
HttpGet httpGetApp = new HttpGet(link);
CloseableHttpResponse httpGetAppRes = auth.getHttpClient().execute(httpGetApp, auth.getHttpContext());
System.out.println("[ " + httpGetAppRes.getStatusLine() + "] " + link);
HttpEntity entityApp = httpGetAppRes.getEntity();
String entityContentsApp = EntityUtils.toString(entityApp);
Document docApp = Jsoup.parse(entityContentsApp);
return docApp;
}
} |
package org.neo4j.rdf.store;
import java.util.Iterator;
import java.util.LinkedList;
import org.neo4j.api.core.Direction;
import org.neo4j.api.core.NeoService;
import org.neo4j.api.core.Node;
import org.neo4j.api.core.Relationship;
import org.neo4j.api.core.RelationshipType;
import org.neo4j.api.core.StopEvaluator;
import org.neo4j.api.core.Transaction;
import org.neo4j.api.core.Traverser.Order;
import org.neo4j.neometa.structure.MetaStructure;
import org.neo4j.rdf.model.CompleteStatement;
import org.neo4j.rdf.model.Context;
import org.neo4j.rdf.model.Literal;
import org.neo4j.rdf.model.Resource;
import org.neo4j.rdf.model.Statement;
import org.neo4j.rdf.model.Uri;
import org.neo4j.rdf.model.Value;
import org.neo4j.rdf.model.WildcardStatement;
import org.neo4j.rdf.store.representation.AbstractNode;
import org.neo4j.rdf.store.representation.standard.AbstractUriBasedExecutor;
import org.neo4j.rdf.store.representation.standard.VerboseQuadExecutor;
import org.neo4j.rdf.store.representation.standard.VerboseQuadStrategy;
import org.neo4j.util.FilteringIterable;
import org.neo4j.util.FilteringIterator;
import org.neo4j.util.IterableWrapper;
import org.neo4j.util.OneOfRelTypesReturnableEvaluator;
import org.neo4j.util.PrefetchingIterator;
import org.neo4j.util.RelationshipToNodeIterable;
import org.neo4j.util.index.IndexService;
public class VerboseQuadStore extends RdfStoreImpl
{
private final MetaStructure meta;
public VerboseQuadStore( NeoService neo, IndexService indexer )
{
this( neo, indexer, null );
}
public VerboseQuadStore( NeoService neo, IndexService indexer,
MetaStructure meta )
{
super( neo, new VerboseQuadStrategy(
new VerboseQuadExecutor( neo, indexer, meta ), meta ) );
this.meta = meta;
debug( "I'm initialized!" );
}
protected MetaStructure meta()
{
return this.meta;
}
@Override
protected VerboseQuadStrategy getRepresentationStrategy()
{
return ( VerboseQuadStrategy ) super.getRepresentationStrategy();
}
// private String getStatementType( Statement statement )
// StringBuffer buffer = new StringBuffer();
// buffer.append( statement.getSubject().isWildcard() ? "?S" : "S" );
// buffer.append( " " );
// buffer.append( statement.getPredicate().isWildcard() ? "?P" : "P" );
// buffer.append( " " );
// buffer.append( statement.getObject().isWildcard() ? "?O" : "O" );
// buffer.append( " " );
// buffer.append( statement.getContext().isWildcard() ? "?C" : "C" );
// return buffer.toString();
@Override
public Iterable<CompleteStatement> getStatements(
WildcardStatement statement,
boolean includeInferredStatements )
{
// debug( "getStatements( " + statement + " )" );
// Sampler s = P.s( "getStatements " + getStatementType( statement ),
// statement.toString() );
Transaction tx = neo().beginTx();
try
{
if ( includeInferredStatements )
{
throw new UnsupportedOperationException( "We currently not " +
"support getStatements() with reasoning enabled" );
}
Iterable<CompleteStatement> result = null;
if ( wildcardPattern( statement, false, false, true ) )
{
result = handleSubjectPredicateWildcard( statement );
}
else if ( wildcardPattern( statement, false, true, true ) )
{
result = handleSubjectWildcardWildcard( statement );
}
else if ( wildcardPattern( statement, false, true, false ) )
{
result = handleSubjectWildcardObject( statement );
}
else if ( wildcardPattern( statement, true, true, false ) )
{
result = handleWildcardWildcardObject( statement );
}
else if ( wildcardPattern( statement, true, false, false ) )
{
result = handleWildcardPredicateObject( statement );
}
else if ( wildcardPattern( statement, false, false, false ) )
{
result = handleSubjectPredicateObject( statement );
}
else if ( wildcardPattern( statement, true, false, true ) )
{
result = handleWildcardPredicateWildcard( statement );
}
else if ( wildcardPattern( statement, true, true, true ) )
{
result = handleWildcardWildcardWildcard( statement );
}
else
{
result = super.getStatements( statement,
includeInferredStatements );
}
if ( result == null )
{
result = new LinkedList<CompleteStatement>();
}
tx.success();
return result;
}
finally
{
tx.finish();
}
}
private void debug( String message )
{
}
private Node lookupNode( Value uri )
{
return getRepresentationStrategy().getExecutor().
lookupNode( new AbstractNode( uri ) );
}
private String getNodeUriOrNull( Node node )
{
return ( String ) node.getProperty(
AbstractUriBasedExecutor.URI_PROPERTY_KEY, null );
}
private Value getValueForObjectNode( String predicate, Node objectNode )
{
String uri = ( String ) objectNode.getProperty(
AbstractUriBasedExecutor.URI_PROPERTY_KEY, null );
if ( uri != null )
{
return new Uri( uri );
}
else
{
return getLiteralValueForObjectNode( predicate, objectNode );
}
}
private Value getLiteralValueForObjectNode( String predicate,
Node literalNode )
{
Object value = literalNode.getProperty( predicate );
String datatype = ( String ) literalNode.getProperty(
VerboseQuadExecutor.LITERAL_DATATYPE_KEY, null );
String language = ( String ) literalNode.getProperty(
VerboseQuadExecutor.LITERAL_LANGUAGE_KEY, null );
return new Literal( value, datatype == null ? null :
new Uri( datatype ), language );
}
private RelationshipType relType( final String name )
{
return new RelationshipType()
{
public String name()
{
return name;
}
};
}
private RelationshipType relType( Value value )
{
return relType( ( ( Uri ) value ).getUriAsString() );
}
private RelationshipType relType( Statement statement )
{
return relType( statement.getPredicate() );
}
private Iterable<Node> getMiddleNodesFromLiterals( Statement statement )
{
Literal literal = ( Literal ) statement.getObject();
Iterable<Node> literalNodes = getRepresentationStrategy().
getExecutor().findLiteralNodes( literal.getValue() );
return new LiteralToMiddleNodeIterable( literalNodes );
}
private Iterable<Node> getMiddleNodesFromAllContexts()
{
return getRepresentationStrategy().getExecutor().
getContextsReferenceNode().traverse( Order.DEPTH_FIRST,
StopEvaluator.END_OF_NETWORK,
new OneOfRelTypesReturnableEvaluator(
VerboseQuadStrategy.RelTypes.IN_CONTEXT ),
VerboseQuadExecutor.RelTypes.IS_A_CONTEXT, Direction.OUTGOING,
VerboseQuadStrategy.RelTypes.IN_CONTEXT, Direction.INCOMING );
}
private Iterable<CompleteStatement> handleSubjectPredicateWildcard(
Statement statement )
{
Node subjectNode = lookupNode( statement.getSubject() );
if ( subjectNode == null )
{
return null;
}
Iterable<Node> middleNodes = new RelationshipToNodeIterable(
subjectNode, subjectNode.getRelationships( relType( statement ),
Direction.OUTGOING ) );
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleSubjectWildcardWildcard(
Statement statement )
{
Node subjectNode = lookupNode( statement.getSubject() );
if ( subjectNode == null )
{
return null;
}
Iterable<Node> middleNodes = new RelationshipToNodeIterable(
subjectNode, subjectNode.getRelationships( Direction.OUTGOING ) );
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleSubjectWildcardObject(
final Statement statement )
{
// TODO Optimization: maybe check which has least rels (S or O)
// and start there.
Node subjectNode = lookupNode( statement.getSubject() );
if ( subjectNode == null )
{
return null;
}
Iterable<Relationship> relationships = null;
if ( statement.getObject() instanceof Resource )
{
final Node objectNode = lookupNode( statement.getObject() );
relationships =
subjectNode.getRelationships( Direction.OUTGOING );
relationships = new FilteringIterable<Relationship>( relationships )
{
@Override
protected boolean passes( Relationship subjectToMiddleRel )
{
Node middleNode = subjectToMiddleRel.getEndNode();
Node anObjectNode = middleNode.getSingleRelationship(
subjectToMiddleRel.getType(), Direction.OUTGOING ).
getEndNode();
return anObjectNode.equals( objectNode );
}
};
}
else
{
relationships = subjectNode.getRelationships( Direction.OUTGOING );
relationships = new LiteralFilteredRelationships(
statement, relationships );
}
Iterable<Node> middleNodes = new RelationshipToNodeIterable(
subjectNode, relationships );
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleSubjectPredicateObject(
Statement statement )
{
Node subjectNode = lookupNode( statement.getSubject() );
if ( subjectNode == null )
{
return null;
}
Iterable<Relationship> relationships = subjectNode.getRelationships(
relType( statement ), Direction.OUTGOING );
relationships = new LiteralFilteredRelationships( statement,
relationships );
Iterable<Node> middleNodes = new RelationshipToNodeIterable(
subjectNode, relationships );
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleWildcardWildcardObject(
Statement statement )
{
Iterable<Node> middleNodes = null;
if ( statement.getObject() instanceof Literal )
{
middleNodes = getMiddleNodesFromLiterals( statement );
}
else
{
Node objectNode = lookupNode( statement.getObject() );
if ( objectNode == null )
{
return null;
}
middleNodes = new RelationshipToNodeIterable(
objectNode, objectNode.getRelationships( Direction.INCOMING ) );
}
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleWildcardPredicateWildcard(
Statement statement )
{
Iterable<Node> middleNodes = null;
if ( statement.getContext().isWildcard() )
{
// TODO Slow
middleNodes = getMiddleNodesFromAllContexts();
}
else
{
Node contextNode = lookupNode( statement.getContext() );
if ( contextNode == null )
{
return null;
}
middleNodes = new RelationshipToNodeIterable(
contextNode, contextNode.getRelationships(
VerboseQuadStrategy.RelTypes.IN_CONTEXT,
Direction.INCOMING ) );
}
middleNodes = new PredicateFilteredNodes( statement, middleNodes );
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleWildcardPredicateObject(
Statement statement )
{
Iterable<Node> middleNodes = null;
if ( statement.getObject() instanceof Literal )
{
middleNodes = new PredicateFilteredNodes( statement,
getMiddleNodesFromLiterals( statement ) );
}
else
{
Node objectNode = lookupNode( statement.getObject() );
if ( objectNode == null )
{
return null;
}
middleNodes = new RelationshipToNodeIterable(
objectNode, objectNode.getRelationships( relType( statement ),
Direction.INCOMING ) );
}
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private Iterable<CompleteStatement> handleWildcardWildcardWildcard(
Statement statement )
{
Iterable<Node> middleNodes = null;
if ( statement.getContext().isWildcard() )
{
// TODO Slow
middleNodes = getMiddleNodesFromAllContexts();
}
else
{
Node contextNode = lookupNode( statement.getContext() );
if ( contextNode == null )
{
return null;
}
middleNodes = new RelationshipToNodeIterable(
contextNode, contextNode.getRelationships(
VerboseQuadStrategy.RelTypes.IN_CONTEXT,
Direction.INCOMING ) );
}
return new MiddleNodeToStatementIterable( statement, middleNodes );
}
private class MiddleNodeToStatementIterable
implements Iterable<CompleteStatement>
{
private Statement statement;
private Iterable<Node> middleNodes;
MiddleNodeToStatementIterable( Statement statement,
Iterable<Node> middleNodes )
{
this.statement = statement;
this.middleNodes = middleNodes;
}
public Iterator<CompleteStatement> iterator()
{
return new MiddleNodeToStatementIterator( statement,
middleNodes.iterator() );
}
}
private class MiddleNodeToStatementIterator
extends PrefetchingIterator<CompleteStatement>
{
private Statement statement;
private Iterator<Node> middleNodes;
// They are both null or both non-null synced.
private Node currentMiddleNode;
private Iterator<Node> currentMiddleNodeContexts;
MiddleNodeToStatementIterator( Statement statement,
Iterator<Node> middleNodes )
{
this.statement = statement;
this.middleNodes = middleNodes;
}
@Override
protected CompleteStatement fetchNextOrNull()
{
if ( currentMiddleNodeContexts == null ||
!currentMiddleNodeContexts.hasNext() )
{
while ( middleNodes.hasNext() )
{
currentMiddleNode = middleNodes.next();
currentMiddleNodeContexts =
newContextIterator( currentMiddleNode );
if ( currentMiddleNodeContexts.hasNext() )
{
break;
}
}
}
if ( currentMiddleNodeContexts != null &&
currentMiddleNodeContexts.hasNext() )
{
return newStatement();
}
return null;
}
private Iterator<Node> newContextIterator( Node middleNode )
{
// TODO With the traverser it's... somewhat like
// 1000 times slower, why Johan why?
// currentMiddleNodeContexts = currentMiddleNode.traverse(
// Order.BREADTH_FIRST, StopEvaluator.END_OF_NETWORK,
// contextMatcher, VerboseQuadStrategy.RelTypes.IN_CONTEXT,
// Direction.OUTGOING ).iterator();
Iterator<Node> iterator = new RelationshipToNodeIterable(
middleNode, middleNode.getRelationships(
VerboseQuadStrategy.RelTypes.IN_CONTEXT,
Direction.OUTGOING ) ).iterator();
if ( !statement.getContext().isWildcard() )
{
iterator = new FilteringIterator<Node>( iterator )
{
@Override
protected boolean passes( Node contextNode )
{
String contextUri = getNodeUriOrNull( contextNode );
return new Context( contextUri ).equals(
statement.getContext() );
}
};
}
return iterator;
}
private CompleteStatement newStatement()
{
Node middleNode = currentMiddleNode;
Relationship subjectRelationship = middleNode.getRelationships(
Direction.INCOMING ).iterator().next();
Node subjectNode = subjectRelationship.getOtherNode( middleNode );
Uri subject = new Uri( getNodeUriOrNull( subjectNode ) );
Uri predicate = new Uri( subjectRelationship.getType().name() );
Node objectNode = middleNode.getSingleRelationship(
subjectRelationship.getType(),
Direction.OUTGOING ).getEndNode();
Value object = getValueForObjectNode( predicate.getUriAsString(),
objectNode );
Node contextNode = currentMiddleNodeContexts.next();
Context context = new Context( getNodeUriOrNull( contextNode ) );
return object instanceof Literal ?
new CompleteStatement( subject, predicate, ( Literal ) object,
context ) :
new CompleteStatement( subject, predicate, ( Resource ) object,
context );
}
}
private class PredicateFilteredNodes
extends FilteringIterable<Node>
{
private Statement statement;
PredicateFilteredNodes( Statement statment, Iterable<Node> source )
{
super( source );
this.statement = statment;
}
@Override
protected boolean passes( Node middleNode )
{
Relationship relationship = middleNode.getRelationships(
Direction.INCOMING ).iterator().next();
return relationship.getType().name().equals( ( ( Uri )
statement.getPredicate() ).getUriAsString() );
}
}
private class LiteralFilteredRelationships
extends FilteringIterable<Relationship>
{
private Statement statement;
LiteralFilteredRelationships( Statement statement,
Iterable<Relationship> source )
{
super( source );
this.statement = statement;
}
@Override
protected boolean passes( Relationship subjectToMiddleRel )
{
Node middleNode = subjectToMiddleRel.getEndNode();
Node literalNode = middleNode.getSingleRelationship(
subjectToMiddleRel.getType(), Direction.OUTGOING ).getEndNode();
Value objectValue = getLiteralValueForObjectNode(
subjectToMiddleRel.getType().name(), literalNode );
return objectValue.equals( statement.getObject() );
}
}
private class LiteralToMiddleNodeIterable
extends IterableWrapper<Node, Node>
{
LiteralToMiddleNodeIterable( Iterable<Node> literalNodes )
{
super( literalNodes );
}
@Override
protected Node underlyingObjectToObject( Node literalNode )
{
return literalNode.getRelationships(
Direction.INCOMING ).iterator().next().getStartNode();
}
}
} |
package global.template;
import static java.lang.Math.*;
import java.util.*;
enum Direction4 {
N, E, S, W; // TODO control name and order
//NORTH, EAST, SOUTH, WEST;
//UP, RIGHT, DOWN, LEFT;
//TOP, RIGHT, BOTTOM, LEFT;
Direction4 cw() {
return values()[(ordinal() + 1) % 4];
}
Direction4 oposite() {
return values()[(ordinal() + 2) % 4];
}
Direction4 ccw() {
return values()[(ordinal() + 3) % 4];
}
}
enum Direction6 {
NE, E, SE, SW, W, NW; // TODO control name and order
Direction6 cw() {
return values()[(ordinal() + 1) % 6];
}
Direction6 oposite() {
return values()[(ordinal() + 3) % 6];
}
Direction6 ccw() {
return values()[(ordinal() + 5) % 6];
}
int angle(Direction6 other) {
int angle = abs(ordinal() - other.ordinal());
if (angle > 3) {
angle = 6 - angle;
}
return angle;
}
}
enum Direction8 {
N, NE, E, SE, S, SW, W, NW; // TODO control name and order
Direction8 cw() {
return values()[(ordinal() + 1) % 8];
}
Direction8 oposite() {
return values()[(ordinal() + 4) % 8];
}
Direction8 ccw() {
return values()[(ordinal() + 7) % 8];
}
int angle(Direction8 other) {
int angle = abs(ordinal() - other.ordinal());
if (angle > 4) {
angle = 8 - angle;
}
return angle;
}
}
class Coord {
final int x;
final int y;
Coord(int x, int y) {
this.x = x;
this.y = y;
}
Coord(Scanner in) {
this(in.nextInt(), in.nextInt());
}
Coord add(Direction4 dir) {
switch (dir) {
case N: return new Coord(x, y - 1);
case E: return new Coord(x + 1, y);
case S: return new Coord(x, y + 1);
case W: return new Coord(x - 1, y);
default: throw new IllegalArgumentException("Invalid dir: " + dir);
}
}
Coord add(Direction6 dir) {
if ((y % 2) == 0) {
// Even lines
switch (dir) {
case NE: return new Coord(x, y - 1);
case E: return new Coord(x + 1, y);
case SE: return new Coord(x, y + 1);
case SW: return new Coord(x - 1, y + 1);
case W: return new Coord(x - 1, y);
case NW: return new Coord(x - 1, y - 1);
default: throw new IllegalArgumentException("Invalid dir: " + dir);
}
} else {
// Odd lines
switch (dir) {
case NE: return new Coord(x + 1, y - 1);
case E: return new Coord(x + 1, y);
case SE: return new Coord(x + 1, y + 1);
case SW: return new Coord(x, y + 1);
case W: return new Coord(x - 1, y);
case NW: return new Coord(x, y - 1);
default: throw new IllegalArgumentException("Invalid dir: " + dir);
}
}
}
Coord add(Direction8 dir) {
switch (dir) {
case N: return new Coord(x, y - 1);
case NE: return new Coord(x + 1, y - 1);
case E: return new Coord(x + 1, y);
case SE: return new Coord(x + 1, y + 1);
case S: return new Coord(x, y + 1);
case SW: return new Coord(x - 1, y + 1);
case W: return new Coord(x - 1, y);
case NW: return new Coord(x - 1, y - 1);
default: throw new IllegalArgumentException("Invalid dir: " + dir);
}
}
// Manhattan distance (for 4 directions maps)
int distanceMan(Coord other) {
int deltaX = abs(x - other.x);
int deltaY = abs(y - other.y);
return deltaX + deltaY;
}
CubeCoord toCubeCoord() {
int newX = x - (y - (y & 1)) / 2;
int newZ = y;
int newY = -(newX + newZ);
return new CubeCoord(newX, newY, newZ);
}
// Hexagonal distance (for 6 direction maps)
int distanceHexa(Coord other) {
return toCubeCoord().distanceHexa(other.toCubeCoord());
}
// Chebyshev distance (for 8 directions maps)
int distanceCheb(Coord other) {
int deltaX = abs(x - other.x);
int deltaY = abs(y - other.y);
return max(deltaX, deltaY);
}
// Euclidean distance (for physics engine)
double distanceEcl(Coord other) {
int deltaX = x - other.x;
int deltaY = y - other.y;
return sqrt(((double)deltaX * deltaX) + ((double)deltaY * deltaY));
}
Coord moveTo(Coord target, double maxDist) {
Coord newPos = target;
double dist = distanceEcl(target);
if (dist > maxDist) {
double ratio = maxDist / dist;
int newX = x + (int) rint((target.x - x) * ratio);
int newY = y + (int) rint((target.y - y) * ratio);
newPos = new Coord(newX, newY);
}
return newPos;
}
public int hashCode() {
final int PRIME = 31;
int result = 1;
result = PRIME * result + x;
result = PRIME * result + y;
return result;
}
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
Coord other = (Coord) obj;
return (x == other.x) && (y == other.y);
}
public String toString() {
//return x + " " + y; // TODO change if you use Coord in System.out
return "[" + x + ", " + y + "]";
}
}
class CubeCoord {
final int x;
final int y;
final int z;
public CubeCoord(int x, int y, int z) {
this.x = x;
this.y = y;
this.z = z;
}
Coord toOffsetCoord() {
int newX = x + (z - (z & 1)) / 2;
int newY = z;
return new Coord(newX, newY);
}
CubeCoord add(Direction6 dir) {
switch (dir) {
case NE: return new CubeCoord(x + 1, y, z - 1);
case E: return new CubeCoord(x + 1, y - 1, z);
case SE: return new CubeCoord(x, y - 1, z + 1);
case SW: return new CubeCoord(x - 1, y, z + 1);
case W: return new CubeCoord(x - 1, y + 1, z);
case NW: return new CubeCoord(x, y + 1, z - 1);
default: throw new IllegalArgumentException("Invalid dir: " + dir);
}
}
int distanceHexa(CubeCoord other) {
int deltaX = abs(x - other.x);
int deltaY = abs(y - other.y);
int deltaZ = abs(z - other.z);
return (deltaX + deltaY + deltaZ) / 2;
}
public int hashCode() {
final int PRIME = 31;
int result = 1;
result = PRIME * result + x;
result = PRIME * result + y;
result = PRIME * result + x;
return result;
}
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
CubeCoord other = (CubeCoord) obj;
return (x == other.x) && (y == other.y) && (z == other.z);
}
public String toString() {
return "[" + x + ", " + y + ", " + z + "]";
}
}
class Segment {
final Coord from;
final Coord to;
Segment(Coord from, Coord to) {
this.from = from;
this.to = to;
}
// Check if a point is inside a bounding box
boolean inBoundingBox(Coord pos) {
return min(from.x, to.x) <= pos.x && pos.x <= max(from.x, to.x)
&& min(from.y, to.y) <= pos.y && pos.y <= max(from.y, to.y);
}
// Find the orientation of the triplet (from, to, pos)
// Returns 1 when clockwise, 0 when colinear, -1 when counterclockwise
int orientation(Coord pos) {
int val = (to.y - from.y) * (pos.x - to.x) - (to.x - from.x) * (pos.y - to.y);
return (val > 0) ? 1 : ((val < 0) ? -1 : 0);
}
// Test if 2 segments intersects
boolean intersect(Segment other) {
int o1 = orientation(other.from);
int o2 = orientation(other.to);
int o3 = other.orientation(from);
int o4 = other.orientation(to);
return (o1 != o2 && o3 != o4) // <- General case, special case below
|| (o1 == 0 && inBoundingBox(other.from)) || (o2 == 0 && inBoundingBox(other.to))
|| (o3 == 0 && other.inBoundingBox(from)) || (o4 == 0 && other.inBoundingBox(to));
}
public String toString() {
return from + "-" + to;
}
}
class Zone {
final Set<Coord> coords = new HashSet<>();
int size() {
return coords.size();
}
}
class Path {
static final Path NO_PATH = new Path();
final Deque<Direction4> directions = new ArrayDeque<>();
final Deque<Coord> coords = new ArrayDeque<>();
int size() {
return directions.size();
}
}
class Board {
final int width;
final int height;
private final StringBuilder[] cells;
private final Zone[][] zones;
private Board(int width, int height) {
this.width = width;
this.height = height;
cells = new StringBuilder[height];
zones = new Zone[height][];
for (int rowIdx = 0; rowIdx < height; rowIdx++) {
zones[rowIdx] = new Zone[width];
}
}
Board(Scanner in) {
this(in.nextInt(), in.nextInt());
// TODO split here is content is not right after size
for (int rowIdx = 0; rowIdx < height; rowIdx++) {
String row = in.next(); // TODO use in.nextLine() if the line contains spaces
cells[rowIdx] = new StringBuilder(row);
}
}
boolean cellExist(Coord pos) {
return ((pos.y >= 0) && (pos.y < height) && (pos.x >= 0) && (pos.x < width));
}
private char getCellAt(int row, int col) {
return cells[row].charAt(col);
}
char getCellAt(Coord pos) {
return getCellAt(pos.y, pos.x);
}
void setCellAt(Coord pos, char val) {
cells[pos.y].setCharAt(pos.x, val);
}
Coord findFirst(char target) {
for (int colIdx = 0; colIdx < width; colIdx++) {
for (int rowIdx = 0; rowIdx < height; rowIdx++) {
char val = getCellAt(rowIdx, colIdx);
if (val == target) {
return new Coord(colIdx, rowIdx);
}
}
}
return null;
}
int count(char target) {
int count = 0;
for (int colIdx = 0; colIdx < width; colIdx++) {
for (int rowIdx = 0; rowIdx < height; rowIdx++) {
char val = getCellAt(rowIdx, colIdx);
if (val == target) {
count++;
}
}
}
return count;
}
boolean canWalkOn(Coord pos) {
return (cellExist(pos) && (getCellAt(pos) != '#')); // TODO modify depending of the game
}
void clearZones() {
for (int rowIdx = 0; rowIdx < height; rowIdx++) {
for (int colIdx = 0; colIdx < width; colIdx++) {
zones[rowIdx][colIdx] = null;
}
}
}
Zone getZoneAt(Coord pos) {
if (!canWalkOn(pos)) {
return new Zone();
}
Zone zone = zones[pos.y][pos.x];
if (zone != null) {
return zone;
}
return floodFill(pos);
}
private Zone floodFill(Coord start) {
Zone zone = new Zone();
Queue<Coord> toFill = new ArrayDeque<>();
toFill.add(start);
while (!toFill.isEmpty()) {
Coord pos = toFill.poll();
if (zones[pos.y][pos.x] == null) {
zones[pos.y][pos.x] = zone;
zone.coords.add(pos);
for (Direction4 dir : Direction4.values()) { // TODO modify depending of the game
Coord nextPos = pos.add(dir);
if (canWalkOn(nextPos)) {
// Note: queue may contains duplicates
toFill.add(nextPos);
}
}
}
}
return zone;
}
static class PathData {
int pathDist = Integer.MAX_VALUE;
Direction4 pathPrev;
}
private static PathData getPathData(Map<Coord, PathData> pathMap, Coord pos) {
PathData pathData = pathMap.get(pos);
if (pathData == null) {
pathData = new PathData();
pathMap.put(pos, pathData);
}
pathMap.put(pos, pathData);
return pathData;
}
private Path path(Coord start, Coord end, Queue<Coord> toUpdate) {
Map<Coord, PathData> pathMap = new HashMap<>();
PathData startData = getPathData(pathMap, start);
startData.pathDist = 0;
PathData endData = getPathData(pathMap, end);
toUpdate.add(start);
while (!toUpdate.isEmpty()) {
Coord curPos = toUpdate.poll();
PathData pathData = pathMap.get(curPos);
for (Direction4 dir : Direction4.values()) { // TODO modify depending of the game
Coord nextPos = curPos.add(dir);
if (canWalkOn(nextPos)) { // TODO modify depending of the game
PathData nextData = getPathData(pathMap, nextPos);
int dist = pathData.pathDist + 1;
if ((dist < endData.pathDist) && (nextData.pathDist > dist)) {
nextData.pathDist = dist;
nextData.pathPrev = dir;
toUpdate.add(nextPos); // queue may contains duplicates
}
}
}
}
if (endData.pathDist == Integer.MAX_VALUE) {
return Path.NO_PATH;
}
Path path = new Path();
Coord pos = end;
while (!pos.equals(start)) {
PathData pathData = pathMap.get(pos);
Direction4 dir = pathData.pathPrev;
path.directions.addFirst(dir);
path.coords.addFirst(pos);
pos = pos.add(dir.oposite());
}
path.coords.addFirst(pos);
return path;
}
Path pathDijkstra(Coord begin, Coord end) {
return path(begin, end, new ArrayDeque<>());
}
Path pathAstar(Coord begin, Coord end, Comparator<Coord> heuristic) {
return path(begin, end, new PriorityQueue<>(heuristic));
}
void debugPrint() {
for (StringBuilder row : cells) {
System.err.println(row);
}
}
} |
package org.neo4j.rdf.store;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.neo4j.api.core.Direction;
import org.neo4j.api.core.NeoService;
import org.neo4j.api.core.Node;
import org.neo4j.api.core.Relationship;
import org.neo4j.api.core.RelationshipType;
import org.neo4j.api.core.Transaction;
import org.neo4j.neometa.structure.MetaStructure;
import org.neo4j.rdf.model.CompleteStatement;
import org.neo4j.rdf.model.Context;
import org.neo4j.rdf.model.Literal;
import org.neo4j.rdf.model.Resource;
import org.neo4j.rdf.model.Statement;
import org.neo4j.rdf.model.Uri;
import org.neo4j.rdf.model.Value;
import org.neo4j.rdf.model.Wildcard;
import org.neo4j.rdf.model.WildcardStatement;
import org.neo4j.rdf.store.representation.AbstractNode;
import org.neo4j.rdf.store.representation.standard.AbstractUriBasedExecutor;
import org.neo4j.rdf.store.representation.standard.RelationshipTypeImpl;
import org.neo4j.rdf.store.representation.standard.VerboseQuadExecutor;
import org.neo4j.rdf.store.representation.standard.VerboseQuadStrategy;
import org.neo4j.rdf.store.representation.standard.VerboseQuadValidatable;
import org.neo4j.rdf.validation.Validatable;
import org.neo4j.util.index.IndexService;
public class VerboseQuadStore extends RdfStoreImpl
{
private final MetaStructure meta;
public VerboseQuadStore( NeoService neo, IndexService indexer )
{
this( neo, indexer, null );
}
public VerboseQuadStore( NeoService neo, IndexService indexer,
MetaStructure meta )
{
super( neo, new VerboseQuadStrategy(
new VerboseQuadExecutor( neo, indexer, meta ), meta ) );
this.meta = meta;
System.out.println( "======> VerboseQuadStore: I'm initialized! ");
}
protected MetaStructure meta()
{
return this.meta;
}
@Override
protected VerboseQuadStrategy getRepresentationStrategy()
{
return ( VerboseQuadStrategy ) super.getRepresentationStrategy();
}
@Override
public Iterable<CompleteStatement> getStatements(
WildcardStatement statement,
boolean includeInferredStatements )
{
Transaction tx = neo().beginTx();
try
{
if ( includeInferredStatements )
{
throw new UnsupportedOperationException( "We currently not " +
"support getStatements() with reasoning enabled" );
}
Iterable<CompleteStatement> result = null;
if ( wildcardPattern( statement, false, false, true ) )
{
result = handleSubjectPredicateWildcard( statement );
}
else if ( wildcardPattern( statement, false, true, true ) )
{
result = handleSubjectWildcardWildcard( statement );
}
else if ( wildcardPattern( statement, false, true, false ) )
{
result = handleSubjectWilcardObject( statement );
}
else if ( wildcardPattern( statement, true, true, false ) )
{
result = handleWildcardWildcardObject( statement );
}
else if ( wildcardPattern( statement, true, false, false ) )
{
result = handleWildcardPredicateObject( statement );
}
else if ( wildcardPattern( statement, false, false, false ) )
{
result = handleSubjectPredicateObject( statement );
}
else if ( wildcardPattern( statement, true, true, true ) )
{
result = handleWildcardWildcardWildcard( statement );
}
else
{
result = super.getStatements( statement,
includeInferredStatements );
}
tx.success();
return result;
}
finally
{
tx.finish();
}
}
private Node lookupNode( Value uri )
{
return getRepresentationStrategy().getExecutor().
lookupNode( new AbstractNode( uri ) );
}
private Iterable<CompleteStatement> handleWildcardWildcardWildcard(
Statement statement )
{
if ( statement.getContext().isWildcard() )
{
throw new RuntimeException( "We can't handle ?S ?P ?O ?G" );
}
Context context = ( Context ) statement.getContext();
Node contextNode = lookupNode( context );
if ( contextNode == null )
{
return new ArrayList<CompleteStatement>();
}
List<CompleteStatement> statementList =
new LinkedList<CompleteStatement>();
for ( Relationship contextRelationship : contextNode.getRelationships(
VerboseQuadStrategy.RelTypes.IN_CONTEXT, Direction.INCOMING ) )
{
Node middleNode = contextRelationship.getStartNode();
Relationship subjectRelationship = findSubjectRelationship(
middleNode );
if ( subjectRelationship == null )
{
throw new RuntimeException( "Error, no subject for " +
middleNode );
}
Node subjectNode = subjectRelationship.getOtherNode( middleNode );
Node objectNode = getObjectNode(
middleNode, subjectRelationship.getType().name() );
Uri subject = newValidatable( subjectNode ).getUri();
Uri predicate = new Uri( subjectRelationship.getType().name() );
Value object = getValueForObjectNode( predicate.getUriAsString(),
objectNode );
if ( object instanceof Literal )
{
statementList.add( new CompleteStatement(
subject, predicate, ( Literal ) object, context ) );
}
else
{
statementList.add( new CompleteStatement(
subject, predicate, ( Uri ) object, context ) );
}
}
return statementList;
}
private Relationship findSubjectRelationship( Node middleNode )
{
for ( Relationship relationship : middleNode.getRelationships(
Direction.INCOMING ) )
{
if ( !relationship.getType().name().equals( VerboseQuadStrategy.
RelTypes.IN_CONTEXT.name() ) )
{
return relationship;
}
}
return null;
}
private Iterable<CompleteStatement> handleSubjectWilcardObject(
Statement statement )
{
ArrayList<CompleteStatement> statementList =
new ArrayList<CompleteStatement>();
Uri subject = ( Uri ) statement.getSubject();
Node subjectNode = lookupNode( subject );
if ( subjectNode == null )
{
return statementList;
}
VerboseQuadValidatable validatable = newValidatable( subjectNode );
if ( statement.getObject() instanceof Uri )
{
Uri object = ( Uri ) statement.getObject();
Node objectNode = lookupNode( object );
if ( objectNode == null )
{
return statementList;
}
for ( String key : validatable.getComplexPropertyKeys() )
{
for ( Validatable otherObject : validatable.complexProperties(
key ) )
{
Node middleNode = otherObject.getUnderlyingNode().
getSingleRelationship( new RelationshipTypeImpl( key ),
Direction.INCOMING ).getStartNode();
if ( otherObject.getUnderlyingNode().equals( objectNode ) )
{
addIfInContext( statement, statementList,
middleNode, key );
}
}
}
}
else
{
Literal literal = ( Literal ) statement.getObject();
Object value = literal.getValue();
for ( String key : validatable.getSimplePropertyKeys() )
{
for ( Node middleNode : validatable.getPropertiesAsMiddleNodes(
key ) )
{
if ( literalMatches( middleNode, key, value ) )
{
addIfInContext( statement, statementList, middleNode,
key );
}
}
}
}
return statementList;
}
private Iterable<CompleteStatement> handleSubjectPredicateObject(
Statement statement )
{
ArrayList<CompleteStatement> statementList =
new ArrayList<CompleteStatement>();
Uri subject = ( Uri ) statement.getSubject();
Uri predicate = ( Uri ) statement.getPredicate();
RelationshipType predicateType = new RelationshipTypeImpl(
predicate.getUriAsString() );
Node subjectNode = lookupNode( subject );
if ( subjectNode == null )
{
return statementList;
}
VerboseQuadValidatable validatable = newValidatable( subjectNode );
if ( statement.getObject() instanceof Uri )
{
Node objectNode = lookupNode( statement.getObject() );
Relationship objectToMiddle = objectNode.getSingleRelationship(
predicateType, Direction.INCOMING );
if ( objectToMiddle == null )
{
return statementList;
}
Node middleNode = objectNode.getSingleRelationship(
predicateType, Direction.INCOMING ).getStartNode();
if ( objectNode == null )
{
return new ArrayList<CompleteStatement>();
}
for ( Validatable complexProperty : validatable.complexProperties(
predicate.getUriAsString() ) )
{
if ( complexProperty.getUnderlyingNode().equals( objectNode ) )
{
addIfInContext( statement, statementList, middleNode,
predicate.getUriAsString() );
}
}
}
else
{
Object value = ( ( Literal ) statement.getObject() ).getValue();
for ( Node middleNode : validatable.getPropertiesAsMiddleNodes(
predicate.getUriAsString() ) )
{
if ( literalMatches( middleNode, predicate.getUriAsString(),
value ) )
{
addIfInContext( statement, statementList, middleNode,
predicate.getUriAsString() );
}
}
}
return statementList;
}
private boolean literalMatches( Node middleNode, String key, Object value )
{
Node literalNode = middleNode.getSingleRelationship(
new RelationshipTypeImpl( key ), Direction.OUTGOING ).getEndNode();
Object literalValue = literalNode.getProperty( key );
return value.equals( literalValue );
}
private Node getObjectNode( Node middleNode, String key )
{
return middleNode.getSingleRelationship(
new RelationshipTypeImpl( key ), Direction.OUTGOING ).getEndNode();
}
private Iterable<CompleteStatement> handleWildcardPredicateObject(
WildcardStatement statement )
{
return handleWildcardWildcardObject( statement );
}
private VerboseQuadValidatable newValidatable( Node subjectNode )
{
return new VerboseQuadValidatable( neo(), subjectNode, meta() );
}
private void buildStatementsOfObjectHits( Statement statement,
List<CompleteStatement> statementList, Literal literal )
{
VerboseQuadExecutor executor = ( VerboseQuadExecutor )
getRepresentationStrategy().getExecutor();
Object value = literal.getValue();
String predicate = statement.getPredicate() instanceof Wildcard ?
null : ( ( Uri ) statement.getPredicate() ).getUriAsString();
for ( Node literalNode : executor.findLiteralNodes( value ) )
{
for ( Relationship rel : literalNode.getRelationships(
Direction.INCOMING ) )
{
Node middleNode = rel.getStartNode();
Uri thePredicate = new Uri( rel.getType().name() );
if ( predicate != null &&
!thePredicate.getUriAsString().equals( predicate ) )
{
continue;
}
addIfInContext( statement, statementList, middleNode,
thePredicate.getUriAsString() );
}
}
}
private void buildStatementsOfObjectHits( Statement statement,
List<CompleteStatement> statementList, Uri object )
{
Uri objectUri = ( Uri ) statement.getObject();
Node objectNode = getRepresentationStrategy().getExecutor().
lookupNode( new AbstractNode( objectUri ) );
if ( objectNode == null )
{
return;
}
String predicate = statement.getPredicate() instanceof Wildcard ?
null : ( ( Uri ) statement.getPredicate() ).getUriAsString();
for ( Relationship rel : objectNode.getRelationships(
Direction.INCOMING ) )
{
Node middleNode = rel.getStartNode();
Uri thePredicate = new Uri( rel.getType().name() );
if ( predicate != null &&
!thePredicate.getUriAsString().equals( predicate ) )
{
continue;
}
addIfInContext( statement, statementList, middleNode,
thePredicate.getUriAsString() );
}
}
private RelationshipType relType( final String name )
{
return new RelationshipType()
{
public String name()
{
return name;
}
};
}
private static Context getContextForUri( String contextUri )
{
return isNull( contextUri ) ? null : new Context( contextUri );
}
private static boolean isNull( String uri )
{
return uri == null || uri.equals( "null" );
}
private Set<Context> getExistingContexts( Node middleNode )
{
Set<Context> set = new HashSet<Context>();
for ( Relationship relationship : middleNode.getRelationships(
VerboseQuadStrategy.RelTypes.IN_CONTEXT,
Direction.OUTGOING ) )
{
Node contextNode = relationship.getEndNode();
String uri = ( String ) contextNode.getProperty(
AbstractUriBasedExecutor.URI_PROPERTY_KEY );
set.add( getContextForUri( uri ) );
}
return set;
}
private void addIfInContext( Statement statement,
List<CompleteStatement> statementList, Node middleNode,
String predicate )
{
Relationship rel = middleNode.getSingleRelationship(
relType( predicate ), Direction.INCOMING );
Node subjectNode = rel.getStartNode();
Validatable validatable = newValidatable( subjectNode );
Uri subject = validatable.getUri();
Set<Context> existingContexts = getExistingContexts( middleNode );
Set<Context> contextsToAdd = new HashSet<Context>();
if ( statement.getContext() instanceof Wildcard )
{
contextsToAdd = existingContexts;
}
else
{
if ( existingContexts.contains( statement.getContext() ) )
{
contextsToAdd.add( ( Context ) statement.getContext() );
}
}
for ( Context context : contextsToAdd )
{
Node objectNode = middleNode.getSingleRelationship(
relType( predicate ), Direction.OUTGOING ).getEndNode();
Value object = getValueForObjectNode( predicate, objectNode );
if ( object instanceof Resource )
{
statementList.add( new CompleteStatement( subject,
new Uri( predicate ), ( Resource ) object, context ) );
}
else
{
statementList.add( new CompleteStatement( subject,
new Uri( predicate ), ( Literal ) object, context ) );
}
}
}
private Value getValueForObjectNode( String predicate, Node objectNode )
{
String uri = ( String ) objectNode.getProperty(
AbstractUriBasedExecutor.URI_PROPERTY_KEY, null );
if ( uri != null )
{
return new Uri( uri );
}
else
{
Object value = objectNode.getProperty( predicate );
String datatype = ( String ) objectNode.getProperty(
VerboseQuadExecutor.LITERAL_DATATYPE_KEY, null );
String language = ( String ) objectNode.getProperty(
VerboseQuadExecutor.LITERAL_LANGUAGE_KEY, null );
return new Literal( value, datatype == null ? null :
new Uri( datatype ), language );
}
}
private Iterable<CompleteStatement> handleWildcardWildcardObject(
WildcardStatement statement )
{
List<CompleteStatement> statementList =
new ArrayList<CompleteStatement>();
if ( statement.getObject() instanceof Literal )
{
buildStatementsOfObjectHits( statement, statementList,
( Literal ) statement.getObject() );
}
else
{
buildStatementsOfObjectHits( statement, statementList,
( Uri ) statement.getObject() );
}
return statementList;
}
private Iterable<CompleteStatement> handleSubjectPredicateWildcard(
WildcardStatement statement )
{
Uri subject = ( Uri ) statement.getSubject();
Uri predicate = ( Uri ) statement.getPredicate();
AbstractNode abstractSubjectNode = new AbstractNode( subject );
Node subjectNode = getRepresentationStrategy().getExecutor().
lookupNode( abstractSubjectNode );
if ( subjectNode == null )
{
return new ArrayList<CompleteStatement>();
}
VerboseQuadValidatable validatableInstance =
newValidatable( subjectNode );
List<CompleteStatement> statementList =
new LinkedList<CompleteStatement>();
addObjects( statement, statementList, subject, predicate, subjectNode,
validatableInstance );
return statementList;
}
private void addObjects( Statement statement,
List<CompleteStatement> statementList,
Uri subject, Uri predicate, Node subjectNode,
VerboseQuadValidatable validatableInstance )
{
Node[] middleNodes = validatableInstance.
getPropertiesAsMiddleNodes( predicate.getUriAsString() );
for ( Node middleNode : middleNodes )
{
addIfInContext( statement, statementList, middleNode,
predicate.getUriAsString() );
}
}
private void addObjects( Statement statement,
List<CompleteStatement> statementList,
Uri subject, Node subjectNode, VerboseQuadValidatable instance )
{
for ( String predicate : instance.getAllPropertyKeys() )
{
addObjects( statement, statementList, subject, new Uri( predicate ),
subjectNode, instance );
}
// for ( MetaStructureClass cls : validatableInstance.getClasses() )
// statementList.add( new CompleteStatement( subject, new Uri(
// AbstractUriBasedExecutor.RDF_TYPE_URI ), new Uri(
// cls.getName() ) ) );
}
private Iterable<CompleteStatement> handleSubjectWildcardWildcard(
WildcardStatement statement )
{
Uri subject = ( Uri ) statement.getSubject();
AbstractNode abstractSubjectNode = new AbstractNode( subject );
Node subjectNode = getRepresentationStrategy().getExecutor().
lookupNode( abstractSubjectNode );
if ( subjectNode == null )
{
return new ArrayList<CompleteStatement>();
}
VerboseQuadValidatable validatableInstance =
newValidatable( subjectNode );
List<CompleteStatement> statementList =
new ArrayList<CompleteStatement>();
addObjects( statement, statementList, subject,
subjectNode, validatableInstance );
return statementList;
}
} |
package hello;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import torrent.Premiumize;
import torrent.Torrent;
import torrent.TorrentFile;
import utilities.HttpHelper;
import utilities.PropertiesHelper;
import utilities.StreamGobbler;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Component
public class DownloadMonitor {
private static final int SECONDS_BETWEEN_POLLING = 30;
private static final Logger log = LoggerFactory.getLogger(DownloadMonitor.class);
private boolean isDownloadInProgress = false;
private Premiumize premiumize = new Premiumize();
@Scheduled(fixedRate = SECONDS_BETWEEN_POLLING * 1000)
public void checkForDownloadableTorrents() {
log.info("checkForDownloadableTorrents()");
this.premiumize = new Premiumize();
if (!isDownloadInProgress) {
checkForDownloadbleTorrentsAndDownloadTheFirst();
}
}
private void checkForDownloadbleTorrentsAndDownloadTheFirst() {
ArrayList<Torrent> remoteTorrents = premiumize.getRemoteTorrents();
boolean returnToMonitor = false;
for (Torrent remoteTorrent : remoteTorrents) {
if (checkIfTorrentCanBeDownloaded(remoteTorrent) && !returnToMonitor) {
isDownloadInProgress = true;
//createDownloadFolderIfNotExists(remoteTorrent);
// check if SingleFileDownload
if (premiumize.isSingleFileDownload(remoteTorrent)) {
String fileURLFromTorrent = premiumize.getMainFileURLFromTorrent(remoteTorrent);
String localPath = PropertiesHelper.getProperty("rclonedir") + remoteTorrent.name + addFilenameIfNotYetPresent(remoteTorrent.name, fileURLFromTorrent);
//downloadFile(fileURLFromTorrent, localPath);
rcloneDownloadFileToGdrive(fileURLFromTorrent, PropertiesHelper.getProperty("rclonedir") + "/" + remoteTorrent.name + extractFileEndingFromUrl(fileURLFromTorrent));
//uploadFile()
// cleanup afterwards
premiumize.delete(remoteTorrent);
} else { // start multifile download
// download every file
List<TorrentFile> filesFromTorrent = premiumize.getFilesFromTorrent(remoteTorrent);
for (TorrentFile torrentFile : filesFromTorrent) {
// check filesize to get rid of samples and NFO files?
String localPath = PropertiesHelper.getProperty("rclonedir") + remoteTorrent.name + addFilenameIfNotYetPresent(remoteTorrent.name, torrentFile.url);
// downloadFile(torrentFile.url, localPath);
rcloneDownloadFileToGdrive(torrentFile.url, PropertiesHelper.getProperty("rclonedir") + "/multipart/" + remoteTorrent.name + "/" + extractFileNameFromUrl(torrentFile.url));
}
// cleanup afterwards
premiumize.delete(remoteTorrent);
}
isDownloadInProgress = false;
returnToMonitor = true;
}
}
}
private String extractFileNameFromUrl(String fileURLFromTorrent) {
Pattern pattern = Pattern.compile("([\\w.%\\-]+)$");
String foundMatch = null;
Matcher matcher = pattern.matcher(fileURLFromTorrent);
while (matcher.find()) {
foundMatch = matcher.group();
}
return foundMatch;
}
private String extractFileEndingFromUrl(String fileURLFromTorrent) {
Pattern pattern = Pattern.compile("[A-Za-z0-9]+$");
String foundMatch = null;
Matcher matcher = pattern.matcher(fileURLFromTorrent);
while (matcher.find()) {
foundMatch = matcher.group();
}
return foundMatch;
}
private void rcloneDownloadFileToGdrive(String fileURLFromTorrent, String destinationPath) {
log.info("About to download:" + fileURLFromTorrent + "\nto: " + destinationPath);
ProcessBuilder builder = new ProcessBuilder();
builder.command("rclone", "copyurl", fileURLFromTorrent, destinationPath);
builder.directory(new File(System.getProperty("user.home")));
Process process = null;
int exitCode = -1;
try {
process = builder.start();
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
Executors.newSingleThreadExecutor().submit(streamGobbler);
exitCode = process.waitFor();
} catch (IOException | InterruptedException e) {
log.error(e.getMessage());
e.printStackTrace();
}
assert exitCode == 0;
log.info("Download Successfull:" + fileURLFromTorrent + "\nto: " + destinationPath);
}
private void downloadFile(String fileURLFromTorrent, String localPath) throws IOException {
log.info("About to download:" + fileURLFromTorrent + "\nto: " + localPath);
HttpHelper.downloadFileToPath(fileURLFromTorrent, localPath);
}
private String addFilenameIfNotYetPresent(String name, String mainFileURLFromTorrent) {
if (name.matches(".+[.].*]")) {
return "";
} else {
return mainFileURLFromTorrent.substring(mainFileURLFromTorrent.lastIndexOf("/"));
}
}
private boolean createDownloadFolderIfNotExists(Torrent remoteTorrent) {
if (remoteTorrent.name.matches(".+[.].*]")) {
return new File(PropertiesHelper.getProperty("downloaddir")).mkdirs();
} else {
return new File(PropertiesHelper.getProperty("downloaddir") + remoteTorrent.name).mkdirs();
}
}
private boolean checkIfTorrentCanBeDownloaded(Torrent remoteTorrent) {
boolean remoteStatusIsFinished = remoteTorrent.status.contains("finished") || remoteTorrent.status.contains("seeding");
boolean isAlreadyDownloaded = new File("./downloads/" + remoteTorrent.name).exists();
return remoteStatusIsFinished && !isAlreadyDownloaded;
}
} |
package org.neo4j.util.index;
import org.neo4j.api.core.NeoService;
import org.neo4j.api.core.Node;
import org.neo4j.util.btree.KeyEntry;
/**
* A "multi" index implementation using {@link org.neo4j.util.btree.BTree BTree}
* that can index multiple nodes per key. They key is checked for equality
* using both <CODE>hashCode</CODE> and <CODE>equal</CODE> methods.
* <p>
* Note: this implementation is not thread safe (yet).
*/
// not thread safe yet
public class MultiValueIndex extends AbstractIndex
{
public MultiValueIndex( String name, Node underlyingNode, NeoService neo )
{
super( name, underlyingNode, neo );
}
@Override
protected void addOrReplace( KeyEntry entry, long nodeId )
{
Object value = entry.getValue();
if ( value.getClass().isArray() )
{
long[] values = (long[]) value;
long[] newValues = new long[values.length + 1];
boolean addNewValues = true;
for ( int i = 0; i < values.length; i++ )
{
if ( values[i] == nodeId )
{
addNewValues = false;
break;
}
newValues[i] = values[i];
}
if ( addNewValues )
{
newValues[newValues.length - 1] = nodeId;
entry.setValue( newValues );
}
}
else
{
long currentId = (Long) value;
if ( currentId != nodeId )
{
long[] newValues = new long[2];
newValues[0] = currentId;
newValues[1] = nodeId;
entry.setValue( newValues );
}
}
}
@Override
protected void addOrReplace( Node node, long nodeId )
{
Object value = node.getProperty( INDEX_VALUES );
if ( value.getClass().isArray() )
{
long[] values = (long[]) value;
long[] newValues = new long[values.length + 1];
boolean addNewValues = true;
for ( int i = 0; i < values.length; i++ )
{
if ( values[i] == nodeId )
{
addNewValues = false;
break;
}
newValues[i] = values[i];
}
if ( addNewValues )
{
newValues[newValues.length - 1] = nodeId;
node.setProperty( INDEX_VALUES, newValues );
}
}
else
{
long currentId = (Long) value;
if ( currentId != nodeId )
{
long[] newValues = new long[2];
newValues[0] = currentId;
newValues[1] = nodeId;
node.setProperty( INDEX_VALUES, newValues );
}
}
}
@Override
protected boolean removeAllOrOne( Node node, long nodeId )
{
Object value = node.getProperty( INDEX_VALUES );
if ( value.getClass().isArray() )
{
long[] values = (long[]) value;
if ( values.length == 1 )
{
if ( values[0] == nodeId )
{
return true;
}
return false;
}
long[] newValues = new long[values.length - 1];
int j = 0;
for ( int i = 0; i < values.length; i++ )
{
if ( values[i] != nodeId )
{
newValues[j++] = values[i];
}
}
node.setProperty( INDEX_VALUES, newValues );
return false;
}
long currentId = (Long) value;
if ( currentId == nodeId )
{
return true;
}
return false;
}
@Override
protected boolean removeAllOrOne( KeyEntry entry, long nodeId )
{
Object value = entry.getValue();
if ( value.getClass().isArray() )
{
long[] values = (long[]) value;
if ( values.length == 1 )
{
if ( values[0] == nodeId )
{
return true;
}
return false;
}
long[] newValues = new long[values.length - 1];
int j = 0;
for ( int i = 0; i < values.length; i++ )
{
if ( values[i] != nodeId )
{
newValues[j++] = values[i];
}
}
entry.setValue( newValues );
return false;
}
long currentId = (Long) value;
if ( currentId == nodeId )
{
return true;
}
return false;
}
@Override
protected long[] getValues( KeyEntry entry )
{
Object value = entry.getValue();
if ( value.getClass().isArray() )
{
return (long[]) value;
}
long values[] = new long[1];
values[0] = (Long) value;
return values;
}
@Override
protected long[] getValues( Node node )
{
Object value = node.getProperty( INDEX_VALUES );
if ( value.getClass().isArray() )
{
return (long[]) value;
}
long values[] = new long[1];
values[0] = (Long) value;
return values;
}
@Override
protected String getIndexType()
{
return "multi";
}
@Override
protected long getSingleValue( KeyEntry entry )
{
Object value = entry.getValue();
if ( value.getClass().isArray() )
{
long[] ids = (long[]) value;
if ( ids.length > 1 )
{
throw new RuntimeException( "Multiple values found" );
}
return ids[0];
}
return (Long) value;
}
@Override
protected long getSingleValue( Node entry )
{
Object value = entry.getProperty( INDEX_VALUES );
if ( value.getClass().isArray() )
{
long[] ids = (long[]) value;
if ( ids.length > 1 )
{
throw new RuntimeException( "Multiple values found" );
}
return ids[0];
}
return (Long) value;
}
} |
package hello;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import torrent.Premiumize;
import torrent.Torrent;
import utilities.PropertiesHelper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
@Component
public class DownloadMonitor {
private static final int SECONDS_BETWEEN_POLLING = 30;
private static final Logger log = LoggerFactory.getLogger(DownloadMonitor.class);
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
private boolean isDownloadInProgress = false;
private Premiumize premiumize = new Premiumize();
@Scheduled(fixedRate = SECONDS_BETWEEN_POLLING * 1000)
public void checkForDownloadableTorrents() {
log.info("The time is now {%s}", dateFormat.format(new Date()));
this.premiumize = new Premiumize();
if (!isDownloadInProgress) {
checkForDownloadbleTorrentsAndDownloadTheFirst();
}
}
private void checkForDownloadbleTorrentsAndDownloadTheFirst() {
ArrayList<Torrent> remoteTorrents = premiumize.getRemoteTorrents();
for (Torrent remoteTorrent : remoteTorrents) {
if (checkIfTorrentCanBeDownloaded(remoteTorrent)) {
try {
isDownloadInProgress = true;
createDownloadFolderIfNotExists();
log.info("About to download:" + remoteTorrent.toString());
String mainFileURLFromTorrent = premiumize.getMainFileURLFromTorrent(remoteTorrent);
if (mainFileURLFromTorrent != null) {
URL website = new URL(mainFileURLFromTorrent);
ReadableByteChannel rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(PropertiesHelper.getProperty("downloaddir") + remoteTorrent.name);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
//FileUtils.copyURLToFile(, new File(PropertiesHelper.getProperty("downloaddir") + remoteTorrent.name));
} else {
log.info("sorry I'm not yet smart enough to handle multi file torrent downloads");
}
isDownloadInProgress = false;
} catch (IOException e) {
isDownloadInProgress = false;
e.printStackTrace();
}
}
}
}
private void createDownloadFolderIfNotExists() {
if (!new File(PropertiesHelper.getProperty("downloaddir")).exists()) {
new File(PropertiesHelper.getProperty("downloaddir")).mkdirs();
}
}
private boolean checkIfTorrentCanBeDownloaded(Torrent remoteTorrent) {
boolean remoteStatusIsFinished = remoteTorrent.status.contains("finished");
boolean isAlreadyDownloaded = new File("./downloads/" + remoteTorrent.name).exists();
return remoteStatusIsFinished && !isAlreadyDownloaded;
}
} |
package org.plumelib.util;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.checkerframework.checker.lock.qual.GuardedByUnknown;
import org.checkerframework.dataflow.qual.Pure;
/**
* Just like {@code FileWriter}, but adds a {@code getFileName()} method and overrides {@code
* toString()} to give the file name.
*/
public final class FileWriterWithName extends FileWriter {
/** The file being written by this. */
private final String fileName;
/**
* Constructs a FileWriterWithName object given a file name.
*
* @param fileName String The system-dependent filename.
* @throws IOException if the named file exists but is a directory rather than a regular file,
* does not exist but cannot be created, or cannot be opened for any other reason
*/
public FileWriterWithName(String fileName) throws IOException {
super(fileName);
this.fileName = fileName;
}
/**
* Constructs a FileWriterWithName object given a file name with a boolean indicating whether or
* not to append the data written.
*
* @param fileName String The system-dependent filename.
* @param append boolean if {@code true}, then data will be written to the end of the file rather
* than the beginning.
* @throws IOException if the named file exists but is a directory rather than a regular file,
* does not exist but cannot be created, or cannot be opened for any other reason
*/
public FileWriterWithName(String fileName, boolean append) throws IOException {
super(fileName, append);
this.fileName = fileName;
}
/**
* Constructs a FileWriterWithName object given a File object.
*
* @param file a File object to write to.
* @throws IOException if the file exists but is a directory rather than a regular file, does not
* exist but cannot be created, or cannot be opened for any other reason
*/
public FileWriterWithName(File file) throws IOException {
super(file);
this.fileName = file.getAbsolutePath();
}
/**
* Constructs a FileWriterWithName object given a File object. If the second argument is {@code
* true}, then bytes will be written to the end of the file rather than the beginning.
*
* @param file a File object to write to
* @param append if {@code true}, then bytes will be written to the end of the file rather than
* the beginning
* @throws IOException if the file exists but is a directory rather than a regular file, does not
* exist but cannot be created, or cannot be opened for any other reason
* @since 1.4
*/
public FileWriterWithName(File file, boolean append) throws IOException {
super(file, append);
this.fileName = file.getAbsolutePath();
}
/**
* Return the name of the file being written by this.
*
* @return the name of the file being written by this
*/
@SuppressWarnings("lock") // Lock Checker bug? fileName is final, no lock is needed to access it
@Pure
public String getFileName(@GuardedByUnknown FileWriterWithName this) {
return fileName;
}
/**
* Return the name of the file being written by this.
*
* @return the name of the file being written by this
*/
@Pure
@Override
public String toString(@GuardedByUnknown FileWriterWithName this) {
return getFileName();
}
} |
package hprose.net;
import hprose.io.ByteBufferStream;
import hprose.util.concurrent.Timer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public final class Connection {
private final SocketChannel channel;
private final ConnectionHandler handler;
private volatile SelectionKey key;
private volatile TimeoutType timeoutType;
private final Timer timer = new Timer(new Runnable() {
public void run() {
try {
handler.onTimeout(Connection.this, timeoutType);
}
finally {
close();
}
}
});
private ByteBuffer inbuf = ByteBufferStream.allocate(1024);
private int headerLength = 4;
private int dataLength = -1;
private Integer id = null;
private OutPacket packet = null;
private final Queue<OutPacket> outqueue = new ConcurrentLinkedQueue<OutPacket>();
private Reactor reactor = null;
public Connection(SocketChannel channel, ConnectionHandler handler) {
this.channel = channel;
this.handler = handler;
}
public final void connect(Selector selector) throws ClosedChannelException {
key = channel.register(selector, SelectionKey.OP_CONNECT, this);
setTimeout(handler.getConnectTimeout(), TimeoutType.CONNECT_TIMEOUT);
}
public final void connected(Reactor reactor, Selector selector) throws ClosedChannelException {
clearTimeout();
this.reactor = reactor;
key = channel.register(selector, SelectionKey.OP_READ, this);
handler.onConnected(this);
}
public final boolean isConnected() {
return channel.isOpen() && channel.isConnected();
}
public final SocketChannel socketChannel() {
return channel;
}
public final void close() {
try {
clearTimeout();
handler.onClose(this);
channel.close();
key.cancel();
}
catch (IOException e) {}
}
public final boolean receive() {
try {
setTimeout(handler.getReadTimeout(), TimeoutType.READ_TIMEOUT);
int n = channel.read(inbuf);
if (n < 0) {
close();
return false;
}
if (n == 0) return true;
for (;;) {
if ((dataLength < 0) &&
(inbuf.position() >= headerLength)) {
dataLength = inbuf.getInt(0);
if (dataLength < 0) {
dataLength &= 0x7fffffff;
headerLength = 8;
}
if (headerLength + dataLength > inbuf.capacity()) {
ByteBuffer buf = ByteBufferStream.allocate(headerLength + dataLength);
inbuf.flip();
buf.put(inbuf);
ByteBufferStream.free(inbuf);
inbuf = buf;
}
setTimeout(handler.getReadTimeout(), TimeoutType.READ_TIMEOUT);
if (channel.read(inbuf) < 0) {
close();
return false;
}
}
if ((headerLength == 8) && (id == null)
&& (inbuf.position() >= headerLength)) {
id = inbuf.getInt(4);
}
if ((dataLength >= 0) &&
((inbuf.position() - headerLength) >= dataLength)) {
ByteBuffer data = ByteBufferStream.allocate(dataLength);
inbuf.flip();
inbuf.position(headerLength);
int bufLen = inbuf.limit();
inbuf.limit(headerLength + dataLength);
data.put(inbuf);
inbuf.limit(bufLen);
inbuf.compact();
clearTimeout();
handler.onReceived(this, data, id);
headerLength = 4;
dataLength = -1;
id = null;
}
else {
break;
}
}
}
catch (Exception e) {
handler.onError(this, e);
close();
return false;
}
return true;
}
public final void send(ByteBuffer buffer, Integer id) {
outqueue.offer(new OutPacket(buffer, id));
key.interestOps(SelectionKey.OP_READ | SelectionKey.OP_WRITE);
reactor.write(this);
}
public final void send() {
if (packet == null) {
packet = outqueue.poll();
if (packet == null) {
return;
}
}
try {
for (;;) {
while (packet.writeLength < packet.totalLength) {
setTimeout(handler.getWriteTimeout(), TimeoutType.WRITE_TIMEOUT);
long n = channel.write(packet.buffers);
if (n < 0) {
close();
return;
}
if (n == 0) {
key.interestOps(SelectionKey.OP_READ | SelectionKey.OP_WRITE);
return;
}
packet.writeLength += n;
}
ByteBufferStream.free(packet.buffers[1]);
clearTimeout();
handler.onSended(this, packet.id);
synchronized (outqueue) {
packet = outqueue.poll();
if (packet == null) {
key.interestOps(SelectionKey.OP_READ);
return;
}
}
}
}
catch (Exception e) {
close();
}
}
public final void setTimeout(int timeout, TimeoutType type) {
timeoutType = type;
if (type == TimeoutType.IDLE_TIMEOUT) {
timer.setTimeout(timeout);
}
else {
timer.setTimeout(timeout, true);
}
}
public final void clearTimeout() {
timer.clear();
}
} |
package io.schinzel;
import com.amazonaws.regions.Regions;
import io.schinzel.basicutils.configvar.ConfigVar;
import io.schinzel.basicutils.timekeeper.Timekeeper;
public class REMOVE_ME {
public static void main(String[] args) {
System.out.println("Started!");
testReceive();
System.out.println("All done!");
}
public static void testReceive() {
ConfigVar configVar = ConfigVar.create(".env");
String awsAccessKey = configVar.getValue("AWS_SQS_ACCESS_KEY");
String awsSecretKey = configVar.getValue("AWS_SQS_SECRET_KEY");
SqsReceiver sqsReceiver = SqsReceiver.builder()
.awsAccessKey(awsAccessKey)
.awsSecretKey(awsSecretKey)
.queueUrl("https://sqs.eu-west-1.amazonaws.com/146535832843/my_first_queue.fifo")
.region(Regions.EU_WEST_1)
.build();
String message = sqsReceiver.receive();
System.out.println("Message '" + message + "'");
}
public static void testSend() {
ConfigVar configVar = ConfigVar.create(".env");
String awsAccessKey = configVar.getValue("AWS_SQS_ACCESS_KEY");
String awsSecretKey = configVar.getValue("AWS_SQS_SECRET_KEY");
SqsSender sqsSender = SqsSender.builder()
.awsAccessKey(awsAccessKey)
.awsSecretKey(awsSecretKey)
.groupId("my_funky_group_id")
.queueUrl("https://sqs.eu-west-1.amazonaws.com/146535832843/my_first_queue.fifo")
.region(Regions.EU_WEST_1)
.build();
Timekeeper tk = Timekeeper.create().startLap("s1");
sqsSender.send("s1");
tk.stopLap().startLap("s2");
sqsSender.send("s2");
tk.stopLap().startLap("s3");
sqsSender.send("s3");
tk.stopLap().stop().toStr().pln();
}
} |
package io.schinzel;
import com.amazonaws.regions.Regions;
import io.schinzel.basicutils.configvar.ConfigVar;
import io.schinzel.basicutils.timekeeper.Timekeeper;
public class REMOVE_ME {
public static void main(String[] args) {
System.out.println("Started!");
ConfigVar configVar = ConfigVar.create(".env");
String awsAccessKey = configVar.getValue("AWS_SQS_ACCESS_KEY");
String awsSecretKey = configVar.getValue("AWS_SQS_SECRET_KEY");
SqsSender sqsSender = SqsSender.builder()
.awsAccessKey(awsAccessKey)
.awsSecretKey(awsSecretKey)
.groupId("my_funky_group_id")
.queueUrl("https://sqs.eu-west-1.amazonaws.com/146535832843/my_first_queue.fifo")
.region(Regions.EU_WEST_1)
.build();
Timekeeper tk = Timekeeper.create().startLap("s1");
sqsSender.send("s1");
tk.stopLap().startLap("s2");
sqsSender.send("s2");
tk.stopLap().startLap("s3");
sqsSender.send("s3");
tk.stopLap().stop().toStr().pln();
System.out.println("All done!");
}
} |
package io.scif.common;
import org.scijava.util.IntRect;
/**
* A class for representing a rectangular region. This class is very similar to
* {@link java.awt.Rectangle}; it mainly exists to avoid problems with AWT, JNI
* and headless operation.
*
* @deprecated Use {@link IntRect} instead
*/
@Deprecated
public class Region {
// -- Fields --
public int x;
public int y;
public int width;
public int height;
// -- Constructor --
public Region() {}
public Region(final int x, final int y, final int width, final int height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
// -- Region API methods --
/** Returns true if this region intersects the given region. */
public boolean intersects(final Region r) {
int tw = this.width;
int th = this.height;
int rw = r.width;
int rh = r.height;
if (rw <= 0 || rh <= 0 || tw <= 0 || th <= 0) {
return false;
}
final int tx = this.x;
final int ty = this.y;
final int rx = r.x;
final int ry = r.y;
rw += rx;
rh += ry;
tw += tx;
th += ty;
final boolean rtn =
((rw < rx || rw > tx) && (rh < ry || rh > ty) && (tw < tx || tw > rx) && (th < ty || th > ry));
return rtn;
}
/**
* Returns a Region representing the intersection of this Region with the
* given Region. If the two Regions do not intersect, the result is an empty
* Region.
*/
public Region intersection(final Region r) {
final int x = Math.max(this.x, r.x);
final int y = Math.max(this.y, r.y);
int w = Math.min(this.x + this.width, r.x + r.width) - x;
int h = Math.min(this.y + this.height, r.y + r.height) - y;
if (w < 0) w = 0;
if (h < 0) h = 0;
return new Region(x, y, w, h);
}
/**
* Returns true if the point specified by the given X and Y coordinates is
* contained within this region.
*/
public boolean containsPoint(final int xc, final int yc) {
return intersects(new Region(xc, yc, 1, 1));
}
@Override
public String toString() {
return "x=" + x + ", y=" + y + ", w=" + width + ", h=" + height;
}
@Override
public boolean equals(final Object o) {
if (!(o instanceof Region)) return false;
final Region that = (Region) o;
return this.x == that.x && this.y == that.y && this.width == that.width &&
this.height == that.height;
}
@Override
public int hashCode() {
return toString().hashCode();
}
} |
package io.searchbox.core;
import com.google.gson.internal.StringMap;
import io.searchbox.AbstractAction;
import io.searchbox.Action;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/**
* @author Dogukan Sonmez
*/
public class Get extends AbstractAction implements Action {
protected Get() {
}
public Get(String indexName, String typeName, String id) {
super.indexName = indexName;
super.typeName = typeName;
super.id = id;
setRestMethodName("GET");
setPathToResult("_source");
}
public Get(String typeName, String id) {
setDefaultIndexEnabled(true);
setRestMethodName("GET");
super.typeName = typeName;
super.id = id;
setPathToResult("_source");
}
public Get(Doc doc) {
if (doc.getFields().size() > 0) {
setURI("_mget");
List<Doc> docs = new ArrayList<Doc>();
docs.add(doc);
setData(prepareMultiGet(docs));
setBulkOperation(true);
setRestMethodName("POST");
} else {
super.indexName = doc.getIndex();
super.typeName = doc.getType();
super.id = doc.getId();
setRestMethodName("GET");
}
setPathToResult("_source");
}
public Get(List<Doc> docs) {
setURI("_mget");
setBulkOperation(true);
setRestMethodName("POST");
setData(prepareMultiGet(docs));
setPathToResult("docs/_source");
}
public Get(String type, String[] ids) {
setDefaultIndexEnabled(true);
setRestMethodName("POST");
setBulkOperation(true);
setURI("/" + type + "/_mget");
setData(prepareMultiGet(ids));
setPathToResult("docs/_source");
}
public Get(String[] ids) {
setDefaultIndexEnabled(true);
setDefaultTypeEnabled(true);
setURI("/_mget");
setData(prepareMultiGet(ids));
setRestMethodName("POST");
setBulkOperation(true);
setPathToResult("docs/_source");
}
public Get(ActionRequest request) {
GetRequest getRequest = (GetRequest) request;
super.indexName = getRequest.index();
super.typeName = getRequest.type();
super.id = getRequest.id();
setRestMethodName("GET");
setPathToResult("_source");
}
@Override
public String getName() {
return "GET";
}
//{"_index":"3wjvhggiwi6qxes6","_type":"articles","_id":"tSm0twKCTTalE9A6IJKqJA","_version":1,"exists":true, "_source" : { "name":"First" }}
@Override
public byte[] createByteResult(Map jsonMap) throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
out.writeUTF((String) jsonMap.get("_index"));
out.writeOptionalUTF((String) jsonMap.get("_type"));
out.writeUTF((String) jsonMap.get("_id"));
out.writeLong(((Double) jsonMap.get("_version")).longValue());
Boolean exists = (Boolean) jsonMap.get("exists");
out.writeBoolean(exists);
if (exists) {
out.writeBytesHolder(jsonMap.get("_source").toString().getBytes(), 0, jsonMap.get("_source").toString().getBytes().length);
if (jsonMap.containsKey("fields")) {
StringMap fields = (StringMap) jsonMap.get("fields");
out.writeVInt(fields.size());
for (Object key : fields.keySet()) {
out.writeUTF((String) key);
List<StringMap> fieldValues = (List) fields.get(key);
out.writeVInt(fieldValues.size());
for (Object fieldValue : fieldValues) {
out.writeGenericValue(fieldValue);
}
}
} else {
//no fields provided for query
out.writeVInt(0);
}
}
return out.copiedByteArray();
}
protected Object prepareMultiGet(List<Doc> docs) {
//[{"_index":"twitter","_type":"tweet","_id":"1","fields":["field1","field2"]}
StringBuilder sb = new StringBuilder("{\"docs\":[");
for (Doc doc : docs) {
sb.append("{\"_index\":\"")
.append(doc.getIndex())
.append("\",\"_type\":\"")
.append(doc.getType())
.append("\",\"_id\":\"")
.append(doc.getId())
.append("\"");
if (doc.getFields().size() > 0) {
sb.append(",");
sb.append(getFieldsString(doc.getFields()));
}
sb.append("}");
sb.append(",");
}
sb.delete(sb.toString().length() - 1, sb.toString().length());
sb.append("]}");
return sb.toString();
}
private Object getFieldsString(HashSet<String> fields) {
//"fields":["field1","field2"]
StringBuilder sb = new StringBuilder("\"fields\":[");
for (String val : fields) {
sb.append("\"")
.append(val)
.append("\"")
.append(",");
}
sb.delete(sb.toString().length() - 1, sb.toString().length());
sb.append("]");
return sb.toString();
}
protected Object prepareMultiGet(String[] ids) {
//{"docs":[{"_id":"1"},{"_id" : "2"},{"_id" : "3"}]}
StringBuilder sb = new StringBuilder("{\"docs\":[")
.append(concatenateArray(ids))
.append("]}");
return sb.toString();
}
private String concatenateArray(String[] values) {
StringBuilder sb = new StringBuilder();
for (String val : values) {
sb.append("{\"_id\":\"")
.append(val)
.append("\"}")
.append(",");
}
sb.delete(sb.toString().length() - 1, sb.toString().length());
return sb.toString();
}
public String getURI() {
if (isBulkOperation()) {
return super.getURI();
} else {
return buildURI(indexName, typeName, id);
}
}
} |
package reciter.controller;
import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import reciter.Uids;
import reciter.algorithm.cluster.model.ReCiterCluster;
import reciter.algorithm.util.ArticleTranslator;
import reciter.database.mongo.model.ESearchResult;
import reciter.database.mongo.model.GoldStandard;
import reciter.database.mongo.model.MeshTerm;
import reciter.database.mongo.model.PubMedArticleFeature;
import reciter.engine.Engine;
import reciter.engine.EngineOutput;
import reciter.engine.EngineParameters;
import reciter.engine.Feature;
import reciter.engine.ReCiterEngine;
import reciter.engine.StrategyParameters;
import reciter.engine.erroranalysis.Analysis;
import reciter.engine.erroranalysis.ReCiterAnalysis;
import reciter.engine.erroranalysis.ReCiterAnalysisTranslator;
import reciter.model.article.ReCiterArticle;
import reciter.model.identity.Identity;
import reciter.model.pubmed.PubMedArticle;
import reciter.model.scopus.ScopusArticle;
import reciter.service.mongo.AnalysisService;
import reciter.service.mongo.ESearchResultService;
import reciter.service.mongo.GoldStandardService;
import reciter.service.mongo.IdentityService;
import reciter.service.mongo.MeshTermService;
import reciter.service.mongo.PubMedArticleFeatureService;
import reciter.service.mongo.PubMedService;
import reciter.service.mongo.ReCiterClusterService;
import reciter.service.mongo.ScopusService;
import reciter.xml.retriever.engine.ReCiterRetrievalEngine;
@Controller
public class ReCiterController {
private static final Logger slf4jLogger = LoggerFactory.getLogger(ReCiterController.class);
private final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd");
@Autowired
private ESearchResultService eSearchResultService;
@Autowired
private PubMedService pubMedService;
@Autowired
private ReCiterRetrievalEngine aliasReCiterRetrievalEngine;
@Autowired
private IdentityService identityService;
@Autowired
private ScopusService scopusService;
@Autowired
private MeshTermService meshTermService;
@Autowired
private PubMedArticleFeatureService pubMedArticleFeatureService;
@Autowired
private GoldStandardService goldStandardService;
@Autowired
private AnalysisService analysisService;
@Autowired
private ReCiterClusterService reCiterClusterService;
@Autowired
private StrategyParameters strategyParameters;
@Value("${use.scopus.articles}")
private boolean useScopusArticles;
@RequestMapping(value = "/reciter/retrieve/goldstandard", method = RequestMethod.GET)
@ResponseBody
public void retrieveGoldStandard() {
long startTime = System.currentTimeMillis();
slf4jLogger.info("Start time is: " + startTime);
for (String uid : Uids.uids) {
GoldStandard goldStandard = goldStandardService.findByUid(uid);
try {
aliasReCiterRetrievalEngine.retrieveByPmids(goldStandard.getId(), goldStandard.getKnownPmids());
} catch (IOException e) {
slf4jLogger.info("Failed to retrieve articles.", e);
}
}
long estimatedTime = System.currentTimeMillis() - startTime;
slf4jLogger.info("elapsed time: " + estimatedTime);
}
/**
* Retrieve all articles in Uids.java.
*/
@RequestMapping(value = "/reciter/retrieve/articles/", method = RequestMethod.GET)
@ResponseBody
public void retrieveArticles() {
long startTime = System.currentTimeMillis();
slf4jLogger.info("Start time is: " + startTime);
List<Identity> identities = new ArrayList<>();
LocalDate initial = LocalDate.now();
LocalDate startDate = initial.withDayOfMonth(1);
LocalDate endDate = initial.withDayOfMonth(initial.lengthOfMonth());
for (String uid : Uids.uids) {
Identity identity = identityService.findByUid(uid);
identities.add(identity);
}
try {
aliasReCiterRetrievalEngine.retrieveArticlesByDateRange(identities, startDate, endDate);
} catch (IOException e) {
slf4jLogger.info("Failed to retrieve articles.", e);
}
long estimatedTime = System.currentTimeMillis() - startTime;
slf4jLogger.info("elapsed time: " + estimatedTime);
}
/**
* Retrieve all articles in Uids.java.
*/
@RequestMapping(value = "/reciter/retrieve/articles/by/uid", method = RequestMethod.GET)
@ResponseBody
public void retrieveArticlesByUid(String uid) {
long startTime = System.currentTimeMillis();
slf4jLogger.info("Start time is: " + startTime);
List<Identity> identities = new ArrayList<>();
LocalDate initial = LocalDate.now();
LocalDate startDate = initial.withDayOfMonth(1);
LocalDate endDate = initial.withDayOfMonth(initial.lengthOfMonth());
Identity identity = identityService.findByUid(uid);
identities.add(identity);
try {
aliasReCiterRetrievalEngine.retrieveArticlesByDateRange(identities, startDate, endDate);
} catch (IOException e) {
slf4jLogger.info("Failed to retrieve articles.", e);
}
long estimatedTime = System.currentTimeMillis() - startTime;
slf4jLogger.info("elapsed time: " + estimatedTime);
}
/**
* Run analysis for all uids in Uids.java.
* @return
*/
@RequestMapping(value = "/reciter/all/analysis/", method = RequestMethod.GET)
@ResponseBody
public String runAllAnalysis() {
for (String uid : Uids.uids) {
runAnalysis(uid);
}
return "Success";
}
@RequestMapping(value = "/reciter/all/feature/", method = RequestMethod.GET)
@ResponseBody
public String generateAllFeatures() {
for (String uid : Uids.uids) {
runAnalysis(uid);
EngineParameters parameters = initializeEngineParameters(uid);
Engine engine = new ReCiterEngine();
List<Feature> features = engine.generateFeature(parameters);
PubMedArticleFeature articleFeatures = new PubMedArticleFeature();
articleFeatures.setUid(uid);
articleFeatures.setFeatures(features);
pubMedArticleFeatureService.save(articleFeatures);
}
return "Success";
}
@RequestMapping(value = "/reciter/feature/by/uid", method = RequestMethod.GET)
@ResponseBody
public List<Feature> generateFeatures(@RequestParam(value="uid") String uid) {
EngineParameters parameters = initializeEngineParameters(uid);
Engine engine = new ReCiterEngine();
return engine.generateFeature(parameters);
}
@RequestMapping(value = "/reciter/analysis/by/uid", method = RequestMethod.GET)
@ResponseBody
public Analysis runAnalysis(@RequestParam(value="uid") String uid) {
EngineParameters parameters = initializeEngineParameters(uid);
Engine engine = new ReCiterEngine();
EngineOutput engineOutput = engine.run(parameters, strategyParameters);
slf4jLogger.info(engineOutput.getAnalysis().toString());
analysisService.save(engineOutput.getAnalysis(), uid);
reCiterClusterService.save(engineOutput.getReCiterClusters(), uid);
return engineOutput.getAnalysis();
}
@RequestMapping(value = "/reciter/analysis/web/by/uid", method = RequestMethod.GET)
@ResponseBody
public ReCiterAnalysis runReCiterAnalysis(@RequestParam(value="uid") String uid) {
EngineParameters parameters = initializeEngineParameters(uid);
Engine engine = new ReCiterEngine();
EngineOutput engineOutput = engine.run(parameters, strategyParameters);
slf4jLogger.info(engineOutput.getAnalysis().toString());
analysisService.save(engineOutput.getAnalysis(), uid);
reCiterClusterService.save(engineOutput.getReCiterClusters(), uid);
Analysis analysis = engineOutput.getAnalysis();
List<ReCiterCluster> reCiterClusters = engineOutput.getReCiterClusters();
return ReCiterAnalysisTranslator.convert(uid, analysis, reCiterClusters);
}
private EngineParameters initializeEngineParameters(String uid) {
// find identity
Identity identity = identityService.findByUid(uid);
// find search results for this identity
List<ESearchResult> eSearchResults = eSearchResultService.findByUid(uid);
Set<Long> pmids = new HashSet<>();
for (ESearchResult eSearchResult : eSearchResults) {
pmids.addAll(eSearchResult.getESearchPmid().getPmids());
}
// create a list of pmids to pass to search
List<Long> pmidList = new ArrayList<>(pmids);
List<Long> filtered = new ArrayList<>();
for (long pmid : pmidList) {
if (pmid <= 27090613) {
filtered.add(pmid);
}
}
List<PubMedArticle> pubMedArticles = pubMedService.findByPmids(filtered);
List<ScopusArticle> scopusArticles = scopusService.findByPmids(filtered);
// create temporary map to retrieve Scopus articles by PMID (at the stage below)
Map<Long, ScopusArticle> map = new HashMap<>();
if (useScopusArticles) {
for (ScopusArticle scopusArticle : scopusArticles) {
map.put(scopusArticle.getPubmedId(), scopusArticle);
}
}
// combine PubMed and Scopus articles into a list of ReCiterArticle
List<ReCiterArticle> reCiterArticles = new ArrayList<>();
for (PubMedArticle pubMedArticle : pubMedArticles) {
long pmid = pubMedArticle.getMedlineCitation().getMedlineCitationPMID().getPmid();
if (map.containsKey(pmid)) {
reCiterArticles.add(ArticleTranslator.translate(pubMedArticle, map.get(pmid)));
} else {
reCiterArticles.add(ArticleTranslator.translate(pubMedArticle, null));
}
}
// calculate precision and recall
EngineParameters parameters = new EngineParameters();
parameters.setIdentity(identity);
parameters.setPubMedArticles(pubMedArticles);
parameters.setScopusArticles(Collections.emptyList());
if (EngineParameters.getMeshCountMap() == null) {
List<MeshTerm> meshTerms = meshTermService.findAll();
slf4jLogger.info("Found " + meshTerms.size() + " mesh terms");
Map<String, Long> meshCountMap = new HashMap<>();
for (MeshTerm meshTerm : meshTerms) {
meshCountMap.put(meshTerm.getMesh(), meshTerm.getCount());
}
EngineParameters.setMeshCountMap(meshCountMap);
}
GoldStandard goldStandard = goldStandardService.findByUid(uid);
parameters.setKnownPmids(goldStandard.getKnownPmids());
return parameters;
}
} |
package model;
import dataObjects.Document;
import javafx.collections.ObservableList;
import model.similarity.Cosine;
import model.similarity.DotProduct;
import model.similarity.Similarity;
import model.weight.*;
import utilities.TermExtractor;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
public class ModelOperations {
private Weight weight;
private Similarity similarity;
private HashMap<String, Weight> weightHashMap;
private HashMap<String, Similarity> similarityHashMap;
protected ModelOperations(Connection connection) throws SQLException{
similarityHashMap = new HashMap<>();
weightHashMap = new HashMap<>();
//Create similarity objects
similarityHashMap.put("Dot product", new DotProduct(connection));
similarityHashMap.put("Cosine", new Cosine(connection));
//Create weight objects
weightHashMap.put("TF-IDF", new TFIDF(connection));
weightHashMap.put("Normalized TF-IDF", new NormalizedTFIDF(connection));
weightHashMap.put("Maximum normalized TF", new MaximumNormalizedTF(connection));
weightHashMap.put("Maximum normalized TF-IDF", new MaximumNormalizedTFIDF(connection));
//Initialize with TF-IDF and DotProduct
setSimilarityMethod("Dot product");
setWeightMethod("TF-IDF");
//Calculate IDFs
calculateIDFs();
//Calculate weights
calculateWeights();
}
/**
* A method to evaluate a query calculating its similarity against the documents,
* in order to get the documents sorted by relevance
*
* @param query The input query from the user
* @return An ArrayList of Documents sorted by their similarity to the query
*/
public ObservableList<Document> evaluateQuery(String query){
HashMap<String, Integer> wordCount = TermExtractor.extractTerms(query); //Counter for word occurrence in the query
ObservableList<Document> searchResult;
//Request the calculation of similarity for the query, and save the results in the searchResult list
searchResult = similarity.similarityFeedback(wordCount, 0, 0, 1); //0,0 for default limits, and the 1 for one iteration
//Sort the results by similarity, from highest to lowest
Collections.sort(searchResult, Collections.reverseOrder());
//Return the results
return searchResult;
}
/**
* A method to calculate the weights of the inserted terms
*/
public void calculateWeights() {
weight.calculateWeights();
System.out.print("Weights calculated!\n");
}
/**
* A method to request the calculation of IDFs
*/
public void calculateIDFs(){
weight.calculateIDFs();
}
/**
* Getters
*/
public Set<String> getSimilarityMethods(){
return similarityHashMap.keySet();
}
public Set<String> getWeightMethods(){
return weightHashMap.keySet();
}
/**
* Set the desired weight method
* @param weightMethod the weight method name to be used
*/
public void setWeightMethod(String weightMethod){
if(this.weight==null || !this.weight.getWeightMethodName().equals(weightMethod)) {
this.weight = weightHashMap.get(weightMethod);
}
else{
System.out.println("\nTrying to calculate an already calculated weight method!\nSkipping...\n");
}
}
/**
* Set the desired similarity method
* @param similarityMethod The similarity method name to be used
*/
public void setSimilarityMethod(String similarityMethod){
this.similarity = similarityHashMap.get(similarityMethod);
}
public void calculateQueryWeights() {
this.weight.calculateQueryWeights();
}
} |
package sb.tasks.jobs.trupd.agent;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.JsoupResponse;
import com.jcabi.http.wire.CookieOptimizingWire;
import com.jcabi.log.Logger;
import com.jcabi.xml.XML;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.w3c.dom.Node;
import sb.tasks.ValidProps;
import sb.tasks.jobs.meta.MetaInfo;
import sb.tasks.jobs.trupd.TrNotif;
import sb.tasks.system.net.ComboRequest;
import javax.xml.namespace.NamespaceContext;
import java.io.IOException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public final class AnLostFilm extends TorrentFromPage {
private static final String VALUE = "value";
private final org.bson.Document document;
private final ValidProps props;
private final String session;
private final String uid;
private final String quality;
public AnLostFilm(org.bson.Document document, ValidProps props,
org.bson.Document session, org.bson.Document uid, org.bson.Document quality) {
this(
document,
props,
session == null ? "" : session.getString(VALUE),
uid == null ? "" : uid.getString(VALUE),
quality == null ? "" : quality.getString(VALUE)
);
}
public AnLostFilm(org.bson.Document document, ValidProps props,
String session, String uid, String quality) {
this.document = document;
this.props = props;
this.session = session;
this.uid = uid;
this.quality = quality;
}
@Override
protected String name(Document document) throws IOException {
for (Element item : document.getElementsByClass("inner-box--item")) {
if (item.getElementsByClass("inner-box--label").get(0).text().equals(quality)) {
return item
.getElementsByClass("main").get(0)
.getElementsByTag("a").get(0)
.text();
}
}
throw new IOException("Name not parsed");
}
@Override
protected String torrentUrl(Document document) throws IOException {
for (Element item : document.getElementsByClass("inner-box--item")) {
if (item.getElementsByClass("inner-box--label").get(0).text().equals(quality)) {
return item
.getElementsByClass("main").get(0)
.getElementsByTag("a").get(0)
.attr("href");
}
}
throw new IOException("URL not found");
}
@Override
public List<TrNotif> perform() throws IOException {
XML rss = MetaInfo.get("rssFeed", XML.class, new EmptyFeed());
List<TrNotif> result = new ArrayList<>();
for (String str : rss.xpath("//item/link/text()")) {
String decodedUrl = URLDecoder.decode(document.getString("url"), StandardCharsets.UTF_8);
if (str.startsWith(decodedUrl)) {
String url = str.replaceAll("\\s", "%20");
Document root = Jsoup.parse(
new ComboRequest(new JdkRequest(url)).fetch().as(JsoupResponse.class).body(),
document.getString("url")
);
Matcher m = Pattern.compile("PlayEpisode\\('(\\d{3})(\\d{3})(\\d{3})'\\)")
.matcher(
root.getElementsByClass("external-btn").attr("onclick")
);
if (m.find()) {
Document doc = Jsoup.parse(
new ComboRequest(
new JdkRequest(
String.format("https://lostfilm.tv/v_search.php?c=%s&s=%s&e=%s",
m.group(1), m.group(2), m.group(3))))
.through(CookieOptimizingWire.class)
.header("Cookie", String.format("lf_session=%s", session))
.header("Cookie", String.format("lnk_uid=%s", uid))
.fetch()
.as(JsoupResponse.class)
.body()
);
if (!doc.getElementsByTag("a").isEmpty()) {
org.jsoup.nodes.Document doc2 = Jsoup.parse(
new ComboRequest(
new JdkRequest(
doc.getElementsByTag("a").get(0).attr("href")
)
).fetch().body()
);
result.add(
fromReq(doc2, props, document.getString("url"))
);
}
}
}
}
return result.isEmpty() ?
Collections.singletonList(
new TrNotif.CheckedNotif(document, props.isInitial())
) :
result;
}
private final class EmptyFeed implements XML {
@Override
public List<String> xpath(String query) {
Logger.info(AnLostFilm.this, "rss is empty");
return Collections.emptyList();
}
@Override
public List<XML> nodes(String query) {
return Collections.emptyList();
}
@Override
public XML registerNs(String prefix, Object uri) {
return this;
}
@Override
public XML merge(NamespaceContext context) {
return this;
}
@Override
public Node node() {
return null;
}
}
} |
package org.agmip.dome;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.agmip.dome.DomeUtil;
import org.agmip.ace.util.AcePathfinderUtil;
import com.rits.cloning.Cloner;
/**
* The Engine of the DOME, which reads in a DOME ruleset and applies
* the rules to a dataset.
*
* Terms
* <ul>
* <li><strong>Command</strong> which method (FILL/REPLACE) are we working in</li>
* <li><strong>Function</strong> a function used to populate the command</li>
* <li><strong>Static</strong> populate the command with a static reference
* (either a variable or a value)</li>
* </ul>
*
*/
public class Engine {
private static final Logger log = LoggerFactory.getLogger(Engine.class);
private ArrayList<HashMap<String, String>> rules;
private ArrayList<HashMap<String, String>> generators;
private boolean allowGenerators;
/**
* Construct a new engine with the ruleset passed in.
* @param dome A full DOME
* @param allowGenerators allow generators to be run
*/
public Engine(HashMap<String, Object> dome, boolean allowGenerators) {
this.rules = DomeUtil.getRules(dome);
this.generators = DomeUtil.getGenerators(dome);
this.allowGenerators = allowGenerators;
}
/**
* Construct a new engine with the ruleset passed in. Generators are
* <strong>not</strong> allowed by default.
* @param dome A full DOME
*/
public Engine(HashMap<String, Object> dome) {
this(dome, false);
}
/**
* Construct a new engine with the ruleset passed in.
* @param rules A DOME ruleset.
*/
public Engine(ArrayList<HashMap<String, String>> rules) {
this.rules = rules;
this.generators = new ArrayList<HashMap<String, String>>();
this.allowGenerators = false;
}
protected Engine() {
this.rules = new ArrayList<HashMap<String,String>>();
this.generators = new ArrayList<HashMap<String, String>>();
this.allowGenerators = false;
}
/**
* Add more rules to the Engine
* @param rules new set of rules to append (from another DOME)
*/
public void appendRules(ArrayList<HashMap<String, String>> rules) {
this.rules.addAll(rules);
}
/**
* Apply the ruleset to the dataset passed in.
*
* @param data A dataset to modify according to the DOME ruleset.
*/
public void apply(HashMap<String, Object> data) {
for (HashMap<String, String> rule: rules) {
String cmd = rule.get("cmd").toUpperCase();
// NPE defender
if (rule.get("variable") == null) {
log.error("Invalid rule: {}", rule.toString());
return;
}
String a = rule.get("args");
if (a == null) {
a = "";
}
String[] args = a.split("[|]");
if (cmd.equals("INFO")) {
log.debug("Recevied an INFO command");
} else if (cmd.equals("FILL") || cmd.equals("REPLACE") || cmd.equals("REPLACE_FIELD_ONLY")) {
boolean replace = true;
if (cmd.equals("FILL")) replace=false;
if (args[0].endsWith("()")) {
Calculate.run(data, rule.get("variable"), args, replace);
} else {
if (cmd.equals("REPLACE_FIELD_ONLY")) {
log.debug("Found FIELD_ONLY replace");
if ( data.containsKey("seasonal_dome_applied")) {
log.info("Replace for {} not applied due to FIELD_ONLY restriction", rule.get("variable"));
} else {
log.debug("Found data without seasonal_dome_applied set.");
Assume.run(data, rule.get("variable"), args, replace);
}
} else {
Assume.run(data, rule.get("variable"), args, replace);
}
}
} else {
log.error("Invalid command: [{}]", cmd);
}
}
}
/**
* Run the generators on the dataset passed in. This will generate a number
* of additional datasets based on the original dataset.
*
* @param data A dataset to run the generators on
* @param keysToExtract A list of keys to extract from the resulting
* generated datasets.
*
* @return A {@code HashMap} of just the exported keys.
*/
public ArrayList<HashMap<String, Object>> runGenerators(HashMap<String, Object> data) {
if (this.allowGenerators) {
log.debug("Starting generators");
ArrayList<HashMap<String, Object>> results = new ArrayList<HashMap<String, Object>>();
HashSet<String> keysToExtract = new HashSet<String>();
ArrayList<HashMap<String, String>> gAcc = new ArrayList<HashMap<String, String>>();
ArrayList<ArrayList<HashMap<String, String>>> newEventArrs = new ArrayList<ArrayList<HashMap<String, String>>>();
// Run the generators
for (HashMap<String, String> generator: generators) {
// NPE defender
if (generator.get("variable") == null) {
log.error("Invalid generator: {}", generator.toString());
return new ArrayList<HashMap<String, Object>>();
}
String path = Command.getPathOrRoot(generator.get("variable"));
if (path.contains("weather")) {
keysToExtract.add("weather");
} else if (path.contains("soil")) {
keysToExtract.add("soil");
} else {
keysToExtract.add("experiment");
}
String a = generator.get("args");
if (a == null) {
a = "";
}
String[] args = a.split("[|]");
if (args[0].toUpperCase().equals("AUTO_REPLICATE_EVENTS()")) {
newEventArrs = Generate.runEvent(data, args, newEventArrs);
} else {
gAcc = Generate.run(data, args, gAcc);
}
}
// On the output of "each" generation, put the export blocks into results
if (! keysToExtract.contains("weather")) {
data.remove("weather");
}
if (! keysToExtract.contains("soil")) {
data.remove("soil");
}
Cloner cloner = new Cloner();
if (newEventArrs.isEmpty()) {
int i = 0;
for (HashMap<String, String> rules : gAcc) {
i++;
Generate.applyGeneratedRules(data, rules, ""+i);
results.add(cloner.deepClone(data));
}
} else {
int i = 0;
for (ArrayList<HashMap<String, String>> eventArr : newEventArrs) {
i++;
Generate.applyReplicatedEvents(data, eventArr, "" + i);
results.add(cloner.deepClone(data));
}
}
return results;
// return the results.
} else {
log.error("You cannot run generators in this mode.");
return new ArrayList<HashMap<String, Object>>();
}
}
protected void addRule(HashMap<String,String> rule) {
rules.add(rule);
}
protected void addGenerator(HashMap<String, String> generator) {
generators.add(generator);
}
protected void enableGenerators() {
this.allowGenerators = true;
}
protected void disableGenerators() {
this.allowGenerators = false;
}
} |
package se.kits.gakusei.util;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import se.kits.gakusei.content.model.Inflection;
import se.kits.gakusei.content.model.Lesson;
import se.kits.gakusei.content.model.Nugget;
import se.kits.gakusei.content.repository.InflectionRepository;
import se.sandboge.japanese.conjugation.Verb;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.stream.Collectors;
@Component
public class QuestionHandler {
@Autowired
InflectionRepository inflectionRepository;
public List<HashMap<String, Object>> createQuestions(List<Nugget> nuggets, String questionType, String answerType) {
List<HashMap<String, Object>> questions = nuggets.stream()
.map(n -> createQuestion(n, nuggets, questionType, answerType))
.filter(Objects::nonNull)
.collect(Collectors.toList());
return questions;
}
protected HashMap<String, Object> createQuestion(Nugget nugget,
List<Nugget> nuggets,
String questionType,
String answerType) {
LinkedList<Nugget> optimalNuggets = new LinkedList<>();
LinkedList<Nugget> allNuggets = new LinkedList<>(nuggets);
Collections.shuffle(allNuggets);
allNuggets.remove(nugget);
List<List<String>> alternatives = new ArrayList<>();
alternatives.add(createAlternative(nugget, answerType));
HashMap<String, Object> questionMap = new HashMap<>();
for(int i = 0; optimalNuggets.size() < 3 && i < allNuggets.size(); i++) {
if(allNuggets.get(i).getType().equals(nugget.getType()))
optimalNuggets.push(allNuggets.get(i));
else if(allNuggets.size() - (i + 1) <= 4 - optimalNuggets.size())
optimalNuggets.push(allNuggets.get(i));
}
//Avoid getting the same alternative from another nugget
while (alternatives.size() < 4 && !optimalNuggets.isEmpty()) {
List<String> tempAlternative = createAlternative(optimalNuggets.poll(), answerType);
if (alternatives.stream().noneMatch(l -> l.get(0).equals(tempAlternative.get(0)))) {
alternatives.add(tempAlternative);
}
}
if (alternatives.size() == 4) {
List<String> question = createAlternative(nugget, questionType);
questionMap.put("question", question);
questionMap.put("correctAlternative", alternatives.get(0));
questionMap.put("alternative1", alternatives.get(1));
questionMap.put("alternative2", alternatives.get(2));
questionMap.put("alternative3", alternatives.get(3));
questionMap.put("questionNuggetId", nugget.getId());
return questionMap;
} else {
return null;
}
}
public List<HashMap<String, Object>> createGrammarQuestions(Lesson lesson,
List<Nugget> nuggets,
String questionType,
String answerType){
return nuggets.stream().
map(n -> createGrammarQuestion(lesson, n, questionType, answerType)).
filter(Objects::nonNull).
collect(Collectors.toList());
}
private HashMap<String, Object> createGrammarQuestion(Lesson lesson,
Nugget nugget,
String questionType,
String answerType){
HashMap<String, Object> questionMap = new HashMap<>();
List<Inflection> inflections = inflectionRepository.findByLessonId(lesson.getId());
Collections.shuffle(inflections); // Get "random" inflection
Inflection selectedInflection = inflections.get(0);
List<String> question = createAlternative(nugget, questionType);
List<String> inflectionInfo = InflectionUtil.getInflectionNameAndTextLink(selectedInflection.getInflectionMethod());
question.add(inflectionInfo.get(0));
question.addAll(createAlternative(nugget, answerType));
if(inflectionInfo.get(1) != null){
question.add(inflectionInfo.get(1));
}
String inflectedVerb = inflectVerb(selectedInflection, question.get(1));
if(inflectedVerb == null){
return null;
}
questionMap.put("question", question);
questionMap.put("correctAlternative", Collections.singletonList(inflectedVerb));
questionMap.put("alternative1", Collections.EMPTY_LIST);
questionMap.put("alternative2", Collections.EMPTY_LIST);
questionMap.put("alternative3", Collections.EMPTY_LIST);
questionMap.put("questionNuggetId", nugget.getId());
return questionMap;
}
private String inflectVerb(Inflection inflection, String baseVerb){
try {
Verb verb = new Verb(baseVerb);
Method methodToInvoke = verb.getClass().getMethod(inflection.getInflectionMethod());
String inflectedVerb = (String) methodToInvoke.invoke(verb);
return inflectedVerb;
} catch (NoSuchMethodException
| InvocationTargetException
| IllegalAccessException
| IllegalArgumentException e) {
e.printStackTrace();
return null;
}
}
public List<Nugget> chooseNuggets(List<Nugget> nuggetsWithLowSuccessrate,
List<Nugget> unansweredNuggets,
List<Nugget> allLessonNuggets,
int quantity) {
List<Nugget> hiddenNuggets = allLessonNuggets.stream().filter(n -> n.isHidden()).collect(Collectors.toList());
if (allLessonNuggets.size() <= quantity) {
return allLessonNuggets;
} else {
List<Nugget> nuggets = new ArrayList<>();
Collections.shuffle(unansweredNuggets);
Collections.shuffle(nuggetsWithLowSuccessrate);
Collections.shuffle(allLessonNuggets);
nuggets.addAll(unansweredNuggets);
nuggets.addAll(nuggetsWithLowSuccessrate);
nuggets.addAll(allLessonNuggets);
List<Nugget> questionNuggets = new ArrayList<>();
while (questionNuggets.size() <= quantity && nuggets.size() != 0) {
Nugget nugget = nuggets.remove(0);
if (!questionNuggets.contains(nugget) && !hiddenNuggets.contains(nugget)) {
questionNuggets.add(nugget);
}
}
return questionNuggets;
}
}
private List<String> createAlternative(Nugget nugget, String type) {
List<String> alternative = new ArrayList<>();
try {
if (type.equals("reading")) { // reading -> japanese
alternative.add(nugget.getJpRead());
alternative.add(nugget.getJpWrite());
} else {
String methodName = "get" + Character.toString(Character.toUpperCase(type.charAt(0))) +
type.substring(1);
alternative.add((String)nugget.getClass().getMethod(methodName).invoke(nugget));
}
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
e.printStackTrace();
}
return alternative;
}
} |
package org.c4sg.dao;
import java.util.List;
import java.util.Map;
import org.c4sg.entity.Skill;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
public interface SkillDAO extends CrudRepository<Skill, Integer> {
List<Skill> findAllByOrderBySkillNameAsc();
String FIND_SKILL_USERCOUNT ="select s.skillName as skillName, count(*) as userCount from UserSkill us "
+"inner join us.skill s group by us.skill "
+"order by userCount desc, skillName";
String FIND_SKILL_FOR_USER ="select s.skillName as skillName "
+"from UserSkill us inner join us.skill s where us.user.id= :id order by us.displayOrder";
String FIND_SKILL_FOR_PROJECT ="select s.skillName as skillName "
+"from ProjectSkill ps inner join ps.skill s where ps.project.id= :id order by ps.displayOrder";
@Query(FIND_SKILL_USERCOUNT)
List<Map<String, Object>> findSkillsAndUserCount();
@Query(FIND_SKILL_FOR_USER)
List<Map<String, Object>> findSkillsByUserId(@Param("id") Integer id);
@Query(FIND_SKILL_FOR_PROJECT)
List<Map<String, Object>> findSkillsByProjectId(@Param("id") Integer id);
} |
package tw.kewang.hbase.dao.scan;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tw.kewang.hbase.dao.Constants;
public class ScanBuilder {
private static final Logger LOG = LoggerFactory
.getLogger(ScanBuilder.class);
private Scan scan = new Scan();
public ScanBuilder setCaching(int caching) {
scan.setCaching(caching);
return this;
}
public ScanBuilder setBatch(int batch) {
scan.setBatch(batch);
return this;
}
public ScanBuilder setMaxVersions(int maxVersions) {
scan.setMaxVersions(maxVersions);
return this;
}
public ScanBuilder setStartRow(byte[] startRow) {
scan.setStartRow(startRow);
return this;
}
public ScanBuilder setStopRow(byte[] stopRow) {
scan.setStopRow(stopRow);
return this;
}
public ScanBuilder setReversed(boolean reversed) {
scan.setReversed(reversed);
return this;
}
public ScanBuilder setFilter(Filter filter) {
scan.setFilter(filter);
return this;
}
public ScanBuilder setTimeRange(long minStamp, long maxStamp) {
try {
scan.setTimeRange(minStamp, maxStamp);
} catch (Exception e) {
LOG.error(Constants.EXCEPTION_PREFIX, e);
}
return this;
}
public ScanBuilder setTimeStamp(long timestamp) {
scan.setTimeStamp(timestamp);
return this;
}
public Scan create() {
return scan;
}
} |
package org.javacs;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.sun.source.util.TreePath;
import com.sun.source.util.Trees;
import org.eclipse.lsp4j.*;
import org.eclipse.lsp4j.jsonrpc.messages.Either;
import org.eclipse.lsp4j.services.LanguageClient;
import org.eclipse.lsp4j.services.LanguageServer;
import org.eclipse.lsp4j.services.TextDocumentService;
import org.eclipse.lsp4j.services.WorkspaceService;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.lang.model.element.Element;
import javax.tools.JavaFileObject;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.*;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.javacs.Main.JSON;
class FindConfig {
private final Path workspaceRoot;
/**
* Instead of looking for javaconfig.json, just use this one.
* For testing.
*/
private final Optional<JavacConfig> testConfig;
// TODO invalidate cache when VSCode notifies us config file has changed
private final Map<Path, List<JavacConfig>> configBySource = new HashMap<>(), configByDir = new HashMap<>();
FindConfig(Path workspaceRoot, Optional<JavacConfig> testConfig) {
this.workspaceRoot = workspaceRoot;
this.testConfig = testConfig;
}
Optional<JavacConfig> forFile(Path file) {
if (!file.toFile().isDirectory())
file = file.getParent();
if (file == null)
return Optional.empty();
List<JavacConfig> found = configBySource.computeIfAbsent(file, this::doFindConfig);
return chooseConfig(found, file);
}
private List<JavacConfig> doFindConfig(final Path sourceDir) {
if (testConfig.isPresent())
return ImmutableList.of(testConfig.get());
Path dir = sourceDir;
while (true) {
List<JavacConfig> found = readIfConfig(dir);
if (!found.isEmpty()) {
LOG.info("Found " + dir + "/javaconfig.json for " + sourceDir);
return found;
}
else if (workspaceRoot.startsWith(dir))
return Collections.emptyList();
else
dir = dir.getParent();
}
}
private Optional<JavacConfig> chooseConfig(List<JavacConfig> found, Path dir) {
return found.stream()
.filter(config -> matchesDir(config, dir))
.findFirst();
}
private boolean matchesDir(JavacConfig config, Path sourceDir) {
for (Path each : config.sourcePath) {
if (sourceDir.startsWith(each))
return true;
}
return false;
}
/**
* If directory contains a config file, for example javaconfig.json or an eclipse project file, read it.
*/
private List<JavacConfig> readIfConfig(Path dir) {
return configByDir.computeIfAbsent(dir, this::doReadIfConfig);
}
private List<JavacConfig> doReadIfConfig(Path dir) {
Function<JavaConfigJson, JavacConfig> parseJavaConfigJson = json -> {
Set<Path> classPath = readClassPath(dir, json.classPath, json.classPathFile),
docPath = readClassPath(dir, json.docPath, json.docPathFile);
Set<Path> sourcePath = json.sourcePath.stream().map(dir::resolve).collect(Collectors.toSet());
Path outputDirectory = dir.resolve(json.outputDirectory);
return new JavacConfig(sourcePath, classPath, outputDirectory, CompletableFuture.completedFuture(docPath));
};
if (Files.exists(dir.resolve("javaconfig.json"))) {
return readJavaConfigJson(dir.resolve("javaconfig.json")).stream()
.map(parseJavaConfigJson)
.collect(Collectors.toList());
}
else if (Files.exists(dir.resolve("pom.xml"))) {
return ImmutableList.of(
readPomXml(dir, false),
readPomXml(dir, true)
);
}
// TODO add more file types
else {
return Collections.emptyList();
}
}
private Set<Path> readClassPath(Path dir, Set<Path> jsonClassPath, Optional<Path> jsonClassPathFile) {
Set<Path> classPath = new HashSet<>();
jsonClassPathFile.ifPresent(classPathFile -> {
Path classPathFilePath = dir.resolve(classPathFile);
Set<Path> paths = readClassPathFile(classPathFilePath);
classPath.addAll(paths);
});
jsonClassPath.forEach(entry -> classPath.add(dir.resolve(entry)));
return classPath;
}
private JavacConfig readPomXml(Path dir, boolean testScope) {
Path originalPom = dir.resolve("pom.xml");
Path effectivePom = generateEffectivePom(originalPom);
// Invoke maven to get classpath
Set<Path> classPath = buildClassPath(effectivePom, testScope, false);
// Get source directory from pom.xml
Set<Path> sourcePath = sourceDirectories(effectivePom, testScope);
// Use maven output directory so incremental compilation uses maven-generated .class files
Path outputDirectory = testScope ?
Paths.get("target/test-classes").toAbsolutePath() :
Paths.get("target/classes").toAbsolutePath();
JavacConfig config = new JavacConfig(
sourcePath,
classPath,
outputDirectory,
CompletableFuture.supplyAsync(() -> buildClassPath(effectivePom, testScope, true))
);
LOG.info("Inferred from " + originalPom + ":");
LOG.info("\tsourcePath: " + Joiner.on(" ").join(sourcePath));
LOG.info("\tclassPath: " + Joiner.on(" ").join(classPath));
LOG.info("\tdocPath: (pending)");
LOG.info("\toutputDirectory: " + outputDirectory);
return config;
}
private static Path generateEffectivePom(Path pomXml) {
try {
Objects.requireNonNull(pomXml, "pom.xml path is null");
Path effectivePom = Files.createTempFile("effective-pom", ".xml");
LOG.info(String.format("Emit effective pom for %s to %s", pomXml, effectivePom));
String cmd = String.format(
"%s help:effective-pom -Doutput=%s",
getMvnCommand(),
effectivePom
);
File workingDirectory = pomXml.toAbsolutePath().getParent().toFile();
int result = Runtime.getRuntime().exec(cmd, null, workingDirectory).waitFor();
if (result != 0)
throw new RuntimeException("`" + cmd + "` returned " + result);
return effectivePom;
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
private static Set<Path> buildClassPath(Path pomXml, boolean testScope, boolean sourceJars) {
try {
Objects.requireNonNull(pomXml, "pom.xml path is null");
// Tell maven to output classpath to a temporary file
// TODO if pom.xml already specifies outputFile, use that location
Path classPathTxt = Files.createTempFile(sourceJars ? "sourcepath" : "classpath", ".txt");
LOG.info(String.format(
"Emit %s to %s",
sourceJars ? "docPath" : "classpath",
classPathTxt
));
String cmd = String.format(
"%s dependency:build-classpath -DincludeScope=%s -Dmdep.outputFile=%s %s",
getMvnCommand(),
testScope ? "test" : "compile",
classPathTxt,
sourceJars ? "-Dclassifier=sources" : ""
);
File workingDirectory = pomXml.toAbsolutePath().getParent().toFile();
int result = Runtime.getRuntime().exec(cmd, null, workingDirectory).waitFor();
if (result != 0)
throw new RuntimeException("`" + cmd + "` returned " + result);
Set<Path> found = readClassPathFile(classPathTxt);
LOG.info("Read " + Joiner.on(" ").join(found) + " from " + classPathTxt);
return found;
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
private static String getMvnCommand() {
String mvnCommand = "mvn";
if (File.separatorChar == '\\') {
mvnCommand = findExecutableOnPath("mvn.cmd");
if (mvnCommand == null) {
mvnCommand = findExecutableOnPath("mvn.bat");
}
}
return mvnCommand;
}
private static String findExecutableOnPath(String name) {
for (String dirname : System.getenv("PATH").split(File.pathSeparator)) {
File file = new File(dirname, name);
if (file.isFile() && file.canExecute()) {
return file.getAbsolutePath();
}
}
return null;
}
private static Set<Path> sourceDirectories(Path pomXml, boolean testScope) {
return testScope ?
ImmutableSet.of(onlySourceDirectories(pomXml, true), onlySourceDirectories(pomXml, false)) :
ImmutableSet.of(onlySourceDirectories(pomXml, false));
}
private static Path onlySourceDirectories(Path pomXml, boolean testScope) {
String defaultSourceDir = testScope ? "src/test/java" : "src/main/java";
String xPath = testScope ? "/project/build/testSourceDirectory" : "/project/build/sourceDirectory";
Document doc = parsePomXml(pomXml);
try {
String sourceDir = XPathFactory.newInstance().newXPath().compile(xPath).evaluate(doc);
if (sourceDir == null || sourceDir.isEmpty()) {
LOG.info("Use default source directory " + defaultSourceDir);
sourceDir = defaultSourceDir;
}
else LOG.info("Use source directory from pom.xml " + sourceDir);
return pomXml.resolveSibling(sourceDir).toAbsolutePath();
} catch (XPathExpressionException e) {
throw new RuntimeException(e);
}
}
private static Document parsePomXml(Path pomXml) {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse(pomXml.toFile());
} catch (IOException | ParserConfigurationException | SAXException e) {
throw new RuntimeException(e);
}
}
private List<JavaConfigJson> readJavaConfigJson(Path configFile) {
try {
JsonNode json = JSON.readValue(configFile.toFile(), JsonNode.class);
if (json.isArray())
return JSON.convertValue(json, new TypeReference<List<JavaConfigJson>>() { });
else {
JavaConfigJson one = JSON.convertValue(json, JavaConfigJson.class);
return ImmutableList.of(one);
}
} catch (IOException e) {
MessageParams message = new MessageParams();
message.setMessage("Error reading " + configFile);
message.setType(MessageType.Error);
throw new ShowMessageException(message, e);
}
}
private static Set<Path> readClassPathFile(Path classPathFilePath) {
try {
InputStream in = Files.newInputStream(classPathFilePath);
String text = new BufferedReader(new InputStreamReader(in))
.lines()
.collect(Collectors.joining());
Path dir = classPathFilePath.getParent();
return Arrays.stream(text.split(File.pathSeparator))
.map(dir::resolve)
.collect(Collectors.toSet());
} catch (IOException e) {
MessageParams message = new MessageParams();
message.setMessage("Error reading " + classPathFilePath);
message.setType(MessageType.Error);
throw new ShowMessageException(message, e);
}
}
private static final Logger LOG = Logger.getLogger("main");
} |
package util.validator;
import http.helpers.Helper;
import net.itarray.automotion.Element;
import net.itarray.automotion.Errors;
import net.itarray.automotion.Zoom;
import net.itarray.automotion.internal.DriverFacade;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import org.json.simple.JSONObject;
import org.json.simple.parser.ParseException;
import org.openqa.selenium.*;
import org.openqa.selenium.Dimension;
import util.general.HtmlReportBuilder;
import util.validator.properties.Padding;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicLong;
import static environment.EnvironmentFactory.*;
import static util.general.SystemHelper.isRetinaDisplay;
import static util.validator.Constants.*;
import static util.validator.ResponsiveUIValidator.Units.PX;
public class ResponsiveUIValidator {
static final int MIN_OFFSET = -10000;
private final static Logger LOG = Logger.getLogger(ResponsiveUIValidator.class);
private final DriverFacade driver;
private static Element rootElement;
static long startTime;
private static boolean isMobileTopBar = false;
private static boolean withReport = false;
private static String scenarioName = "Default";
private static Color rootColor = new Color(255, 0, 0, 255);
private static Color highlightedElementsColor = new Color(255, 0, 255, 255);
private static Color linesColor = Color.ORANGE;
private static String currentZoom = "100%";
private static List<String> jsonFiles = new ArrayList<>();
protected static Errors errors;
boolean drawLeftOffsetLine = false;
boolean drawRightOffsetLine = false;
boolean drawTopOffsetLine = false;
boolean drawBottomOffsetLine = false;
String rootElementReadableName = "Root Element";
protected List<Element> rootElements;
ResponsiveUIValidator.Units units = PX;
private Dimension pageSize;
public ResponsiveUIValidator(WebDriver driver) {
this(new DriverFacade(driver));
}
protected ResponsiveUIValidator(DriverFacade driver) {
this.driver = driver;
ResponsiveUIValidator.errors = new Errors();
currentZoom = driver.getZoom();
pageSize = driver.retrievePageSize();
}
protected static Element getRootElement() {
return rootElement;
}
protected static void setRootElement(Element element) {
ResponsiveUIValidator.rootElement = element;
}
/**
* Set color for main element. This color will be used for highlighting element in results
*
* @param color
*/
public void setColorForRootElement(Color color) {
rootColor = color;
}
/**
* Set color for compared elements. This color will be used for highlighting elements in results
*
* @param color
*/
public void setColorForHighlightedElements(Color color) {
highlightedElementsColor = color;
}
/**
* Set color for grid lines. This color will be used for the lines of alignment grid in results
*
* @param color
*/
public void setLinesColor(Color color) {
linesColor = color;
}
/**
* Set top bar mobile offset. Applicable only for native mobile testing
*
* @param state
*/
public void setTopBarMobileOffset(boolean state) {
isMobileTopBar = state;
}
/**
* Method that defines start of new validation. Needs to be called each time before calling findElement(), findElements()
*
* @return ResponsiveUIValidator
*/
public ResponsiveUIValidator init() {
return new ResponsiveUIValidator(driver);
}
/**
* Method that defines start of new validation with specified name of scenario. Needs to be called each time before calling findElement(), findElements()
*
* @param scenarioName
* @return ResponsiveUIValidator
*/
public ResponsiveUIValidator init(String scenarioName) {
ResponsiveUIValidator.scenarioName = scenarioName;
return new ResponsiveUIValidator(driver);
}
/**
* Main method to specify which element we want to validate (can be called only findElement() OR findElements() for single validation)
*
* @param element
* @param readableNameOfElement
* @return UIValidator
*/
public UIValidator findElement(WebElement element, String readableNameOfElement) {
return new UIValidator(driver, element, readableNameOfElement);
}
/**
* Main method to specify the list of elements that we want to validate (can be called only findElement() OR findElements() for single validation)
*
* @param elements
* @return ResponsiveUIChunkValidator
*/
public ResponsiveUIChunkValidator findElements(java.util.List<WebElement> elements) {
return new ResponsiveUIChunkValidator(driver, elements);
}
/**
* Change units to Pixels or % (Units.PX, Units.PERCENT)
*
* @param units
* @return UIValidator
*/
public ResponsiveUIValidator changeMetricsUnitsTo(Units units) {
this.units = units;
return this;
}
/**
* Methods needs to be called to collect all the results in JSON file and screenshots
*
* @return ResponsiveUIValidator
*/
public ResponsiveUIValidator drawMap() {
withReport = true;
return this;
}
/**
* Call method to summarize and validate the results (can be called with drawMap(). In this case result will be only True or False)
*
* @return boolean
*/
public boolean validate() {
if (errors.hasMessages()) {
compileValidationReport();
}
return !errors.hasMessages();
}
private void compileValidationReport() {
if (!withReport) {
return;
}
JSONObject jsonResults = new JSONObject();
jsonResults.put(ERROR_KEY, errors.hasMessages());
jsonResults.put(DETAILS, errors.getMessages());
File screenshot = null;
BufferedImage img = null;
try {
screenshot = driver.takeScreenshot();
img = ImageIO.read(screenshot);
} catch (Exception e) {
LOG.error("Failed to create screenshot file: " + e.getMessage());
}
JSONObject rootDetails = new JSONObject();
if (rootElement != null) {
rootDetails.put(X, rootElement.getX());
rootDetails.put(Y, rootElement.getY());
rootDetails.put(WIDTH, rootElement.getWidth());
rootDetails.put(HEIGHT, rootElement.getHeight());
}
jsonResults.put(SCENARIO, scenarioName);
jsonResults.put(ROOT_ELEMENT, rootDetails);
jsonResults.put(TIME_EXECUTION, String.valueOf(System.currentTimeMillis() - startTime) + " milliseconds");
jsonResults.put(ELEMENT_NAME, rootElementReadableName);
jsonResults.put(SCREENSHOT, rootElementReadableName.replace(" ", "") + "-" + screenshot.getName());
long ms = System.currentTimeMillis();
String uuid = Helper.getGeneratedStringWithLength(7);
String jsonFileName = rootElementReadableName.replace(" ", "") + "-automotion" + ms + uuid + ".json";
try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(TARGET_AUTOMOTION_JSON + jsonFileName), StandardCharsets.UTF_8))) {
writer.write(jsonResults.toJSONString());
} catch (IOException ex) {
LOG.error("Cannot create json report: " + ex.getMessage());
}
jsonFiles.add(jsonFileName);
try {
File file = new File(TARGET_AUTOMOTION_JSON + rootElementReadableName.replace(" ", "") + "-automotion" + ms + uuid + ".json");
if (file.getParentFile().mkdirs()) {
if (file.createNewFile()) {
BufferedWriter writer = new BufferedWriter(new FileWriter(file));
writer.write(jsonResults.toJSONString());
writer.close();
}
}
} catch (IOException e) {
e.printStackTrace();
}
if ((boolean) jsonResults.get(ERROR_KEY)) {
drawScreenshot(screenshot, img);
}
}
/**
* Call method to generate HTML report
*/
public void generateReport() {
if (withReport && !jsonFiles.isEmpty()) {
try {
new HtmlReportBuilder().buildReport(jsonFiles);
} catch (IOException | ParseException | InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* Call method to generate HTML report with specified file report name
*
* @param name
*/
public void generateReport(String name) {
if (withReport && !jsonFiles.isEmpty()) {
try {
new HtmlReportBuilder().buildReport(name, jsonFiles);
} catch (IOException | ParseException | InterruptedException e) {
e.printStackTrace();
}
}
}
void drawScreenshot(File output, BufferedImage img) {
if (img != null) {
Graphics2D g = img.createGraphics();
drawRoot(rootColor, g, img);
for (Object obj : errors.getMessages()) {
JSONObject det = (JSONObject) obj;
JSONObject details = (JSONObject) det.get(REASON);
JSONObject numE = (JSONObject) details.get(ELEMENT);
if (numE != null) {
int x = (int) (float) numE.get(X);
int y = (int) (float) numE.get(Y);
int width = (int) (float) numE.get(WIDTH);
int height = (int) (float) numE.get(HEIGHT);
g.setColor(highlightedElementsColor);
g.setStroke(new BasicStroke(2));
drawRectByExtend(g, x, y, width, height);
}
}
try {
ImageIO.write(img, "png", output);
File file = new File(TARGET_AUTOMOTION_IMG + rootElementReadableName.replace(" ", "") + "-" + output.getName());
FileUtils.copyFile(output, file);
} catch (IOException e) {
e.printStackTrace();
}
} else {
LOG.error("Taking of screenshot was failed for some reason.");
}
}
void validateElementsAreNotOverlapped(List<Element> elements) {
for (int firstIndex = 0; firstIndex < elements.size(); firstIndex++) {
Element first = elements.get(firstIndex);
for (int secondIndex = firstIndex+1; secondIndex < elements.size(); secondIndex++) {
Element second = elements.get(secondIndex);
if (first.overlaps(second)) {
errors.add("Elements are overlapped", first);
break;
}
}
}
}
void validateGridAlignment(List<Element> elements, int columns, int rows) {
ConcurrentSkipListMap<Integer, AtomicLong> map = new ConcurrentSkipListMap<>();
for (Element element : elements) {
Integer y = element.getY();
map.putIfAbsent(y, new AtomicLong(0));
map.get(y).incrementAndGet();
}
int mapSize = map.size();
if (rows > 0) {
if (mapSize != rows) {
errors.add(String.format("Elements in a grid are not aligned properly. Looks like grid has wrong amount of rows. Expected is %d. Actual is %d", rows, mapSize));
}
}
if (columns > 0) {
int errorLastLine = 0;
int rowCount = 1;
for (Map.Entry<Integer, AtomicLong> entry : map.entrySet()) {
if (rowCount <= mapSize) {
int actualInARow = entry.getValue().intValue();
if (actualInARow != columns) {
errorLastLine++;
if (errorLastLine > 1) {
errors.add(String.format("Elements in a grid are not aligned properly in row #%d. Expected %d elements in a row. Actually it's %d", rowCount, columns, actualInARow));
}
}
rowCount++;
}
}
}
}
void validateRightOffsetForChunk(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasEqualRightOffsetAs(elementToCompare)) {
errors.add(String.format("Element #%d has not the same right offset as element #%d", i + 1, i + 2), elementToCompare);
}
}
}
void validateLeftOffsetForChunk(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasEqualLeftOffsetAs(elementToCompare)) {
errors.add(String.format("Element #%d has not the same left offset as element #%d", i + 1, i + 2), elementToCompare);
}
}
}
void validateTopOffsetForChunk(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasEqualTopOffsetAs(elementToCompare)) {
errors.add(String.format("Element #%d has not the same top offset as element #%d", i + 1, i + 2), elementToCompare);
}
}
}
void validateBottomOffsetForChunk(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasEqualBottomOffsetAs(elementToCompare)) {
errors.add(String.format("Element #%d has not the same bottom offset as element #%d", i + 1, i + 2), elementToCompare);
}
}
}
void validateRightOffsetForElements(Element element, String readableName) {
if (!rootElement.hasEqualRightOffsetAs(element)) {
errors.add(String.format("Element '%s' has not the same right offset as element '%s'", rootElementReadableName, readableName), element);
}
}
void validateLeftOffsetForElements(Element element, String readableName) {
if (!rootElement.hasEqualLeftOffsetAs(element)) {
errors.add(String.format("Element '%s' has not the same left offset as element '%s'", rootElementReadableName, readableName), element);
}
}
void validateTopOffsetForElements(Element element, String readableName) {
if (!rootElement.hasEqualTopOffsetAs(element)) {
errors.add(String.format("Element '%s' has not the same top offset as element '%s'", rootElementReadableName, readableName), element);
}
}
void validateBottomOffsetForElements(Element element, String readableName) {
if (!rootElement.hasEqualBottomOffsetAs(element)) {
errors.add(String.format("Element '%s' has not the same bottom offset as element '%s'", rootElementReadableName, readableName), element);
}
}
void validateNotOverlappingWithElements(Element element, String readableName) {
if (rootElement.overlaps(element)) {
errors.add(String.format("Element '%s' is overlapped with element '%s' but should not", rootElementReadableName, readableName), element);
}
}
void validateOverlappingWithElements(Element element, String readableName) {
if (!rootElement.overlaps(element)) {
errors.add(String.format("Element '%s' is not overlapped with element '%s' but should be", rootElementReadableName, readableName), element);
}
}
void validateMaxOffset(int top, int right, int bottom, int left) {
int rootElementRightOffset = getRootElement().getRightOffset(pageSize);
int rootElementBottomOffset = rootElement.getBottomOffset(pageSize);
if (rootElement.getX() > left) {
errors.add(String.format("Expected max left offset of element '%s' is: %spx. Actual left offset is: %spx", rootElementReadableName, left, rootElement.getX()));
}
if (rootElement.getY() > top) {
errors.add(String.format("Expected max top offset of element '%s' is: %spx. Actual top offset is: %spx", rootElementReadableName, top, rootElement.getY()));
}
if (rootElementRightOffset > right) {
errors.add(String.format("Expected max right offset of element '%s' is: %spx. Actual right offset is: %spx", rootElementReadableName, right, rootElementRightOffset));
}
if (rootElementBottomOffset > bottom) {
errors.add(String.format("Expected max bottom offset of element '%s' is: %spx. Actual bottom offset is: %spx", rootElementReadableName, bottom, rootElementBottomOffset));
}
}
void validateMinOffset(int top, int right, int bottom, int left) {
int rootElementRightOffset = getRootElement().getRightOffset(pageSize);
int rootElementBottomOffset = rootElement.getBottomOffset(pageSize);
if (rootElement.getX() < left) {
errors.add(String.format("Expected min left offset of element '%s' is: %spx. Actual left offset is: %spx", rootElementReadableName, left, rootElement.getX()));
}
if (rootElement.getY() < top) {
errors.add(String.format("Expected min top offset of element '%s' is: %spx. Actual top offset is: %spx", rootElementReadableName, top, rootElement.getY()));
}
if (rootElementRightOffset < right) {
errors.add(String.format("Expected min top offset of element '%s' is: %spx. Actual right offset is: %spx", rootElementReadableName, right, rootElementRightOffset));
}
if (rootElementBottomOffset < bottom) {
errors.add(String.format("Expected min bottom offset of element '%s' is: %spx. Actual bottom offset is: %spx", rootElementReadableName, bottom, rootElementBottomOffset));
}
}
void validateMaxHeight(int height) {
if (!rootElement.hasMaxHeight(height)) {
errors.add(String.format("Expected max height of element '%s' is: %spx. Actual height is: %spx", rootElementReadableName, height, rootElement.getHeight()));
}
}
void validateMinHeight(int height) {
if (!rootElement.hasMinHeight(height)) {
errors.add(String.format("Expected min height of element '%s' is: %spx. Actual height is: %spx", rootElementReadableName, height, rootElement.getHeight()));
}
}
void validateMaxWidth(int width) {
if (!rootElement.hasMaxWidth(width)) {
errors.add(String.format("Expected max width of element '%s' is: %spx. Actual width is: %spx", rootElementReadableName, width, rootElement.getWidth()));
}
}
void validateMinWidth(int width) {
if (!rootElement.hasMinWidth(width)) {
errors.add(String.format("Expected min width of element '%s' is: %spx. Actual width is: %spx", rootElementReadableName, width, rootElement.getWidth()));
}
}
void validateSameWidth(Element element, String readableName) {
if (!rootElement.hasSameWidthAs(element)) {
errors.add(String.format("Element '%s' has not the same width as %s. Width of '%s' is %spx. Width of element is %spx", rootElementReadableName, readableName, rootElementReadableName, rootElement.getWidth(), element.getWidth()), element);
}
}
void validateSameHeight(Element element, String readableName) {
if (!rootElement.hasSameHeightAs(element)) {
errors.add(String.format("Element '%s' has not the same height as %s. Height of '%s' is %spx. Height of element is %spx", rootElementReadableName, readableName, rootElementReadableName, rootElement.getHeight(), element.getHeight()), element);
}
}
void validateSameSize(Element element, String readableName) {
if (!rootElement.hasSameSizeAs(element)) {
errors.add(String.format("Element '%s' has not the same size as %s. Size of '%s' is %spx x %spx. Size of element is %spx x %spx", rootElementReadableName, readableName, rootElementReadableName, rootElement.getWidth(), rootElement.getHeight(), element.getWidth(), element.getHeight()), element);
}
}
void validateSameWidth(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasSameWidthAs(elementToCompare)) {
errors.add(String.format("Element #%d has different width. Element width is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has different width. Element width is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateSameHeight(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasSameHeightAs(elementToCompare)) {
errors.add(String.format("Element #%d has different height. Element height is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has different height. Element height is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateSameSize(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (!element.hasSameSizeAs(elementToCompare)) {
errors.add(String.format("Element #%d has different size. Element size is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has different size. Element size is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateNotSameSize(Element element, String readableName) {
if (!element.getWebElement().equals(getRootElement().getWebElement())) {
int h = element.getHeight();
int w = element.getWidth();
if (h == rootElement.getHeight() && w == rootElement.getWidth()) {
errors.add(String.format("Element '%s' has the same size as %s. Size of '%s' is %spx x %spx. Size of element is %spx x %spx", rootElementReadableName, readableName, rootElementReadableName, rootElement.getWidth(), rootElement.getHeight(), w, h), element);
}
}
}
void validateNotSameSize(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (element.hasSameSizeAs(elementToCompare)) {
errors.add(String.format("Element #%d has same size. Element size is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has same size. Element size is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateNotSameWidth(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (element.hasSameWidthAs(elementToCompare)) {
errors.add(String.format("Element #%d has same width. Element width is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has same width. Element width is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateNotSameHeight(List<Element> elements) {
for (int i = 0; i < elements.size() - 1; i++) {
Element element = elements.get(i);
Element elementToCompare = elements.get(i + 1);
if (element.hasSameHeightAs(elementToCompare)) {
errors.add(String.format("Element #%d has same height. Element height is: [%d, %d]", (i + 1), element.getWidth(), element.getHeight()), element);
errors.add(String.format("Element #%d has same height. Element height is: [%d, %d]", (i + 2), elementToCompare.getWidth(), elementToCompare.getHeight()), elementToCompare);
}
}
}
void validateBelowElement(Element element, int minMargin, int maxMargin) {
int marginBetweenRoot = element.getY() - rootElement.getCornerY();
if (marginBetweenRoot < minMargin || marginBetweenRoot > maxMargin) {
errors.add(String.format("Below element aligned not properly. Expected margin should be between %spx and %spx. Actual margin is %spx", minMargin, maxMargin, marginBetweenRoot), element);
}
}
void validateBelowElement(Element belowElement) {
if (!getRootElement().hasBelowElement(belowElement)) {
errors.add("Below element aligned not properly", belowElement);
}
}
void validateAboveElement(Element element, int minMargin, int maxMargin) {
int marginBetweenRoot = rootElement.getY() - element.getCornerY();
if (marginBetweenRoot < minMargin || marginBetweenRoot > maxMargin) {
errors.add(String.format("Above element aligned not properly. Expected margin should be between %spx and %spx. Actual margin is %spx", minMargin, maxMargin, marginBetweenRoot), element);
}
}
void validateAboveElement(Element aboveElement) {
if (!getRootElement().hasAboveElement(aboveElement)) {
errors.add("Above element aligned not properly", aboveElement);
}
}
void validateRightElement(Element element, int minMargin, int maxMargin) {
int marginBetweenRoot = element.getX() - rootElement.getCornerX();
if (marginBetweenRoot < minMargin || marginBetweenRoot > maxMargin) {
errors.add(String.format("Right element aligned not properly. Expected margin should be between %spx and %spx. Actual margin is %spx", minMargin, maxMargin, marginBetweenRoot), element);
}
}
void validateRightElement(Element rightElement) {
if (!getRootElement().hasRightElement(rightElement)) {
errors.add("Right element aligned not properly", rightElement);
}
}
void validateLeftElement(Element leftElement, int minMargin, int maxMargin) {
int marginBetweenRoot = rootElement.getX() - leftElement.getCornerX();
if (marginBetweenRoot < minMargin || marginBetweenRoot > maxMargin) {
errors.add(String.format("Left element aligned not properly. Expected margin should be between %spx and %spx. Actual margin is %spx", minMargin, maxMargin, marginBetweenRoot), leftElement);
}
}
void validateLeftElement(Element leftElement) {
if (!getRootElement().hasLeftElement(leftElement)) {
errors.add("Left element aligned not properly", leftElement);
}
}
void validateEqualLeftRightOffset(Element element, String rootElementReadableName) {
if (!element.hasEqualLeftRightOffset(pageSize)) {
errors.add(String.format("Element '%s' has not equal left and right offset. Left offset is %dpx, right is %dpx", rootElementReadableName, element.getX(), element.getRightOffset(pageSize)), element);
}
}
void validateEqualTopBottomOffset(Element element, String rootElementReadableName) {
if (!element.hasEqualTopBottomOffset(pageSize)) {
errors.add(String.format("Element '%s' has not equal top and bottom offset. Top offset is %dpx, bottom is %dpx", rootElementReadableName, element.getY(), element.getBottomOffset(pageSize)), element);
}
}
void validateEqualLeftRightOffset(List<Element> elements) {
for (Element element : elements) {
if (!element.hasEqualLeftRightOffset(pageSize)) {
errors.add(String.format("Element '%s' has not equal left and right offset. Left offset is %dpx, right is %dpx", getFormattedMessage(element), element.getX(), element.getRightOffset(pageSize)), element);
}
}
}
void validateEqualTopBottomOffset(List<Element> elements) {
for (Element element : elements) {
if (!element.hasEqualTopBottomOffset(pageSize)) {
errors.add(String.format("Element '%s' has not equal top and bottom offset. Top offset is %dpx, bottom is %dpx", getFormattedMessage(element), element.getY(), element.getBottomOffset(pageSize)), element);
}
}
}
void drawRoot(Color color, Graphics2D g, BufferedImage img) {
g.setColor(color);
g.setStroke(new BasicStroke(2));
drawRectByExtend(g, rootElement.getX(), rootElement.getY(), rootElement.getWidth(), rootElement.getHeight());
//g.fillRect(retinaValue(xRoot), retinaValue((yRoot), retinaValue(widthRoot), retinaValue(heightRoot));
Stroke dashed = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 0, new float[]{9}, 0);
g.setStroke(dashed);
g.setColor(linesColor);
if (drawLeftOffsetLine) {
drawVerticalLine(g, img, rootElement.getX());
}
if (drawRightOffsetLine) {
drawVerticalLine(g, img, rootElement.getCornerX());
}
if (drawTopOffsetLine) {
drawHorizontalLine(g, img, rootElement.getY());
}
if (drawBottomOffsetLine) {
drawHorizontalLine(g, img, rootElement.getCornerY());
}
}
private void drawRectByExtend(Graphics2D g, int x, int y, int width, int height) {
int cornerX = x + width;
int cornerY = y + height;
int transformedX = transformX(x);
int transformedY = transformY(y);
int transformedCornerX = transformX(cornerX);
int transformedCornerY = transformY(cornerY);
int transformedWidth = transformedCornerX - transformedX;
int transformedHeight = transformedCornerY - transformedY;
g.drawRect(transformedX, transformedY, transformedWidth, transformedHeight);
}
private void drawVerticalLine(Graphics2D g, BufferedImage img, int x) {
int transformedX = transformX(x);
g.drawLine(transformedX, 0, transformedX, retinaValue(img.getHeight()));
}
private void drawHorizontalLine(Graphics2D g, BufferedImage img, int y) {
int transformedY = transformY(y);
g.drawLine(0, transformedY, retinaValue(img.getWidth()), transformedY);
}
private int transformX(int x) {
return retinaValue(x);
}
private int transformY(int y) {
return retinaValue(mobileY(y));
}
private int mobileY(int value) {
if (isMobile() && driver.isAppiumWebContext()) {
if (isIOS()) {
if (isMobileTopBar) {
return value + 20;
} else {
return value;
}
} else if (isAndroid()) {
if (isMobileTopBar) {
return value + 20;
} else {
return value;
}
} else {
return value;
}
} else {
return value;
}
}
int retinaValue(int value) {
if (!isMobile()) {
int zoom = Integer.parseInt(currentZoom.replace("%", ""));
value = Zoom.applyZoom(value, zoom);
if (isRetinaDisplay() && isChrome()) {
return 2 * value;
} else {
return value;
}
} else {
if (isIOS()) {
String[] iOS_RETINA_DEVICES = {
"iPhone 4", "iPhone 4s",
"iPhone 5", "iPhone 5s",
"iPhone 6", "iPhone 6s",
"iPad Mini 2",
"iPad Mini 4",
"iPad Air 2",
"iPad Pro"
};
if (Arrays.asList(iOS_RETINA_DEVICES).contains(getDevice())) {
return 2 * value;
} else {
return value;
}
} else {
return value;
}
}
}
int getConvertedInt(int i, boolean horizontal) {
if (units.equals(PX)) {
return i;
} else {
if (horizontal) {
return (i * pageSize.getWidth()) / 100;
} else {
return (i * pageSize.getHeight()) / 100;
}
}
}
String getFormattedMessage(Element element) {
return String.format("with properties: tag=[%s], id=[%s], class=[%s], text=[%s], coord=[%s,%s], size=[%s,%s]",
element.getWebElement().getTagName(),
element.getWebElement().getAttribute("id"),
element.getWebElement().getAttribute("class"),
element.getWebElement().getText().length() < 10 ? element.getWebElement().getText() : element.getWebElement().getText().substring(0, 10) + "...",
String.valueOf(element.getX()),
String.valueOf(element.getY()),
String.valueOf(element.getWidth()),
String.valueOf(element.getHeight()));
}
void validateInsideOfContainer(Element containerElement, String readableContainerName) {
Rectangle2D.Double elementRectangle = containerElement.rectangle();
if (rootElements == null) {
if (!elementRectangle.contains(rootElement.rectangle())) {
errors.add(String.format("Element '%s' is not inside of '%s'", rootElementReadableName, readableContainerName), containerElement);
}
} else {
for (Element element : rootElements) {
if (!elementRectangle.contains(element.rectangle())) {
errors.add(String.format("Element is not inside of '%s'", readableContainerName), containerElement);
}
}
}
}
void validateInsideOfContainer(Element element, String readableContainerName, Padding padding) {
int top = getConvertedInt(padding.getTop(), false);
int right = getConvertedInt(padding.getRight(), true);
int bottom = getConvertedInt(padding.getBottom(), false);
int left = getConvertedInt(padding.getLeft(), true);
Rectangle2D.Double paddedRootRectangle = new Rectangle2D.Double(
rootElement.getX() - left,
rootElement.getY() - top,
rootElement.getWidth() + left + right,
rootElement.getHeight() + top + bottom);
int paddingTop = rootElement.getY() - element.getY();
int paddingLeft = rootElement.getX() - element.getX();
int paddingBottom = element.getCornerY() - rootElement.getCornerY();
int paddingRight = element.getCornerX() - rootElement.getCornerX();
if (!element.rectangle().contains(paddedRootRectangle)) {
errors.add(String.format("Padding of element '%s' is incorrect. Expected padding: top[%d], right[%d], bottom[%d], left[%d]. Actual padding: top[%d], right[%d], bottom[%d], left[%d]",
rootElementReadableName, top, right, bottom, left, paddingTop, paddingRight, paddingBottom, paddingLeft), element);
}
}
public enum Units {
PX,
PERCENT
}
} |
package com.exease.etd.objective;
import android.content.Intent;
import com.facebook.react.ReactActivity;
import com.umeng.analytics.MobclickAgent;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript.
* This is used to schedule rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "objective";
}
@Override
protected void onResume() {
super.onResume();
MobclickAgent.onResume(this);
}
@Override
protected void onPause() {
super.onPause();
MobclickAgent.onPause(this);
}
@Override
public void invokeDefaultOnBackPressed() {
Intent setIntent = new Intent(Intent.ACTION_MAIN);
setIntent.addCategory(Intent.CATEGORY_HOME);
setIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(setIntent);
}
} |
package imagej.updater.gui;
import imagej.updater.core.FileObject;
import imagej.updater.core.FileObject.Action;
import imagej.updater.core.FilesCollection;
import imagej.updater.core.UpdateSite;
import imagej.updater.core.UploaderService;
import imagej.updater.util.Util;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.AbstractAction;
import javax.swing.BoxLayout;
import javax.swing.DefaultCellEditor;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.KeyStroke;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.TableModelEvent;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import javax.swing.table.TableColumnModel;
import net.miginfocom.swing.MigLayout;
/**
* The dialog in which the user can choose which update sites to follow.
*
* @author Johannes Schindelin
*/
@SuppressWarnings("serial")
public class SitesDialog extends JDialog implements ActionListener {
protected UpdaterFrame updaterFrame;
protected FilesCollection files;
protected List<UpdateSite> sites;
protected DataModel tableModel;
protected JTable table;
protected JButton addNewSite, addPersonalSite, remove, close;
public SitesDialog(final UpdaterFrame owner, final FilesCollection files)
{
super(owner, "Manage update sites");
updaterFrame = owner;
this.files = files;
sites = initializeSites(files);
final Container contentPane = getContentPane();
contentPane.setLayout(new BoxLayout(contentPane, BoxLayout.PAGE_AXIS));
tableModel = new DataModel();
table = new JTable(tableModel) {
@Override
public void valueChanged(final ListSelectionEvent e) {
super.valueChanged(e);
remove.setEnabled(getSelectedRow() > 0);
}
@Override
public boolean isCellEditable(final int row, final int column) {
return column >= 0 && column < getColumnCount() && row >= 0 && row < getRowCount();
}
@Override
public TableCellEditor getCellEditor(final int row, final int column) {
if (column == 0) return super.getCellEditor(row, column);
final JTextField field = new JTextField();
return new DefaultCellEditor(field) {
@Override
public boolean stopCellEditing() {
String value = field.getText();
if ((column == 2 || column == 4) && !value.equals("") && !value.endsWith("/")) {
value += "/";
}
if (column == 1) {
if (value.equals(getUpdateSiteName(row))) return super.stopCellEditing();
if (files.getUpdateSite(value) != null) {
error("Update site '" + value + "' exists already!");
return false;
}
} else if (column == 2) {
if ("/".equals(value)) value = "";
final UpdateSite site = getUpdateSite(row);
if (value.equals(site.getURL())) return super.stopCellEditing();
if (validURL(value)) {
activateUpdateSite(row);
} else {
if (site == null || site.getHost() == null || site.getHost().equals("")) {
error("URL does not refer to an update site: " + value + "\n"
+ "If you want to initialize that site, you need to provide upload information first.");
return false;
} else {
if (!showYesNoQuestion("Initialize upload site?",
"It appears that the URL\n"
+ "\t" + value + "\n"
+ "is not (yet) valid. "
+ "Do you want to initialize it (host: "
+ site.getHost() + "; directory: "
+ site.getUploadDirectory() + ")?"))
return false;
if (!initializeUpdateSite((String)getValueAt(row, 0),
value, site.getHost(), site.getUploadDirectory()))
return false;
}
}
} else if (column == 3) {
final UpdateSite site = getUpdateSite(row);
if (value.equals(site.getHost())) return super.stopCellEditing();
final int colon = value.indexOf(':');
if (colon > 0) {
final String protocol = value.substring(0, colon);
final UploaderService uploaderService = updaterFrame.getUploaderService();
if (null == uploaderService.installUploader(protocol, files, updaterFrame.getProgress(null))) {
error("Unknown upload protocol: " + protocol);
return false;
}
}
} else if (column == 4) {
final UpdateSite site = getUpdateSite(row);
if (value.equals(site.getUploadDirectory())) return super.stopCellEditing();
}
updaterFrame.enableUploadOrNot();
return super.stopCellEditing();
}
};
}
@Override
public void setValueAt(final Object value, final int row, final int column)
{
final UpdateSite site = getUpdateSite(row);
if (column == 0) {
site.setActive(Boolean.TRUE.equals(value));
} else {
if (isEditing()) return;
final String string = (String)value;
// if the name changed, or if we auto-fill the name from the URL
switch (column) {
case 1:
final String name = getUpdateSiteName(row);
if (name.equals(string)) return;
files.renameUpdateSite(name, string);
sites.get(row).setName(string);
break;
case 2:
site.setURL(string);
break;
case 3:
site.setHost(string);
break;
case 4:
site.setUploadDirectory(string);
break;
default:
updaterFrame.log.error("Whoa! Column " + column + " is not handled!");
}
}
if (site.isActive()) {
if (column == 0 || column == 2) {
activateUpdateSite(row);
}
} else {
deactivateUpdateSite(site.getName());
}
}
@Override
public Component prepareRenderer(TableCellRenderer renderer,int row, int column) {
Component component = super.prepareRenderer(renderer, row, column);
if (component instanceof JComponent) {
final UpdateSite site = getUpdateSite(row);
if (site != null) {
JComponent jcomponent = (JComponent) component;
jcomponent.setToolTipText(wrapToolTip(site.getDescription(), site.getMaintainer()));
}
}
return component;
}
};
table.setColumnSelectionAllowed(false);
table.setRowSelectionAllowed(true);
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
tableModel.setColumnWidths();
final JScrollPane scrollpane = new JScrollPane(table);
scrollpane.setPreferredSize(new Dimension(tableModel.tableWidth, 400));
contentPane.add(scrollpane);
final JPanel buttons = new JPanel();
addPersonalSite = SwingTools.button("Add my site", "Add my personal update site", this, buttons);
addNewSite = SwingTools.button("Add", "Add", this, buttons);
remove = SwingTools.button("Remove", "Remove", this, buttons);
remove.setEnabled(false);
close = SwingTools.button("Close", "Close", this, buttons);
contentPane.add(buttons);
getRootPane().setDefaultButton(close);
escapeCancels(this);
pack();
addNewSite.requestFocusInWindow();
setLocationRelativeTo(owner);
}
private static String wrapToolTip(final String description, final String maintainer) {
if (description == null) return null;
return "<html><p width='400'>" + description.replaceAll("\n", "<br />")
+ (maintainer != null ? "</p><p>Maintainer: " + maintainer + "</p>": "")
+ "</p></html>";
}
private static List<UpdateSite> initializeSites(final FilesCollection files) {
final List<UpdateSite> sites = new ArrayList<UpdateSite>();
final Map<String, Integer> url2index = new HashMap<String, Integer>();
// make sure that the main update site is the first one.
final UpdateSite mainSite = new UpdateSite(FilesCollection.DEFAULT_UPDATE_SITE, Util.MAIN_URL, "", "", null, null, 0l);
sites.add(mainSite);
url2index.put(mainSite.getURL(), 0);
// read available sites from the Fiji Wiki
try {
for (final UpdateSite site : getAvailableSites().values()) {
Integer index = url2index.get(site.getURL());
if (index == null) {
url2index.put(site.getURL(), sites.size());
sites.add(site);
} else {
sites.set(index.intValue(), site);
}
}
} catch (Exception e) {
e.printStackTrace();
}
// add active / upload information
final Set<String> names = new HashSet<String>();
for (final String name : files.getUpdateSiteNames()) {
final UpdateSite site = files.getUpdateSite(name);
Integer index = url2index.get(site.getURL());
if (index == null) {
url2index.put(site.getURL(), sites.size());
sites.add(site);
} else {
final UpdateSite listed = sites.get(index.intValue());
listed.setActive(true);
listed.setName(site.getName());
listed.setHost(site.getHost());
listed.setUploadDirectory(site.getUploadDirectory());
}
}
// make sure names are unique
for (final UpdateSite site : sites) {
if (site.isActive()) continue;
if (names.contains(site.getName())) {
int i = 2;
while (names.contains(site.getName() + "-" + i))
i++;
site.setName(site.getName() + ("-" + i));
}
names.add(site.getName());
}
return sites;
}
protected String getUpdateSiteName(int row) {
return sites.get(row).getName();
}
protected UpdateSite getUpdateSite(int row) {
return sites.get(row);
}
private void addNew() {
add(new UpdateSite(makeUniqueSiteName("New"), "", "", "", null, null, 0l));
}
private final static String PERSONAL_SITES_URL = "http://sites.imagej.net/";
private void addPersonalSite() {
final PersonalSiteDialog dialog = new PersonalSiteDialog();
final String user = dialog.name;
if (user == null) return;
final String url = PERSONAL_SITES_URL + user;
final int row = sites.size();
add(new UpdateSite(makeUniqueSiteName("My Site"), url, "webdav:" + user, "", null, null, 0l));
activateUpdateSite(row);
}
private void add(final UpdateSite site) {
final int row = sites.size();
sites.add(site);
tableModel.rowsChanged();
tableModel.rowChanged(row);
table.setRowSelectionInterval(row, row);
}
private String makeUniqueSiteName(final String prefix) {
final Set<String> names = new HashSet<String>();
for (final UpdateSite site : sites) names.add(site.getName());
if (!names.contains(prefix)) return prefix;
for (int i = 2; ; i++) {
if (!names.contains(prefix + "-" + i)) return prefix + "-" + i;
}
}
protected void delete(final int row) {
final String name = getUpdateSiteName(row);
if (!showYesNoQuestion("Remove " + name + "?",
"Do you really want to remove the site '" + name + "' from the list?\n"
+ "URL: " + getUpdateSite(row).getURL()))
return;
deactivateUpdateSite(name);
sites.remove(row);
tableModel.rowChanged(row);
}
private void deactivateUpdateSite(final String name) {
final List<FileObject> list = new ArrayList<FileObject>();
final List<FileObject> remove = new ArrayList<FileObject>();
int count = 0;
for (final FileObject file : files.forUpdateSite(name))
switch (file.getStatus()) {
case NEW:
case NOT_INSTALLED:
case OBSOLETE_UNINSTALLED:
count
remove.add(file);
break;
default:
count++;
list.add(file);
}
if (count > 0) info("" +
count + (count == 1 ? " file is" : " files are") +
" installed from the site '" +
name +
"'\n" +
"These files will not be deleted automatically.\n" +
"Note: even if marked as 'Local-only', they might be available from other sites.");
for (final FileObject file : list) {
file.updateSite = null;
// TODO: unshadow
file.setStatus(FileObject.Status.LOCAL_ONLY);
}
for (final FileObject file : remove) {
files.remove(file);
}
files.removeUpdateSite(name);
updaterFrame.updateFilesTable();
}
@Override
public void actionPerformed(final ActionEvent e) {
final Object source = e.getSource();
if (source == addNewSite) addNew();
else if (source == addPersonalSite) addPersonalSite();
else if (source == remove) delete(table.getSelectedRow());
else if (source == close) {
dispose();
}
}
protected class DataModel extends DefaultTableModel {
protected int tableWidth;
protected int[] widths = { 20, 150, 280, 125, 125 };
protected String[] headers = { "Active", "Name", "URL", "Host",
"Directory on Host" };
public void setColumnWidths() {
final TableColumnModel columnModel = table.getColumnModel();
for (int i = 0; i < tableModel.widths.length && i < getColumnCount(); i++)
{
final TableColumn column = columnModel.getColumn(i);
column.setPreferredWidth(tableModel.widths[i]);
column.setMinWidth(tableModel.widths[i]);
tableWidth += tableModel.widths[i];
}
}
@Override
public int getColumnCount() {
return 5;
}
@Override
public String getColumnName(final int column) {
return headers[column];
}
@Override
public Class<?> getColumnClass(final int column) {
return column == 0 ? Boolean.class : String.class;
}
@Override
public int getRowCount() {
return sites.size();
}
@Override
public Object getValueAt(final int row, final int col) {
if (col == 1) return getUpdateSiteName(row);
final UpdateSite site = getUpdateSite(row);
if (col == 0) return Boolean.valueOf(site.isActive());
if (col == 2) return site.getURL();
if (col == 3) return site.getHost();
if (col == 4) return site.getUploadDirectory();
return null;
}
public void rowChanged(final int row) {
rowsChanged(row, row + 1);
}
public void rowsChanged() {
rowsChanged(0, sites.size());
}
public void rowsChanged(final int firstRow, final int lastRow) {
// fireTableChanged(new TableModelEvent(this, firstRow, lastRow));
fireTableChanged(new TableModelEvent(this));
}
}
protected boolean validURL(String url) {
if (!url.endsWith("/"))
url += "/";
try {
return files.util.getLastModified(new URL(url
+ Util.XML_COMPRESSED)) != -1;
} catch (MalformedURLException e) {
updaterFrame.log.error(e);
return false;
}
}
protected boolean activateUpdateSite(final int row) {
final UpdateSite updateSite = getUpdateSite(row);
updateSite.setActive(true);
try {
if (files.getUpdateSite(updateSite.getName()) == null) files.addUpdateSite(updateSite);
files.reReadUpdateSite(updateSite.getName(), updaterFrame.getProgress(null));
markForUpdate(updateSite.getName(), false);
updaterFrame.filesChanged();
} catch (final Exception e) {
error("Not a valid URL: " + getUpdateSite(row).getURL());
return false;
}
return true;
}
private void markForUpdate(final String updateSite, final boolean evenForcedUpdates) {
for (final FileObject file : files.forUpdateSite(updateSite)) {
if (file.isUpdateable(evenForcedUpdates) && file.isUpdateablePlatform(files)) {
file.setFirstValidAction(files, Action.UPDATE,
Action.UNINSTALL, Action.INSTALL);
}
}
}
protected boolean initializeUpdateSite(final String siteName,
String url, final String host, String uploadDirectory) {
if (!url.endsWith("/"))
url += "/";
if (!uploadDirectory.endsWith("/"))
uploadDirectory += "/";
boolean result;
try {
result = updaterFrame.initializeUpdateSite(url, host,
uploadDirectory) && validURL(url);
} catch (final InstantiationException e) {
updaterFrame.log.error(e);
result = false;
}
if (result)
info("Initialized update site '" + siteName + "'");
else
error("Could not initialize update site '" + siteName + "'");
return result;
}
@Override
public void dispose() {
super.dispose();
updaterFrame.updateFilesTable();
updaterFrame.enableUploadOrNot();
updaterFrame.addCustomViewOptions();
}
public void info(final String message) {
SwingTools.showMessageBox(this, message, JOptionPane.INFORMATION_MESSAGE);
}
public void error(final String message) {
SwingTools.showMessageBox(this, message, JOptionPane.ERROR_MESSAGE);
}
public boolean showYesNoQuestion(final String title, final String message) {
return SwingTools.showYesNoQuestion(this, title, message);
}
public static void escapeCancels(final JDialog dialog) {
dialog.getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(
KeyStroke.getKeyStroke("ESCAPE"), "ESCAPE");
dialog.getRootPane().getActionMap().put("ESCAPE", new AbstractAction() {
@Override
public void actionPerformed(final ActionEvent e) {
dialog.dispose();
}
});
}
private static String stripWikiMarkup(final String string) {
final UpdateSite info = new UpdateSite(stripWikiMarkup(columns[1]), stripWikiMarkup(columns[2]), null, null, stripWikiMarkup(columns[3]), stripWikiMarkup(columns[4]), 0l);
result.put(info.getURL(), info);
}
}
// Sanity checks
final Iterator<UpdateSite> iter = result.values().iterator();
if (!iter.hasNext()) throw new Error("Invalid page: " + SITE_LIST_PAGE_TITLE);
UpdateSite site = iter.next();
if (!site.getName().equals("ImageJ") || !site.getURL().equals("http://update.imagej.net/")) {
throw new Error("Invalid page: " + SITE_LIST_PAGE_TITLE);
}
if (!iter.hasNext()) throw new Error("Invalid page: " + SITE_LIST_PAGE_TITLE);
site = iter.next();
if (!site.getName().equals("Fiji") || !site.getURL().equals("http://fiji.sc/update/")) {
throw new Error("Invalid page: " + SITE_LIST_PAGE_TITLE);
}
return result;
}
private class PersonalSiteDialog extends JDialog implements ActionListener {
private String name;
private JLabel userLabel, realNameLabel, emailLabel, passwordLabel;
private JTextField userField, realNameField, emailField;
private JPasswordField passwordField;
private JButton cancel, okay;
public PersonalSiteDialog() {
super(SitesDialog.this, "Add Personal Site");
setLayout(new MigLayout("wrap 2"));
add(new JLabel("<html><h2>Personal update site setup</h2>" +
"<p width=400>For security reasons, personal update sites are associated with a Fiji Wiki account. " +
"Please provide the account name of your Fiji Wiki account.</p>" +
"<p width=400>If your personal udate site was not yet initialized, you can initialize it in this dialog.</p>" +
"<p width=400>If you do not have a Fiji Wiki account</p></html>"), "span 2");
userLabel = new JLabel("Fiji Wiki account");
add(userLabel);
userField = new JTextField();
userField.setColumns(30);
add(userField);
realNameLabel = new JLabel("Real Name");
add(realNameLabel);
realNameField = new JTextField();
realNameField.setColumns(30);
add(realNameField);
emailLabel = new JLabel("Email");
add(emailLabel);
emailField = new JTextField();
emailField.setColumns(30);
add(emailField);
passwordLabel = new JLabel("Password");
add(passwordLabel);
passwordField = new JPasswordField();
passwordField.setColumns(30);
add(passwordField);
final JPanel panel = new JPanel();
cancel = new JButton("Cancel");
cancel.addActionListener(this);
panel.add(cancel);
okay = new JButton("OK");
okay.addActionListener(this);
panel.add(okay);
add(panel, "span 2, right");
setWikiAccountFieldsEnabled(false);
setChangePasswordEnabled(false);
pack();
final KeyAdapter keyListener = new KeyAdapter() {
@Override
public void keyReleased(final KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) dispose();
else if (e.getKeyCode() == KeyEvent.VK_ENTER) actionPerformed(new ActionEvent(okay, -1, null));
}
};
userField.addKeyListener(keyListener);
realNameField.addKeyListener(keyListener);
emailField.addKeyListener(keyListener);
passwordField.addKeyListener(keyListener);
cancel.addKeyListener(keyListener);
okay.addKeyListener(keyListener);
setModal(true);
setVisible(true);
}
private void setWikiAccountFieldsEnabled(final boolean enabled) {
realNameLabel.setEnabled(enabled);
realNameField.setEnabled(enabled);
emailLabel.setEnabled(enabled);
emailField.setEnabled(enabled);
if (enabled) realNameField.requestFocusInWindow();
}
private void setChangePasswordEnabled(final boolean enabled) {
passwordLabel.setEnabled(enabled);
passwordField.setEnabled(enabled);
if (enabled) passwordField.requestFocusInWindow();
}
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == cancel) {
dispose();
return;
} else if (e.getSource() == okay) {
final String name = userField.getText();
if ("".equals(name)) {
error("Please provide a Fiji Wiki account name!");
return;
}
if (validURL(PERSONAL_SITES_URL + name)) {
this.name = name;
dispose();
return;
}
// create a Fiji Wiki user if needed
final MediaWikiClient wiki = new MediaWikiClient(FIJI_WIKI_URL);
try {
if (!wiki.userExists(name)) {
if (realNameLabel.isEnabled()) {
final String realName = realNameField.getText();
final String email = emailField.getText();
if ("".equals(realName) || "".equals(email)) {
error("<html><p width=400>Please provide your name and email address to register an account on the Fiji Wiki!</p></html>");
} else {
if (wiki.createUser(name, realName, email, "Wants a personal site")) {
setWikiAccountFieldsEnabled(false);
setChangePasswordEnabled(true);
info("<html><p width=400>An email with the activation code was sent. " +
"Please provide your Fiji Wiki password after activating the account.</p></html>");
} else {
error("<html><p width=400>There was a problem creating the user account!</p></html>");
}
}
} else {
setWikiAccountFieldsEnabled(true);
error("<html><p width=400>Please provide your name and email address to register an account on the Fiji Wiki</p></html>");
}
return;
}
// initialize the personal update site
final String password = new String(passwordField.getPassword());
if (!wiki.login(name, password)) {
error("Could not log in (incorrect password?)");
return;
}
if (!wiki.changeUploadPassword(password)) {
error("Could not initialize the personal update site");
return;
}
wiki.logout();
this.name = name;
dispose();
} catch (IOException e2) {
updaterFrame.log.error(e2);
error("<html><p width=400>There was a problem contacting the Fiji Wiki: " + e2 + "</p></html>");
return;
}
}
}
}
} |
package com.horcrux.svg;
import android.graphics.Bitmap;
import android.util.SparseArray;
import com.facebook.yoga.YogaMeasureMode;
import com.facebook.yoga.YogaMeasureFunction;
import com.facebook.yoga.YogaNode;
import com.facebook.react.uimanager.BaseViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import javax.annotation.Nullable;
/**
* ViewManager for RNSVGSvgView React views. Renders as a {@link SvgView} and handles
* invalidating the native view on shadow view updates happening in the underlying tree.
*/
public class SvgViewManager extends BaseViewManager<SvgView, SvgViewShadowNode> {
private static final String REACT_CLASS = "RNSVGSvgView";
private static final YogaMeasureFunction MEASURE_FUNCTION = new YogaMeasureFunction() {
@Override
public long measure(
YogaNode node,
float width,
YogaMeasureMode widthMode,
float height,
YogaMeasureMode heightMode) {
throw new IllegalStateException("SurfaceView should have explicit width and height set");
}
};
private static final SparseArray<SvgViewShadowNode> mTagToShadowNode = new SparseArray<>();
private static final SparseArray<SvgView> mTagToSvgView = new SparseArray<>();
static void setShadowNode(SvgViewShadowNode shadowNode) {
mTagToShadowNode.put(shadowNode.getReactTag(), shadowNode);
}
static void setSvgView(SvgView svg) {
mTagToSvgView.put(svg.getId(), svg);
}
static @Nullable SvgView getSvgViewByTag(int tag) {
return mTagToSvgView.get(tag);
}
static @Nullable SvgViewShadowNode getShadowNodeByTag(int tag) {
return mTagToShadowNode.get(tag);
}
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public Class<SvgViewShadowNode> getShadowNodeClass() {
return SvgViewShadowNode.class;
}
@Override
public SvgViewShadowNode createShadowNodeInstance() {
SvgViewShadowNode node = new SvgViewShadowNode();
node.setMeasureFunction(MEASURE_FUNCTION);
return node;
}
@Override
public void onDropViewInstance(SvgView view) {
int tag = view.getId();
mTagToShadowNode.remove(tag);
mTagToSvgView.remove(tag);
}
@Override
protected SvgView createViewInstance(ThemedReactContext reactContext) {
return new SvgView(reactContext);
}
@Override
public void updateExtraData(SvgView root, Object extraData) {
root.setBitmap((Bitmap) extraData);
}
} |
package org.testng.reporters;
import org.testng.IResultMap;
import org.testng.ISuiteResult;
import org.testng.ITestResult;
import org.testng.internal.Utils;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Properties;
/**
* Utility writing an ISuiteResult to an XMLStringBuffer. Depending on the settings in the <code>config</code> property
* it might generate an additional XML file with the actual content and only reference the file with an <code>url</code>
* attribute in the passed XMLStringBuffer.
*
* @author Cosmin Marginean, Mar 16, 2007
*/
public class XMLSuiteResultWriter {
private XMLReporterConfig config;
public XMLSuiteResultWriter(XMLReporterConfig config) {
this.config = config;
}
/**
* Writes the specified ISuiteResult in the given XMLStringBuffer. Please consider that depending on the settings in
* the <code>config</code> property it might generate an additional XML file with the actual content and only
* reference the file with an <code>url</code> attribute in the passed XMLStringBuffer.
*
* @param xmlBuffer The XML buffer where to write or reference the suite result
* @param suiteResult The <code>ISuiteResult</code> to serialize
*/
public void writeSuiteResult(XMLStringBuffer xmlBuffer, ISuiteResult suiteResult) {
if (XMLReporterConfig.FF_LEVEL_SUITE_RESULT != config.getFileFragmentationLevel()) {
writeAllToBuffer(xmlBuffer, suiteResult);
} else {
String parentDir =
config.getOutputDirectory() + File.separatorChar + suiteResult.getTestContext().getSuite().getName();
File file = referenceSuiteResult(xmlBuffer, parentDir, suiteResult);
XMLStringBuffer suiteXmlBuffer = new XMLStringBuffer("");
writeAllToBuffer(suiteXmlBuffer, suiteResult);
Utils.writeFile(file.getAbsoluteFile().getParent(), file.getName(), suiteXmlBuffer.toXML());
}
}
private void writeAllToBuffer(XMLStringBuffer xmlBuffer, ISuiteResult suiteResult) {
xmlBuffer.push(XMLReporterConfig.TAG_TEST, getSuiteResultAttributes(suiteResult));
addTestResults(xmlBuffer, suiteResult.getTestContext().getPassedTests(), XMLReporterConfig.TEST_PASSED);
addTestResults(xmlBuffer, suiteResult.getTestContext().getFailedTests(), XMLReporterConfig.TEST_FAILED);
addTestResults(xmlBuffer, suiteResult.getTestContext().getSkippedTests(), XMLReporterConfig.TEST_SKIPPED);
xmlBuffer.pop();
}
private File referenceSuiteResult(XMLStringBuffer xmlBuffer, String parentDir, ISuiteResult suiteResult) {
Properties attrs = new Properties();
String suiteResultName = suiteResult.getTestContext().getName() + ".xml";
attrs.setProperty(XMLReporterConfig.ATTR_URL, suiteResultName);
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_TEST, attrs);
return new File(parentDir + File.separatorChar + suiteResultName);
}
private Properties getSuiteResultAttributes(ISuiteResult suiteResult) {
Properties attributes = new Properties();
attributes.setProperty(XMLReporterConfig.ATTR_NAME, suiteResult.getTestContext().getName());
return attributes;
}
private void addTestResults(XMLStringBuffer xmlBuffer, IResultMap results, String resultType) {
for (ITestResult testResult : results.getAllResults()) {
addTestResult(xmlBuffer, testResult, resultType);
}
}
private void addTestResult(XMLStringBuffer xmlBuffer, ITestResult testResult, String resultType) {
Properties attribs = getTestResultAttributes(testResult);
attribs.setProperty(XMLReporterConfig.ATTR_STATUS, resultType);
xmlBuffer.push(XMLReporterConfig.TAG_TEST_METHOD, attribs);
addTestMethodParams(xmlBuffer, testResult);
addTestResultException(xmlBuffer, testResult);
xmlBuffer.pop();
}
private Properties getTestResultAttributes(ITestResult testResult) {
Properties attributes = new Properties();
attributes.setProperty(XMLReporterConfig.ATTR_NAME, testResult.getName());
String description = testResult.getMethod().getDescription();
if (!Utils.isStringEmpty(description)) {
attributes.setProperty(XMLReporterConfig.ATTR_DESC, description);
}
String className = testResult.getTestClass().getName();
int dot = className.lastIndexOf('.');
attributes.setProperty(XMLReporterConfig.ATTR_PACKAGE, dot > -1 ? className.substring(0, dot) : "<default>");
attributes.setProperty(XMLReporterConfig.ATTR_CLASS, dot > -1 ? className.substring(dot + 1, className.length()) : className);
attributes.setProperty(XMLReporterConfig.ATTR_METHOD_SIG, removeClassName(testResult.getMethod().toString()));
//TODO: Cosmin - not finished
SimpleDateFormat format = new SimpleDateFormat(config.getTimestampFormat());
String startTime = format.format(testResult.getStartMillis());
String endTime = format.format(testResult.getEndMillis());
attributes.setProperty(XMLReporterConfig.ATTR_STARTED_AT, startTime);
attributes.setProperty(XMLReporterConfig.ATTR_FINISHED_AT, endTime);
long duration = testResult.getEndMillis() - testResult.getStartMillis();
String strDuration = Long.toString(duration);
attributes.setProperty(XMLReporterConfig.ATTR_DURATION_MS, strDuration);
if (config.isGenerateGroupsAttribute()) {
String groupNamesStr = getGroupNamesString(testResult);
if (!Utils.isStringEmpty(groupNamesStr)) {
attributes.setProperty(XMLReporterConfig.ATTR_GROUPS, groupNamesStr);
}
}
return attributes;
}
private String removeClassName(String methodSignature) {
int firstParanthesisPos = methodSignature.indexOf("(");
int dotAferClassPos = methodSignature.substring(0, firstParanthesisPos).lastIndexOf(".");
return methodSignature.substring(dotAferClassPos + 1, methodSignature.length());
}
public void addTestMethodParams(XMLStringBuffer xmlBuffer, ITestResult testResult) {
Object[] parameters = testResult.getParameters();
if ((parameters != null) && (parameters.length > 0)) {
xmlBuffer.push(XMLReporterConfig.TAG_PARAMS);
for (int i = 0; i < parameters.length; i++) {
addParameter(xmlBuffer, parameters[i], i);
}
xmlBuffer.pop();
}
}
private void addParameter(XMLStringBuffer xmlBuffer, Object parameter, int i) {
Properties attrs = new Properties();
attrs.setProperty(XMLReporterConfig.ATTR_INDEX, String.valueOf(i));
xmlBuffer.push(XMLReporterConfig.TAG_PARAM, attrs);
if (parameter == null) {
Properties valueAttrs = new Properties();
valueAttrs.setProperty(XMLReporterConfig.ATTR_IS_NULL, "true");
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_PARAM_VALUE, valueAttrs);
} else {
xmlBuffer.push(XMLReporterConfig.TAG_PARAM_VALUE);
xmlBuffer.addCDATA(parameter.toString());
xmlBuffer.pop();
}
xmlBuffer.pop();
}
private void addTestResultException(XMLStringBuffer xmlBuffer, ITestResult testResult) {
Throwable exception = testResult.getThrowable();
if (exception != null) {
Properties exceptionAttrs = new Properties();
exceptionAttrs.setProperty(XMLReporterConfig.ATTR_CLASS, exception.getClass().getName());
xmlBuffer.push(XMLReporterConfig.TAG_EXCEPTION, exceptionAttrs);
if (!Utils.isStringEmpty(exception.getMessage())) {
xmlBuffer.push(XMLReporterConfig.TAG_MESSAGE);
xmlBuffer.addCDATA(exception.getMessage());
xmlBuffer.pop();
}
String[] stackTraces = Utils.stackTrace(exception, true);
if ((config.getStackTraceOutputMethod() & XMLReporterConfig.STACKTRACE_SHORT) == XMLReporterConfig
.STACKTRACE_SHORT) {
xmlBuffer.push(XMLReporterConfig.TAG_SHORT_STACKTRACE);
xmlBuffer.addCDATA(stackTraces[0]);
xmlBuffer.pop();
}
if ((config.getStackTraceOutputMethod() & XMLReporterConfig.STACKTRACE_FULL) == XMLReporterConfig.STACKTRACE_FULL)
{
xmlBuffer.push(XMLReporterConfig.TAG_FULL_STACKTRACE);
xmlBuffer.addCDATA(stackTraces[1]);
xmlBuffer.pop();
}
xmlBuffer.pop();
}
}
private String getGroupNamesString(ITestResult testResult) {
String result = "";
String[] groupNames = testResult.getMethod().getGroups();
if ((groupNames != null) && (groupNames.length > 0)) {
for (int i = 0; i < groupNames.length; i++) {
result += groupNames[i];
if (i < groupNames.length - 1) {
result += ", ";
}
}
}
return result;
}
} |
package org.voovan.tools;
import org.voovan.db.CallType;
import org.voovan.tools.json.JSON;
import org.voovan.tools.log.Logger;
import org.voovan.tools.reflect.TReflect;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.sql.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
import java.util.Map.Entry;
public class TSQL {
/**
* SQL , SQL
* @param sqlStr sql (select * from table where x=::x and y=::y)
* @return sql ([:x,:y])
*/
public static List<String> getSqlParams(String sqlStr){
String[] params = TString.searchByRegex(sqlStr, "::[^,\\s\\)]+");
ArrayList<String> sqlParams = new ArrayList<String>();
for(String param : params){
sqlParams.add(param);
}
return sqlParams;
}
/**
* preparedStatement sql (?)
* @param sqlStr sql (select * from table where x=:x and y=::y)
* @return :? (select * from table where x=? and y=?)
*/
public static String preparedSql(String sqlStr){
return sqlStr.replaceAll("::[^,\\s\\)]+", "?");
}
/**
* preparedStatement
*
* @param preparedStatement preparedStatement
* @param sqlParams sql
* @param params Map
* @throws SQLException SQL
*/
public static void setPreparedParams(PreparedStatement preparedStatement,List<String> sqlParams,Map<String, Object> params) throws SQLException{
for(int i=0;i<sqlParams.size();i++){
String paramName = sqlParams.get(i);
paramName = paramName.substring(2,paramName.length());
Object data = params.get(paramName);
if(TReflect.isBasicType(data.getClass())) {
preparedStatement.setObject(i + 1, params.get(paramName));
}else{
//,, JSON
preparedStatement.setObject(i + 1, JSON.toJSON(params.get(paramName)));
}
Logger.debug("[SQL_Parameter]: "+sqlParams.get(i)+" = "+params.get(paramName));
}
}
/**
* PreparedStatement
* @param conn
* @param sqlStr sql
* @param params Map
* @return PreparedStatement
* @throws SQLException SQL
*/
public static PreparedStatement createPreparedStatement(Connection conn,String sqlStr,Map<String, Object> params) throws SQLException{
List<String> sqlParams = TSQL.getSqlParams(sqlStr);
sqlStr = TSQL.removeEmptyCondiction(sqlStr,sqlParams,params);
Logger.debug("[SQL_Executed]: " + sqlStr);
//preparedStatement SQL
String preparedSql = TSQL.preparedSql(sqlStr);
PreparedStatement preparedStatement = (PreparedStatement) conn.prepareStatement(preparedSql);
//params,
if(params==null){
params = new Hashtable<String, Object>();
}
//preparedStatement
TSQL.setPreparedParams(preparedStatement,sqlParams,params);
return preparedStatement;
}
/**
* PreparedStatement
* @param conn
* @param sqlStr sql
* @param params Map
* @param callTypes
* @return PreparedStatement
* @throws SQLException SQL
*/
public static CallableStatement createCallableStatement(Connection conn,String sqlStr,Map<String, Object> params,CallType[] callTypes) throws SQLException{
Logger.debug("[SQL_Executed]: " + sqlStr);
List<String> sqlParams = TSQL.getSqlParams(sqlStr);
//preparedStatement SQL
String preparedSql = TSQL.preparedSql(sqlStr);
// jdbc statement
CallableStatement callableStatement = (CallableStatement) conn.prepareCall(preparedSql);
//params,
if(params==null){
params = new Hashtable<String, Object>();
}
//callableStatement
TSQL.setPreparedParams(callableStatement,sqlParams,params);
//, OUT
ParameterMetaData parameterMetaData = callableStatement.getParameterMetaData();
for(int i=0;i<parameterMetaData.getParameterCount();i++){
int paramMode = parameterMetaData.getParameterMode(i+1);
if(paramMode == ParameterMetaData.parameterModeOut || paramMode == ParameterMetaData.parameterModeInOut) {
callableStatement.registerOutParameter(i + 1, parameterMetaData.getParameterType(i + 1));
}
}
return callableStatement;
}
/**
*
* @param callableStatement callableStatement
* @return
* @throws SQLException SQL
*/
public static List<Object> getCallableStatementResult(CallableStatement callableStatement) throws SQLException{
ArrayList<Object> result = new ArrayList<Object>();
ParameterMetaData parameterMetaData = callableStatement.getParameterMetaData();
for(int i=0;i<parameterMetaData.getParameterCount();i++){
int paramMode = parameterMetaData.getParameterMode(i+1);
// out ,
if(paramMode == ParameterMetaData.parameterModeOut || paramMode == ParameterMetaData.parameterModeInOut){
String methodName = getDataMethod(parameterMetaData.getParameterType(i+1));
Object value;
try {
// int
Method method = TReflect.findMethod(CallableStatement.class,methodName,new Class[]{int.class});
value = TReflect.invokeMethod(callableStatement, method,i+1);
result.add(value);
} catch (ReflectiveOperationException e) {
e.printStackTrace();
}
}
}
return result;
}
/**
* SQL
* @param sqlStr SQL
* @param args
* @return SQL
*/
public static String assembleSQLWithArray(String sqlStr,Object[] args){
Map<String,Object> argMap = TObject.arrayToMap(args);
return assembleSQLWithMap(sqlStr,argMap);
}
/**
* argObjectjSQL
* @param sqlStr SQL
* @param argObjectj
* @return SQL
* @throws ReflectiveOperationException
*/
public static String assembleSQLWithObject(String sqlStr,Object argObjectj) throws ReflectiveOperationException{
// (-)Map
Map<String,Object> argMap = TReflect.getMapfromObject(argObjectj);
return assembleSQLWithMap(sqlStr,argMap);
}
/**
* argMapKVSQL
* SQL:
* @param sqlStr SQL
* @param argMap Map
* @return
*/
public static String assembleSQLWithMap(String sqlStr,Map<String ,Object> argMap) {
for(Entry<String,Object> arg : argMap.entrySet())
{
sqlStr = sqlStr.replaceAll(":"+arg.getKey(),getSQLString(argMap.get(arg.getKey())));
}
return sqlStr;
}
/**
* resultSetMap
* @param resultset
* @return Map
* @throws SQLException SQL
* @throws ReflectiveOperationException
*/
public static Map<String, Object> getOneRowWithMap(ResultSet resultset)
throws SQLException, ReflectiveOperationException {
HashMap<String, Object> resultMap = new HashMap<String,Object>();
HashMap<String,Integer> columns = new HashMap<String,Integer>();
int columnCount = resultset.getMetaData().getColumnCount();
for(int i=1;i<=columnCount;i++){
columns.put(resultset.getMetaData().getColumnLabel(i),resultset.getMetaData().getColumnType(i));
}
//Map
for(Entry<String, Integer> columnEntry : columns.entrySet())
{
String methodName =getDataMethod(columnEntry.getValue());
Object value = TReflect.invokeMethod(resultset, methodName, columnEntry.getKey());
resultMap.put(columnEntry.getKey(), value);
}
return resultMap;
}
/**
* resultSet
* @param clazz
* @param resultset
* @return
* @throws ReflectiveOperationException
* @throws SQLException SQL
* @throws ParseException
*/
public static Object getOneRowWithObject(Class<?> clazz,ResultSet resultset)
throws SQLException, ReflectiveOperationException, ParseException {
Map<String,Object>rowMap = getOneRowWithMap(resultset);
HashMap<String,Object> newMap = new HashMap<String,Object>();
for(Entry<String,Object> entry : rowMap.entrySet()){
String key = entry.getKey().replaceAll("[^a-z|A-Z|0-9]","");
newMap.put(key,entry.getValue());
}
rowMap.clear();
return TReflect.getObjectFromMap(clazz, newMap,true);
}
/**
* resultSetList,Map
* @param resultSet
* @return List[Map]
* @throws ReflectiveOperationException
* @throws SQLException SQL
*/
public static List<Map<String,Object>> getAllRowWithMapList(ResultSet resultSet)
throws SQLException, ReflectiveOperationException {
List<Map<String,Object>> resultList = new ArrayList<Map<String,Object>>();
while(resultSet!=null && resultSet.next()){
resultList.add(getOneRowWithMap(resultSet));
}
return resultList;
}
/**
* resultSetList,
* @param clazz
* @param resultSet
* @return
* @throws ParseException
* @throws ReflectiveOperationException
* @throws SQLException SQL
*/
public static List<Object> getAllRowWithObjectList(Class<?> clazz,ResultSet resultSet)
throws SQLException, ReflectiveOperationException, ParseException {
List<Object> resultList = new ArrayList<Object>();
while(resultSet!=null && resultSet.next()){
resultList.add(getOneRowWithObject(clazz,resultSet));
}
return resultList;
}
/**
* SQL ,
* @param sqlText SQL
* @param sqlParams sql
* @param params
* @return
*/
public static String removeEmptyCondiction(String sqlText,List<String> sqlParams,Map<String, Object> params){
//params,
if(params==null){
params = new Hashtable<String, Object>();
}
//::paramName ``paramName
for(String paramName : params.keySet()){
sqlText = sqlText.replace("::"+paramName,"``"+paramName);
}
String sqlRegx = "((\\swhere\\s)|(\\sand\\s)|(\\sor\\s))[\\S\\s]+?(?=(\\swhere\\s)|(\\s\\)\\s)|(\\sand\\s)|(\\sor\\s)|(\\sorder\\s)|(\\shaving\\s)|$)";
String[] sqlCondiction = TString.searchByRegex(sqlText,sqlRegx);
for(String condiction : sqlCondiction){
String[] condictions = TString.searchByRegex(condiction,"::[^,\\s\\)]+");
if(condictions.length>0){
if(condiction.trim().toLowerCase().startsWith("where")){
sqlText = sqlText.replace(condiction.trim(),"where 1=1");
}else{
sqlText = sqlText.replace(condiction.trim(),"");
}
sqlParams.remove(condictions[0]);
}
}
//``paramName ::paramName
return sqlText.replace("``","::");
}
/**
* SQL
* @param sqlText SQL
* @return SQL
*/
public static List<String[]> parseSQLCondiction(String sqlText) {
ArrayList<String[]> condictionList = new ArrayList<String[]>();
sqlText = sqlText.toLowerCase();
String sqlRegx = "((\\swhere\\s)|(\\sand\\s)|(\\sor\\s))[\\S\\s]+?(?=(\\swhere\\s)|(\\s\\)\\s)|(\\sand\\s)|(\\sor\\s)|(\\sorder\\s)|(\\shaving\\s)|$)";
String[] sqlCondiction = TString.searchByRegex(sqlText,sqlRegx);
for(String condiction : sqlCondiction){
condiction = condiction.trim();
String concateMethod = condiction.substring(0,condiction.indexOf(" ")+1).trim();
condiction = condiction.substring(condiction.indexOf(" ")+1,condiction.length()).trim();
String operatorChar = TString.searchByRegex(condiction, "(\\slike\\s*)|(\\sin\\s*)|(>=)|(<=)|[=<>]")[0].trim();
String[] condictionArr = condiction.split("(\\slike\\s*)|(\\sin\\s*)|(>=)|(<=)|[=<>]");
condictionArr[0] = condictionArr[0].trim();
condictionArr[1] = condictionArr[1].trim();
if(condictionArr[0].trim().indexOf(".")>1){
condictionArr[0] = condictionArr[0].split("\\.")[1];
condictionArr[0] = condictionArr[0].substring(condictionArr[0].lastIndexOf(" ")+1);
}
if(condictionArr.length>1){
if((condictionArr[1].trim().startsWith("'") && condictionArr[1].trim().endsWith("'")) ||
(condictionArr[1].trim().startsWith("(") && condictionArr[1].trim().endsWith(")"))
){
condictionArr[1] = condictionArr[1].substring(1,condictionArr[1].length()-1);
}
if(operatorChar.contains("in")){
condictionArr[1] = condictionArr[1].replace("'", "");
}
//System.out.println(": "+concateMethod+" \t: "+condictionArr[0]+" \t: "+operatorChar+" \t: "+condictionArr[1]);
condictionList.add(new String[]{concateMethod, condictionArr[0], operatorChar, condictionArr[1]});
}else{
Logger.error("Parse SQL condiction error");
}
}
return condictionList;
}
/**
* SQL, JAVA SQL
* :String 'chs'
* @param argObj
* @return
*/
public static String getSQLString(Object argObj)
{
if(argObj instanceof List)
{
Object[] objects =((List<?>)argObj).toArray();
StringBuilder listValueStr= new StringBuilder("(");
for(Object obj : objects)
{
String sqlValue = getSQLString(obj);
if(sqlValue!=null) {
listValueStr.append(sqlValue);
listValueStr.append(",");
}
}
return TString.removeSuffix(listValueStr.toString())+")";
}
//String
else if(argObj instanceof String){
return "\'"+argObj.toString()+"\'";
}
//Boolean
else if(argObj instanceof Boolean){
if((Boolean)argObj)
return "true";
else
return "false";
}
//Date
else if(argObj instanceof Date){
SimpleDateFormat dateFormat = new SimpleDateFormat(TDateTime.STANDER_DATETIME_TEMPLATE);
return "'"+dateFormat.format(argObj)+"'";
}
//String
else
{
return argObj.toString();
}
}
/**
* SQL Result
* @param databaseType
* @return
*/
public static String getDataMethod(int databaseType){
switch(databaseType){
case java.sql.Types.CHAR :
return "getString";
case java.sql.Types.VARCHAR :
return "getString";
case java.sql.Types.LONGVARCHAR :
return "getString";
case java.sql.Types.NCHAR :
return "getString";
case java.sql.Types.LONGNVARCHAR :
return "getString";
case java.sql.Types.NUMERIC :
return "getBigDecimal";
case java.sql.Types.DECIMAL :
return "getBigDecimal";
case java.sql.Types.BIT :
return "getBoolean";
case java.sql.Types.BOOLEAN :
return "getBoolean";
case java.sql.Types.TINYINT :
return "getByte";
case java.sql.Types.SMALLINT :
return "getShort";
case java.sql.Types.INTEGER :
return "getInt";
case java.sql.Types.BIGINT :
return "getLong";
case java.sql.Types.REAL :
return "getFloat";
case java.sql.Types.FLOAT :
return "getFloat";
case java.sql.Types.DOUBLE :
return "getDouble";
case java.sql.Types.BINARY :
return "getBytes";
case java.sql.Types.VARBINARY :
return "getBytes";
case java.sql.Types.LONGVARBINARY :
return "getBytes";
case java.sql.Types.DATE :
return "getDate";
case java.sql.Types.TIME :
return "getTime";
case java.sql.Types.TIMESTAMP :
return "getTimestamp";
case java.sql.Types.CLOB :
return "getClob";
case java.sql.Types.BLOB :
return "getBlob";
case java.sql.Types.ARRAY :
return "getArray";
default:
return "getString";
}
}
/**
* JAVA SQL
* @param obj
* @return
*/
public static int getSqlTypes(Object obj){
Class<?> objectClass = obj.getClass();
if(char.class == objectClass){
return java.sql.Types.CHAR;
}else if(String.class == objectClass){
return java.sql.Types.VARCHAR ;
}else if(BigDecimal.class == objectClass){
return java.sql.Types.NUMERIC;
}else if(Boolean.class == objectClass){
return java.sql.Types.BIT;
}else if(Byte.class == objectClass){
return java.sql.Types.TINYINT;
}else if(Short.class == objectClass){
return java.sql.Types.SMALLINT;
}else if(Integer.class == objectClass){
return java.sql.Types.INTEGER;
}else if(Long.class == objectClass){
return java.sql.Types.BIGINT;
}else if(Float.class == objectClass){
return java.sql.Types.FLOAT;
}else if(Double.class == objectClass){
return java.sql.Types.DOUBLE;
}else if(Byte[].class == objectClass){
return java.sql.Types.BINARY;
}else if(Date.class == objectClass){
return java.sql.Types.DATE;
}else if(Time.class == objectClass){
return java.sql.Types.TIME;
}else if(Timestamp.class == objectClass){
return java.sql.Types.TIMESTAMP;
}else if(Clob.class == objectClass){
return java.sql.Types.CLOB;
}else if(Blob.class == objectClass){
return java.sql.Types.BLOB;
}else if(Object[].class == objectClass){
return java.sql.Types.ARRAY;
}
return 0;
}
} |
package org.testng.reporters;
import org.testng.IResultMap;
import org.testng.ISuiteResult;
import org.testng.ITestResult;
import org.testng.internal.Utils;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Utility writing an ISuiteResult to an XMLStringBuffer. Depending on the settings in the <code>config</code> property
* it might generate an additional XML file with the actual content and only reference the file with an <code>url</code>
* attribute in the passed XMLStringBuffer.
*
* @author Cosmin Marginean, Mar 16, 2007
*/
public class XMLSuiteResultWriter {
private XMLReporterConfig config;
public XMLSuiteResultWriter(XMLReporterConfig config) {
this.config = config;
}
/**
* Writes the specified ISuiteResult in the given XMLStringBuffer. Please consider that depending on the settings in
* the <code>config</code> property it might generate an additional XML file with the actual content and only
* reference the file with an <code>url</code> attribute in the passed XMLStringBuffer.
*
* @param xmlBuffer The XML buffer where to write or reference the suite result
* @param suiteResult The <code>ISuiteResult</code> to serialize
*/
public void writeSuiteResult(XMLStringBuffer xmlBuffer, ISuiteResult suiteResult) {
if (XMLReporterConfig.FF_LEVEL_SUITE_RESULT != config.getFileFragmentationLevel()) {
writeAllToBuffer(xmlBuffer, suiteResult);
} else {
String parentDir =
config.getOutputDirectory() + File.separatorChar + suiteResult.getTestContext().getSuite().getName();
File file = referenceSuiteResult(xmlBuffer, parentDir, suiteResult);
XMLStringBuffer suiteXmlBuffer = new XMLStringBuffer("");
writeAllToBuffer(suiteXmlBuffer, suiteResult);
Utils.writeUtf8File(file.getAbsoluteFile().getParent(), file.getName(), suiteXmlBuffer.toXML());
}
}
private void writeAllToBuffer(XMLStringBuffer xmlBuffer, ISuiteResult suiteResult) {
xmlBuffer.push(XMLReporterConfig.TAG_TEST, getSuiteResultAttributes(suiteResult));
Set<ITestResult> testResults = new HashSet();
addAllTestResults(testResults, suiteResult.getTestContext().getPassedTests());
addAllTestResults(testResults, suiteResult.getTestContext().getFailedTests());
addAllTestResults(testResults, suiteResult.getTestContext().getSkippedTests());
addAllTestResults(testResults, suiteResult.getTestContext().getPassedConfigurations());
addAllTestResults(testResults, suiteResult.getTestContext().getSkippedConfigurations());
addAllTestResults(testResults, suiteResult.getTestContext().getFailedConfigurations());
addAllTestResults(testResults, suiteResult.getTestContext().getFailedButWithinSuccessPercentageTests());
addTestResults(xmlBuffer, testResults);
xmlBuffer.pop();
}
private void addAllTestResults(Set<ITestResult> testResults, IResultMap resultMap) {
if (resultMap != null) {
testResults.addAll(resultMap.getAllResults());
}
}
private File referenceSuiteResult(XMLStringBuffer xmlBuffer, String parentDir, ISuiteResult suiteResult) {
Properties attrs = new Properties();
String suiteResultName = suiteResult.getTestContext().getName() + ".xml";
attrs.setProperty(XMLReporterConfig.ATTR_URL, suiteResultName);
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_TEST, attrs);
return new File(parentDir + File.separatorChar + suiteResultName);
}
private Properties getSuiteResultAttributes(ISuiteResult suiteResult) {
Properties attributes = new Properties();
attributes.setProperty(XMLReporterConfig.ATTR_NAME, suiteResult.getTestContext().getName());
return attributes;
}
private void addTestResults(XMLStringBuffer xmlBuffer, Set<ITestResult> testResults) {
Map<String, List<ITestResult>> testsGroupedByClass = buildTestClassGroups(testResults);
for (Map.Entry<String, List<ITestResult>> result : testsGroupedByClass.entrySet()) {
Properties attributes = new Properties();
String className = result.getKey();
if (config.isSplitClassAndPackageNames()) {
int dot = className.lastIndexOf('.');
attributes.setProperty(XMLReporterConfig.ATTR_NAME,
dot > -1 ? className.substring(dot + 1, className.length()) : className);
attributes.setProperty(XMLReporterConfig.ATTR_PACKAGE, dot > -1 ? className.substring(0, dot) : "[default]");
} else {
attributes.setProperty(XMLReporterConfig.ATTR_NAME, className);
}
xmlBuffer.push(XMLReporterConfig.TAG_CLASS, attributes);
for (ITestResult testResult : result.getValue()) {
addTestResult(xmlBuffer, testResult);
}
xmlBuffer.pop();
}
}
private Map<String, List<ITestResult>> buildTestClassGroups(Set<ITestResult> testResults) {
Map<String, List<ITestResult>> map = new HashMap<String, List<ITestResult>>();
for (ITestResult result : testResults) {
String className = result.getTestClass().getName();
List<ITestResult> list = map.get(className);
if (list == null) {
list = new ArrayList<ITestResult>();
map.put(className, list);
}
list.add(result);
}
return map;
}
private void addTestResult(XMLStringBuffer xmlBuffer, ITestResult testResult) {
Properties attribs = getTestResultAttributes(testResult);
attribs.setProperty(XMLReporterConfig.ATTR_STATUS, getStatusString(testResult.getStatus()));
xmlBuffer.push(XMLReporterConfig.TAG_TEST_METHOD, attribs);
addTestMethodParams(xmlBuffer, testResult);
addTestResultException(xmlBuffer, testResult);
xmlBuffer.pop();
}
private String getStatusString(int testResultStatus) {
switch (testResultStatus) {
case ITestResult.SUCCESS:
return "PASS";
case ITestResult.FAILURE:
return "FAIL";
case ITestResult.SKIP:
return "SKIP";
case ITestResult.SUCCESS_PERCENTAGE_FAILURE:
return "SUCCESS_PERCENTAGE_FAILURE";
}
return null;
}
private Properties getTestResultAttributes(ITestResult testResult) {
Properties attributes = new Properties();
if (!testResult.getMethod().isTest()) {
attributes.setProperty(XMLReporterConfig.ATTR_IS_CONFIG, "true");
}
attributes.setProperty(XMLReporterConfig.ATTR_NAME, testResult.getName());
String description = testResult.getMethod().getDescription();
if (!Utils.isStringEmpty(description)) {
attributes.setProperty(XMLReporterConfig.ATTR_DESC, description);
}
attributes.setProperty(XMLReporterConfig.ATTR_METHOD_SIG, removeClassName(testResult.getMethod().toString()));
SimpleDateFormat format = new SimpleDateFormat(config.getTimestampFormat());
String startTime = format.format(testResult.getStartMillis());
String endTime = format.format(testResult.getEndMillis());
attributes.setProperty(XMLReporterConfig.ATTR_STARTED_AT, startTime);
attributes.setProperty(XMLReporterConfig.ATTR_FINISHED_AT, endTime);
long duration = testResult.getEndMillis() - testResult.getStartMillis();
String strDuration = Long.toString(duration);
attributes.setProperty(XMLReporterConfig.ATTR_DURATION_MS, strDuration);
if (config.isGenerateGroupsAttribute()) {
String groupNamesStr = Utils.arrayToString(testResult.getMethod().getGroups());
if (!Utils.isStringEmpty(groupNamesStr)) {
attributes.setProperty(XMLReporterConfig.ATTR_GROUPS, groupNamesStr);
}
}
if (config.isGenerateDependsOnMethods()) {
String dependsOnStr = Utils.arrayToString(testResult.getMethod().getMethodsDependedUpon());
if (!Utils.isStringEmpty(dependsOnStr)) {
attributes.setProperty(XMLReporterConfig.ATTR_DEPENDS_ON_METHODS, dependsOnStr);
}
}
if (config.isGenerateDependsOnGroups()) {
String dependsOnStr = Utils.arrayToString(testResult.getMethod().getGroupsDependedUpon());
if (!Utils.isStringEmpty(dependsOnStr)) {
attributes.setProperty(XMLReporterConfig.ATTR_DEPENDS_ON_GROUPS, dependsOnStr);
}
}
return attributes;
}
private String removeClassName(String methodSignature) {
int firstParanthesisPos = methodSignature.indexOf("(");
int dotAferClassPos = methodSignature.substring(0, firstParanthesisPos).lastIndexOf(".");
return methodSignature.substring(dotAferClassPos + 1, methodSignature.length());
}
public void addTestMethodParams(XMLStringBuffer xmlBuffer, ITestResult testResult) {
Object[] parameters = testResult.getParameters();
if ((parameters != null) && (parameters.length > 0)) {
xmlBuffer.push(XMLReporterConfig.TAG_PARAMS);
for (int i = 0; i < parameters.length; i++) {
addParameter(xmlBuffer, parameters[i], i);
}
xmlBuffer.pop();
}
}
private void addParameter(XMLStringBuffer xmlBuffer, Object parameter, int i) {
Properties attrs = new Properties();
attrs.setProperty(XMLReporterConfig.ATTR_INDEX, String.valueOf(i));
xmlBuffer.push(XMLReporterConfig.TAG_PARAM, attrs);
if (parameter == null) {
Properties valueAttrs = new Properties();
valueAttrs.setProperty(XMLReporterConfig.ATTR_IS_NULL, "true");
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_PARAM_VALUE, valueAttrs);
} else {
xmlBuffer.push(XMLReporterConfig.TAG_PARAM_VALUE);
xmlBuffer.addCDATA(parameter.toString());
xmlBuffer.pop();
}
xmlBuffer.pop();
}
private void addTestResultException(XMLStringBuffer xmlBuffer, ITestResult testResult) {
Throwable exception = testResult.getThrowable();
if (exception != null) {
Properties exceptionAttrs = new Properties();
exceptionAttrs.setProperty(XMLReporterConfig.ATTR_CLASS, exception.getClass().getName());
xmlBuffer.push(XMLReporterConfig.TAG_EXCEPTION, exceptionAttrs);
if (!Utils.isStringEmpty(exception.getMessage())) {
xmlBuffer.push(XMLReporterConfig.TAG_MESSAGE);
xmlBuffer.addCDATA(exception.getMessage());
xmlBuffer.pop();
}
String[] stackTraces = Utils.stackTrace(exception, false);
if ((config.getStackTraceOutputMethod() & XMLReporterConfig.STACKTRACE_SHORT) == XMLReporterConfig
.STACKTRACE_SHORT) {
xmlBuffer.push(XMLReporterConfig.TAG_SHORT_STACKTRACE);
xmlBuffer.addCDATA(stackTraces[0]);
xmlBuffer.pop();
}
if ((config.getStackTraceOutputMethod() & XMLReporterConfig.STACKTRACE_FULL) == XMLReporterConfig
.STACKTRACE_FULL) {
xmlBuffer.push(XMLReporterConfig.TAG_FULL_STACKTRACE);
xmlBuffer.addCDATA(stackTraces[1]);
xmlBuffer.pop();
}
xmlBuffer.pop();
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.