answer
stringlengths
17
10.2M
package dev.kkorolyov.simpleprops; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Provides access to and mutation of long-term properties, which are an indefinite set of key-value pairs. * Provides for saving and loading data to/from a file unique to the current instance. */ public class Properties { private static final Map<String, Properties> instances = new HashMap<>(); private final String filename; private final Map<String, String> defaultProperties = new HashMap<>(), properties = new HashMap<>(); /** * Returns a {@code Properties} instance of the specified name. * If an appropriate instance does not yet exist, it is created using the specified filename and no default properties. * @see #getInstance(String, Map) */ public static Properties getInstance(String filename) { return getInstance(filename, (Map<String, String>) null); } /** * Functions similarly to {@link #getInstance(String, Map)}, but uses a 2-dimensional array instead of a {@code Map} to specify default properties. */ public static Properties getInstance(String filename, String[][] defaultProperties) { return getInstance(filename, convertArrayToMap(defaultProperties)); } /** * Returns a {@code Properties} instance for the specified filename. * If an appropriate instance does not yet exist, it is created using the specified filename and default properties. * @param filename label for the instance, as well as the name of the file written to when saved * @param defaultProperties default properties of new instance; ignored if retrieving an existing instance * @return appropriate instance */ public static Properties getInstance(String filename, Map<String, String> defaultProperties) { Properties instance = null; while ((instance = instances.get(filename)) == null) instances.put(filename, new Properties(filename, defaultProperties)); return instance; } private static Map<String, String> convertArrayToMap(String[][] array) { if (array == null) return null; Map<String, String> convertedMap = new HashMap<>(); for (String[] property : array) { if (array.length >= 2) convertedMap.put(property[0], property[1]); } return convertedMap; } private Properties(String filename, Map<String, String> defaultProperties) { this.filename = filename; setDefaultProperties(defaultProperties); loadDefaults(); try { loadFromFile(); } catch (IOException e) { e.printStackTrace(); } } private void setDefaultProperties(Map<String, String> newDefaultProperties) { defaultProperties.clear(); if (newDefaultProperties != null) { // If null, no default properties for (String key : newDefaultProperties.keySet()) { defaultProperties.put(key, newDefaultProperties.get(key)); } } } /** * Resets all properties to default values. */ public void loadDefaults() { properties.clear(); for (String key : defaultProperties.keySet()) { properties.put(key, defaultProperties.get(key)); } } /** * Retrieves the value of a property of the specified key. * If this method is called on an instance which has no properties in memory, the instance will first attempt to load both its default properties and properties from its respective file. * @param key key of property to retrieve * @return property value */ public String getValue(String key) { reloadIfEmpty(); return properties.get(key); } /** * Returns the keys of all properties. * If this method is called on an instance which has no properties in memory, the instance will first attempt to load both its default properties and properties from its respective file. * @return key of every property */ public Set<String> getAllKeys() { reloadIfEmpty(); return properties.keySet(); } /** * Returns the total number of properties of this instance. * If this method is called on an instance which has no properties in memory, the instance will first attempt to load both its default properties and properties from its respective file. * @return number of properties */ public int size() { return getAllKeys().size(); } /** * Adds the specified property. * If the key matches an existing property's key, then that preexisting property's value is overridden by the specified value instead. * If this method is called on an instance which has no properties in memory, the instance will first attempt to load both its default properties and properties from its respective file. * @param key key of property to add * @param value value of property to add */ public void addProperty(String key, String value) { reloadIfEmpty(); properties.put(key, value); } private void reloadIfEmpty() { if (properties.isEmpty()) { // Properties were possibly cleared to free memory try { loadDefaults(); loadFromFile(); } catch (IOException e) { e.printStackTrace(); } } } /** * Clears all properties from memory. * Any properties not saved to disk are lost. */ public void clear() { properties.clear(); } /** * Loads all properties found in this instance's respective file. * @throws IOException if an I/O error occurs */ public void loadFromFile() throws IOException { FileReader fileIn; try { fileIn = new FileReader(new File(filename)); } catch (FileNotFoundException e) { saveToFile(); fileIn = new FileReader(new File(filename)); // Should open now } try (BufferedReader fileReader = new BufferedReader(fileIn)) { String nextLine; while ((nextLine = fileReader.readLine()) != null) { String[] currentKeyValue = nextLine.split("="); // Line should be "<KEY>=<VALUE>"; String currentKey = "", currentValue = ""; if (currentKeyValue.length > 0) { currentKey = currentKeyValue[0].trim(); if (currentKeyValue.length > 1) { currentValue = currentKeyValue[1].trim(); } } properties.put(currentKey, currentValue); } } } /** * Writes all properties currently loaded in memory to the properties file. * @throws IOException if an I/O error occurs * @throws FileNotFoundException if the properties file cannot be accessed for some reason */ public void saveToFile() throws FileNotFoundException, IOException { try ( OutputStream fileOut = new FileOutputStream(new File(filename)); PrintWriter filePrinter = new PrintWriter(fileOut)) { for (String key : properties.keySet()) { filePrinter.println(key + "=" + properties.get(key)); } } } /** @return name of file attached to this instance */ public String getFilename() { return filename; } }
package dr.app.gui.chart; import dr.inference.trace.TraceDistribution; import dr.stats.Variate; import dr.util.FrequencyDistribution; import java.awt.*; import java.util.List; public class CategoryDensityPlot extends FrequencyPlot { private final static double BAR_WIDTH = 0.8; private int barCount = 0; private int barId; public CategoryDensityPlot(List<Double> data, int minimumBinCount, TraceDistribution traceDistribution, int barCount, int barId) { super(traceDistribution); this.barCount = barCount; this.barId = barId; setData(new Variate.D(data)); } // for string public CategoryDensityPlot(List<String> data, TraceDistribution traceDistribution, int barCount, int barId) { super(traceDistribution); this.barCount = barCount; this.barId = barId; if (!traceDistribution.getTraceType().isCategorical()) throw new IllegalArgumentException("Categorical value is required for frequency plot."); List<Double> intData = traceDistribution.indexingData(data); // set data by index of unique categorical values setData(new Variate.D(intData)); } /** * Set data, all integers */ public void setData(Variate.D data) { setRawData(data); FrequencyDistribution frequency = getFrequencyDistribution(data, -1); Variate.D xData = new Variate.D(); Variate.D yData = new Variate.D(); double x = frequency.getLowerBound(); for (int i = 0; i < frequency.getBinCount(); i++) { xData.add(x); yData.add(0.0); x += frequency.getBinSize(); xData.add(x); yData.add(frequency.getProbability(i)); } setData(xData, yData); } /** * Paint data series */ protected void paintData(Graphics2D g2, Variate.N xData, Variate.N yData) { int n = xData.getCount(); for (int i = 0; i < n; i += 2) { double cellWidth = Math.abs(xAxis.getMajorTickSpacing() * xScale) * BAR_WIDTH; float x = (float) transformX(((Number) xData.get(i)).doubleValue()); float x1 = x - (float)cellWidth / 2; float x2 = x + (float)cellWidth / 2; float y1 = (float) transformY(((Number) yData.get(i)).doubleValue()); float y2 = (float) transformY(((Number) yData.get(i + 1)).doubleValue()); // x = x2 - x1; // if (barCount > 1) { // x1 = x1 - ((double) (barCount - 1)) * x + 2.0 * ((double) barId) * x; // x2 = x2 - ((double) (barCount - 1)) * x + 2.0 * ((double) barId) * x; // y1 = (Double) yData.get(i); // y2 = (Double) yData.get(i + 1); if (y1 != y2) { if (linePaint != null) { Paint fillPaint = new Color( ((Color) linePaint).getRed(), ((Color) linePaint).getGreen(), ((Color) linePaint).getBlue(), 125); if (barId==0) fillPaint = new Color(124, 164, 221, 125); g2.setPaint(fillPaint); fillRect(g2, x1, y1, x2, y2); } if (lineStroke != null && linePaint != null) { g2.setStroke(lineStroke); g2.setPaint(linePaint); drawRect(g2, x1, y1, x2, y2); } } } } }
package verification.platu.project; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import lpn.parser.LhpnFile; import org.antlr.runtime.ANTLRFileStream; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; import org.antlr.runtime.TokenStream; import verification.platu.logicAnalysis.Analysis; import verification.platu.logicAnalysis.CompositionalAnalysis; import verification.platu.lpn.LPN; import verification.platu.lpn.LPNTranRelation; import verification.platu.lpn.io.Instance; import verification.platu.lpn.io.PlatuGrammarLexer; import verification.platu.lpn.io.PlatuGrammarParser; import verification.platu.lpn.io.PlatuInstLexer; import verification.platu.lpn.io.PlatuInstParser; import verification.platu.main.Options; import verification.platu.stategraph.State; import verification.platu.stategraph.StateGraph; import verification.timed_state_exploration.zoneProject.ContinuousUtilities; import verification.timed_state_exploration.zoneProject.Zone; public class Project { protected String label; /* 1. Each design unit has an unique label index. * 2. The indices of all design units are sequential starting from 0. * */ protected List<StateGraph> designUnitSet; /* The list for timing analysis */ // protected List<StateGraph_timed> designUnitTimedSet; protected LPNTranRelation lpnTranRelation = null; protected CompositionalAnalysis analysis = null; public Project() { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(1); lpnTranRelation = new LPNTranRelation(this.designUnitSet); } public Project(LhpnFile lpn) { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(1); StateGraph stateGraph = new StateGraph(lpn); designUnitSet.add(stateGraph); //stateGraph.printStates(); } /** * If the OptionsFlag is false, then this constructor is identical to * Poject(LhpnFile lpn). If the OptionsFlag is true, this constructor uses * StateGraph_timed objects. * * @author Andrew N. Fisher * * @param lpn * The lpn under consideration. * @param OptionsFlag * True for timing analysis and false otherwise. The option should match * Options.getTimingAnalysisFlag(). */ // public Project(LhpnFile lpn, boolean OptionsFlag) // if(Options.getTimingAnalysisFlag()) // this.label = ""; // this.designUnitSet = new ArrayList<StateGraph>(0); // this.designUnitTimedSet = new ArrayList<StateGraph_timed>(1); // StateGraph_timed stategraph = new StateGraph_timed(lpn); // designUnitTimedSet.add(stategraph); // else // this.label = ""; // this.designUnitSet = new ArrayList<StateGraph>(1); // StateGraph stateGraph = new StateGraph(lpn); // designUnitSet.add(stateGraph); public Project(ArrayList<LhpnFile> lpns) { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(lpns.size()); for (int i=0; i<lpns.size(); i++) { LhpnFile lpn = lpns.get(i); StateGraph stateGraph = new StateGraph(lpn); designUnitSet.add(stateGraph); } } /** * Find the SG for the entire project where each project state is a tuple of * local states * */ public StateGraph[] search() { validateInputs(); // if(Options.getSearchType().equals("compositional")){ // this.analysis = new CompositionalAnalysis(); // if(Options.getParallelFlag()){ // this.analysis.parallelCompositionalFindSG(this.designUnitSet); // else{ // this.analysis.findReducedSG(this.designUnitSet); // return; long start = System.currentTimeMillis(); int lpnCnt = designUnitSet.size(); /* Prepare search by placing LPNs in an array in the order of their indices.*/ StateGraph[] sgArray = new StateGraph[lpnCnt]; int idx = 0; for (StateGraph du : designUnitSet) { LhpnFile lpn = du.getLpn(); lpn.setLpnIndex(idx++); sgArray[lpn.getLpnIndex()] = du; } // If timing, then create the sgArray with StateGraph_timed objects. // if(Options.getTimingAnalysisFlag()) // for(StateGraph_timed du : this.designUnitTimedSet) // LhpnFile lpn = du.getLpn(); // lpn.setIndex(idx++); // sgArray[lpn.getIndex()] = du; // Initialize the project state HashMap<String, Integer> varValMap = new HashMap<String, Integer>(); State[] initStateArray = new State[lpnCnt]; for (int index = 0; index < lpnCnt; index++) { LhpnFile curLpn = sgArray[index].getLpn(); StateGraph curSg = sgArray[index]; initStateArray[index] = curSg.getInitState(); //curLpn.getInitState(); int[] curStateVector = initStateArray[index].getVector(); varValMap = curLpn.getAllVarsWithValuesAsInt(curStateVector); // HashMap<String, String> vars = curLpn.getAllOutputs();//curLpn.getAllOutputs(); // DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap(); // for(String var : vars.keySet()) { // varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]); } // Adjust the value of the input variables in LPN in the initial state. // Add the initial states into their respective LPN. for (int index = 0; index < lpnCnt; index++) { StateGraph curSg = sgArray[index]; // If this is a timing analysis, the boolean inequality variables // must be updated. if(Options.getTimingAnalysisFlag()){ // First create a zone with the continuous variables. State[] ls = new State[1]; ls[0] = initStateArray[index]; Zone z = new Zone(ls); ContinuousUtilities.updateInitialInequalities(z, ls[0]); initStateArray[index] = curSg.getInitState(); } initStateArray[index].update(curSg, varValMap, curSg.getLpn().getVarIndexMap()); initStateArray[index] = curSg.addState(initStateArray[index]); } // Initialize the zones for the initStateArray, if timining is enabled. // if(Options.getTimingAnalysisFlag()) // for(int index =0; index < lpnCnt; index++) // if(sgArray[index] instanceof StateGraph_timed) // if (Options.getTimingAnalysisFlag()) { // new TimingAnalysis(sgArray); // return; // else if(!Options.getTimingAnalysisFlag()) { // Analysis tmp = new Analysis(sgArray, initStateArray, lpnTranRelation, Options.getSearchType()); // // Analysis tmp = new Analysis(lpnList, curStateArray, // // lpnTranRelation, "dfs_por"); // //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs"); // //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs_noDisabling"); // else { // return; /* Entry point for the timed analysis. */ // if(Options.getTimingAnalysisFlag()) // Analysis_Timed dfsTimedStateExploration = new Analysis_Timed(sgArray); // dfsTimedStateExploration.search_dfs_timed(sgArray, initStateArray); // return new StateGraph[0]; Analysis dfsStateExploration = new Analysis(sgArray); StateGraph[] stateGraphArray = dfsStateExploration.search_dfs(sgArray, initStateArray); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis/1000F; System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n"); if (Options.getOutputLogFlag()) outputRuntimeLog(false, elapsedTimeSec); return stateGraphArray; } public Set<LPN> readLpn(final String src_file) { Set<LPN> lpnSet = null; try { if (!src_file.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } // ANTLRFileStream input = new ANTLRFileStream(src_file); // PlatuGrammarLexer lexer = new PlatuGrammarLexer(input); // TokenStream tokenStream = new CommonTokenStream(lexer); // PlatuGrammarParser parser = new PlatuGrammarParser(tokenStream); // lpnSet = parser.lpn(this); } catch (Exception ex) { Logger.getLogger(Project.class.getName()).log(Level.SEVERE, null, ex); } return lpnSet; } /** * Find the SG for the entire project where each project state is a tuple of * local states. Use partial order reduction during dfs search. * @param globalSGpath * @return * */ public StateGraph[] searchPOR() { validateInputs(); // if(Options.getSearchType().equals("compositional")){ // this.analysis = new CompositionalAnalysis(); // if(Options.getParallelFlag()){ // this.analysis.parallelCompositionalFindSG(this.designUnitSet); // else{ // this.analysis.findReducedSG(this.designUnitSet); // return; long start = System.currentTimeMillis(); int lpnCnt = designUnitSet.size(); /* Prepare search by placing LPNs in an array in the order of their indices.*/ StateGraph[] sgArray = new StateGraph[lpnCnt]; int idx = 0; for (StateGraph du : designUnitSet) { LhpnFile lpn = du.getLpn(); lpn.setLpnIndex(idx++); sgArray[lpn.getLpnIndex()] = du; } // Initialize the project state HashMap<String, Integer> varValMap = new HashMap<String, Integer>(); State[] initStateArray = new State[lpnCnt]; for (int index = 0; index < lpnCnt; index++) { LhpnFile curLpn = sgArray[index].getLpn(); StateGraph curSg = sgArray[index]; initStateArray[index] = curSg.getInitState(); //curLpn.getInitState(); int[] curStateVector = initStateArray[index].getVector(); varValMap = curLpn.getAllVarsWithValuesAsInt(curStateVector); // DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap(); // HashMap<String, String> outVars = curLpn.getAllOutputs(); // for(String var : outVars.keySet()) { // varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]); } // Adjust the value of the input variables in LPN in the initial state. // Add the initial states into their respective LPN. for (int index = 0; index < lpnCnt; index++) { StateGraph curLpn = sgArray[index]; initStateArray[index].update(curLpn, varValMap, curLpn.getLpn().getVarIndexMap()); initStateArray[index] = curLpn.addState(initStateArray[index]); } Analysis dfsPOR = new Analysis(sgArray); StateGraph[] stateGraphArray; stateGraphArray = dfsPOR.search_dfsPOR(sgArray, initStateArray); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis/1000F; System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n"); if (Options.getOutputLogFlag()) outputRuntimeLog(true, elapsedTimeSec); return stateGraphArray; } private void outputRuntimeLog(boolean isPOR, float runtime) { try { String fileName = null; if (isPOR) fileName = Options.getPrjSgPath() + Options.getLogName() + "_" + Options.getPOR() + "_" + Options.getCycleClosingMthd() + "_" + Options.getCycleClosingAmpleMethd() + "_runtime.log"; else fileName = Options.getPrjSgPath() + Options.getLogName() + "_full_runtime.log"; BufferedWriter out = new BufferedWriter(new FileWriter(fileName)); out.write("runtime(sec)\n"); out.write(runtime + "\n"); out.close(); } catch (Exception e) { e.printStackTrace(); System.err.println("Error producing local state graph as dot file."); } } public void readLpn(List<String> fileList) { for(String srcFile : fileList){ if (!srcFile.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } ANTLRFileStream input = null; try { input = new ANTLRFileStream(srcFile); } catch (IOException e) { System.err.println("error: error reading " + srcFile); System.exit(1); } PlatuInstLexer lexer = new PlatuInstLexer(input); TokenStream tokenStream = new CommonTokenStream(lexer); PlatuInstParser parser = new PlatuInstParser(tokenStream); try { parser.parseLpnFile(this); } catch (RecognitionException e) { System.err.println("error: error parsing " + srcFile); System.exit(1); } } PlatuInstParser.includeSet.removeAll(fileList); for(String srcFile : PlatuInstParser.includeSet){ if (!srcFile.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } ANTLRFileStream input = null; try { input = new ANTLRFileStream(srcFile); } catch (IOException e) { System.err.println("error: error reading " + srcFile); System.exit(1); } PlatuInstLexer lexer = new PlatuInstLexer(input); TokenStream tokenStream = new CommonTokenStream(lexer); PlatuInstParser parser = new PlatuInstParser(tokenStream); try { parser.parseLpnFile(this); } catch (RecognitionException e) { System.err.println("error: error parsing " + srcFile); System.exit(1); } } verification.platu.lpn.LPN.nextID = 1; HashMap<String, LPN> instanceMap = new HashMap<String, LPN>(); for(Instance inst : PlatuInstParser.InstanceList){ LPN lpn = PlatuInstParser.LpnMap.get(inst.getLpnLabel()); if(lpn == null){ System.err.println("error: class " + inst.getLpnLabel() + " does not exist"); System.exit(1); } LPN instLpn = lpn.instantiate(inst.getName()); instanceMap.put(instLpn.getLabel(), instLpn); this.designUnitSet.add(instLpn.getStateGraph()); } // TODO: (irrelevant) Is this really needed??? /* for(StateGraph sg : this.designUnitSet){ sg.getLpn().setGlobals(this.designUnitSet); } */ for(Instance inst : PlatuInstParser.InstanceList){ LPN dstLpn = instanceMap.get(inst.getName()); if(dstLpn == null){ System.err.println("error: instance " + inst.getName() + " does not exist"); System.exit(1); } List<String> argumentList = dstLpn.getArgumentList(); List<String> varList = inst.getVariableList(); List<String> modList = inst.getModuleList(); if(argumentList.size() != varList.size()){ System.err.println("error: incompatible number of arguments for instance " + inst.getName()); System.exit(1); } for(int i = 0; i < argumentList.size(); i++){ LPN srcLpn = instanceMap.get(modList.get(i)); if(srcLpn == null){ System.err.println("error: instance " + modList.get(i) + " does not exist"); System.exit(1); } String outputVar = varList.get(i); String inputVar = argumentList.get(i); srcLpn.connect(outputVar, dstLpn, inputVar); } } } /** * @return the designUnitSet */ public List<StateGraph> getDesignUnitSet() { return designUnitSet; } /** * Validates each lpn's input variables are driven by another lpn's output. */ protected void validateInputs(){ // Changed protection level. ANF boolean error = false; for(StateGraph sg : designUnitSet){ for(String input : sg.getLpn().getAllInputs().keySet()){ boolean connected = false; for(StateGraph sg2 : designUnitSet){ if(sg == sg2) continue; if(sg2.getLpn().getAllOutputs().keySet().contains(input)){ connected = true; break; } } if(!connected){ error = true; System.err.println("error in lpn " + sg.getLpn().getLabel() + ": input variable '" + input + "' is not dependent on an output"); } } } if(error){ System.exit(1); } } }
package dr.app.tools; import dr.app.beast.BeastVersion; import dr.app.util.Arguments; import dr.app.util.Utils; import dr.inference.trace.PathSamplingAnalysis; import dr.inference.trace.TraceException; import dr.util.Attribute; import dr.util.FileHelpers; import dr.util.Version; import dr.xml.AbstractXMLObjectParser; import dr.xml.AttributeRule; import dr.xml.ElementRule; import dr.xml.StringAttributeRule; import dr.xml.XMLObject; import dr.xml.XMLObjectParser; import dr.xml.XMLParseException; import dr.xml.XMLSyntaxRule; import java.io.*; import java.util.*; /** * @author Wai Lok Sibon Li */ public class MixtureModelLogAnalyser { private final static Version version = new BeastVersion(); public static final String MIXTURE_MODEL_LOG_ANALYSER = "mixtureModelLogAnalyser"; public static final String BURNIN = "burnin"; public static final String DISCRETE_VARIABLE = "discreteVariable"; public MixtureModelLogAnalyser(int burnin, String inputFileName, String outputFileName, String discreteVariableName ) throws IOException, TraceException { File parentFile = new File(inputFileName); if (parentFile.isDirectory()) { System.out.println("Analysing all log files below directory: " + inputFileName); } else if (parentFile.isFile()) { System.out.println("Analysing log file: " + inputFileName); } else { System.err.println(inputFileName + " does not exist!"); System.exit(0); } if (outputFileName != null) { FileOutputStream outputStream = new FileOutputStream(outputFileName); System.setOut(new PrintStream(outputStream)); } analyze(parentFile, burnin, discreteVariableName); } /** * Recursively analyzes log files. * * @param file the file to analyze (if this is a directory then the files within it are analyzed) * @param burnin the burnin to use * @param discreteVariableName tag for the name of the discrete variable * @throws dr.inference.trace.TraceException * if the trace file is in the wrong format or corrupted */ private void analyze(File file, int burnin, String discreteVariableName) throws TraceException { if (file.isFile()) { try { String name = file.getCanonicalPath(); report(name, burnin, discreteVariableName); } catch (IOException e) { //e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } else { File[] files = file.listFiles(); for (File f : files) { if (f.isDirectory()) { analyze(f, burnin, discreteVariableName); } else if (f.getName().endsWith(".log")) { analyze(f, burnin, discreteVariableName); } } } } /** * Recursively analyzes trees files. * * @param name the file to analyze (if this is a directory then the files within it are analyzed) * @param burnin the burnin to use * @param discreteVariableName tag for the name of the discrete variable */ private void report(String name, int burnin, String discreteVariableName) { try { FileReader fileReader = new FileReader(new File(name)); BufferedReader br = new BufferedReader(fileReader); String line; String previousLine =null; int discreteVariableIndex = -1; Hashtable<String, Integer> hash = new Hashtable<String, Integer>(); while((line=br.readLine()) != null) { if(line.matches("\\d+\\t.+")) { String[] split = line.split("\t"); if(split[0].equals("0")) { String[] headerSplit = previousLine.split("\t"); for(int i=0; i<headerSplit.length; i++) { String s = headerSplit[i]; if(s.equals(discreteVariableName)) { discreteVariableIndex = i; } } } if(Integer.parseInt(split[0])>=burnin) { if(hash.containsKey(split[discreteVariableIndex])) { hash.put(split[discreteVariableIndex], new Integer(hash.get(split[discreteVariableIndex]) + 1)); } else { hash.put(split[discreteVariableIndex], new Integer(1)); } } } previousLine = line; } Vector<String> v = new Vector(hash.keySet()); Collections.sort(v); System.out.print("name" + "\t"); for(String s : v) { double state = Double.parseDouble(s); if(Math.floor(state)!=state) { throw new RuntimeException("State is not a whole number: " + state); } System.out.print("state " + (int) state + "\t"); } System.out.print("\n" + name + "\t"); for (String aV : v) { String element = aV; System.out.print(hash.get(element) + "\t"); } System.out.println(); br.close(); } catch (IOException e) { System.err.println("Error Parsing Input log: " + e.getMessage()); } } public static XMLObjectParser PARSER = new AbstractXMLObjectParser() { public String getParserName() { return MIXTURE_MODEL_LOG_ANALYSER; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { try { String inputFileName = xo.getStringAttribute(FileHelpers.FILE_NAME); String discreteVariableName = xo.getStringAttribute(DISCRETE_VARIABLE); int burninLength = 0; if (xo.hasAttribute(BURNIN)) { burninLength = xo.getIntegerAttribute(BURNIN); } MixtureModelLogAnalyser mixtureModel = new MixtureModelLogAnalyser(burninLength, inputFileName, null, discreteVariableName); return mixtureModel; } catch (IOException ioe) { throw new XMLParseException(ioe.getMessage()); } catch (TraceException te) { throw new XMLParseException(te.getMessage()); } } new MixtureModelLogAnalyser(burnin, inputFileName, outputFileName, discreteVariableName/*, !shortReport, hpds, ess, stdErr, marginalLikelihood*/);
package verification.platu.project; import java.io.IOException; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import lpn.parser.LhpnFile; import org.antlr.runtime.ANTLRFileStream; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; import org.antlr.runtime.TokenStream; import verification.platu.lpn.DualHashMap; import verification.platu.lpn.io.Instance; import verification.platu.lpn.io.PlatuGrammarLexer; import verification.platu.lpn.io.PlatuInstLexer; import verification.platu.main.Options; import verification.platu.stategraph.*; import verification.platu.TimingAnalysis.*; import verification.platu.logicAnalysis.Analysis; import verification.platu.logicAnalysis.CompositionalAnalysis; import verification.platu.lpn.LPN; import verification.platu.lpn.LPNTranRelation; import verification.platu.lpn.io.PlatuGrammarParser; import verification.platu.lpn.io.PlatuInstParser; import verification.platu.stategraph.State; import verification.platu.stategraph.StateGraph; import verification.timed_state_exploration.zone.Analysis_Timed; import verification.timed_state_exploration.zone.StateGraph_timed; public class Project { protected String label; /* 1. Each design unit has an unique label index. * 2. The indices of all design units are sequential starting from 0. * */ protected List<StateGraph> designUnitSet; /* The list for timing analysis */ // protected List<StateGraph_timed> designUnitTimedSet; protected LPNTranRelation lpnTranRelation = null; protected CompositionalAnalysis analysis = null; public Project() { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(1); lpnTranRelation = new LPNTranRelation(this.designUnitSet); } public Project(LhpnFile lpn) { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(1); StateGraph stateGraph = new StateGraph(lpn); designUnitSet.add(stateGraph); //stateGraph.printStates(); } /** * If the OptionsFlag is false, then this constructor is identical to * Poject(LhpnFile lpn). If the OptionsFlag is true, this constructor uses * StateGraph_timed objects. * * @author Andrew N. Fisher * * @param lpn * The lpn under consideration. * @param OptionsFlag * True for timing analysis and false otherwise. The option should match * Options.getTimingAnalysisFlag(). */ // public Project(LhpnFile lpn, boolean OptionsFlag) // if(Options.getTimingAnalysisFlag()) // this.label = ""; // this.designUnitSet = new ArrayList<StateGraph>(0); // this.designUnitTimedSet = new ArrayList<StateGraph_timed>(1); // StateGraph_timed stategraph = new StateGraph_timed(lpn); // designUnitTimedSet.add(stategraph); // else // this.label = ""; // this.designUnitSet = new ArrayList<StateGraph>(1); // StateGraph stateGraph = new StateGraph(lpn); // designUnitSet.add(stateGraph); public Project(ArrayList<LhpnFile> lpns) { this.label = ""; this.designUnitSet = new ArrayList<StateGraph>(lpns.size()); for (int i=0; i<lpns.size(); i++) { LhpnFile lpn = lpns.get(i); StateGraph stateGraph = new StateGraph(lpn); designUnitSet.add(stateGraph); } } /** * Find the SG for the entire project where each project state is a tuple of * local states * */ public StateGraph[] search() { validateInputs(); // if(Options.getSearchType().equals("compositional")){ // this.analysis = new CompositionalAnalysis(); // if(Options.getParallelFlag()){ // this.analysis.parallelCompositionalFindSG(this.designUnitSet); // else{ // this.analysis.findReducedSG(this.designUnitSet); // return; long start = System.currentTimeMillis(); int lpnCnt = designUnitSet.size(); /* Prepare search by placing LPNs in an array in the order of their indices.*/ StateGraph[] sgArray = new StateGraph[lpnCnt]; int idx = 0; for (StateGraph du : designUnitSet) { LhpnFile lpn = du.getLpn(); lpn.setIndex(idx++); sgArray[lpn.getIndex()] = du; } // If timing, then create the sgArray with StateGraph_timed objects. // if(Options.getTimingAnalysisFlag()) // for(StateGraph_timed du : this.designUnitTimedSet) // LhpnFile lpn = du.getLpn(); // lpn.setIndex(idx++); // sgArray[lpn.getIndex()] = du; // Initialize the project state HashMap<String, Integer> varValMap = new HashMap<String, Integer>(); State[] initStateArray = new State[lpnCnt]; for (int index = 0; index < lpnCnt; index++) { LhpnFile curLpn = sgArray[index].getLpn(); StateGraph curSg = sgArray[index]; initStateArray[index] = curSg.getInitState(); //curLpn.getInitState(); int[] curStateVector = initStateArray[index].getVector(); varValMap = curLpn.getAllVarsWithValues(curStateVector); // HashMap<String, String> vars = curLpn.getAllOutputs();//curLpn.getAllOutputs(); // DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap(); // for(String var : vars.keySet()) { // varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]); } // TODO: (future) Need to adjust the transition vector as well? // Adjust the value of the input variables in LPN in the initial state. // Add the initial states into their respective LPN. for (int index = 0; index < lpnCnt; index++) { StateGraph curSg = sgArray[index]; initStateArray[index].update(curSg, varValMap, curSg.getLpn().getVarIndexMap()); initStateArray[index] = curSg.addState(initStateArray[index]); } // Initialize the zones for the initStateArray, if timining is enabled. // if(Options.getTimingAnalysisFlag()) // for(int index =0; index < lpnCnt; index++) // if(sgArray[index] instanceof StateGraph_timed) // if (Options.getTimingAnalysisFlag()) { // new TimingAnalysis(sgArray); // return; // else if(!Options.getTimingAnalysisFlag()) { // Analysis tmp = new Analysis(sgArray, initStateArray, lpnTranRelation, Options.getSearchType()); // // Analysis tmp = new Analysis(lpnList, curStateArray, // // lpnTranRelation, "dfs_por"); // //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs"); // //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs_noDisabling"); // else { // return; /* Entry point for the timed analysis. */ // if(Options.getTimingAnalysisFlag()) // Analysis_Timed dfsTimedStateExploration = new Analysis_Timed(sgArray); // dfsTimedStateExploration.search_dfs_timed(sgArray, initStateArray); // return new StateGraph[0]; Analysis dfsStateExploration = new Analysis(sgArray); StateGraph[] stateGraphArray = dfsStateExploration.search_dfs(sgArray, initStateArray); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis/1000F; System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n"); return stateGraphArray; } public Set<LPN> readLpn(final String src_file) { Set<LPN> lpnSet = null; try { if (!src_file.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } ANTLRFileStream input = new ANTLRFileStream(src_file); PlatuGrammarLexer lexer = new PlatuGrammarLexer(input); TokenStream tokenStream = new CommonTokenStream(lexer); PlatuGrammarParser parser = new PlatuGrammarParser(tokenStream); lpnSet = parser.lpn(this); } catch (Exception ex) { Logger.getLogger(Project.class.getName()).log(Level.SEVERE, null, ex); } return lpnSet; } /** * Find the SG for the entire project where each project state is a tuple of * local states. Use partial order reduction during dfs search. * @param cycleClosingMthdIndex * @param outputDotFile * @return * */ public StateGraph searchWithPOR(int cycleClosingMthdIndex) { validateInputs(); // if(Options.getSearchType().equals("compositional")){ // this.analysis = new CompositionalAnalysis(); // if(Options.getParallelFlag()){ // this.analysis.parallelCompositionalFindSG(this.designUnitSet); // else{ // this.analysis.findReducedSG(this.designUnitSet); // return; long start = System.currentTimeMillis(); int lpnCnt = designUnitSet.size(); /* Prepare search by placing LPNs in an array in the order of their indices.*/ StateGraph[] sgArray = new StateGraph[lpnCnt]; int idx = 0; for (StateGraph du : designUnitSet) { LhpnFile lpn = du.getLpn(); lpn.setIndex(idx++); sgArray[lpn.getIndex()] = du; } // Initialize the project state HashMap<String, Integer> varValMap = new HashMap<String, Integer>(); State[] initStateArray = new State[lpnCnt]; for (int index = 0; index < lpnCnt; index++) { LhpnFile curLpn = sgArray[index].getLpn(); StateGraph curSg = sgArray[index]; initStateArray[index] = curSg.getInitState(); //curLpn.getInitState(); int[] curStateVector = initStateArray[index].getVector(); varValMap = curLpn.getAllVarsWithValues(curStateVector); // DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap(); // HashMap<String, String> outVars = curLpn.getAllOutputs(); // for(String var : outVars.keySet()) { // varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]); } // Adjust the value of the input variables in LPN in the initial state. // Add the initial states into their respective LPN. for (int index = 0; index < lpnCnt; index++) { StateGraph curLpn = sgArray[index]; initStateArray[index].update(curLpn, varValMap, curLpn.getLpn().getVarIndexMap()); initStateArray[index] = curLpn.addState(initStateArray[index]); } StateGraph stateGraph; Analysis dfsStateExplorationWithPOR = new Analysis(sgArray); // cycleClosingMthdIndex: 0 = Use sticky transitions // 1 = Use behavioral analysis // 2 = Use behavioral analysis with state trace-back // 3 = No cycle closing if (cycleClosingMthdIndex == 0 || cycleClosingMthdIndex == 3) stateGraph = dfsStateExplorationWithPOR.search_dfsPOR(sgArray, initStateArray, cycleClosingMthdIndex); else stateGraph = dfsStateExplorationWithPOR.search_dfsPORrefinedCycleRule(sgArray, initStateArray, cycleClosingMthdIndex); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis/1000F; System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n"); return stateGraph; } public void readLpn(List<String> fileList) { for(String srcFile : fileList){ if (!srcFile.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } ANTLRFileStream input = null; try { input = new ANTLRFileStream(srcFile); } catch (IOException e) { System.err.println("error: error reading " + srcFile); System.exit(1); } PlatuInstLexer lexer = new PlatuInstLexer(input); TokenStream tokenStream = new CommonTokenStream(lexer); PlatuInstParser parser = new PlatuInstParser(tokenStream); try { parser.parseLpnFile(this); } catch (RecognitionException e) { System.err.println("error: error parsing " + srcFile); System.exit(1); } } PlatuInstParser.includeSet.removeAll(fileList); for(String srcFile : PlatuInstParser.includeSet){ if (!srcFile.endsWith(".lpn")) { System.err.println("Invalid file extention"); System.exit(1); } ANTLRFileStream input = null; try { input = new ANTLRFileStream(srcFile); } catch (IOException e) { System.err.println("error: error reading " + srcFile); System.exit(1); } PlatuInstLexer lexer = new PlatuInstLexer(input); TokenStream tokenStream = new CommonTokenStream(lexer); PlatuInstParser parser = new PlatuInstParser(tokenStream); try { parser.parseLpnFile(this); } catch (RecognitionException e) { System.err.println("error: error parsing " + srcFile); System.exit(1); } } verification.platu.lpn.LPN.nextID = 1; HashMap<String, LPN> instanceMap = new HashMap<String, LPN>(); for(Instance inst : PlatuInstParser.InstanceList){ LPN lpn = PlatuInstParser.LpnMap.get(inst.getLpnLabel()); if(lpn == null){ System.err.println("error: class " + inst.getLpnLabel() + " does not exist"); System.exit(1); } LPN instLpn = lpn.instantiate(inst.getName()); instanceMap.put(instLpn.getLabel(), instLpn); this.designUnitSet.add(instLpn.getStateGraph()); } // TODO: (irrelevant) Is this really needed??? /* for(StateGraph sg : this.designUnitSet){ sg.getLpn().setGlobals(this.designUnitSet); } */ for(Instance inst : PlatuInstParser.InstanceList){ LPN dstLpn = instanceMap.get(inst.getName()); if(dstLpn == null){ System.err.println("error: instance " + inst.getName() + " does not exist"); System.exit(1); } List<String> argumentList = dstLpn.getArgumentList(); List<String> varList = inst.getVariableList(); List<String> modList = inst.getModuleList(); if(argumentList.size() != varList.size()){ System.err.println("error: incompatible number of arguments for instance " + inst.getName()); System.exit(1); } for(int i = 0; i < argumentList.size(); i++){ LPN srcLpn = instanceMap.get(modList.get(i)); if(srcLpn == null){ System.err.println("error: instance " + modList.get(i) + " does not exist"); System.exit(1); } String outputVar = varList.get(i); String inputVar = argumentList.get(i); srcLpn.connect(outputVar, dstLpn, inputVar); } } } /** * @return the designUnitSet */ public List<StateGraph> getDesignUnitSet() { return designUnitSet; } /** * Validates each lpn's input variables are driven by another lpn's output. */ protected void validateInputs(){ // Changed protection level. ANF boolean error = false; for(StateGraph sg : designUnitSet){ for(String input : sg.getLpn().getAllInputs().keySet()){ boolean connected = false; for(StateGraph sg2 : designUnitSet){ if(sg == sg2) continue; if(sg2.getLpn().getAllOutputs().keySet().contains(input)){ connected = true; break; } } if(!connected){ error = true; System.err.println("error in lpn " + sg.getLpn().getLabel() + ": input variable '" + input + "' is not dependent on an output"); } } } if(error){ System.exit(1); } } }
package Freeways; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Vector; public class Interstate10 extends Freeway { public static Vector<Ramp> ramps; public static Vector<Waypoint> waypoints; public Interstate10() { ramps = new Vector<Ramp>(); waypoints = new Vector<Waypoint>(); } public void addFreewayPoints() { try { FileReader fr = new FileReader("waypoints/10WayPoints.txt"); BufferedReader br = new BufferedReader(fr); String line; String headerText = br.readLine(); System.out.println("Adding points on " + headerText); // Prints out name of highway while ((line = br.readLine()) != null) { final String[] coordinatesArray = line.split(","); final double x = Double.parseDouble(coordinatesArray[0]); final double y = Double.parseDouble(coordinatesArray[1]); waypoints.add(new Waypoint(x, y)); } br.close(); //Adding ramps FileReader fr2 = new FileReader("ramps/10Ramps.txt"); BufferedReader br2 = new BufferedReader(fr2); br2.readLine(); String orientation = ""; System.out.println("Adding ramps on " + headerText); while((line = br2.readLine()) != null){ if(line.equals("North") || line.equals("South") || line.equals("East") || line.equals("West")){ orientation = line; System.out.println("Setting orientation " + orientation); } else{ final double rampX = Double.parseDouble(line.substring(0,line.indexOf(','))); final double rampY = Double.parseDouble(line.substring(line.indexOf(',') + 1, line.indexOf('|'))); final String name = line.substring(line.indexOf('|') + 1); ramps.add(new Ramp(rampX,rampY,orientation,name)); } } br2.close(); } catch (NumberFormatException nfe) { nfe.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace();} } }
package me.capit.mechanization; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.logging.Logger; import me.capit.mechanization.exception.MechaException; import me.capit.mechanization.factory.MechaFactory; import me.capit.mechanization.item.MechaItem; import me.capit.mechanization.recipe.MechaFactoryRecipe; import me.capit.xmpapi.XMLPlugin; import org.bukkit.ChatColor; import org.bukkit.command.ConsoleCommandSender; import org.bukkit.configuration.serialization.ConfigurationSerialization; import org.bukkit.plugin.java.JavaPlugin; import org.jdom2.Document; public class Mechanization extends JavaPlugin { public static final HashMap<String, MechaItem> items = new HashMap<String, MechaItem>(); public static final HashMap<String, MechaFactoryRecipe> recipes = new HashMap<String, MechaFactoryRecipe>(); public static final HashMap<String, MechaFactory> factories = new HashMap<String, MechaFactory>(); public static Logger logger; public static ConsoleCommandSender console; public static File pluginDir; public static Document factoriesDoc,itemsDoc,recipesDoc; public static Mechanization plugin; static { ConfigurationSerialization.registerClass(Position3.class); } @Override public void onEnable(){ logger = getLogger(); console = getServer().getConsoleSender(); plugin = this; console.sendMessage(ChatColor.WHITE+" console.sendMessage(ChatColor.WHITE+"Initializing directories and loading defaults..."); pluginDir = getDataFolder(); saveDefaultResource("items.xml"); saveDefaultResource("recipes.xml"); saveDefaultResource("factories.xml"); try { itemsDoc = XMLPlugin.read(new File(getDataFolder(), "items.xml")); console.sendMessage(ChatColor.WHITE+"Loading items..."); for (org.jdom2.Element element : itemsDoc.getRootElement().getChildren()){ try { MechaItem mi = new MechaItem(element); items.put(mi.getName(), mi); console.sendMessage(ChatColor.WHITE+" Loaded "+mi.getDisplayName()); } catch (MechaException e){ e.printStackTrace(); } } } catch (IOException | org.jdom2.JDOMException e){ console.sendMessage(ChatColor.RED+"FAILED to load items!"); e.printStackTrace(); } try { recipesDoc = XMLPlugin.read(new File(getDataFolder(), "recipes.xml")); console.sendMessage(ChatColor.WHITE+"Loading recipes..."); for (org.jdom2.Element element : recipesDoc.getRootElement().getChildren()){ try { MechaFactoryRecipe mi = new MechaFactoryRecipe(element); recipes.put(mi.getName(), mi); console.sendMessage(ChatColor.WHITE+" Loaded "+mi.getDisplayName()); } catch (MechaException e){ e.printStackTrace(); } } } catch (IOException | org.jdom2.JDOMException e){ console.sendMessage(ChatColor.RED+"FAILED to load recipes!"); e.printStackTrace(); } try { factoriesDoc = XMLPlugin.read(new File(getDataFolder(), "factories.xml")); console.sendMessage(ChatColor.WHITE+"Loading factories..."); for (org.jdom2.Element element : factoriesDoc.getRootElement().getChildren()){ try { MechaFactory mi = new MechaFactory(element); console.sendMessage(ChatColor.WHITE+" Loaded "+ChatColor.translateAlternateColorCodes('&', mi.getDisplayName())); } catch (MechaException e){ e.printStackTrace(); } } } catch (IOException | org.jdom2.JDOMException e){ console.sendMessage(ChatColor.RED+"FAILED to load factories!"); e.printStackTrace(); } } public void saveDefaultResource(String resource){ if (!new File(getDataFolder(), resource).exists()) saveResource(resource,false); } }
package me.fatpigsarefat.quests.obj; import me.fatpigsarefat.quests.Quests; import org.bukkit.ChatColor; public enum Messages { QUEST_START("messages.quest-start"), QUEST_COMPLETE("messages.quest-complete"), QUEST_START_LIMIT("messages.quest-start-limit"), QUEST_START_DISABLED("messages.quest-start-disabled"), QUEST_START_LOCKED("messages.quest-start-locked"), QUEST_START_COOLDOWN("messages.quest-start-cooldown"), QUEST_UPDATER("messages.quest-updater"), COMMAND_QUEST_START_DOESNTEXIST("messages.command-quest-start-doesntexist"), COMMAND_QUEST_OPENCATEGORY_ADMIN_SUCCESS("messages.command-quest-opencategory-admin-success"), COMMAND_QUEST_OPENQUESTS_ADMIN_SUCCESS("messages.command-quest-openquests-admin-success"), COMMAND_QUEST_ADMIN_PLAYERNOTFOUND("messages.command-quest-admin-playernotfound"), COMMAND_CATEGORY_OPEN_DOESNTEXIST("messages.command-category-open-doesntexist"), COMMAND_CATEGORY_OPEN_DISABLED("messages.command-category-open-disabled"), COMMAND_QUEST_START_ADMIN_SUCCESS("messages.command-quest-start-admin-success"), COMMAND_TASKVIEW_ADMIN_FAIL("messages.command-taskview-admin-fail"), COMMAND_QUEST_START_ADMIN_FAIL("messages.command-quest-start-admin-fail"), TITLE_QUEST_START_TITLE("titles.quest-start.title"), TITLE_QUEST_START_SUBTITLE("titles.quest-start.subtitle"), TITLE_QUEST_COMPLETE_TITLE("titles.quest-complete.title"), TITLE_QUEST_COMPLETE_SUBTITLE("titles.quest-complete.subtitle"); private String path; Messages(String path) { this.path = path; } public String getMessage() { if (Quests.getInstance().getConfig().contains(path)) { String message = Quests.getInstance().getConfig().getString(path); if (message != null) { return message; } } return path; } }
package me.vertretungsplan.parser; import io.jsonwebtoken.Claims; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureException; import me.vertretungsplan.exception.CredentialInvalidException; import me.vertretungsplan.objects.*; import me.vertretungsplan.objects.credential.UserPasswordCredential; import org.apache.commons.codec.binary.Base64; import org.apache.http.client.HttpResponseException; import org.apache.http.entity.ContentType; import org.joda.time.LocalDate; import org.joda.time.LocalDateTime; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class IphisParser extends BaseParser { private static final String PARAM_URL = "url"; private static final String PARAM_JWT_KEY = "jwt_key"; private static final String PARAM_KUERZEL = "kuerzel"; /** * URL of given IPHIS instance */ private String api; /** * Shortcode for school */ private String kuerzel; private String jwt_key; private String website; /** * array of grades/classes retrieved from the api */ private JSONArray grades; /** * array of teachers retrieved from the api */ private JSONArray teachers; /** * array of messages retrieved from the api */ private JSONArray messages; /** * hold the Authentication Token (JWT) */ private String authToken; /** * hold the timestamp of the last schedule-update */ private LocalDateTime lastUpdate; public IphisParser(SubstitutionScheduleData scheduleData, CookieProvider cookieProvider) { super(scheduleData, cookieProvider); JSONObject data = scheduleData.getData(); try { api = "https://" + data.getString(PARAM_URL) + "/remote/vertretungsplan/ssp"; kuerzel = data.getString(PARAM_KUERZEL); jwt_key = data.getString(PARAM_JWT_KEY); } catch (JSONException e) { e.printStackTrace(); } } public SubstitutionSchedule getSubstitutionSchedule() throws IOException, JSONException, CredentialInvalidException { final SubstitutionSchedule substitutionSchedule = SubstitutionSchedule.fromData(scheduleData); if (login()) { getGrades(); getTeachers(); getMessages(); final JSONArray changes = getChanges(); substitutionSchedule.setClasses(getAllClasses()); substitutionSchedule.setTeachers(getAllTeachers()); substitutionSchedule.setWebsite(website); parseIphis(substitutionSchedule, changes, grades, teachers, messages); } return substitutionSchedule; } @Override public LocalDateTime getLastChange() throws IOException, JSONException, CredentialInvalidException { if (lastUpdate == null) { login(); } return lastUpdate; } private Boolean login() throws CredentialInvalidException, IOException { final UserPasswordCredential userPasswordCredential = (UserPasswordCredential) credential; final String username = userPasswordCredential.getUsername(); final String password = userPasswordCredential.getPassword(); JSONObject payload = new JSONObject(); try { payload.put("school", kuerzel); payload.put("user", username); payload.put("type", scheduleData.getType()); payload.put("password", password); } catch (JSONException e) { e.printStackTrace(); } httpPost(api + "/login", "UTF-8", payload.toString(), ContentType.APPLICATION_JSON); final String httpResponse = httpPost(api + "/login", "UTF-8", payload.toString(), ContentType.APPLICATION_JSON); final JSONObject token; try { token = new JSONObject(httpResponse); final String key = Base64.encodeBase64String(jwt_key.getBytes()); final Claims jwtToken = Jwts.parser().setSigningKey(key) .parseClaimsJws(token.getString("token")).getBody(); assert jwtToken.getSubject().equals("vertretungsplan.me"); authToken = token.getString("token"); website = jwtToken.getIssuer(); lastUpdate = new LocalDateTime(token.getLong("stand") * 1000); } catch (SignatureException | JSONException e) { throw new CredentialInvalidException(); } return true; } /** * Returns a JSONArray with all changes from now to in one week. */ private JSONArray getChanges() throws IOException, CredentialInvalidException { // Date (or alias of date) when the changes start final String startBy = LocalDate.now().toString(); // Date (or alias of date) when the changes end final String endBy = LocalDate.now().plusWeeks(1).toString(); final String url = api + "/vertretung/von/" + startBy + "/bis/" + endBy; return getJSONArray(url); } /** * Returns a JSONArray with all messages. */ private void getMessages() throws IOException, JSONException, CredentialInvalidException { if (messages == null) { final String url = api + "/nachrichten"; messages = getJSONArray(url); } } /** * Returns a JSONArray with all grades. */ private void getGrades() throws IOException, JSONException, CredentialInvalidException { if (grades == null) { final String url = api + "/klassen"; grades = getJSONArray(url); } } /** * Returns a JSONArray with all teachers. */ private void getTeachers() throws IOException, CredentialInvalidException { if (teachers == null) { final String url = api + "/lehrer"; teachers = getJSONArray(url); } } private JSONArray getJSONArray(String url) throws IOException, CredentialInvalidException { try { Map<String, String> headers = new HashMap<>(); headers.put("Authorization", "Bearer " + authToken); headers.put("Content-Type", "application/json"); headers.put("Accept", "application/json"); final String httpResponse = httpGet(url, "UTF-8", headers); return new JSONArray(httpResponse); } catch (HttpResponseException httpResponseException) { if (httpResponseException.getStatusCode() == 404) { return null; } throw httpResponseException; } catch (JSONException e) { return new JSONArray(); } } void parseIphis(SubstitutionSchedule substitutionSchedule, JSONArray changes, JSONArray grades, JSONArray teachers, JSONArray messages) throws IOException, JSONException { if (changes == null) { return; } // Link course IDs to their names HashMap<String, String> coursesHashMap = null; if (grades != null) { coursesHashMap = new HashMap<>(); for (int i = 0; i < grades.length(); i++) { JSONObject grade = grades.getJSONObject(i); coursesHashMap.put(grade.getString("id"), grade.getString("name")); } } // Link teacher IDs to their names HashMap<String, String> teachersHashMap = null; if (teachers != null) { teachersHashMap = new HashMap<>(); for (int i = 0; i < teachers.length(); i++) { JSONObject teacher = teachers.getJSONObject(i); teachersHashMap.put(teacher.getString("id"), teacher.getString("name")); } } // Add Messages List<AdditionalInfo> infos = new ArrayList<>(messages.length()); for (int i = 0; i < messages.length(); i++) { JSONObject message = messages.getJSONObject(i); AdditionalInfo info = new AdditionalInfo(); info.setHasInformation(message.getBoolean("notification")); info.setTitle(message.getString("titel").trim()); info.setText(message.getString("nachricht").trim()); info.setFromSchedule(true); infos.add(info); } substitutionSchedule.getAdditionalInfos().addAll(infos); substitutionSchedule.setLastChange(lastUpdate); // Add changes to SubstitutionSchedule LocalDate currentDate = LocalDate.now(); SubstitutionScheduleDay substitutionScheduleDay = new SubstitutionScheduleDay(); substitutionScheduleDay.setDate(currentDate); for (int i = 0; i < changes.length(); i++) { final JSONObject change = changes.getJSONObject(i); final LocalDate substitutionDate = new LocalDate(change.getString("datum")); // If starting date of change does not equal date of SubstitutionScheduleDay if (!substitutionDate.isEqual(currentDate)) { if (!substitutionScheduleDay.getSubstitutions().isEmpty() || !substitutionScheduleDay.getMessages().isEmpty()) { substitutionSchedule.addDay(substitutionScheduleDay); } substitutionScheduleDay = new SubstitutionScheduleDay(); substitutionScheduleDay.setDate(substitutionDate); currentDate = substitutionDate; } if (change.getInt("id") > 0) { final Substitution substitution = getSubstitution(change, coursesHashMap, teachersHashMap); substitutionScheduleDay.addSubstitution(substitution); } else if (!change.optString("nachricht").isEmpty()) { substitutionScheduleDay.addMessage(change.optString("nachricht")); } } substitutionSchedule.addDay(substitutionScheduleDay); } private String[] getSQLArray(String data) { String[] retArray = {}; Pattern pattern = Pattern.compile("\\{(.*?)}"); Matcher matcher = pattern.matcher(data); if (matcher.find()) { retArray = matcher.group(1).split(","); } return retArray; } private Substitution getSubstitution(JSONObject change, HashMap<String, String> gradesHashMap, HashMap<String, String> teachersHashMap) throws IOException, JSONException { final Substitution substitution = new Substitution(); // Set class(es) final String[] classIds = getSQLArray(change.getString("id_klasse")); if (classIds.length > 0) { if (gradesHashMap == null) { throw new IOException("Change references a grade but grades are empty."); } final HashSet<String> classes = new HashSet<>(); for (String classId : classIds) { if (!classId.toLowerCase().equals("null")) { if (gradesHashMap.containsKey(classId)) { classes.add(gradesHashMap.get(classId)); } else { throw new IllegalArgumentException("unknown class ID " + classId + " referenced"); } } } substitution.setClasses(classes); } // Set type final String type = change.getString("aenderungsgrund").trim(); if (!type.isEmpty() && !type.toLowerCase().equals("null")) { substitution.setType(type); } else { substitution.setType("Vertretung"); } // Set color substitution.setColor(colorProvider.getColor(type)); // Set covering teacher final String[] coveringTeacherIds = getSQLArray(change.getString("id_person_verantwortlich")); if (coveringTeacherIds.length > 0) { if (teachersHashMap == null) { throw new IOException("Change references a covering teacher but teachers are empty."); } final HashSet<String> teachers = new HashSet<>(); for (String coveringTeacherId : coveringTeacherIds) { if (!coveringTeacherId.toLowerCase().equals("null") && teachersHashMap.get(coveringTeacherId) != null) { teachers.add(teachersHashMap.get(coveringTeacherId)); } } substitution.setTeachers(teachers); } // Set teacher final String[] teacherIds = getSQLArray(change.getString("id_person_verantwortlich_orig")); final HashSet<String> coveringTeachers = new HashSet<>(); if (teacherIds.length > 0) { if (teachersHashMap == null) { throw new IOException("Change references a teacher but teachers are empty."); } for (String coveringTeacherId : coveringTeacherIds) { if (!coveringTeacherId.toLowerCase().equals("null") && teachersHashMap.get(coveringTeacherId) != null) { coveringTeachers.add(teachersHashMap.get(coveringTeacherId)); } } substitution.setPreviousTeachers(coveringTeachers); } //Set room if (!change.optString("raum").isEmpty() && !change.optString("raum").toLowerCase().equals("null")) { substitution.setRoom(change.optString("raum")); } else if (!change.optString("raum_orig").isEmpty() && !change.optString("raum_orig").toLowerCase().equals("null")) { substitution.setRoom(change.optString("raum_orig")); } if (!change.optString("raum_orig").isEmpty() && !change.optString("raum_orig").toLowerCase().equals("null")) { substitution.setPreviousRoom(change.optString("raum_orig")); } else if (!change.optString("raum").isEmpty() && !change.optString("raum").toLowerCase().equals("null")) { substitution.setPreviousRoom(change.optString("raum")); } //Set subject if (!change.optString("fach").isEmpty() && !change.optString("fach").toLowerCase().equals("null")) { substitution.setSubject(change.optString("fach")); } if (!change.optString("fach_orig").isEmpty() && !change.optString("fach_orig").toLowerCase().equals("null")) { substitution.setPreviousSubject(change.optString("fach_orig")); } //Set description if (!change.getString("information").isEmpty() && !change.getString("information").toLowerCase().equals("null")) { substitution.setDesc(change.getString("information").trim()); } final String startingHour = change.getString("zeit_von").replaceFirst("^0+(?!$)", ""); final String endingHour = change.getString("zeit_bis").replaceFirst("^0+(?!$)", ""); if (!startingHour.equals("") || !endingHour.equals("")) { String lesson = ""; if (!startingHour.equals("") && endingHour.equals("")) { lesson = "Ab " + startingHour; } if (startingHour.equals("") && !endingHour.equals("")) { lesson = "Bis " + endingHour; } if (!startingHour.equals("") && !endingHour.equals("")) { lesson = startingHour + " - " + endingHour; } if (startingHour.equals(endingHour)) { lesson = startingHour; } substitution.setLesson(lesson); } return substitution; } @Override public List<String> getAllClasses() throws IOException, JSONException, CredentialInvalidException { final List<String> classesList = new ArrayList<>(); if (grades == null) { return null; } for (int i = 0; i < grades.length(); i++) { final JSONObject grade = grades.getJSONObject(i); classesList.add(grade.getString("name")); } return classesList; } @Override public List<String> getAllTeachers() throws IOException, JSONException, CredentialInvalidException { final List<String> teachersList = new ArrayList<>(); if (teachers == null) { return null; } for (int i = 0; i < teachers.length(); i++) { final JSONObject teacher = teachers.getJSONObject(i); teachersList.add(teacher.getString("name")); } return teachersList; } }
package net.coobird.thumbnailator; import java.awt.Dimension; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import net.coobird.thumbnailator.filters.ImageFilter; import net.coobird.thumbnailator.filters.Pipeline; import net.coobird.thumbnailator.filters.Rotation; import net.coobird.thumbnailator.filters.Watermark; import net.coobird.thumbnailator.geometry.Position; import net.coobird.thumbnailator.geometry.Positions; import net.coobird.thumbnailator.name.Rename; import net.coobird.thumbnailator.resizers.BicubicResizer; import net.coobird.thumbnailator.resizers.BilinearResizer; import net.coobird.thumbnailator.resizers.ProgressiveBilinearResizer; import net.coobird.thumbnailator.resizers.Resizer; import net.coobird.thumbnailator.resizers.Resizers; import net.coobird.thumbnailator.resizers.configurations.AlphaInterpolation; import net.coobird.thumbnailator.resizers.configurations.Antialiasing; import net.coobird.thumbnailator.resizers.configurations.Dithering; import net.coobird.thumbnailator.resizers.configurations.Rendering; import net.coobird.thumbnailator.resizers.configurations.ScalingMode; import net.coobird.thumbnailator.tasks.FileThumbnailTask; import net.coobird.thumbnailator.tasks.SourceSinkThumbnailTask; import net.coobird.thumbnailator.tasks.io.BufferedImageSink; import net.coobird.thumbnailator.tasks.io.BufferedImageSource; import net.coobird.thumbnailator.tasks.io.FileImageSink; import net.coobird.thumbnailator.tasks.io.FileImageSource; import net.coobird.thumbnailator.tasks.io.ImageSource; import net.coobird.thumbnailator.tasks.io.OutputStreamImageSink; /** * This class provides a fluent interface to create thumbnails. * <DL> * <DT><B>Usage:</B></DT> * <DD> * The following example code demonstrates how to use the fluent interface * to create a thumbnail from multiple files from a directory, resizing them to * a maximum of 200 pixels by 200 pixels while preserving the aspect ratio of * the original, then saving the resulting thumbnails as JPEG images with file * names having {@code thumbnail.} appended to the beginning of the file name. * <p> * <pre> Thumbnails.of(directory.listFiles()) .size(200, 200) .keepAspectRatio(true) .outputFormat("jpeg") .asFiles(Rename.PREFIX_DOT_THUMBNAIL); * </pre> * </DD> * </DL> * * @author coobird * */ public final class Thumbnails { /** * This class is not intended to be instantiated. */ private Thumbnails() {} private static void validateDimensions(int width, int height) { if (width <= 0 && height <= 0) { throw new IllegalArgumentException( "Destination image dimensions must not be less than " + "0 pixels." ); } else if (width <= 0 || height <= 0) { String dimension = width == 0 ? "width" : "height"; throw new IllegalArgumentException( "Destination image " + dimension + " must not be " + "less than or equal to 0 pixels." ); } } private static void checkForNull(Object o, String message) { if (o == null) { throw new NullPointerException(message); } } private static void checkForEmpty(Object[] o, String message) { if (o.length == 0) { throw new IllegalArgumentException(message); } } private static void checkForEmpty(Collection<?> o, String message) { if (o.size() == 0) { throw new IllegalArgumentException(message); } } public static Builder<File> of(String... files) { checkForNull(files, "Cannot specify null for input files."); checkForEmpty(files, "Cannot specify an empty array for input files."); return Builder.of(files); } public static Builder<File> of(File... files) { checkForNull(files, "Cannot specify null for input files."); checkForEmpty(files, "Cannot specify an empty array for input files."); return Builder.of(files); } public static Builder<BufferedImage> of(BufferedImage... images) { checkForNull(images, "Cannot specify null for images."); checkForEmpty(images, "Cannot specify an empty array for images."); return Builder.of(images); } public static Builder<File> fromFilenames(Collection<String> files) { checkForNull(files, "Cannot specify null for input files."); checkForEmpty(files, "Cannot specify an empty collection for input files."); return of(files.toArray(new String[files.size()])); } public static Builder<File> fromFiles(Collection<File> files) { checkForNull(files, "Cannot specify null for input files."); checkForEmpty(files, "Cannot specify an empty collection for input files."); return of(files.toArray(new File[files.size()])); } public static Builder<BufferedImage> fromImages(Collection<BufferedImage> images) { checkForNull(images, "Cannot specify null for images."); checkForEmpty(images, "Cannot specify an empty collection for images."); return of(images.toArray(new BufferedImage[images.size()])); } /** * A builder interface for Thumbnailator. * <p> * An instance of this class is obtained by calling one of: * <ul> * <li>{@link Thumbnails#of(BufferedImage...)}</li> * <li>{@link Thumbnails#of(File...)}</li> * <li>{@link Thumbnails#of(String...)}</li> * <li>{@link Thumbnails#fromImages(Collection)}</li> * <li>{@link Thumbnails#fromFiles(Collection)}</li> * <li>{@link Thumbnails#fromFilenames(Collection)}</li> * </ul> * * @author coobird * */ public static class Builder<T> { private final List<ImageSource<T>> sources; private Builder(List<ImageSource<T>> sources) { this.sources = sources; statusMap.put(Properties.OUTPUT_FORMAT, Status.OPTIONAL); } private static Builder<File> of(String... filenames) { List<ImageSource<File>> sources = new ArrayList<ImageSource<File>>(); for (String f : filenames) { sources.add(new FileImageSource(f)); } return new Builder<File>(sources); } private static Builder<File> of(File... files) { List<ImageSource<File>> sources = new ArrayList<ImageSource<File>>(); for (File f : files) { sources.add(new FileImageSource(f)); } return new Builder<File>(sources); } private static Builder<BufferedImage> of(BufferedImage... images) { List<ImageSource<BufferedImage>> sources = new ArrayList<ImageSource<BufferedImage>>(); for (BufferedImage img : images) { sources.add(new BufferedImageSource(img)); } return new Builder<BufferedImage>(sources); } private final class BufferedImageIterable implements Iterable<BufferedImage> { public Iterator<BufferedImage> iterator() { return new Iterator<BufferedImage>() { Iterator<ImageSource<T>> sourceIter = sources.iterator(); public boolean hasNext() { return sourceIter.hasNext(); } public BufferedImage next() { ImageSource<T> source = sourceIter.next(); BufferedImageSink destination = new BufferedImageSink(); try { Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, BufferedImage>(makeParam(), source, destination) ); } catch (IOException e) { return null; } return destination.getSink(); } public void remove() { throw new UnsupportedOperationException( "Cannot remove elements from this iterator." ); } }; } } /** * Status of each property. * * @author coobird * */ private static enum Status { OPTIONAL, READY, NOT_READY, ALREADY_SET, CANNOT_SET, } /** * Interface used by {@link Properties}. * * @author coobird * */ private static interface Property { public String getName(); } /** * Enum of properties which can be set by this builder. * * @author coobird * */ private static enum Properties implements Property { SIZE("size"), SCALE("scale"), IMAGE_TYPE("imageType"), SCALING_MODE("scalingMode"), ALPHA_INTERPOLATION("alphaInterpolation"), ANTIALIASING("antialiasing"), DITHERING("dithering"), RENDERING("rendering"), KEEP_ASPECT_RATIO("keepAspectRatio"), OUTPUT_FORMAT("outputFormat"), OUTPUT_FORMAT_TYPE("outputFormatType"), OUTPUT_QUALITY("outputQuality"), RESIZER("resizer"), ; private final String name; private Properties(String name) { this.name = name; } public String getName() { return name; } } /** * Map to keep track of whether a property has been properly set or not. */ private final Map<Properties, Status> statusMap = new HashMap<Properties, Status>(); /** * Populates the property map. */ { statusMap.put(Properties.SIZE, Status.NOT_READY); statusMap.put(Properties.SCALE, Status.NOT_READY); statusMap.put(Properties.IMAGE_TYPE, Status.OPTIONAL); statusMap.put(Properties.SCALING_MODE, Status.OPTIONAL); statusMap.put(Properties.ALPHA_INTERPOLATION, Status.OPTIONAL); statusMap.put(Properties.ANTIALIASING, Status.OPTIONAL); statusMap.put(Properties.DITHERING, Status.OPTIONAL); statusMap.put(Properties.RENDERING, Status.OPTIONAL); statusMap.put(Properties.KEEP_ASPECT_RATIO, Status.OPTIONAL); statusMap.put(Properties.OUTPUT_FORMAT, Status.OPTIONAL); statusMap.put(Properties.OUTPUT_FORMAT_TYPE, Status.OPTIONAL); statusMap.put(Properties.OUTPUT_QUALITY, Status.OPTIONAL); statusMap.put(Properties.RESIZER, Status.OPTIONAL); } /** * Updates the property status map. * * @param property The property to update. * @param newStatus The new status. */ private void updateStatus(Properties property, Status newStatus) { if (statusMap.get(property) == Status.ALREADY_SET) { throw new IllegalStateException( property.getName() + " is already set."); } if (statusMap.get(property) == Status.CANNOT_SET) { throw new IllegalStateException( property.getName() + " cannot be set."); } statusMap.put(property, newStatus); } /** * An constant used to indicate that the imageType has not been * specified. When this constant is encountered, one should use the * {@link ThumbnailParameter#DEFAULT_IMAGE_TYPE} as the value for * imageType. */ private static int IMAGE_TYPE_UNSPECIFIED = -1; /* * Defines the fields for the builder interface, and assigns the * default values. */ private int width = -1; private int height = -1; private double scale = Double.NaN; private int imageType = IMAGE_TYPE_UNSPECIFIED; private boolean keepAspectRatio = true; private String outputFormat = ThumbnailParameter.ORIGINAL_FORMAT; private String outputFormatType = ThumbnailParameter.DEFAULT_FORMAT_TYPE; private float outputQuality = ThumbnailParameter.DEFAULT_QUALITY; private ScalingMode scalingMode = ScalingMode.PROGRESSIVE_BILINEAR; private AlphaInterpolation alphaInterpolation = AlphaInterpolation.DEFAULT; private Dithering dithering = Dithering.DEFAULT; private Antialiasing antialiasing = Antialiasing.DEFAULT; private Rendering rendering = Rendering.DEFAULT; private Resizer resizer = Resizers.PROGRESSIVE; /** * The {@link ImageFilter}s that should be applied when creating the * thumbnail. */ private Pipeline filterPipeline = new Pipeline(); public Builder<T> size(int width, int height) { updateStatus(Properties.SIZE, Status.ALREADY_SET); updateStatus(Properties.SCALE, Status.CANNOT_SET); validateDimensions(width, height); this.width = width; this.height = height; return this; } public Builder<T> scale(double scale) { updateStatus(Properties.SCALE, Status.ALREADY_SET); updateStatus(Properties.SIZE, Status.CANNOT_SET); updateStatus(Properties.KEEP_ASPECT_RATIO, Status.CANNOT_SET); if (scale <= 0) { throw new IllegalArgumentException( "The scaling factor is equal to or less than 0." ); } this.scale = scale; return this; } public Builder<T> imageType(int type) { updateStatus(Properties.IMAGE_TYPE, Status.ALREADY_SET); imageType = type; return this; } public Builder<T> scalingMode(ScalingMode config) { checkForNull(config, "Scaling mode is null."); updateStatus(Properties.SCALING_MODE, Status.ALREADY_SET); updateStatus(Properties.RESIZER, Status.CANNOT_SET); scalingMode = config; return this; } public Builder<T> resizer(Resizer resizer) { checkForNull(resizer, "Resizer is null."); updateStatus(Properties.RESIZER, Status.ALREADY_SET); updateStatus(Properties.SCALING_MODE, Status.CANNOT_SET); this.resizer = resizer; return this; } public Builder<T> alphaInterpolation(AlphaInterpolation config) { checkForNull(config, "Alpha interpolation is null."); updateStatus(Properties.ALPHA_INTERPOLATION, Status.ALREADY_SET); alphaInterpolation = config; return this; } public Builder<T> dithering(Dithering config) { checkForNull(config, "Dithering is null."); updateStatus(Properties.DITHERING, Status.ALREADY_SET); dithering = config; return this; } public Builder<T> antialiasing(Antialiasing config) { checkForNull(config, "Antialiasing is null."); updateStatus(Properties.ANTIALIASING, Status.ALREADY_SET); antialiasing = config; return this; } public Builder<T> rendering(Rendering config) { checkForNull(config, "Rendering is null."); updateStatus(Properties.RENDERING, Status.ALREADY_SET); rendering = config; return this; } public Builder<T> keepAspectRatio(boolean keep) { if (statusMap.get(Properties.SCALE) == Status.ALREADY_SET) { throw new IllegalStateException("Cannot specify whether to " + "keep the aspect ratio if the scaling factor has " + "already been specified."); } if (statusMap.get(Properties.SIZE) != Status.ALREADY_SET) { throw new IllegalStateException("Cannot specify whether to " + "keep the aspect ratio unless the size parameter has " + "already been specified."); } updateStatus(Properties.KEEP_ASPECT_RATIO, Status.ALREADY_SET); keepAspectRatio = keep; return this; } public Builder<T> outputQuality(float quality) { if (quality < 0.0f || quality > 1.0f) { throw new IllegalArgumentException( "The quality setting must be in the range 0.0f and " + "1.0f, inclusive." ); } updateStatus(Properties.OUTPUT_QUALITY, Status.ALREADY_SET); outputQuality = quality; return this; } public Builder<T> outputQuality(double quality) { if (quality < 0.0d || quality > 1.0d) { throw new IllegalArgumentException( "The quality setting must be in the range 0.0d and " + "1.0d, inclusive." ); } updateStatus(Properties.OUTPUT_QUALITY, Status.ALREADY_SET); outputQuality = (float)quality; if (outputQuality < 0.0f) { outputQuality = 0.0f; } else if (outputQuality > 1.0f) { outputQuality = 1.0f; } return this; } public Builder<T> outputFormat(String format) { if (!ThumbnailatorUtils.isSupportedOutputFormat(format)) { throw new IllegalArgumentException( "Specified format is not supported: " + format ); } updateStatus(Properties.OUTPUT_FORMAT, Status.ALREADY_SET); outputFormat = format; return this; } public Builder<T> outputFormatType(String formatType) { /* * If the output format is the original format, and the format type * is being specified, it's going to be likely that the specified * type will not be present in all the formats, so we'll disallow * it. (e.g. setting type to "JPEG", and if the original formats * were JPEG and PNG, then we'd have a problem. */ if (formatType != ThumbnailParameter.DEFAULT_FORMAT_TYPE && outputFormat == ThumbnailParameter.ORIGINAL_FORMAT) { throw new IllegalArgumentException( "Cannot set the format type if a specific output " + "format has not been specified." ); } if (!ThumbnailatorUtils.isSupportedOutputFormatType(outputFormat, formatType)) { throw new IllegalArgumentException( "Specified format type (" + formatType + ") is not " + " supported for the format: " + outputFormat ); } /* * If the output format type is set, then we'd better make the * output format unchangeable, or else we'd risk having a type * that is not part of the output format. */ updateStatus(Properties.OUTPUT_FORMAT_TYPE, Status.ALREADY_SET); if (!statusMap.containsKey(Properties.OUTPUT_FORMAT)) { updateStatus(Properties.OUTPUT_FORMAT, Status.CANNOT_SET); } outputFormatType = formatType; return this; } /** * Sets the watermark to apply on the thumbnail. * <p> * This method can be called multiple times to apply multiple * watermarks. * <p> * If multiple watermarks are to be applied, the watermarks will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param w The watermark to apply to the thumbnail. * @return Reference to this object. */ public Builder<T> watermark(Watermark w) { if (w == null) { throw new NullPointerException("Watermark is null."); } filterPipeline.add(w); return this; } /** * Sets the image of the watermark to apply on the thumbnail. * <p> * This method is a convenience method for the * {@link #watermark(Position, BufferedImage, float)} method, where * the opacity is 50%, and the position is set to center of the * thumbnail: * <p> * <pre> watermark(Positions.CENTER, image, 0.5f); * </pre> * This method can be called multiple times to apply multiple * watermarks. * <p> * If multiple watermarks are to be applied, the watermarks will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param image The image of the watermark. * @return Reference to this object. */ public Builder<T> watermark(BufferedImage image) { return watermark(Positions.CENTER, image, 0.5f); } /** * Sets the image and opacity of the watermark to apply on * the thumbnail. * <p> * This method is a convenience method for the * {@link #watermark(Position, BufferedImage, float)} method, where * the opacity is 50%: * <p> * <pre> watermark(Positions.CENTER, image, opacity); * </pre> * This method can be called multiple times to apply multiple * watermarks. * <p> * If multiple watermarks are to be applied, the watermarks will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param image The image of the watermark. * @param opacity The opacity of the watermark. * <p> * The value should be between {@code 0.0f} and * {@code 1.0f}, where {@code 0.0f} is completely * transparent, and {@code 1.0f} is completely * opaque. * @return Reference to this object. */ public Builder<T> watermark(BufferedImage image, float opacity) { return watermark(Positions.CENTER, image, opacity); } /** * Sets the image and opacity and position of the watermark to apply on * the thumbnail. * <p> * This method can be called multiple times to apply multiple * watermarks. * <p> * If multiple watermarks are to be applied, the watermarks will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param position The position of the watermark. * @param image The image of the watermark. * @param opacity The opacity of the watermark. * <p> * The value should be between {@code 0.0f} and * {@code 1.0f}, where {@code 0.0f} is completely * transparent, and {@code 1.0f} is completely * opaque. * @return Reference to this object. */ public Builder<T> watermark(Position position, BufferedImage image, float opacity) { filterPipeline.add(new Watermark(position, image, opacity)); return this; } /* * rotation */ /** * Sets the amount of rotation to apply to the thumbnail. * <p> * The thumbnail will be rotated clockwise by the angle specified. * <p> * This method can be called multiple times to apply multiple * rotations. * <p> * If multiple rotations are to be applied, the rotations will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param angle Angle in degrees. * @return Reference to this object. */ public Builder<T> rotate(double angle) { filterPipeline.add(Rotation.newRotator(angle)); return this; } /* * other filters */ /** * Adds a {@link ImageFilter} to apply to the thumbnail. * <p> * This method can be called multiple times to apply multiple * filters. * <p> * If multiple filters are to be applied, the filters will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param filter An image filter to apply to the thumbnail. * @return Reference to this object. */ public Builder<T> addFilter(ImageFilter filter) { if (filter == null) { throw new NullPointerException("Filter is null."); } filterPipeline.add(filter); return this; } /** * Adds multiple {@link ImageFilter}s to apply to the thumbnail. * <p> * This method can be called multiple times to apply multiple * filters. * <p> * If multiple filters are to be applied, the filters will be * applied in the order that this method is called. * <p> * Calling this method to set this parameter is optional. * * @param filters A list of filters to apply to the thumbnail. * @return Reference to this object. */ public Builder<T> addFilters(List<ImageFilter> filters) { if (filters == null) { throw new NullPointerException("Filters is null."); } filterPipeline.addAll(filters); return this; } private void checkReadiness() { for (Map.Entry<Properties, Status> s : statusMap.entrySet()) { if (s.getValue() == Status.NOT_READY) { throw new IllegalStateException(s.getKey().getName() + " is not set."); } } } /** * Returns a {@link Resizer} which is suitable for the current * builder state. * * @return The {@link Resizer} which is suitable for the * current builder state. */ private Resizer makeResizer() { /* * If the scalingMode has been set, then use scalingMode to obtain * a resizer, else, use the resizer field. */ if (statusMap.get(Properties.SCALING_MODE) == Status.ALREADY_SET) { return makeResizer(scalingMode); } else { return this.resizer; } } /** * Returns a {@link Resizer} which is suitable for the current * builder state. * * @param mode The scaling mode to use to create thumbnails. * @return The {@link Resizer} which is suitable for the * specified scaling mode and builder state. */ private Resizer makeResizer(ScalingMode mode) { Map<RenderingHints.Key, Object> hints = new HashMap<RenderingHints.Key, Object>(); hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION, alphaInterpolation.getValue()); hints.put(RenderingHints.KEY_DITHERING, dithering.getValue()); hints.put(RenderingHints.KEY_ANTIALIASING, antialiasing.getValue()); hints.put(RenderingHints.KEY_RENDERING, rendering.getValue()); if (mode == ScalingMode.BILINEAR) { return new BilinearResizer(hints); } else if (mode == ScalingMode.BICUBIC) { return new BicubicResizer(hints); } else if (mode == ScalingMode.PROGRESSIVE_BILINEAR) { return new ProgressiveBilinearResizer(hints); } else { return new ProgressiveBilinearResizer(hints); } } /** * Returns a {@link ThumbnailParameter} from the current builder state. * * @return A {@link ThumbnailParameter} from the current * builder state. */ private ThumbnailParameter makeParam() { Resizer resizer = makeResizer(); int imageTypeToUse = imageType; if (imageType == IMAGE_TYPE_UNSPECIFIED) { imageTypeToUse = ThumbnailParameter.ORIGINAL_IMAGE_TYPE; } if (Double.isNaN(scale)) { return new ThumbnailParameter( new Dimension(width, height), keepAspectRatio, outputFormat, outputFormatType, outputQuality, imageTypeToUse, filterPipeline.getFilters(), resizer ); } else { return new ThumbnailParameter( scale, keepAspectRatio, outputFormat, outputFormatType, outputQuality, imageTypeToUse, filterPipeline.getFilters(), resizer ); } } /** * Create the thumbnails and return as a {@link Iterable} of * {@link BufferedImage}s. * <p> * For situations where multiple thumbnails are being generated, this * method is preferred over the {@link #asBufferedImages()} method, * as (1) the processing does not have to complete before the method * returns and (2) the thumbnails can be retrieved one at a time, * potentially reducing the number of thumbnails which need to be * retained in the heap memory, potentially reducing the chance of * {@link OutOfMemoryError}s from occurring. * <p> * If an {@link IOException} occurs during the processing of the * thumbnail, the {@link Iterable} will return a {@code null} for that * element. * * @return An {@link Iterable} which will provide an * {@link Iterator} which returns thumbnails as * {@link BufferedImage}s. */ public Iterable<BufferedImage> iterableBufferedImages() { checkReadiness(); /* * TODO To get the precise error information, there would have to * be an event notification mechanism. */ return new BufferedImageIterable(); } /** * Create the thumbnails and return as a {@link List} of * {@link BufferedImage}s. * <p> * <h3>Note about performance</h3> * If there are many thumbnails generated at once, it is possible that * the Java virtual machine's heap space will run out and an * {@link OutOfMemoryError} could result. * <p> * If many thumbnails are being processed at once, then using the * {@link #iterableBufferedImages()} method would be preferable. * * @return A list of thumbnails. * @throws IOException If an problem occurred during * the reading of the original * images. */ public List<BufferedImage> asBufferedImages() throws IOException { checkReadiness(); List<BufferedImage> thumbnails = new ArrayList<BufferedImage>(); // Create thumbnails for (ImageSource<T> source : sources) { BufferedImageSink destination = new BufferedImageSink(); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, BufferedImage>(makeParam(), source, destination) ); thumbnails.add(destination.getSink()); } return thumbnails; } public BufferedImage asBufferedImage() throws IOException { checkReadiness(); if (sources.size() > 1) { throw new IllegalArgumentException("Cannot create one thumbnail from multiple original images."); } BufferedImageSink destination = new BufferedImageSink(); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, BufferedImage>(makeParam(), sources.get(0), destination) ); return destination.getSink(); } /** * Creates the thumbnails and stores them to the files, and returns * a {@link List} of {@link File}s to the thumbnails. * <p> * The file names for the thumbnails are obtained from the given * {@link Iterable}. * * @param iterable An {@link Iterable} which returns an * {@link Iterator} which returns file names * which should be assigned to each thumbnail. * @return A list of {@link File}s of the thumbnails * which were created. * @throws IOException If a problem occurs while reading the * original images or writing the thumbnails * to files. */ public List<File> asFiles(Iterable<File> iterable) throws IOException { checkReadiness(); if (iterable == null) { throw new NullPointerException("File name iterable is null."); } List<File> destinationFiles = new ArrayList<File>(); ThumbnailParameter param = makeParam(); Iterator<File> filenameIter = iterable.iterator(); for (ImageSource<T> source : sources) { if (!filenameIter.hasNext()) { throw new IndexOutOfBoundsException( "Not enough file names provided by iterator." ); } FileImageSink destination = new FileImageSink(filenameIter.next()); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, File>(param, source, destination) ); destinationFiles.add(destination.getSink()); } return destinationFiles; } /** * Creates the thumbnails and stores them to the files. * <p> * The file names for the thumbnails are obtained from the given * {@link Iterable}. * * @param iterable An {@link Iterable} which returns an * {@link Iterator} which returns file names * which should be assigned to each thumbnail. * @throws IOException If a problem occurs while reading the * original images or writing the thumbnails * to files. */ public void toFiles(Iterable<File> iterable) throws IOException { asFiles(iterable); } public List<File> asFiles(Rename rename) throws IOException { checkReadiness(); if (!(sources.get(0) instanceof FileImageSource)) { throw new IllegalStateException("Cannot create thumbnails to files if original images are not from files."); } if (rename == null) { throw new NullPointerException("Rename is null."); } List<File> destinationFiles = new ArrayList<File>(); ThumbnailParameter param = makeParam(); for (ImageSource<T> source : sources) { File f = ((FileImageSource)source).getSource(); File destinationFile = new File(f.getParent(), rename.apply(f.getName())); destinationFiles.add(destinationFile); Thumbnailator.createThumbnail(new FileThumbnailTask(param, f, destinationFile)); } return destinationFiles; } public void toFiles(Rename rename) throws IOException { asFiles(rename); } public void toFile(File outFile) throws IOException { checkReadiness(); if (sources.size() > 1) { throw new IllegalArgumentException("Cannot output multiple thumbnails to one file."); } ImageSource<T> source = sources.get(0); FileImageSink destination = new FileImageSink(outFile); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, File>(makeParam(), source, destination) ); } public void toFile(String outFilepath) throws IOException { checkReadiness(); if (sources.size() > 1) { throw new IllegalArgumentException("Cannot output multiple thumbnails to one file."); } ImageSource<T> source = sources.get(0); FileImageSink destination = new FileImageSink(outFilepath); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, File>(makeParam(), source, destination) ); } public void toOutputStream(OutputStream os) throws IOException { checkReadiness(); if (sources.size() > 1) { throw new IllegalArgumentException("Cannot output multiple thumbnails to one stream."); } ImageSource<T> source = sources.get(0); OutputStreamImageSink destination = new OutputStreamImageSink(os); Thumbnailator.createThumbnail( new SourceSinkThumbnailTask<T, OutputStream>(makeParam(), source, destination) ); } } }
package net.milkycraft.Listeners; import net.milkycraft.Spawnegg; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityDeathEvent; public class ExpListener implements Listener { /* * Class tested and working */ Spawnegg plugin; public ExpListener(Spawnegg instance) { plugin = instance; } @EventHandler(priority = EventPriority.LOWEST) public void onXpDrop(EntityDeathEvent e) { if (plugin.getConfig().getBoolean("Disable.Experience")) { e.setDroppedExp(0); return; } } }
package net.sf.jaer.util; import java.awt.Dimension; import java.awt.GraphicsConfiguration; import java.awt.GraphicsDevice; import java.awt.GraphicsEnvironment; import java.awt.IllegalComponentStateException; import java.awt.Insets; import java.awt.Point; import java.awt.Rectangle; import java.awt.Toolkit; import java.util.logging.Logger; import javax.swing.JFrame; import javax.swing.SwingUtilities; /** * Use static methods for window handling. * * @author tobi */ public class JAERWindowUtilities { final static int WINDOWS_TASK_BAR_HEIGHT = 100; // accounts for task bar at bottom, don't want window to underlap it private static int lowerInset = WINDOWS_TASK_BAR_HEIGHT; // filled in from windows screen inset static Logger log = Logger.getLogger("JAERWindowUtilities"); /** * Creates a new instance of JAERWindowUtilities */ public JAERWindowUtilities() { } /** * The preferred settings are loaded based on window name. A windows which * would be displayed partly off-screen is moved to originate at 0,0. A * window which would be too tall or wide is resized to screen size. * * @param frame JFrame */ public static void constrainFrameSizeToScreenSize(final JFrame frame) { boolean resize = false; // set true if window is too big for screen try { Point loc = frame.getLocationOnScreen(); Dimension dim = frame.getSize(); int x = loc.x; int y = loc.y; int w = dim.width; int h = dim.height; Dimension sd = Toolkit.getDefaultToolkit().getScreenSize(); // determine the height of the windows taskbar by this roundabout proceedure // tobi commented code below because of JOGL or driver or java bug that causes JOGL to drop to GDI rendering. // see WindowSaver for more comments. Insets are now determined by calls below // determine the height of the windows taskbar by this roundabout proceedure // TODO tobi removed this because it was causing a runtime native code exception using NVIDIA 181.22 driver with win xp // replaced by hardcoded lowerInset lowerInset = 64; Rectangle windowBounds = GraphicsEnvironment.getLocalGraphicsEnvironment().getMaximumWindowBounds(); if (windowBounds != null) { lowerInset = sd.height - windowBounds.height; } // GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); // GraphicsDevice[] gs = ge.getScreenDevices(); // if(gs!=null && gs.length>0){ // GraphicsDevice gd = gs[0]; // GraphicsConfiguration[] gc = gd.getConfigurations(); // if(gc!=null && gc.length>0){ // Insets insets=Toolkit.getDefaultToolkit().getScreenInsets(gc[0]); // lowerInset=insets.bottom*2; // TODO tobi had to make bigger to handle FilterPanel resize // if(x+w>sd.width || y+h>sd.height) { // log.info("window extends over edge of screen, moving back to origin"); // x=y=0; if (h > sd.height - lowerInset) { log.info("window height (" + h + ") is bigger than screen height minus WINDOWS_TASK_BAR_HEIGHT (" + (sd.height - WINDOWS_TASK_BAR_HEIGHT) + "), resizing height"); h = sd.height - lowerInset; resize = true; } if (w > sd.width) { log.info("window width (" + w + ") is bigger than screen width (" + (sd.width) + "), resizing height"); w = sd.width; resize = true; } // frame.setLocation(x,y); // don't move it, just contrain size final boolean resize2 = resize; final int w2 = w, h2 = h, x2 = x, y2 = y; SwingUtilities.invokeLater(new Runnable() { public void run() { frame.setLocation(x2, y2); if (resize2 && !(frame instanceof WindowSaver.DontResize)) { frame.setSize(new Dimension(w2, h2)); } frame.validate(); } }); } catch (IllegalComponentStateException e) { log.warning(e.toString() + ": not constraining window size to screen"); return; } } }
package nu.nerd.modreq.database; import java.util.ArrayList; import java.util.List; import com.avaje.ebean.Expr; import com.avaje.ebean.ExpressionList; import com.avaje.ebean.PagingList; import com.avaje.ebean.Query; import nu.nerd.modreq.ModReq; import nu.nerd.modreq.database.Request.RequestStatus; public class RequestTable { ModReq plugin; public RequestTable(ModReq plugin) { this.plugin = plugin; } public List<Request> getUserRequests(String username) { List<Request> retVal = new ArrayList<Request>(); Query<Request> query = plugin.getDatabase().find(Request.class).where() .ieq("playerName", username) .in("status", RequestStatus.OPEN, RequestStatus.CLAIMED).query(); if (query != null) { retVal.addAll(query.findList()); } return retVal; } public List<Request> getMissedClosedRequests(String username) { List<Request> retVal = new ArrayList<Request>(); Query<Request> query = plugin.getDatabase().find(Request.class).where().ieq("playerName", username).eq("status", RequestStatus.CLOSED).eq("closeSeenByUser", false).query(); if (query != null) { retVal.addAll(query.findList()); } return retVal; } public int getNumRequestFromUser(String username) { int retVal = 0; Query<Request> query = plugin.getDatabase().find(Request.class).where().ieq("playerName", username).in("status", RequestStatus.OPEN).query(); if (query != null) { retVal = query.findRowCount(); } return retVal; } public int getTotalRequest(boolean includeElevated, RequestStatus ... statuses) { int retVal = 0; ExpressionList<Request> expressions = plugin.getDatabase().find(Request.class).where().in("status", statuses); if (!includeElevated) expressions.where().eq("flagForAdmin", false); Query<Request> query = expressions.query(); if (query != null) { retVal = query.findRowCount(); } return retVal; } public List<Request> getRequestPage(int page, int perPage, boolean includeElevated, RequestStatus ... statuses) { List<Request> retVal = new ArrayList<Request>(); ExpressionList<Request> expressions = plugin.getDatabase().find(Request.class).where().in("status", statuses); if (!includeElevated) expressions.where().eq("flagForAdmin", false); Query<Request> query = expressions.query(); if (query != null) { retVal.addAll(query.findPagingList(perPage).getPage(page).getList()); } return retVal; } public Request getRequest(int id) { Request retVal = null; Query<Request> query = plugin.getDatabase().find(Request.class).where().eq("id", id).query(); if (query != null) { retVal = query.findUnique(); } return retVal; } public void save(Request request) { plugin.getDatabase().save(request); } }
package odontosoft.model.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import odontosoft.model.database.ConexaoBanco; import odontosoft.model.domain.Procedimento; /** * * @author mikolaja */ public class ProcedimentoDAO implements InterfaceGenericDAO<Procedimento, Integer>{ ConexaoBanco conexao = new ConexaoBanco(); Connection connect = conexao.getConexao(); PreparedStatement stmt = null; @Override public void inserir(Procedimento var) { String sql = "INSERT INTO Procedimento(descricao, preco) VALUES (?,?);"; try{ stmt = connect.prepareStatement(sql); stmt.setString(1, var.getDescricao()); stmt.setDouble(2, var.getPreco()); stmt.execute(); stmt.close(); System.out.println("Dados inseridos no banco de dados!"); }catch(SQLException e){ System.out.println("Error: "+e); } } @Override public List<Procedimento> listar() { ResultSet query; List<Procedimento> list = new ArrayList<>(); String sql = "SELECT * FROM Procedimento"; try{ stmt = connect.prepareStatement(sql); query = stmt.executeQuery(); while(query.next()){ list.add(new Procedimento(query.getInt("id"), query.getString("descricao"), query.getDouble("preco"))); } query.close(); stmt.close(); }catch(SQLException e){ System.out.println("Error: " +e); } return list; } @Override public void delete(Integer id) { String sql = "DELETE FROM Procedimento WHERE id = ?"; try{ stmt = connect.prepareStatement(sql); stmt.setInt(1, id); stmt.execute(); stmt.close(); System.out.println("Dados deletados com sucesso!"); }catch(SQLException e){ System.out.println("Error: " +e); } } @Override public void update(Integer id, Procedimento newVar) { String sql = "UPDATE Procedimento SET descricao = ?, preco = ? WHERE id = ?;"; try{ stmt = connect.prepareStatement(sql); stmt.setString(1, newVar.getDescricao()); stmt.setDouble(2, newVar.getPreco()); stmt.setInt(3, id); stmt.execute(); stmt.close(); System.out.println("Dados atualizados!"); }catch(SQLException e){ System.out.println("Error: " +e); } } @Override public Procedimento buscaPorId(Integer id) { String sql = "SELECT * FROM Procedimento WHERE id = ?"; Procedimento var = null; try{ stmt = connect.prepareStatement(sql); stmt.setInt(1, id); ResultSet rs = stmt.executeQuery(); while(rs.next()){ var = new Procedimento(rs.getInt("id"), rs.getString("descricao"), rs.getDouble("preco")); } rs.close(); stmt.close(); }catch(SQLException e){ System.out.println("Error: " +e); } return var; } }
package com.facebook.imagepipeline.producers; import javax.annotation.Nullable; import java.util.Map; import java.util.concurrent.Executor; import com.facebook.common.internal.ImmutableMap; import com.facebook.common.internal.Preconditions; import com.facebook.common.internal.VisibleForTesting; import com.facebook.common.references.CloseableReference; import com.facebook.common.util.TriState; import com.facebook.imageformat.ImageFormat; import com.facebook.imagepipeline.common.ResizeOptions; import com.facebook.imagepipeline.image.EncodedImage; import com.facebook.imagepipeline.memory.PooledByteBuffer; import com.facebook.imagepipeline.memory.PooledByteBufferFactory; import com.facebook.imagepipeline.memory.PooledByteBufferOutputStream; import com.facebook.imagepipeline.nativecode.JpegTranscoder; import com.facebook.imagepipeline.request.ImageRequest; /** * Resizes and rotates JPEG image according to the EXIF orientation data. * * <p> If the image is not JPEG, no transformation is applied. */ public class ResizeAndRotateProducer implements Producer<EncodedImage> { private static final String PRODUCER_NAME = "ResizeAndRotateProducer"; private static final String ORIGINAL_SIZE_KEY = "Original size"; private static final String REQUESTED_SIZE_KEY = "Requested size"; private static final String FRACTION_KEY = "Fraction"; @VisibleForTesting static final int DEFAULT_JPEG_QUALITY = 85; @VisibleForTesting static final int MAX_JPEG_SCALE_NUMERATOR = JpegTranscoder.SCALE_DENOMINATOR; @VisibleForTesting static final int MIN_TRANSFORM_INTERVAL_MS = 100; private static final float MAX_BITMAP_SIZE = 2048f; private static final float ROUNDUP_FRACTION = 2.0f/3; private final Executor mExecutor; private final PooledByteBufferFactory mPooledByteBufferFactory; private final boolean mDownsampleEnabled; private final Producer<EncodedImage> mNextProducer; public ResizeAndRotateProducer( Executor executor, PooledByteBufferFactory pooledByteBufferFactory, boolean downsampleEnabled, Producer<EncodedImage> nextProducer) { mExecutor = Preconditions.checkNotNull(executor); mPooledByteBufferFactory = Preconditions.checkNotNull(pooledByteBufferFactory); mDownsampleEnabled = downsampleEnabled; mNextProducer = Preconditions.checkNotNull(nextProducer); } @Override public void produceResults( final Consumer<EncodedImage> consumer, final ProducerContext context) { mNextProducer.produceResults(new TransformingConsumer(consumer, context), context); } private class TransformingConsumer extends DelegatingConsumer<EncodedImage, EncodedImage> { private final ProducerContext mProducerContext; private boolean mIsCancelled; private final JobScheduler mJobScheduler; public TransformingConsumer( final Consumer<EncodedImage> consumer, final ProducerContext producerContext) { super(consumer); mIsCancelled = false; mProducerContext = producerContext; JobScheduler.JobRunnable job = new JobScheduler.JobRunnable() { @Override public void run(EncodedImage encodedImage, boolean isLast) { doTransform(encodedImage, isLast); } }; mJobScheduler = new JobScheduler(mExecutor, job, MIN_TRANSFORM_INTERVAL_MS); mProducerContext.addCallbacks( new BaseProducerContextCallbacks() { @Override public void onIsIntermediateResultExpectedChanged() { if (mProducerContext.isIntermediateResultExpected()) { mJobScheduler.scheduleJob(); } } @Override public void onCancellationRequested() { mJobScheduler.clearJob(); mIsCancelled = true; // this only works if it is safe to discard the output of previous producer consumer.onCancellation(); } }); } @Override protected void onNewResultImpl(@Nullable EncodedImage newResult, boolean isLast) { if (mIsCancelled) { return; } if (newResult == null) { if (isLast) { getConsumer().onNewResult(null, true); } return; } TriState shouldTransform = shouldTransform(mProducerContext.getImageRequest(), newResult, mDownsampleEnabled); // ignore the intermediate result if we don't know what to do with it if (!isLast && shouldTransform == TriState.UNSET) { return; } // just forward the result if we know that it shouldn't be transformed if (shouldTransform != TriState.YES) { getConsumer().onNewResult(newResult, isLast); return; } // we know that the result should be transformed, hence schedule it if (!mJobScheduler.updateJob(newResult, isLast)) { return; } if (isLast || mProducerContext.isIntermediateResultExpected()) { mJobScheduler.scheduleJob(); } } private void doTransform(EncodedImage encodedImage, boolean isLast) { mProducerContext.getListener().onProducerStart(mProducerContext.getId(), PRODUCER_NAME); ImageRequest imageRequest = mProducerContext.getImageRequest(); PooledByteBufferOutputStream outputStream = mPooledByteBufferFactory.newOutputStream(); Map<String, String> extraMap = null; EncodedImage ret = null; try { int numerator = getScaleNumerator(imageRequest, encodedImage); extraMap = getExtraMap(encodedImage, imageRequest, numerator); JpegTranscoder.transcodeJpeg( encodedImage.getInputStream(), outputStream, getRotationAngle(imageRequest, encodedImage), numerator, DEFAULT_JPEG_QUALITY); CloseableReference<PooledByteBuffer> ref = CloseableReference.of(outputStream.toByteBuffer()); try { ret = new EncodedImage(ref); ret.setImageFormat(ImageFormat.JPEG); try { ret.parseMetaData(); mProducerContext.getListener(). onProducerFinishWithSuccess(mProducerContext.getId(), PRODUCER_NAME, extraMap); getConsumer().onNewResult(ret, isLast); } finally { EncodedImage.closeSafely(ret); } } finally { CloseableReference.closeSafely(ref); } } catch (Exception e) { mProducerContext.getListener(). onProducerFinishWithFailure(mProducerContext.getId(), PRODUCER_NAME, e, extraMap); getConsumer().onFailure(e); return; } finally { outputStream.close(); } } private Map<String, String> getExtraMap( EncodedImage encodedImage, ImageRequest imageRequest, int numerator) { if (!mProducerContext.getListener().requiresExtraMap(mProducerContext.getId())) { return null; } String originalSize = encodedImage.getWidth() + "x" + encodedImage.getHeight(); String requestedSize = imageRequest.getResizeOptions().width + "x" + imageRequest.getResizeOptions().height; String fraction = numerator > 0 ? numerator + "/8" : ""; return ImmutableMap.of( ORIGINAL_SIZE_KEY, originalSize, REQUESTED_SIZE_KEY, requestedSize, FRACTION_KEY, fraction); } } private static TriState shouldTransform( ImageRequest request, EncodedImage encodedImage, boolean downsampleEnabled) { if (encodedImage == null || encodedImage.getImageFormat() == ImageFormat.UNKNOWN) { return TriState.UNSET; } if (encodedImage.getImageFormat() != ImageFormat.JPEG) { return TriState.NO; } return TriState.valueOf( getRotationAngle(request, encodedImage) != 0 || shouldResize(getScaleNumerator(request, encodedImage), downsampleEnabled)); } @VisibleForTesting static float determineResizeRatio( ResizeOptions resizeOptions, int width, int height) { final float widthRatio = ((float) resizeOptions.width) / width; final float heightRatio = ((float) resizeOptions.height) / height; float ratio = Math.max(widthRatio, heightRatio); // TODO: The limit is larger than this on newer devices. The problem is to get the real limit, // you have to call Canvas.getMaximumBitmapWidth/Height on a real HW-accelerated Canvas. if (width * ratio > MAX_BITMAP_SIZE) { ratio = MAX_BITMAP_SIZE / width; } if (height * ratio > MAX_BITMAP_SIZE) { ratio = MAX_BITMAP_SIZE / height; } return ratio; } @VisibleForTesting static int roundNumerator(float maxRatio) { return (int) (ROUNDUP_FRACTION + maxRatio * JpegTranscoder.SCALE_DENOMINATOR); } private static int getScaleNumerator( ImageRequest imageRequest, EncodedImage encodedImage) { final ResizeOptions resizeOptions = imageRequest.getResizeOptions(); if (resizeOptions == null) { return JpegTranscoder.SCALE_DENOMINATOR; } final int rotationAngle = getRotationAngle(imageRequest, encodedImage); final boolean swapDimensions = rotationAngle == 90 || rotationAngle == 270; final int widthAfterRotation = swapDimensions ? encodedImage.getHeight() : encodedImage.getWidth(); final int heightAfterRotation = swapDimensions ? encodedImage.getWidth() : encodedImage.getHeight(); float ratio = determineResizeRatio(resizeOptions, widthAfterRotation, heightAfterRotation); int numerator = roundNumerator(ratio); if (numerator > MAX_JPEG_SCALE_NUMERATOR) { return MAX_JPEG_SCALE_NUMERATOR; } return (numerator < 1) ? 1 : numerator; } private static int getRotationAngle(ImageRequest imageRequest, EncodedImage encodedImage) { if (!imageRequest.getAutoRotateEnabled()) { return 0; } int rotationAngle = encodedImage.getRotationAngle(); Preconditions.checkArgument( rotationAngle == 0 || rotationAngle == 90 || rotationAngle == 180 || rotationAngle == 270); return rotationAngle; } private static boolean shouldResize(int numerator, boolean downsampleEnabled) { return !(downsampleEnabled && numerator <= (MAX_JPEG_SCALE_NUMERATOR / 2)) && numerator < MAX_JPEG_SCALE_NUMERATOR; } }
package com.intellij.codeInspection.unusedParameters; import com.intellij.analysis.AnalysisScope; import com.intellij.codeInsight.daemon.GroupNames; import com.intellij.codeInspection.InspectionManager; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.codeInspection.ex.*; import com.intellij.codeInspection.reference.*; import com.intellij.codeInspection.util.XMLExportUtl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.IconLoader; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.PsiReferenceProcessor; import com.intellij.psi.search.PsiSearchHelper; import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor; import com.intellij.refactoring.changeSignature.ParameterInfo; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.Collection; public class UnusedParametersInspection extends FilteringInspectionTool { private UnusedParametersFilter myFilter; private UnusedParametersComposer myComposer; public UnusedParametersInspection() { myQuickFixActions = new QuickFixAction[] {new AcceptSuggested()}; } private QuickFixAction[] myQuickFixActions; public void runInspection(AnalysisScope scope, final InspectionManager manager) { // Do additional search of problem elements outside the scope. final Runnable action = new Runnable() { public void run() { if (getRefManager().getScope().getScopeType() != AnalysisScope.PROJECT) { ProgressManager.getInstance().runProcess(new Runnable() { public void run() { final UnusedParametersFilter filter = getFilter(); final PsiSearchHelper helper = PsiManager.getInstance(getContext().getProject()).getSearchHelper(); getRefManager().iterate(new RefVisitor() { public void visitElement(RefEntity refEntity) { if (refEntity instanceof RefElement && filter.accepts((RefElement)refEntity)) { RefMethod refMethod = (RefMethod) refEntity; PsiMethod psiMethod = (PsiMethod) refMethod.getElement(); if (!refMethod.isStatic() && !refMethod.isConstructor() && refMethod.getAccessModifier() != PsiModifier.PRIVATE) { PsiMethod[] derived = helper.findOverridingMethods(psiMethod, GlobalSearchScope.projectScope(getContext().getProject()), true); final ArrayList<RefParameter> unusedParameters = UnusedParametersFilter.getUnusedParameters(refMethod); for (final RefParameter refParameter : unusedParameters) { int idx = refParameter.getIndex(); if (refMethod.isAbstract() && derived.length == 0) { refParameter.parameterReferenced(false); } else { final boolean[] found = new boolean[]{false}; for (int i = 0; i < derived.length && !found[0]; i++) { if (!getRefManager().getScope().contains(derived[i])) { PsiParameter psiParameter = derived[i].getParameterList().getParameters()[idx]; helper.processReferences(new PsiReferenceProcessor() { public boolean execute(PsiReference element) { refParameter.parameterReferenced(false); found[0] = true; return false; } }, psiParameter, helper.getUseScope(psiParameter), false); } } } } } } } }); } }, null); } } }; ApplicationManager.getApplication().runReadAction(action); } public UnusedParametersFilter getFilter() { if (myFilter == null) { myFilter = new UnusedParametersFilter(this); } return myFilter; } protected void resetFilter() { myFilter = null; } public void exportResults(final Element parentNode) { final UnusedParametersFilter filter = getFilter(); getRefManager().iterate(new RefVisitor() { public void visitElement(RefEntity refEntity) { if (refEntity instanceof RefElement && filter.accepts((RefElement)refEntity)) { ArrayList<RefParameter> unusedParameters = UnusedParametersFilter.getUnusedParameters((RefMethod)refEntity); for (RefParameter unusedParameter : unusedParameters) { Element element = XMLExportUtl.createElement(refEntity, parentNode, -1); Element problemClassElement = new Element(InspectionsBundle.message("inspection.export.results.problem.element.tag")); problemClassElement.addContent(InspectionsBundle.message("inspection.unused.parameter.export.results")); element.addContent(problemClassElement); Element descriptionElement = new Element(InspectionsBundle.message("inspection.export.results.description.tag")); descriptionElement .addContent(InspectionsBundle.message("inspection.unused.parameter.export.results.description", unusedParameter.getName())); element.addContent(descriptionElement); } } } }); } public QuickFixAction[] getQuickFixes(final RefEntity[] refElements) { return myQuickFixActions; } @NotNull public JobDescriptor[] getJobDescriptors() { return new JobDescriptor[] {GlobalInspectionContextImpl.BUILD_GRAPH, GlobalInspectionContextImpl.FIND_EXTERNAL_USAGES}; } private class AcceptSuggested extends QuickFixAction { private AcceptSuggested() { super(InspectionsBundle.message("inspection.unused.parameter.delete.quickfix"),IconLoader.getIcon("/actions/cancel.png"), null, UnusedParametersInspection.this); } protected boolean applyFix(RefElement[] refElements) { for (RefElement refElement : refElements) { if (refElement instanceof RefMethod) { RefMethod refMethod = (RefMethod)refElement; PsiMethod psiMethod = (PsiMethod)refMethod.getElement(); if (psiMethod == null) continue; ArrayList<PsiElement> psiParameters = new ArrayList<PsiElement>(); for (final RefParameter refParameter : UnusedParametersFilter.getUnusedParameters(refMethod)) { psiParameters.add(refParameter.getElement()); } removeUnusedParameterViaChangeSignature(psiMethod, psiParameters); getFilter().ignore(refMethod); } } return true; } } public String getDisplayName() { return InspectionsBundle.message("inspection.unused.parameter.display.name"); } public String getGroupDisplayName() { return GroupNames.DECLARATION_REDUNDANCY; } public String getShortName() { return "UnusedParameters"; } public HTMLComposer getComposer() { if (myComposer == null) { myComposer = new UnusedParametersComposer(getFilter(), this); } return myComposer; } private void removeUnusedParameterViaChangeSignature(final PsiMethod psiMethod, final Collection<PsiElement> parametersToDelete) { ArrayList<ParameterInfo> newParameters = new ArrayList<ParameterInfo>(); PsiParameter[] oldParameters = psiMethod.getParameterList().getParameters(); for (int i = 0; i < oldParameters.length; i++) { PsiParameter oldParameter = oldParameters[i]; if (!parametersToDelete.contains(oldParameter)) { newParameters.add(new ParameterInfo(i, oldParameter.getName(), oldParameter.getType())); } } ParameterInfo[] parameterInfos = newParameters.toArray(new ParameterInfo[newParameters.size()]); ChangeSignatureProcessor csp = new ChangeSignatureProcessor(getContext().getProject(), psiMethod, false, null, psiMethod.getName(), psiMethod.getReturnType(), parameterInfos); csp.run(); } }
package de.upb.recalys.control; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.LinkedList; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.SwingUtilities; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.SAXException; import de.upb.recalys.model.ExperimentFile; import de.upb.recalys.model.RCSGraph; import de.upb.recalys.model.RCSNode; import de.upb.recalys.model.RCSTask; import de.upb.recalys.view.GUI; /** * This class defines the main class of this app. It controlls the behavior of * this app. * * @author Roman Kober * @version 1.1 */ public class ReCaLys { private boolean complete = false; private double maxProblemRate = 0.6; private double maxReturnRate = 0.15; private int timeToThink = 100; // in milliseconds private int nodesToInspect = -1; private GUI gui; private RCSGraph graph; private ArrayList<RCSTask> experiment; private LinkedList<RCSNode> badList; @SuppressWarnings("rawtypes") private LinkedList[] hotList; private int userCount = 0; private ObjectInputStream ois; // private static boolean isRecapoImport = true; /** * Constructor: Creates a new ReCaLys-object */ public ReCaLys() { super(); final ReCaLys recalys = this; SwingUtilities.invokeLater(new Runnable() { public void run() { gui = new GUI(recalys); gui.setVisible(true); } }); } /** * Main function that starts the app * * @param args * the command line arguments */ public static void main(String[] args) { ReCaLys recalys = new ReCaLys(); } /** * Builds the Graph based on the imported XML-Structure-File * * @param importFile * the xml-file that contains the website-structure * * @author Roman Kober */ public void buildGraphXML(File importFile) { graph = new RCSGraph(); graph.buildGraph(importFile); graph.computeDistances(); graph.buildLevelGraph(); graph.addNodeDegrees(); graph.addLeavesCount(); graph.computeNodesToInspect(); graph.computeMinimalLatencies(250); gui.getIaGraph().init(graph); gui.getPieGraph().init(graph); gui.setAnalyseMenuEnabled(false); gui.setGraphMenuEnabled(true); } /** * Imports a xml-file that contains the results of an experiment in ReCaPo * and creates an experiment that can be analyzed. * * @param importFile * file that shall be imported * @author Roman Kober */ public void importResults(File importFile) { graph.resetGraphProperties(); complete = false; experiment = new ArrayList<>(); userCount = 0; SAXParserFactory factory = SAXParserFactory.newInstance(); SAXParser saxParser; try { saxParser = factory.newSAXParser(); ResultsContentHandler handler = new ResultsContentHandler(this); saxParser.parse(importFile, handler); gui.updateGUI(); gui.setAnalyseMenuEnabled(true); gui.setExportMenuEnabled(true); gui.setPieGraphMenuItemsEnabled(true); } catch (ParserConfigurationException | SAXException | IOException e) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, e); } } /** * Returns the number of nodes that must be inspected combined for detection * of systematic searching * * @return number of nodes to inspect combined */ public int getNodesToInspect() { return nodesToInspect; } /** * Sets the maximal problem-rate for the detection of systematic searching * * @param rate */ public void setMaxProblemRate(double rate) { maxProblemRate = rate; } /** * Sets the maximal return-rate for the detection of systematic searching * * @param rate */ public void setMaxReturnRate(double rate) { maxReturnRate = rate; } /** * Gives the maximal problem-rate for the detection of systematic searching * * @return the maximal problem-rate */ public double getMaxProblemRate() { return maxProblemRate; } /** * Gives the maximal return-rate for the detection of systematic searching * * @return the maximal return-rate */ public double getMaxReturnRate() { return maxReturnRate; } /** * Sets in the time a standard user will max. need to think before choosing * a link * * @param time * time to think */ public void setTimeToThink(int time) { timeToThink = time; } /** * Gives the time a standard user will max. need to think before choosing a * link * * @return time to think */ public int getTimeToThink() { return timeToThink; } /** * Returns the graph that represents the website structure * * @return graph */ public RCSGraph getGraph() { return graph; } /** * Resets the analysis of the experiment on the given graph */ public void resetAnalysis() { graph.resetAnalysis(experiment.size()); } /** * Starts the analysis of the experiment */ public void analyse() { resetAnalysis(); graph.computeMinimalLatencies(timeToThink); nodesToInspect = graph.computeNodesToInspect(); for (RCSTask task : experiment) { task.resetAnalysis(); task.detectNodesOnOptimalPaths(); task.analysePaths(nodesToInspect, maxProblemRate, maxReturnRate); } badList = graph.getBadList(); hotList = graph.getHotList(); complete = true; updateGUI(); updateSystematicSearchLog(); } /** * Saves the whole experiment into an rcs-file * * @param file * file the experiment shall be saved into * * @deprecated As of Version 1.1 this method is not used anymore, because * the import process for ReCaPo is much easier and a separate * import and export mechanism is not needed anymore */ @Deprecated public void saveExperiment(File file) { ExperimentFile ef = new ExperimentFile(graph, experiment, maxProblemRate, maxReturnRate, timeToThink, nodesToInspect, userCount); FileOutputStream fos = null; try { fos = new FileOutputStream(file); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(ef); } catch (IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } finally { try { fos.close(); } catch (IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } } } /** * Imports an experiment * * @param file * experiment file * * @deprecated As of Version 1.1 this method is not used anymore, because * the import process for ReCaPo is much easier and a separate * import and export mechanism is not needed anymore */ @Deprecated public void loadExperiment(File file) { complete = false; ExperimentFile ef; FileInputStream fos = null; try { fos = new FileInputStream(file); ois = new ObjectInputStream(fos); ef = (ExperimentFile) ois.readObject(); graph = ef.getGraph(); experiment = ef.getTasks(); maxProblemRate = ef.getMaxProblemRate(); maxReturnRate = ef.getMaxReturnRate(); nodesToInspect = ef.getNodesToInspect(); timeToThink = ef.getTimeToThink(); userCount = ef.getUserCount(); gui.updateGUI(); gui.setAnalyseMenuEnabled(true); gui.setExportMenuEnabled(true); analyse(); } catch (ClassNotFoundException | IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } finally { try { fos.close(); } catch (IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } } } /** * Increases the number of users who attended the experiment by 1 */ public void increaseUserCount() { userCount++; gui.updateGUI(); } /** * Returns the number of users who attended the experiment * * @return userCount */ public int getUserCount() { return userCount; } /** * Gives the total coverage of the experiment * * @return coverage of the experiment */ public double getCoverage() { return graph.getCoverage(); } /** * Returns the RCS-Experiment in an ArrayList * * @return experiment */ public ArrayList<RCSTask> getExperiment() { return experiment; } /** * Gets the solution-rate over all tasks * * @return totalSolutionRate */ public double getTotalSolutionRate() { double tmp = 0; for (RCSTask task : experiment) tmp += task.getSolutionRate(); return tmp / experiment.size(); } /** * Gets the BadList computed during the experiment * * @return badList */ public LinkedList<RCSNode> getBadList() { return badList; } /** * Gets the HotList computed during the experiment * * @return hotList */ @SuppressWarnings("rawtypes") public LinkedList[] getHotList() { return hotList; } /** * Updates the GUI */ public void updateGUI() { gui.updateGUI(); } /** * Returns if the analysis is executed entirely * * @return */ public boolean analysisComplete() { return complete; } /** * Updates the Log in the GUI for the detection of systematic searching */ public void updateSystematicSearchLog() { String log = graph.getSystematicSearchingLog(); gui.setSSDLog(log); if (log.equals("")) return; try { PrintWriter pw = new PrintWriter(new FileWriter("ssd-log.txt")); pw.print(log); pw.flush(); pw.close(); } catch (IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } } /** * Exports the logfile for the detection of systematic searching into the * given save directory. * * @param saveDirectory * the save directory */ public void exportSystematicSearchLog(String saveDirectory) { String log = graph.getSystematicSearchingLog(); try { PrintWriter pw = new PrintWriter(new FileWriter(saveDirectory)); pw.print(log); pw.flush(); pw.close(); } catch (IOException ex) { Logger.getLogger(ReCaLys.class.getName()).log(Level.SEVERE, null, ex); } } }
package org.intermine.objectstore.query; import java.util.ArrayList; import java.util.List; /** * Object representing a combination of ObjectStoreBags. * * @author Matthew Wakeling */ public class ObjectStoreBagCombination implements QuerySelectable { // List of either Bags or ObjectStoreBagCombinations private final List<QuerySelectable> bags = new ArrayList<QuerySelectable>(); private final int op; /** Constant representing a UNION operation */ public static final int UNION = 879234; /** Constant representing an INTERSECTION operation */ public static final int INTERSECT = 519552; /** Constant representing an EXCEPT operation */ public static final int EXCEPT = 281056; /** Constant representing an all but intersect operation */ public static final int ALLBUTINTERSECT = 853915; /** * Constructs a new ObjectStoreBagCombination. * * @param op the type of combination, out of UNION, INTERSECT, and EXCEPT */ public ObjectStoreBagCombination(int op) { if ((op != UNION) && (op != INTERSECT) && (op != EXCEPT) && (op != ALLBUTINTERSECT)) { throw new IllegalArgumentException("Illegal type: " + op); } this.op = op; } /** * Adds a bag to this combination * * @param bag an ObjectStoreBag */ public void addBag(ObjectStoreBag bag) { bags.add(bag); } public void addBagCombination(ObjectStoreBagCombination combo) { bags.add(combo); } /** * Returns the op of this combination. * * @return an int */ public int getOp() { return op; } /** * Returns the List of bags. * * @return a List of ObjectStoreBags */ public List<QuerySelectable> getBags() { return bags; } /** * {@inheritDoc} */ public Class<?> getType() { return Integer.class; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (o instanceof ObjectStoreBagCombination) { return bags.equals(((ObjectStoreBagCombination) o).bags) && (op == ((ObjectStoreBagCombination) o).op); } return false; } /** * {@inheritDoc} */ @Override public int hashCode() { return bags.hashCode() + op; } }
package org.lockss.extractor; import java.util.*; import org.apache.commons.collections4.*; import org.apache.commons.collections4.map.*; import org.lockss.util.*; /** * Collection of metadata about a single article or other feature. Consists of * two maps that associate one or more string values with a string key. The raw * map should hold the raw keys and values extracted from one or more (html, * xml, etc.) files; the cooked map holds standard metadata (DOI, ISSN, etc.) * associated with well-known keys. */ public class ArticleMetadata { private static Logger log = Logger.getLogger("ArticleMetadata"); private MultiValueMap rawMap = new MultiValueMap(); private MultiValueMap cookedmap = new MultiValueMap(); private Locale locale; public ArticleMetadata() { } public void setLocale(Locale locale) { if (cookedmap.isEmpty()) { this.locale = locale; } else { throw new IllegalStateException( "Cannot set locale after storing any cooked metadata"); } } /** * Return the Locale in which values (e.g., dates) in this metadata should be * interpreted. Returns the systemwide default from (@link * MetadataUtil#getDefaultLocale()} if no Locale has't been explicitly set * with {@link #setLocale(Locale)}. */ public Locale getLocale() { return locale != null ? locale : MetadataUtil.getDefaultLocale(); } /* * Accessors ensure that metadata keys are case-insensitive strings * (lowercased). */ // Raw map /** * Set or add to the value associated with the key in the raw metadata map. */ public void putRaw(String key, String value) { rawMap.put(key.toLowerCase(), value); } /** * Set the value associated with the key in the raw metadata map. */ public void putRaw(String key, Map<String, String> value) { rawMap.put(key.toLowerCase(), value); } /** * Return the list of raw values associated with a key, or an empty list. */ public List<String> getRawList(String key) { return getRawCollection(key); } /** * Return the map of raw values associated with a key, or an empty map if none. */ public Map<String, String> getRawMap(MetadataField field) { return getRawMap(field.getKey()); } /** * Return the map of raw values associated with a key, or an empty map if none. */ public Map<String, String> getRawMap(String key) { List<Map<String, String>> lst = (List<Map<String, String>>)rawMap.get(key); if (lst == null || lst.isEmpty()) { return Collections.<String, String> emptyMap(); } return lst.get(0); } /** * Return the first or only raw value associated with a key, else null if * none. */ public String getRaw(String key) { List<String> lst = getRawCollection(key); if (lst.isEmpty()) { return null; } else { return lst.get(0); } } /** Return true if the key has an associated value in the raw map */ public boolean containsRawKey(String key) { return rawMap.containsKey(key.toLowerCase()); } /** Return the set of keys in the raw map */ public Set<String> rawKeySet() { return rawMap.keySet(); } /** Return the raw Entry set */ public Set<Map.Entry<String, List<String>>> rawEntrySet() { return rawMap.entrySet(); } /** Return the size of the raw map */ public int rawSize() { return rawMap.size(); } private List<String> getRawCollection(String key) { List<String> res = getMapCollection(rawMap, key); if (res == null || res.isEmpty()) { return Collections.<String> emptyList(); } return res; } private List<String> getMapCollection(MultiValueMap map, String key) { return (List<String>) map.getCollection(key.toLowerCase()); } // Cooked map /** * Set or add to the value associated with the key. If the field has a * validator/normalizer it will be applied to the value first. Returns true * iff the value validates and is stored successfully. * * <h4>Single-valued fields</h4>A valid value will be stored if either no * value is already present or the new value is equal to the current value. If * a different value is present nothing is stored. <br> * A raw value that doesn't validate will be stored as an {@link InvalidValue} * along with the validation exception, iff no valid value is already present. * (See {@link #hasValidValue(MetadataField)}, * {@link #hasInvalidValue(MetadataField)} and {@link #get(MetadataField)}.) * * <h4>Multi-valued fields</h4>A valid value will be added and true returned; * an invalid valid will not be stored, and false returned. If the field has a * splitter it will be invoked to convert the value into a list of values. If * there is also a validator/normalizer, it will be invoked on each split * value. If any of them fails to validate the behavior is undefined. */ public boolean put(MetadataField field, String value) { MetadataException ex = put0(field, value); return ex == null; } /** * Set or add to the value associated with the key. If the field has a * validator/normalizer it will be applied to the value first. Throws * MetadataException if the value does not validate or is not stored * successfully * * <h4>Single-valued fields</h4>A valid value will be stored if either no * value is already present or the new value is equal to the current value. If * a different value is present nothing is stored and a * MetadataException.CardinalityException is thrown. <br> * A raw value that doesn't validate will cause a * MetadataException.ValidationException to be thrown. If no valid value is * already present. the invalid raw value will be stored as an * {@link InvalidValue} along with the validation exception. (See * {@link #hasValidValue(MetadataField)}, * {@link #hasInvalidValue(MetadataField)} and {@link #get(MetadataField)}.) * * <h4>Multi-valued fields</h4>A valid value will be added. An invalid valid * will not be added, and a validation exception will be thrown. If the field * has a splitter it will be invoked to convert the value into a list of * values. If there is also a validator/normalizer, it will be invoked on each * split value. If any of them fails to validate the behavior is undefined. */ public void putValid(MetadataField field, String value) throws MetadataException { MetadataException ex = put0(field, value); if (ex != null) { throw ex; } } /** Store the value in the ArticleMetadata unless it's null or the field * already has a valid value. I.e., store the value iff it's better than * the one that's already there. * @param field The MetadataField into which to store * @param val the value to store * @returns true if the value was stored */ public boolean putIfBetter(MetadataField field, String value) { if (value != null && !hasValidValue(field)) { put(field, value); return true; } return false; } /** Store the value in the ArticleMetadata, replacing any previous value. * @param field The MetadataField into which to store * @param val the value to store * @returns true */ public boolean replace(MetadataField field, String value) { putSingle(field, value, true); return true; } private MetadataException put0(MetadataField field, String value) { switch (field.getCardinality()) { case Single: return putSingle(field, value, false); case Multi: return putMulti(field, value); } return new MetadataException("Unknown field type: " + field.getCardinality()).setField(field); } private String getKey(MetadataField field) { return field.getKey().toLowerCase(); } private MetadataException putSingle(MetadataField field,final String value, boolean force) { String key = getKey(field); MetadataException valEx = null; String normVal = null; try { if(field.hasExtractor()){ try{ String val = field.extract(this, value); normVal = field.validate(this, val); }catch(IndexOutOfBoundsException e){ MetadataException ex = new MetadataException.CardinalityException(value, "Attempt to reset single-valued key: " + key + " to " + value); ex.setField(field); ex.setNormalizedValue(normVal); ex.setRawValue(value); throw ex; } } else{ normVal = field.validate(this, value); } List curval = getCollection(key); if (curval.isEmpty()) { cookedmap.put(key, normVal); return null; } else if (force || isInvalid(curval.get(0))) { cookedmap.remove(key); cookedmap.put(key, normVal); return null; } else if (value.equals(curval.get(0))) { return null; } MetadataException ex = new MetadataException.CardinalityException(value, "Attempt to reset single-valued key: " + key + " to " + value); ex.setField(field); ex.setNormalizedValue(normVal); ex.setRawValue(value); return ex; } catch (MetadataException ex) { if (getCollection(key).isEmpty()) { InvalidValue ival = new InvalidValue(value, ex); cookedmap.put(key, ival); } ex.setField(field); ex.setRawValue(value); return ex; } } private MetadataException putMulti(MetadataField field, String value) { String key = getKey(field); MetadataException valEx = null; String normVal; if (field.hasSplitter()) { MetadataException elemEx = null; for (String elem : field.split(this, value)) { try { String normElem = field.validate(this, elem); cookedmap.put(key, normElem); } catch (MetadataException.ValidationException ex) { if (elemEx == null) { elemEx = ex; } } } return elemEx; } try { normVal = field.validate(this, value); cookedmap.put(key, normVal); return null; } catch (MetadataException.ValidationException ex) { ex.setRawValue(value); ex.setField(field); return ex; } } /** * Return true iff the field has either a valid value or an invalid value */ public boolean hasValue(MetadataField field) { List curval = getCollection(getKey(field)); return !curval.isEmpty(); } /** Return true iff the field has a valid value */ public boolean hasValidValue(MetadataField field) { List curval = getCollection(getKey(field)); return !curval.isEmpty() && !isInvalid(curval.get(0)); } /** Return true iff the field has a value, which is invalid */ public boolean hasInvalidValue(MetadataField field) { List curval = getCollection(getKey(field)); return !curval.isEmpty() && isInvalid(curval.get(0)); } private boolean isValid(Object obj) { return !(obj instanceof InvalidValue); } private boolean isInvalid(Object obj) { return obj instanceof InvalidValue; } /** If the field has an invalid value, return the {@link InvalidValue} * describing it, else null */ public InvalidValue getInvalid(MetadataField field) { return getInvalid(field.getKey()); } /** * If the key has an invalid value, return the {@link InvalidValue} * describing it, else null */ private InvalidValue getInvalid(String key) { List lst = getCollection(key); if (lst.isEmpty()) { return null; } Object res = lst.get(0); if (res instanceof InvalidValue) { return (InvalidValue) res; } return null; } /** Return the value associated with a key, else null if no valid value */ public String get(MetadataField field) { return get(field.getKey()); } /** * Return the list of values associated with a key, or an empty list if none. */ public List<String> getList(String key) { List lst = getCollection(key); if (lst.isEmpty() || lst.get(0) instanceof InvalidValue) { return Collections.<String> emptyList(); } return lst; } /** * Return the list of values associated with a key,or an empty list if none */ public List<String> getList(MetadataField field) { return getList(field.getKey()); } /** Return the value associated with a key, else null if no valid value */ public String get(String key) { return get(key, null); } /** * Return the value associated with a key, else dfault if no valid value */ private String get(String key, String dfault) { List lst = getCollection(key); if (lst.isEmpty()) { return dfault; } Object res = lst.get(0); if (res instanceof String) { return (String) res; } if (res instanceof InvalidValue) { return null; } return null; } /** Return the keyset of the cooked map */ public Set<String> keySet() { return cookedmap.keySet(); } // Simple entrySet would return keys of invalid values, not clear that's good. // public Set<Map.Entry<String,List<String>>> entrySet() { // return cookedmap.entrySet(); /** Return the size of the cooked map */ public int size() { return cookedmap.size(); } /** Return true if the cooked map is empty */ public boolean isEmpty() { return cookedmap.isEmpty(); } /** * Copies values from the raw metadata map to the cooked map according to the * supplied map. Any MetadataExceptions thrown while storing into the cooked * map are returned in a List. * * @param rawToCooked * maps raw key -> cooked MatadataField. */ public List<MetadataException> cook(MultiMap rawToCooked) { List<MetadataException> errors = new ArrayList<MetadataException>(); for (Map.Entry ent : (Collection<Map.Entry<String, Collection<MetadataField>>>) (rawToCooked.entrySet())) { String rawKey = (String) ent.getKey(); Collection<MetadataField> fields = (Collection) ent.getValue(); for (MetadataField field : fields) { cookField(rawKey, field, errors); } } return errors; } /** * Copies values from the raw metadata map to the cooked map according to the * supplied map. Any MetadataExceptions thrown while storing into the cooked * map are returned in a List. * * @param rawToCooked * maps raw key -> cooked MatadataField. */ public List<MetadataException> cook(org.apache.commons.collections.MultiMap rawToCooked) { List<MetadataException> errors = new ArrayList<MetadataException>(); for (Map.Entry ent : (Collection<Map.Entry<String, Collection<MetadataField>>>) (rawToCooked.entrySet())) { String rawKey = (String) ent.getKey(); Collection<MetadataField> fields = (Collection) ent.getValue(); for (MetadataField field : fields) { cookField(rawKey, field, errors); } } return errors; } private void cookField(String rawKey, MetadataField field, List<MetadataException> errors) { List<String> rawlst = getRawCollection(rawKey); if (!rawlst.isEmpty()) { for (String rawval : rawlst) { try { putValid(field, rawval); } catch (MetadataException ex) { errors.add(ex); } } } } private MetadataField findField(String key) { key = key.toLowerCase(); MetadataField res = MetadataField.findField(key); return (res != null) ? res : new MetadataField.Default(key); } private List getCollection(String key) { List<String> res = getMapCollection(cookedmap, key); if (res == null || res.isEmpty()) { return Collections.EMPTY_LIST; } return res; } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("[md:"); for (String key : keySet()) { sb.append(" ["); sb.append(key); sb.append(": "); List lst = getCollection(key); if (lst.isEmpty()) { sb.append("(null)"); } else if (lst.get(0) instanceof InvalidValue) { sb.append(lst.get(0)); } else { sb.append(lst); } sb.append("]"); } return sb.toString(); } /** Return a pretty printed String */ public String ppString(int indent) { StringBuilder sb = new StringBuilder(); String tab = StringUtil.tab(indent); sb.append(tab); if (cookedmap.isEmpty()) { sb.append("Metadata (empty)\n"); } else { sb.append("Metadata\n"); dumpMap(sb, cookedmap, indent + 2); } sb.append(tab); if (rawMap.isEmpty()) { sb.append("Raw Metadata (empty)\n"); } else { sb.append("Raw Metadata\n"); dumpMap(sb, rawMap, indent + 2); } return sb.toString(); } private void dumpMap(StringBuilder sb, MultiValueMap map, int indent) { String tab = StringUtil.tab(indent); for (String key : StringUtil.caseIndependentSortedSet((Set<String>)map.keySet())) { sb.append(tab); sb.append(key); sb.append(": "); List lst = getMapCollection(map, key); if (lst.isEmpty()) { sb.append("(null)"); } else if (lst.size() == 1 || lst.get(0) instanceof InvalidValue) { sb.append(lst.get(0)); } else { sb.append("["); sb.append(StringUtil.separatedString(lst, "; ")); sb.append("]"); } sb.append("\n"); } } /** * Record of a failed attempt to store a value in the cooked map, either * because the value didn't validate or a second store isn't allowed in a * Single cardinality field */ public static class InvalidValue { private String rawValue; private MetadataException ex; public InvalidValue(String rawValue, MetadataException ex) { this.rawValue = rawValue; this.ex = ex; } /** Return the raw value that was attempted to be stored. */ public String getRawValue() { return rawValue; } /** Return the exception thrown by the validator. */ public MetadataException getException() { return ex; } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("[inv: "); sb.append(rawValue); sb.append(", "); sb.append(ex.toString()); sb.append("]"); return sb.toString(); } } }
package org.exist.indexing.lucene; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.TreeMap; import java.util.TreeSet; import org.apache.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermDocs; import org.apache.lucene.index.TermEnum; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.search.HitCollector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.exist.collections.Collection; import org.exist.dom.AttrImpl; import org.exist.dom.DocumentImpl; import org.exist.dom.DocumentSet; import org.exist.dom.ElementImpl; import org.exist.dom.Match; import org.exist.dom.NewArrayNodeSet; import org.exist.dom.NodeProxy; import org.exist.dom.NodeSet; import org.exist.dom.QName; import org.exist.dom.StoredNode; import org.exist.dom.SymbolTable; import org.exist.dom.TextImpl; import org.exist.indexing.AbstractStreamListener; import org.exist.indexing.IndexController; import org.exist.indexing.IndexWorker; import org.exist.indexing.MatchListener; import org.exist.indexing.OrderedValuesIndex; import org.exist.indexing.QNamedKeysIndex; import org.exist.indexing.StreamListener; import org.exist.numbering.NodeId; import org.exist.storage.DBBroker; import org.exist.storage.IndexSpec; import org.exist.storage.NodePath; import org.exist.storage.txn.Txn; import org.exist.util.ByteConversion; import org.exist.util.DatabaseConfigurationException; import org.exist.util.Occurrences; import org.exist.util.XMLString; import org.exist.xquery.Expression; import org.exist.xquery.XQueryContext; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class LuceneIndexWorker implements OrderedValuesIndex, QNamedKeysIndex { private static final Logger LOG = Logger.getLogger(LuceneIndexWorker.class); private LuceneIndex index; private IndexController controller; private LuceneMatchListener matchListener = null; private DBBroker broker; private DocumentImpl currentDoc = null; private int mode = 0; private LuceneConfig config; private Stack contentStack = null; private Set nodesToRemove = null; private List nodesToWrite = null; private int cachedNodesSize = 0; private int maxCachedNodesSize = 4096 * 1024; public LuceneIndexWorker(LuceneIndex parent, DBBroker broker) { this.index = parent; this.broker = broker; } public String getIndexId() { return LuceneIndex.ID; } public String getIndexName() { return index.getIndexName(); } public Object configure(IndexController controller, NodeList configNodes, Map namespaces) throws DatabaseConfigurationException { this.controller = controller; LOG.debug("Configuring lucene index"); config = new LuceneConfig(configNodes, namespaces); return config; } public void flush() { switch (mode) { case StreamListener.STORE: write(); break; case StreamListener.REMOVE_ALL_NODES: removeDocument(currentDoc.getDocId()); break; case StreamListener.REMOVE_SOME_NODES: removeNodes(); break; } } public void setDocument(DocumentImpl document) { setDocument(document, StreamListener.UNKNOWN); } public void setDocument(DocumentImpl document, int newMode) { currentDoc = document; //config = null; contentStack = null; IndexSpec indexConf = document.getCollection().getIndexConfiguration(broker); if (indexConf != null) config = (LuceneConfig) indexConf.getCustomIndexSpec(LuceneIndex.ID); mode = newMode; } public void setMode(int mode) { this.mode = mode; switch (mode) { case StreamListener.STORE: if (nodesToWrite == null) nodesToWrite = new ArrayList(); else nodesToWrite.clear(); cachedNodesSize = 0; break; case StreamListener.REMOVE_SOME_NODES: nodesToRemove = new TreeSet(); break; } } public DocumentImpl getDocument() { return currentDoc; } public int getMode() { return this.mode; } public StoredNode getReindexRoot(StoredNode node, NodePath path, boolean includeSelf) { if (node.getNodeType() == Node.ATTRIBUTE_NODE) return null; if (config == null) return null; NodePath p = new NodePath(path); boolean reindexRequired = false; if (node.getNodeType() == Node.ELEMENT_NODE && !includeSelf) p.removeLastComponent(); for (int i = 0; i < p.length(); i++) { if (config.matches(p)) { reindexRequired = true; break; } p.removeLastComponent(); } if (reindexRequired) { p = new NodePath(path); StoredNode topMost = null; StoredNode currentNode = node; if (currentNode.getNodeType() != Node.ELEMENT_NODE) currentNode = currentNode.getParentStoredNode(); while (currentNode != null) { if (config.matches(p)) topMost = currentNode; currentNode = currentNode.getParentStoredNode(); p.removeLastComponent(); } return topMost; } return null; } private StreamListener listener = new LuceneStreamListener(); public StreamListener getListener() { return listener; } public MatchListener getMatchListener(DBBroker broker, NodeProxy proxy) { boolean needToFilter = false; Match nextMatch = proxy.getMatches(); while (nextMatch != null) { if (nextMatch.getIndexId() == LuceneIndex.ID) { needToFilter = true; break; } nextMatch = nextMatch.getNextMatch(); } if (!needToFilter) return null; if (matchListener == null) matchListener = new LuceneMatchListener(index, broker, proxy); else matchListener.reset(broker, proxy); return matchListener; } protected void removeDocument(int docId) { IndexReader reader = null; try { reader = index.getWritingReader(); Term dt = new Term("docId", Integer.toString(docId)); reader.deleteDocuments(dt); reader.flush(); } catch (IOException e) { LOG.warn("Error while removing lucene index: " + e.getMessage(), e); } finally { index.releaseWritingReader(reader); } } public void removeCollection(Collection collection, DBBroker broker) { if (LOG.isDebugEnabled()) LOG.debug("Removing collection " + collection.getURI()); IndexReader reader = null; try { reader = index.getWritingReader(); for (Iterator i = collection.iterator(broker); i.hasNext(); ) { DocumentImpl doc = (DocumentImpl) i.next(); Term dt = new Term("docId", Integer.toString(doc.getDocId())); TermDocs td = reader.termDocs(dt); while (td.next()) { reader.deleteDocument(td.doc()); } } reader.flush(); } catch (IOException e) { LOG.warn("Error while removing lucene index: " + e.getMessage(), e); } finally { index.releaseWritingReader(reader); } if (LOG.isDebugEnabled()) LOG.debug("Collection removed."); } /** * Remove specific nodes from the index. This method is used for node updates * and called from flush() if the worker is in {@link StreamListener#REMOVE_SOME_NODES} * mode. */ protected void removeNodes() { if (nodesToRemove == null) return; IndexReader reader = null; try { reader = index.getWritingReader(); Term dt = new Term("docId", Integer.toString(currentDoc.getDocId())); TermDocs docsEnum = reader.termDocs(dt); while (docsEnum.next()) { Document doc = reader.document(docsEnum.doc()); NodeId nodeId = readNodeId(doc); if (nodesToRemove.contains(nodeId)) { reader.deleteDocument(docsEnum.doc()); } } nodesToRemove = null; reader.flush(); } catch (IOException e) { LOG.warn("Error while deleting lucene index entries: " + e.getMessage(), e); } finally { index.releaseWritingReader(reader); } } /** * Query the index. Returns a node set containing all matching nodes. Each node * in the node set has a {@link org.exist.indexing.lucene.LuceneIndexWorker.LuceneMatch} * element attached, which stores the score and a link to the query which generated it. * * @param context current XQuery context * @param contextId current context id, identify to track the position inside nested XPath predicates * @param docs query will be restricted to documents in this set * @param contextSet if specified, returned nodes will be descendants of the nodes in this set * @param qnames query will be restricted to nodes with the qualified names given here * @param queryStr a lucene query string * @param axis which node is returned: the node in which a match was found or the corresponding ancestor * from the contextSet * @return node set containing all matching nodes * * @throws IOException * @throws ParseException */ public NodeSet query(XQueryContext context, int contextId, DocumentSet docs, NodeSet contextSet, List qnames, String queryStr, int axis) throws IOException, ParseException { if (qnames == null || qnames.isEmpty()) qnames = getDefinedIndexes(); NodeSet resultSet = new NewArrayNodeSet(); boolean returnAncestor = axis == NodeSet.ANCESTOR; IndexSearcher searcher = null; try { searcher = index.getSearcher(); for (int i = 0; i < qnames.size(); i++) { QName qname = (QName) qnames.get(i); String field = encodeQName(qname); Analyzer analyzer = getAnalyzer(qname, context.getBroker(), docs); QueryParser parser = new QueryParser(field, analyzer); Query query = parser.parse(queryStr); searcher.search(query, new LuceneHitCollector(searcher, contextId, docs, contextSet, resultSet, returnAncestor, query)); } } finally { index.releaseSearcher(searcher); } return resultSet; } private class LuceneHitCollector extends HitCollector { private IndexSearcher searcher; private int contextId; private DocumentSet docs; private NodeSet contextSet; private NodeSet resultSet; private boolean returnAncestor; private Query query; private LuceneHitCollector(IndexSearcher searcher, int contextId, DocumentSet docs, NodeSet contextSet, NodeSet resultSet, boolean returnAncestor, Query query) { this.searcher = searcher; this.contextId = contextId; this.docs = docs; this.contextSet = contextSet; this.resultSet = resultSet; this.returnAncestor = returnAncestor; this.query = query; } public void collect(int i, float score) { try { Document doc = searcher.doc(i); Field fDocId = doc.getField("docId"); int docId = Integer.parseInt(fDocId.stringValue()); DocumentImpl storedDocument = docs.getDoc(docId); if (storedDocument == null) return; NodeId nodeId = readNodeId(doc); NodeProxy storedNode = new NodeProxy(storedDocument, nodeId); // if a context set is specified, we can directly check if the // matching node is a descendant of one of the nodes // in the context set. if (contextSet != null) { int sizeHint = contextSet.getSizeHint(storedDocument); if (returnAncestor) { NodeProxy parentNode = contextSet.parentWithChild(storedNode, false, true, NodeProxy.UNKNOWN_NODE_LEVEL); if (parentNode != null) { LuceneMatch match = new LuceneMatch(contextId, nodeId, query); match.setScore(score); parentNode.addMatch(match); resultSet.add(parentNode, sizeHint); if (Expression.NO_CONTEXT_ID != contextId) { parentNode.deepCopyContext(storedNode, contextId); } else parentNode.copyContext(storedNode); } } else { LuceneMatch match = new LuceneMatch(contextId, nodeId, query); match.setScore(score); storedNode.addMatch(match); resultSet.add(storedNode, sizeHint); } } else { LuceneMatch match = new LuceneMatch(contextId, nodeId, query); match.setScore(score); storedNode.addMatch(match); resultSet.add(storedNode); } } catch (IOException e) { e.printStackTrace(); } } } private NodeId readNodeId(Document doc) { byte[] temp; Field fNodeId = doc.getField("nodeId"); temp = fNodeId.binaryValue(); int units = ByteConversion.byteToShort(temp, 0); NodeId nodeId = index.getBrokerPool().getNodeFactory() .createFromData(units, temp, 2); return nodeId; } /** * Check index configurations for all collection in the given DocumentSet and return * a list of QNames, which have indexes defined on them. * * @return List of QName objects on which indexes are defined */ private List getDefinedIndexes() { List indexes = new ArrayList(20); IndexReader reader = null; try { reader = index.getReader(); java.util.Collection fields = reader.getFieldNames(IndexReader.FieldOption.INDEXED); for (Iterator i = fields.iterator(); i.hasNext(); ) { String field = (String) i.next(); if (!"docId".equals(field)) indexes.add(decodeQName(field)); } } catch (IOException e) { e.printStackTrace(); } finally { index.releaseReader(reader); } return indexes; } private Analyzer getAnalyzer(QName qname, DBBroker broker, DocumentSet docs) { for (Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { Collection collection = (Collection) i.next(); IndexSpec idxConf = collection.getIndexConfiguration(broker); if (idxConf != null) { LuceneConfig config = (LuceneConfig) idxConf.getCustomIndexSpec(LuceneIndex.ID); if (config != null) { Analyzer analyzer = config.getAnalyzer(qname); if (analyzer != null) return analyzer; } } } return index.getDefaultAnalyzer(); } public boolean checkIndex(DBBroker broker) { return false; //To change body of implemented methods use File | Settings | File Templates. } public Occurrences[] scanIndex(XQueryContext context, DocumentSet docs, NodeSet contextSet, Map hints) { List qnames = hints == null ? null : (List)hints.get(QNAMES_KEY); if (qnames == null || qnames.isEmpty()) qnames = getDefinedIndexes(); //Expects a StringValue Object start = hints == null ? null : hints.get(START_VALUE); Object end = hints == null ? null : hints.get(END_VALUE); TreeMap map = new TreeMap(); IndexReader reader = null; try { reader = index.getReader(); for (int i = 0; i < qnames.size(); i++) { QName qname = (QName) qnames.get(i); String field = encodeQName(qname); TermEnum terms; if (start == null) terms = reader.terms(new Term(field, "")); else terms = reader.terms(new Term(field, start.toString())); if (terms == null) continue; Term term; do { term = terms.term(); if (term != null && term.field().equals(field)) { boolean include = true; if (end != null) { if (term.text().compareTo(start.toString()) > 0) include = false; } else if (start != null && !term.text().startsWith(start.toString())) include = false; if (include) { TermDocs docsEnum = reader.termDocs(term); while (docsEnum.next()) { if (reader.isDeleted(docsEnum.doc())) continue; Document doc = reader.document(docsEnum.doc()); Field fDocId = doc.getField("docId"); int docId = Integer.parseInt(fDocId.stringValue()); DocumentImpl storedDocument = docs.getDoc(docId); if (storedDocument == null) continue; if (contextSet != null) { NodeId nodeId = readNodeId(doc); NodeProxy parentNode = contextSet.parentWithChild(storedDocument, nodeId, false, true); include = (parentNode != null); } if (include) { Occurrences oc = (Occurrences) map.get(term); if (oc == null) { oc = new Occurrences(term.text()); map.put(term, oc); } oc.addDocument(storedDocument); oc.addOccurrences(docsEnum.freq()); } } docsEnum.close(); } } } while (terms.next()); terms.close(); } } catch (IOException e) { LOG.warn("Error while scanning lucene index entries: " + e.getMessage(), e); } finally { index.releaseReader(reader); } Occurrences[] occur = new Occurrences[map.size()]; return (Occurrences[]) map.values().toArray(occur); } /** * Adds the passed character sequence to the lucene index. We * create one lucene document per XML node, using 2 fields to identify * the node: * * <ul> * <li>docId: eXist-internal document id of the node, stored as string.</li> * <li>nodeId: the id of the node, stored in binary compressed form.</li> * </ul> * * The text is indexed into a field whose name encodes the qualified name of * the node. The qualified name is stored as a hex sequence pointing into the * global symbol table. * * @param nodeId * @param qname * @param content */ protected void indexText(NodeId nodeId, QName qname, NodePath path, CharSequence content, float boost) { if (path.length() == 0) throw new RuntimeException(); PendingDoc pending = new PendingDoc(nodeId, content, path, qname, boost); nodesToWrite.add(pending); cachedNodesSize += content.length(); if (cachedNodesSize > maxCachedNodesSize) write(); } private class PendingDoc { NodeId nodeId; CharSequence text; QName qname; Analyzer analyzer; float boost; private PendingDoc(NodeId nodeId, CharSequence text, NodePath path, QName qname, float boost) { this.nodeId = nodeId; this.text = text; this.qname = qname; this.analyzer = config.getAnalyzer(path); this.boost = boost; } } private void write() { if (nodesToWrite == null || nodesToWrite.size() == 0) return; IndexWriter writer = null; try { writer = index.getWriter(); for (int i = 0; i < nodesToWrite.size(); i++) { PendingDoc pending = (PendingDoc) nodesToWrite.get(i); Document doc = new Document(); if (pending.boost > 0) doc.setBoost(pending.boost); else if (config.getBoost() > 0) doc.setBoost(config.getBoost()); // store the node id int nodeIdLen = pending.nodeId.size(); byte[] data = new byte[nodeIdLen + 2]; ByteConversion.shortToByte((short) pending.nodeId.units(), data, 0); pending.nodeId.serialize(data, 2); String contentField = encodeQName(pending.qname); doc.add(new Field("docId", Integer.toString(currentDoc.getDocId()), Field.Store.COMPRESS, Field.Index.UN_TOKENIZED)); doc.add(new Field("nodeId", data, Field.Store.YES)); doc.add(new Field(contentField, pending.text.toString(), Field.Store.NO, Field.Index.TOKENIZED)); if (pending.analyzer == null) writer.addDocument(doc); else writer.addDocument(doc, pending.analyzer); } } catch (IOException e) { LOG.warn("An exception was caught while indexing document: " + e.getMessage(), e); } finally { index.releaseWriter(writer); nodesToWrite = new ArrayList(); cachedNodesSize = 0; } } private String encodeQName(QName qname) { SymbolTable symbols = index.getBrokerPool().getSymbols(); short namespaceId = symbols.getNSSymbol(qname.getNamespaceURI()); short localNameId = symbols.getSymbol(qname.getLocalName()); long nameId = qname.getNameType() | (((int) namespaceId) & 0xFFFF) << 16 | (((long) localNameId) & 0xFFFFFFFFL) << 32; return Long.toHexString(nameId); } private QName decodeQName(String s) { SymbolTable symbols = index.getBrokerPool().getSymbols(); long l = Long.parseLong(s, 16); short namespaceId = (short) ((l >>> 16) & 0xFFFFL); short localNameId = (short) ((l >>> 32) & 0xFFFFL); byte type = (byte) (l & 0xFFL); String namespaceURI = symbols.getNamespace(namespaceId); String localName = symbols.getName(localNameId); QName qname = new QName(localName, namespaceURI, ""); qname.setNameType(type); return qname; } private class LuceneStreamListener extends AbstractStreamListener { public void startElement(Txn transaction, ElementImpl element, NodePath path) { if (mode == STORE && config != null) { if (contentStack != null && !contentStack.isEmpty()) { for (int i = 0; i < contentStack.size(); i++) { TextExtractor extractor = (TextExtractor) contentStack.get(i); extractor.startElement(element.getQName()); } } if (config.matches(path)) { if (contentStack == null) contentStack = new Stack(); TextExtractor extractor = new DefaultTextExtractor(); extractor.configure(config); contentStack.push(extractor); } } super.startElement(transaction, element, path); } public void endElement(Txn transaction, ElementImpl element, NodePath path) { if (config != null) { if (mode == STORE && contentStack != null && !contentStack.isEmpty()) { for (int i = 0; i < contentStack.size(); i++) { TextExtractor extractor = (TextExtractor) contentStack.get(i); extractor.endElement(element.getQName()); } } LuceneIndexConfig idxConf = config.getConfig(path); if (mode != REMOVE_ALL_NODES && idxConf != null) { if (mode == REMOVE_SOME_NODES) { nodesToRemove.add(element.getNodeId()); } else { TextExtractor extractor = (TextExtractor) contentStack.pop(); indexText(element.getNodeId(), element.getQName(), path, extractor.getText(), idxConf.getBoost()); } } } super.endElement(transaction, element, path); } public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { path.addComponent(attrib.getQName()); if (mode != REMOVE_ALL_NODES && config != null && config.matches(path)) { if (mode == REMOVE_SOME_NODES) { nodesToRemove.add(attrib.getNodeId()); } else { indexText(attrib.getNodeId(), attrib.getQName(), path, attrib.getValue(), config.getBoost()); } } path.removeLastComponent(); super.attribute(transaction, attrib, path); } public void characters(Txn transaction, TextImpl text, NodePath path) { if (contentStack != null && !contentStack.isEmpty()) { for (int i = 0; i < contentStack.size(); i++) { TextExtractor extractor = (TextExtractor) contentStack.get(i); extractor.characters(text.getXMLString()); } } super.characters(transaction, text, path); } public IndexWorker getWorker() { return LuceneIndexWorker.this; } } /** * Match class containing the score of a match and a reference to * the query that generated it. */ public class LuceneMatch extends Match { private float score = 0.0f; private Query query; public LuceneMatch(int contextId, NodeId nodeId, Query query) { super(contextId, nodeId, null); this.query = query; } public LuceneMatch(LuceneMatch copy) { super(copy); this.score = copy.score; this.query = copy.query; } public Match createInstance(int contextId, NodeId nodeId, String matchTerm) { return null; } public Match createInstance(int contextId, NodeId nodeId, Query query) { return new LuceneMatch(contextId, nodeId, query); } public Match newCopy() { return new LuceneMatch(this); } public String getIndexId() { return LuceneIndex.ID; } public Query getQuery() { return query; } public float getScore() { return score; } private void setScore(float score) { this.score = score; } public boolean equals(Object other) { if(!(other instanceof LuceneMatch)) return false; LuceneMatch o = (LuceneMatch) other; return (nodeId == o.nodeId || nodeId.equals(o.nodeId)) && query == ((LuceneMatch)other).query; } public boolean matchEquals(Match other) { return equals(other); } } }
package org.jboss.forge.addon.javaee.rest.ui.setup; import java.util.concurrent.Callable; import javax.inject.Inject; import org.jboss.forge.addon.convert.Converter; import org.jboss.forge.addon.facets.FacetFactory; import org.jboss.forge.addon.facets.constraints.FacetConstraint; import org.jboss.forge.addon.javaee.rest.RestFacet; import org.jboss.forge.addon.javaee.rest.config.RestConfigurationStrategy; import org.jboss.forge.addon.javaee.rest.config.RestConfigurationStrategyFactory; import org.jboss.forge.addon.javaee.rest.ui.RestSetupWizard; import org.jboss.forge.addon.javaee.ui.AbstractJavaEECommand; import org.jboss.forge.addon.parser.java.facets.JavaSourceFacet; import org.jboss.forge.addon.projects.Project; import org.jboss.forge.addon.ui.context.UIBuilder; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.context.UIExecutionContext; import org.jboss.forge.addon.ui.context.UIValidationContext; import org.jboss.forge.addon.ui.hints.InputType; import org.jboss.forge.addon.ui.input.UIInput; import org.jboss.forge.addon.ui.input.UISelectOne; import org.jboss.forge.addon.ui.metadata.WithAttributes; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.addon.ui.result.Results; import org.jboss.forge.addon.ui.util.Categories; import org.jboss.forge.addon.ui.util.Metadata; import org.jboss.forge.roaster.Roaster; import org.jboss.forge.roaster.model.source.JavaClassSource; /** * Setups JAX-RS in a project * * @author <a href="ggastald@redhat.com">George Gastaldi</a> */ @FacetConstraint(JavaSourceFacet.class) public class RestSetupWizardImpl extends AbstractJavaEECommand implements RestSetupWizard { @Override public Metadata getMetadata(UIContext context) { return Metadata.from(super.getMetadata(context), getClass()).name("REST: Setup") .description("Setup REST in your project") .category(Categories.create(super.getMetadata(context).getCategory(), "JAX-RS")); } @Inject private FacetFactory facetFactory; @Inject @WithAttributes(required = true, label = "JAX-RS Version", defaultValue = "1.1") private UISelectOne<RestFacet> jaxrsVersion; @Inject @WithAttributes(required = true, label = "Configuration Strategy", type = InputType.RADIO) private UISelectOne<RestActivatorType> config; @Inject @WithAttributes(label = "Application Path", description = "The Application path for the REST resources", defaultValue = "/rest", required = true) private UIInput<String> applicationPath; @Inject @WithAttributes(label = "Target Package", type = InputType.JAVA_PACKAGE_PICKER) private UIInput<String> targetPackage; @Inject @WithAttributes(label = "Class Name", defaultValue = "RestApplication") private UIInput<String> className; @Override public void initializeUI(UIBuilder builder) throws Exception { configureActivationStrategy(builder.getUIContext()); builder.add(jaxrsVersion).add(applicationPath).add(config).add(targetPackage).add(className); } private void configureActivationStrategy(UIContext context) { config.setDefaultValue(RestActivatorType.APP_CLASS); Callable<Boolean> appClassChosen = new Callable<Boolean>() { @Override public Boolean call() throws Exception { return RestActivatorType.APP_CLASS == config.getValue(); } }; if (context.getProvider().isGUI()) { config.setItemLabelConverter(new Converter<RestActivatorType, String>() { @Override public String convert(RestActivatorType source) { return source != null ? source.getDescription() : null; } }); } targetPackage.setRequired(appClassChosen).setEnabled(appClassChosen); className.setRequired(appClassChosen).setEnabled(appClassChosen); Project project = getSelectedProject(context); targetPackage.setDefaultValue(project.getFacet(JavaSourceFacet.class).getBasePackage() + ".rest"); } @Override public void validate(UIValidationContext validator) { super.validate(validator); } @Override public Result execute(final UIExecutionContext context) throws Exception { RestFacet facet = jaxrsVersion.getValue(); if (facetFactory.install(getSelectedProject(context), facet)) { String path = applicationPath.getValue(); final RestConfigurationStrategy strategy; if (config.getValue() == RestActivatorType.WEB_XML) { strategy = RestConfigurationStrategyFactory.createUsingWebXml(path); } else { JavaClassSource javaClass = Roaster.create(JavaClassSource.class).setPackage(targetPackage.getValue()) .setName(className.getValue()); strategy = RestConfigurationStrategyFactory.createUsingJavaClass(path, javaClass); } facet.setConfigurationStrategy(strategy); return Results.success("JAX-RS has been installed."); } return Results.fail("Could not install JAX-RS."); } @Override protected boolean isProjectRequired() { return true; } }
package xal.extension.jels.model.elem; public class FieldMapPoint { private double amplitudeFactorE = 1.0; private double amplitudeFactorB = 1.0; // Electric field private double Ex = 0; private double Ey = 0; private double Ez = 0; // Derivatives of Ex private double dExdx = 0; private double dExdy = 0; private double dExdz = 0; // Derivatives of Ey private double dEydx = 0; private double dEydy = 0; private double dEydz = 0; // Derivatives of Ez private double dEzdx = 0; private double dEzdy = 0; private double dEzdz = 0; // Magnetic field private double Bx = 0; private double By = 0; private double Bz = 0; // Derivatives of Bx private double dBxdx = 0; private double dBxdy = 0; private double dBxdz = 0; // Derivatives of By private double dBydx = 0; private double dBydy = 0; private double dBydz = 0; // Derivatives of Bz private double dBzdx = 0; private double dBzdy = 0; private double dBzdz = 0; public FieldMapPoint() { } public FieldMapPoint(double Ex, double Ey, double Ez, double dExdx, double dExdy, double dExdz, double dEydx, double dEydy, double dEydz, double dEzdx, double dEzdy, double dEzdz, double Bx, double By, double Bz, double dBxdx, double dBxdy, double dBxdz, double dBydx, double dBydy, double dBydz, double dBzdx, double dBzdy, double dBzdz) { this.Ex = Ex; this.Ey = Ey; this.Ez = Ez; this.dExdx = dExdx; this.dExdy = dExdy; this.dExdz = dExdz; this.dEydx = dEydx; this.dEydy = dEydy; this.dEydz = dEydz; this.dEzdx = dEzdx; this.dEzdy = dEzdy; this.dEzdz = dEzdz; this.Bx = Bx; this.By = By; this.Bz = Bz; this.dBxdx = dBxdx; this.dBxdy = dBxdy; this.dBxdz = dBxdz; this.dBydx = dBydx; this.dBydy = dBydy; this.dBydz = dBydz; this.dBzdx = dBzdx; this.dBzdy = dBzdy; this.dBzdz = dBzdz; } public void setAmplitudeFactorE(double amplitudeFactorE) { this.amplitudeFactorE = amplitudeFactorE; } public void setAmplitudeFactorB(double amplitudeFactorB) { this.amplitudeFactorB = amplitudeFactorB; } public double getEx() { return Ex * amplitudeFactorE; } public double getEy() { return Ey * amplitudeFactorE; } public double getEz() { return Ez * amplitudeFactorE; } public double getdExdx() { return dExdx * amplitudeFactorE; } public double getdExdy() { return dExdy * amplitudeFactorE; } public double getdExdz() { return dExdz * amplitudeFactorE; } public double getdEydx() { return dEydx * amplitudeFactorE; } public double getdEydy() { return dEydy * amplitudeFactorE; } public double getdEydz() { return dEydz * amplitudeFactorE; } public double getdEzdx() { return dEzdx * amplitudeFactorE; } public double getdEzdy() { return dEzdy * amplitudeFactorE; } public double getdEzdz() { return dEzdz * amplitudeFactorE; } public double getBx() { return Bx * amplitudeFactorB; } public double getBy() { return By * amplitudeFactorB; } public double getBz() { return Bz * amplitudeFactorB; } public double getdBxdx() { return dBxdx * amplitudeFactorB; } public double getdBxdy() { return dBxdy * amplitudeFactorB; } public double getdBxdz() { return dBxdz * amplitudeFactorB; } public double getdBydx() { return dBydx * amplitudeFactorB; } public double getdBydy() { return dBydy * amplitudeFactorB; } public double getdBydz() { return dBydz * amplitudeFactorB; } public double getdBzdx() { return dBzdx * amplitudeFactorB; } public double getdBzdy() { return dBzdy * amplitudeFactorB; } public double getdBzdz() { return dBzdz * amplitudeFactorB; } public void setEx(double Ex) { this.Ex = Ex; } public void setEy(double Ey) { this.Ey = Ey; } public void setEz(double Ez) { this.Ez = Ez; } public void setdExdx(double dExdx) { this.dExdx = dExdx; } public void setdExdy(double dExdy) { this.dExdy = dExdy; } public void setdExdz(double dExdz) { this.dExdz = dExdz; } public void setdEydx(double dEydx) { this.dEydx = dEydx; } public void setdEydy(double dEydy) { this.dEydy = dEydy; } public void setdEydz(double dEydz) { this.dEydz = dEydz; } public void setdEzdx(double dEzdx) { this.dEzdx = dEzdx; } public void setdEzdy(double dEzdy) { this.dEzdy = dEzdy; } public void setdEzdz(double dEzdz) { this.dEzdz = dEzdz; } public void setBx(double Bx) { this.Bx = Bx; } public void setBy(double By) { this.By = By; } public void setBz(double Bz) { this.Bz = Bz; } public void setdBxdx(double dBxdx) { this.dBxdx = dBxdx; } public void setdBxdy(double dBxdy) { this.dBxdy = dBxdy; } public void setdBxdz(double dBxdz) { this.dBxdz = dBxdz; } public void setdBydx(double dBydx) { this.dBydx = dBydx; } public void setdBydy(double dBydy) { this.dBydy = dBydy; } public void setdBydz(double dBydz) { this.dBydz = dBydz; } public void setdBzdx(double dBzdx) { this.dBzdx = dBzdx; } public void setdBzdy(double dBzdy) { this.dBzdy = dBzdy; } public void setdBzdz(double dBzdz) { this.dBzdz = dBzdz; } }
package org.myrobotlab.service; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Stack; import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.PoolingClientConnectionManager; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.myrobotlab.audio.AudioData; import org.myrobotlab.framework.Service; import org.myrobotlab.framework.repo.ServiceType; import org.myrobotlab.io.FileIO; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.Logging; import org.myrobotlab.logging.LoggingFactory; import org.myrobotlab.service.interfaces.AudioListener; import org.myrobotlab.service.interfaces.SpeechRecognizer; import org.myrobotlab.service.interfaces.SpeechSynthesis; import org.myrobotlab.service.interfaces.TextListener; /** * AcapelaSpeech - Use the acapela group speech synthesis API. This makes a HTTP request * to generate an MP3 that represents the text to be spoken for a given voice. That mp3 * is then cached and played back by the AudioFile service. * */ public class AcapelaSpeech extends Service implements TextListener, SpeechSynthesis, AudioListener { private static final long serialVersionUID = 1L; // default voice public String voice = "Ryan"; public HashSet<String> voices = new HashSet<String>(); transient PoolingClientConnectionManager connectionManager = new PoolingClientConnectionManager(); // this is a peer service. transient AudioFile audioFile = null; // TODO: use a wait / notify for this so we don't poll in a loop! Boolean finishedSpeaking = false; // TODO: fix the volume control // private float volume = 1.0f; public AcapelaSpeech(String n) { super(n); connectionManager.setMaxTotal(10); // TODO: be country/language aware when asking for voices? // maybe have a get voices by language/locale // Arabic voices.add("Leila"); voices.add("Mehdi"); voices.add("Nizar"); voices.add("Salma"); // Catalan voices.add("Laia"); // Czech voices.add("Eliska"); // Danish voices.add("Mette"); voices.add("Rasmus"); // Dutch ( Belgium ) voices.add("Zoe"); voices.add("Jeroen"); voices.add("JeroenHappy"); voices.add("JeroenSad"); voices.add("Sofie"); // Dutch ( Netherlands ) voices.add("Jasmijn"); voices.add("Daan"); voices.add("Femke"); voices.add("Max"); // English (AU) voices.add("Tyler"); voices.add("Lisa"); voices.add("Olivia"); voices.add("Liam"); // English ( India ) voices.add("Deepa"); // English ( Scottish ) voices.add("Rhona"); // English (UK) voices.add("Rachel"); voices.add("Graham"); voices.add("Harry"); voices.add("Lucy"); voices.add("Nizareng"); voices.add("Peter"); voices.add("PeterHappy"); voices.add("PeterSad"); voices.add("QueenElizabeth"); voices.add("Rosie"); // English ( USA ) voices.add("Sharon"); voices.add("Ella"); voices.add("EmilioEnglish"); voices.add("Josh"); voices.add("Karen"); voices.add("Kenny"); voices.add("Laura"); voices.add("Micah"); voices.add("Nelly"); voices.add("Rod"); voices.add("Ryan"); voices.add("Saul"); voices.add("Scott"); voices.add("Tracy"); voices.add("ValeriaEnglish"); voices.add("Will"); voices.add("WillBadGuy"); voices.add("WillFromAfar"); voices.add("WillHappy"); voices.add("WillLittleCreature"); // Faroese voices.add("Hanna"); voices.add("Hanus"); // Finnish voices.add("Sanna"); // French ( Belgium ) voices.add("Justine"); // French ( Canada ) voices.add("Louise"); // French ( France ) voices.add("Manon"); voices.add("Alice"); voices.add("Antoine"); voices.add("AntoineFromAfar"); voices.add("AntoineHappy"); voices.add("AntoineSad"); voices.add("Bruno"); voices.add("Claire"); voices.add("Manon"); voices.add("Julie"); voices.add("Margaux"); voices.add("MargauxHappy"); voices.add("MargauxSad"); // German voices.add("Claudia"); voices.add("Andreas"); voices.add("Jonas"); voices.add("Julia"); voices.add("Klaus"); voices.add("Lea"); voices.add("Sarah"); // Greek voices.add("Dimitris"); voices.add("DimitrisHappy"); voices.add("DimitrisSad"); // Italian voices.add("Fabiana"); voices.add("Chiara"); voices.add("Vittorio"); // Japanese voices.add("Sakura"); // Korean voices.add("Minji"); // Mandarin voices.add("Lulu"); // Norwegian voices.add("Bente"); voices.add("Kari"); voices.add("Olav"); // Polish voices.add("Monika"); voices.add("Ania"); // Portuguese ( Brazil ) voices.add("Marcia"); // Portuguese ( Portugal ) voices.add("Celia"); // Russian voices.add("Alyona"); // Sami ( North ) voices.add("Biera"); voices.add("Elle"); // Spanish ( Spain ) voices.add("Ines"); voices.add("Antonio"); voices.add("Maria"); // Spanish ( US ) voices.add("Rodrigo"); voices.add("Emilio"); voices.add("Rosa"); voices.add("Valeria"); // Swedish voices.add("Elin"); voices.add("Emil"); voices.add("Emma"); voices.add("Erik"); // Swedish ( Finland ) voices.add("Samuel"); // Swedish ( Gothenburg ) voices.add("Kal"); // Swedish ( Scanian ) voices.add("Mia"); // Turkish voices.add("Ipek"); } public void startService() { super.startService(); audioFile = (AudioFile) startPeer("audioFile"); audioFile.startService(); subscribe(audioFile.getName(), "publishAudioStart"); subscribe(audioFile.getName(), "publishAudioEnd"); // attach a listener when the audio file ends playing. audioFile.addListener("finishedPlaying", this.getName(), "publishEndSpeaking"); } public AudioFile getAudioFile() { return audioFile; } @Override public ArrayList<String> getVoices() { return new ArrayList<String>(voices); } @Override public String getVoice() { return voice; } @Override public boolean setVoice(String voice) { this.voice = voice; return voices.contains(voice); } @Override public void setLanguage(String l) { // FIXME ! "MyLanguages", "sonid8" ??? // FIXME - implement !!! } public String getMp3Url(String toSpeak) { HttpPost post = null; try { HttpClient client = new DefaultHttpClient(connectionManager); // request form & send text String url = "http: + "%22%3EAcapela+Voice+as+a+Service%3C%2Fa%3E.+For+demo+and+evaluation+purpose+only%2C+for+commercial+use+of+generated+sound+files+please+go+to+" + "%3Ca+href%3D%22http%3A%2F%2Fwww.acapela-box.com%22%3Ewww.acapela-box.com%3C%2Fa%3E"; post = new HttpPost(url); List<NameValuePair> nvps = new ArrayList<NameValuePair>(); nvps.add(new BasicNameValuePair("MyLanguages", "sonid10")); nvps.add(new BasicNameValuePair("MySelectedVoice", voice)); nvps.add(new BasicNameValuePair("MyTextForTTS", toSpeak)); nvps.add(new BasicNameValuePair("t", "1")); nvps.add(new BasicNameValuePair("SendToVaaS", "")); UrlEncodedFormEntity formData = new UrlEncodedFormEntity(nvps, "UTF-8"); post.setEntity(formData); HttpResponse response = client.execute(post); log.info(response.getStatusLine().toString()); HttpEntity entity = response.getEntity(); byte[] b = FileIO.toByteArray(entity.getContent()); // parse out mp3 file url String mp3Url = null; String data = new String(b); String startTag = "var myPhpVar = '"; int startPos = data.indexOf(startTag); if (startPos != -1) { int endPos = data.indexOf("';", startPos); if (endPos != -1) { mp3Url = data.substring(startPos + startTag.length(), endPos); } } if (mp3Url == null) { error("could not get mp3 back from Acapela server !"); } return mp3Url; } catch (Exception e) { Logging.logError(e); } finally { if (post != null) { post.releaseConnection(); } } return null; } public byte[] getRemoteFile(String toSpeak) { String mp3Url = getMp3Url(toSpeak); HttpGet get = null; byte[] b = null; try { HttpClient client = new DefaultHttpClient(connectionManager); HttpResponse response = null; // fetch file get = new HttpGet(mp3Url); log.info("mp3Url {}", mp3Url); // get mp3 file & save to cache response = client.execute(get); log.info("got {}", response.getStatusLine()); HttpEntity entity = response.getEntity(); // cache the mp3 content b = FileIO.toByteArray(entity.getContent()); EntityUtils.consume(entity); } catch (Exception e) { Logging.logError(e); } finally { if (get != null) { get.releaseConnection(); } } return b; } @Override public boolean speakBlocking(String toSpeak) throws IOException { speak(toSpeak); // audioFile.playFile(to, true); // sleep(afterSpeechPause);// important pause after speech // invoke("publishEndSpeaking", toSpeak); // We are blocking so .. now we want to wait on finished speaking to be true. waitForCompletion(); return false; } private void waitForCompletion() { log.info("Waiting for speaking to finish."); while (!finishedSpeaking) { // TODO: use a better thread wait / notify here. try { Thread.sleep(20); } catch (InterruptedException e) { log.warn("Interrupted while waiting for speaking to finish.", e.getLocalizedMessage()); } } log.info("Speaking Finished."); } @Override public void setVolume(float volume) { // TODO: fix the volume control log.warn("Volume control not implemented in AcapelaSpeech yet."); } @Override public float getVolume() { return 0; } @Override public void interrupt() { // TODO: Implement me! } @Override public void onText(String text) { log.info("ON Text Called: {}", text); try { speak(text); } catch (Exception e) { Logging.logError(e); } } @Override public String getLanguage() { return null; } // HashSet<String> audioFiles = new HashSet<String>(); Stack<String> audioFiles = new Stack<String>(); public AudioData speak(String toSpeak) throws IOException { // this will flip to true on the audio file end playing. finishedSpeaking = false; AudioData ret = null; log.info("speak {}", toSpeak); if (voice == null) { log.warn("voice is null! setting to default: Ryan"); voice = "Ryan"; } String filename = this.getLocalFileName(this, toSpeak, "mp3"); if (audioFile.cacheContains(filename)) { ret = audioFile.playCachedFile(filename); utterances.put(ret, toSpeak); return ret; } audioFiles.push(filename); byte[] b = getRemoteFile(toSpeak); audioFile.cache(filename, b, toSpeak); ret = audioFile.playCachedFile(filename); utterances.put(ret, toSpeak); return ret; } public AudioData speak(String voice, String toSpeak) throws IOException { setVoice(voice); return speak(toSpeak); } @Override public String getLocalFileName(SpeechSynthesis provider, String toSpeak, String audioFileType) throws UnsupportedEncodingException { // TODO: make this a base class sort of thing. return provider.getClass().getSimpleName() + File.separator + URLEncoder.encode(provider.getVoice(), "UTF-8") + File.separator + DigestUtils.md5Hex(toSpeak) + "." + audioFileType; } @Override public void addEar(SpeechRecognizer ear) { // TODO: move this to a base class. it's basically the same for all // mouths/ speech synth stuff. // when we add the ear, we need to listen for request confirmation addListener("publishStartSpeaking", ear.getName(), "onStartSpeaking"); addListener("publishEndSpeaking", ear.getName(), "onEndSpeaking"); } public void onRequestConfirmation(String text) { try { speakBlocking(String.format("did you say. %s", text)); } catch (Exception e) { Logging.logError(e); } } @Override public List<String> getLanguages() { // TODO Auto-generated method stub ArrayList<String> ret = new ArrayList<String>(); // FIXME - add iso language codes currently supported e.g. en en_gb de // etc.. return ret; } /** * This static method returns all the details of the class without it having * to be constructed. It has description, categories, dependencies, and peer * definitions. * * @return ServiceType - returns all the data * */ static public ServiceType getMetaData() { ServiceType meta = new ServiceType(AcapelaSpeech.class.getCanonicalName()); meta.addDescription("Acapela group speech synthesis service."); meta.addCategory("speech"); meta.addPeer("audioFile", "AudioFile", "audioFile"); return meta; } // audioData to utterance map TODO: revisit the design of this HashMap<AudioData, String> utterances = new HashMap<AudioData, String>(); @Override public String publishStartSpeaking(String utterance) { log.info("publishStartSpeaking {}", utterance); return utterance; } @Override public String publishEndSpeaking(String utterance) { log.info("publishEndSpeaking {}", utterance); return utterance; } @Override public void onAudioStart(AudioData data) { log.info("onAudioStart {} {}", getName(), data.toString()); // filters on only our speech if (utterances.containsKey(data)){ String utterance = utterances.get(data); invoke("publishStartSpeaking", utterance); } } @Override public void onAudioEnd(AudioData data) { log.info("onAudioEnd {} {}", getName(), data.toString()); // filters on only our speech if (utterances.containsKey(data)){ String utterance = utterances.get(data); invoke("publishEndSpeaking", utterance); utterances.remove(data); // TODO: use a thread notify / wait and lock/sychronize on this .. finishedSpeaking = true; } } public static void main(String[] args) { LoggingFactory.getInstance().configure(); LoggingFactory.getInstance().setLevel(Level.INFO); try { // Runtime.start("webgui", "WebGui"); AcapelaSpeech speech = (AcapelaSpeech) Runtime.start("speech", "AcapelaSpeech"); // speech.setVoice("Ryan"); // TODO: fix the volume control //speech.setVolume(0); speech.speakBlocking("I'm afraid I can't do that."); // speech.speak("this is a test"); // speech.speak("i am saying something new once again again"); // speech.speak("one"); // speech.speak("two"); // speech.speak("three"); // speech.speak("four"); // arduino.setBoard(Arduino.BOARD_TYPE_ATMEGA2560); // arduino.connect(port); // arduino.broadcastState(); } catch (Exception e) { Logging.logError(e); } } }
package org.nees.buffalo.rdv.rbnb; import com.rbnb.sapi.ChannelMap; import com.rbnb.sapi.SAPIException; import com.rbnb.sapi.Source; /** * A class to create and manage an RBNB source. * * @author Jason P. Hanley */ public class RBNBSource { /** the default host name of the RBNB server */ private static final String DEFAULT_HOST = "localhost"; /** the default port number of the RBNB server */ private static final int DEFAULT_PORT = 3333; /** the name of the source */ private final String name; /** the host name of the RBNB server */ private final String host; /** the port number of the RBNB server */ private final int port; /** the RBNB source */ private final Source source; /** the channel map used to post data */ private final ChannelMap cmap; /** flag to see if we have registered the channel metadata */ private boolean registered; /** the timestamp of the last piece of data posted */ private double lastTimestamp; /** * Creates an RBNBSource object with the given name and archive. * * @param name the name of the RBNB source * @param archive the archive size * @throws RBNBException if there is an error creating the source */ public RBNBSource(String name, int archive) throws RBNBException { this(name, archive, DEFAULT_HOST); } /** * Creates an RBNBSource object with the given name on the specified host. * * @param name the name of the RBNB source * @param archive the archive size * @param host the host name of the RBNB server * @throws RBNBException if there is an error creating the source */ public RBNBSource(String name, int archive, String host) throws RBNBException { this(name, archive, host, DEFAULT_PORT); } /** * Creates an RBNBSource object with the given name on the specified host and * port. * * @param name the name of the RBNB source * @param archive the archive size * @param host the host name of the RBNB server * @param port the port number of the RBNB server * @throws RBNBException if there is an error creating the source */ public RBNBSource(String name, int archive, String host, int port) throws RBNBException { this.name = name; this.host = host; this.port = port; source = new Source(1, "create", archive); cmap = new ChannelMap(); registered = false; lastTimestamp = -1; try { open(); } catch (SAPIException e) { throw new RBNBException(e); } } /** * Gets the name of the RBNB source. * * @return the name of the RBNB source */ public String getName() { return name; } /** * Gets the host name of the RBNB server. * * @return the host name of the RBNB server */ public String getHost() { return host; } /** * Gets the port number of the RBNB server. * * @return the port number of the RBNB server */ public int getPort() { return port; } /** * Opens the connection to the RBNB server. * * @throws SAPIException if there is an error connecting to the server */ private void open() throws SAPIException { source.OpenRBNBConnection(host + ":" + port, name); } /** * Closes the connection to the server. The source will continue to exist on * the server. * */ public void close() { source.Detach(); } /** * Adds a channel to the source. * * @param channel the name of the channel * @throws RBNBException if there is an error adding the channel */ public void addChannel(String channel) throws RBNBException { addChannel(channel, null); } /** * Adds a channel to the source. This channel is given a mime type. * * @param channel the name of the channel * @param mime the mime type of the channel * @throws RBNBException if there is an error adding the channel */ public void addChannel(String channel, String mime) throws RBNBException { addChannel(channel, mime, null); } /** * Adds a channel to the source. This channel is given a mime type and a unit. * * @param channel the name of the channel * @param mime the mime type of the channel * @param unit the unit for the channel * @throws RBNBException if there is an error adding the channel */ public void addChannel(String channel, String mime, String unit) throws RBNBException { try { int cindex = cmap.Add(channel); if (mime == null) { mime = "application/octet-stream"; } cmap.PutMime(cindex, mime); if (unit != null) { cmap.PutUserInfo(cindex, "units=" + unit); } } catch (SAPIException e) { throw new RBNBException(e); } } /** * Registers the channel metadata for all the added channels. * * @throws RBNBException if there is an error registering the metadata */ private void registerChannels() throws RBNBException { if (registered) { return; } try { source.Register(cmap); } catch (SAPIException e) { throw new RBNBException(e); } registered = true; } /** * Puts data in the souce. The data will not be uploaded to the server till * {@link #flush()} is called. * * @param channel the name of the channel * @param timestamp the timestamp for the data * @param data the value for the data * @throws RBNBException if there is an error putting the data. */ public void putData(String channel, double timestamp, double data) throws RBNBException { registerChannels(); putTime(timestamp); int cindex = cmap.GetIndex(channel); try { cmap.PutDataAsFloat64(cindex, new double[] {data}); } catch (SAPIException e) { throw new RBNBException(e); } } /** * Puts data in the souce as a byte array. The data will not be uploaded to * the server till {@link #flush()} is called. * * @param channel the name of the channel * @param timestamp the timestamp for the data * @param data the value for the data * @throws RBNBException if there is an error putting the data. */ public void putData(String channel, double timestamp, byte[] data) throws RBNBException { registerChannels(); putTime(timestamp); int cindex = cmap.GetIndex(channel); try { cmap.PutDataAsByteArray(cindex, data); } catch (SAPIException e) { throw new RBNBException(e); } } /** * Sets the timestamp for the subsequent data added. * * @param timestamp the timestamp for the data */ private void putTime(double timestamp) { if (timestamp == lastTimestamp) { return; } cmap.PutTime(timestamp, 0); lastTimestamp = timestamp; } /** * Uploads all the data added with {@link #putData} to the server. * * @throws RBNBException if there is an error uploading the data */ public void flush() throws RBNBException { try { source.Flush(cmap, true); } catch (SAPIException e) { throw new RBNBException(e); } } }
package org.obd.ws.resources; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.json.JSONException; import org.json.JSONObject; import org.nescent.informatics.OBDQuery; import org.obd.model.Statement; import org.obd.query.Shard; import org.restlet.Context; import org.restlet.data.MediaType; import org.restlet.data.Reference; import org.restlet.data.Request; import org.restlet.data.Response; import org.restlet.data.Status; import org.restlet.ext.json.JsonRepresentation; import org.restlet.resource.Representation; import org.restlet.resource.Resource; import org.restlet.resource.Variant; public class AnatomyResource extends Resource { private final String termId; private JSONObject jObjs; private Shard obdsql; private OBDQuery obdq; private Set<String> taxa; private Set<String> genotypes; private Set<String> genes; private List<String> characters; private Set<String> qualitiesForCharacter; private Map<String, Set<String>> qualityToTaxonMap; private Map<String, Set<String>> qualityToGenotypeMap; private Map<String, Set<String>> qualityToGeneMap; private Map<String, Set<String>> characterToQualityMap; private Map<String, String> qualitiesToCharacterMap; private int annotationCount; private final String OBOOWL_SUBSET_RELATION = "oboInOwl:inSubset"; private final String IS_A_RELATION = "OBO_REL:is_a"; private final String EXHIBITS_RELATION = "PHENOSCAPE:exhibits"; private final String HAS_ALLELE_RELATION = "PHENOSCAPE:has_allele"; private final String VALUE_SLIM_STRING = "value_slim"; public AnatomyResource(Context context, Request request, Response response) { super(context, request, response); this.obdsql = (Shard) this.getContext().getAttributes().get("shard"); this.getVariants().add(new Variant(MediaType.APPLICATION_JSON)); // this.getVariants().add(new Variant(MediaType.TEXT_HTML)); this.termId = Reference.decode((String) (request.getAttributes() .get("termID"))); obdq = new OBDQuery(obdsql); characters = new ArrayList<String>(); qualityToTaxonMap = new HashMap<String, Set<String>>(); qualityToGenotypeMap = new HashMap<String, Set<String>>(); qualityToGeneMap = new HashMap<String, Set<String>>(); characterToQualityMap = new HashMap<String, Set<String>>(); qualitiesToCharacterMap = new HashMap<String, String>(); annotationCount = 0; jObjs = new JSONObject(); // System.out.println(termId); } public Representation getRepresentation(Variant variant) { Representation rep; try { if (!termId.startsWith("TAO:") && !termId.startsWith("ZFA:")) { this.jObjs = null; getResponse().setStatus( Status.CLIENT_ERROR_BAD_REQUEST, "ERROR: The input parameter " + "is not a recognized anatomical entity"); return null; } if (obdsql.getNode(this.termId) != null) { JSONObject termObject = new JSONObject(); String term = obdsql.getNode(this.termId).getLabel(); termObject.put("id", this.termId); termObject.put("name", term); this.jObjs.put("term", termObject); getAnatomyTermSummary(this.termId); if(characters.size() > 0){ Collections.sort(characters); // List<JSONObject> qualityObjs = new ArrayList<JSONObject>(); List<JSONObject> charObjs = new ArrayList<JSONObject>(); for(String charId : characters){ int taxonCt = 0, genotypeCt = 0, geneCt = 0; JSONObject charObj = new JSONObject(); JSONObject genesForCharObj = new JSONObject(); JSONObject genotypesForCharObj = new JSONObject(); JSONObject taxaForCharObj = new JSONObject(); for(String patoStr : characterToQualityMap.get(charId)){ JSONObject qualityObj = new JSONObject(); String character = obdsql.getNode(charId).getLabel(); String state = obdsql.getNode(patoStr).getLabel(); qualityObj.put("id", patoStr); qualityObj.put("name", character.toUpperCase() + "---> " + state); JSONObject taxonObj = new JSONObject(); JSONObject genotypeObj = new JSONObject(); JSONObject geneObj = new JSONObject(); taxonObj.put("annotation_count", annotationCount); if(qualityToTaxonMap.get(patoStr) != null){ taxonObj.put("taxon_count", qualityToTaxonMap.get(patoStr).size()); taxonCt += qualityToTaxonMap.get(patoStr).size(); } else{ taxonObj.put("taxon_count", 0); } qualityObj.put("taxon_annotations", taxonObj); genotypeObj.put("annotation_count", annotationCount); if(qualityToGenotypeMap.get(patoStr) != null){ genotypeObj.put("genotype_count", qualityToGenotypeMap.get(patoStr).size()); genotypeCt += qualityToGenotypeMap.get(patoStr).size(); } else{ genotypeObj.put("genotype_count", 0); } qualityObj.put("genotype_annotations", genotypeObj); geneObj.put("annotation_count", annotationCount); if(qualityToGeneMap.get(patoStr) != null){ geneObj.put("gene_count", qualityToGeneMap.get(patoStr).size()); geneCt += qualityToGeneMap.get(patoStr).size(); } else{ geneObj.put("gene_count", 0); } qualityObj.put("gene_annotations", geneObj); // qualityObjs.add(qualityObj); } charObj.put("id", charId); charObj.put("name", obdsql.getNode(charId).getLabel().toUpperCase()); taxaForCharObj.put("annotation_count", annotationCount); taxaForCharObj.put("taxon_count", taxonCt); charObj.put("taxon_annotations", taxaForCharObj); genotypesForCharObj.put("annotation_count", annotationCount); genotypesForCharObj.put("genotype_count", genotypeCt); charObj.put("genotype_annotations", genotypesForCharObj); genesForCharObj.put("annotation_count", annotationCount); genesForCharObj.put("gene_count", geneCt); charObj.put("gene_annotations", genesForCharObj); charObjs.add(charObj); } // this.jObjs.put("qualities", qualityObjs); this.jObjs.put("attributes", charObjs); } else{ // this.jObjs.put("qualities", "[]"); this.jObjs.put("attributes", "[]"); } } else { getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, "The search term was not found"); return null; } } catch (IOException e) { e.printStackTrace(); } catch (SQLException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } rep = new JsonRepresentation(this.jObjs); return rep; } private void getAnatomyTermSummary(String termId) throws IOException, SQLException, ClassNotFoundException, JSONException, IllegalArgumentException { // start working with the given anatomical feature String nodeId, targetId, patoId; Collection<Statement> stmts = obdq.getStatementsWithPredicateAndObject( termId, EXHIBITS_RELATION); for (Statement stmt : stmts) { ++annotationCount; nodeId = stmt.getNodeId(); targetId = stmt.getTargetId(); if(parseCompositionalDescription(targetId) != null){ //pull out pato term patoId = parseCompositionalDescription(targetId); String characterId; characterId = qualitiesToCharacterMap.get(patoId) != null? //prior mapping exists in reverse lookup qualitiesToCharacterMap.get(patoId): //save on database search findAttrib(patoId); //otherwise, look in database if(!characters.contains(characterId)){ characters.add(characterId); //avoid duplicate entries. we need to sort this later, so can't use a set } qualitiesToCharacterMap.put(patoId, characterId); //a reverse mapping: every state has a unique character if(characterToQualityMap.get(characterId) != null){ //previous mappings exist from character to state qualitiesForCharacter = characterToQualityMap.get(characterId); qualitiesForCharacter.add(patoId); //add the new state to existing list characterToQualityMap.put(characterId, qualitiesForCharacter); } else{ Set<String> newQualitySet = new HashSet<String>(); newQualitySet.add(patoId); //create new list of states and map it to characters characterToQualityMap.put(characterId, newQualitySet); } if (nodeId.contains("TTO:")) { // "Taxon exhibits Phenotype" if(qualityToTaxonMap.get(patoId) != null){ //if previous mappings exist from PATO term to Taxa taxa = qualityToTaxonMap.get(patoId); taxa.add(nodeId); //add this taxon to list qualityToTaxonMap.put(patoId, taxa); } else{ Set<String> newTaxaSet = new HashSet<String>(); newTaxaSet.add(nodeId); //create new list for this PATO term qualityToTaxonMap.put(patoId, newTaxaSet); } } else if (nodeId.contains("GENO")) { // "Genotype exhibits Phenotype" if(qualityToGenotypeMap.get(patoId) != null){ //if previos mappings exist from PATO to genotypes genotypes = qualityToGenotypeMap.get(patoId); genotypes.add(nodeId); //add this genotype to list qualityToGenotypeMap.put(patoId, genotypes); } else{ Set<String> newGenotypeSet = new HashSet<String>(); newGenotypeSet.add(nodeId); //create new list and add it to PATO term qualityToGenotypeMap.put(patoId, newGenotypeSet); } if(getGeneForGenotype(patoId, nodeId) != null){ String gene = getGeneForGenotype(patoId, nodeId); if(qualityToGeneMap.get(patoId) != null){ //if previous mappings exist from PATO term to genes genes = qualityToGeneMap.get(patoId); genes.add(gene); //add this gene to the list qualityToGeneMap.put(patoId, genes); } else{ Set<String> newGeneSet = new HashSet<String>(); newGeneSet.add(gene); //create a new list and add it to PATO term qualityToGeneMap.put(patoId, newGeneSet); } } } } } // look for subclasses of the input anatomical feature and find "their" // qualities and genes if (obdq.getStatementsWithPredicateAndObject(termId, IS_A_RELATION) .size() > 0) { for (Statement scStmt : obdq.getStatementsWithPredicateAndObject( termId, IS_A_RELATION)) { getAnatomyTermSummary(scStmt.getNodeId()); } } } /** * A helper method to find the parent PATO term through the slims hierarchy * @param patoTerm * @return */ private String findAttrib(String patoTerm) { String parentId, valOrAttrib; if(obdq.getStatementsWithSubjectAndPredicate(patoTerm, OBOOWL_SUBSET_RELATION).size() > 0){ valOrAttrib = obdq.getStatementsWithSubjectAndPredicate(patoTerm, OBOOWL_SUBSET_RELATION). iterator().next().getTargetId(); if(valOrAttrib.equals(VALUE_SLIM_STRING)){ for(Statement s : obdq.getStatementsWithSubjectAndPredicate(patoTerm, IS_A_RELATION)){ parentId = s.getTargetId(); if(!parentId.equals(patoTerm)){ return findAttrib(parentId); } } } } return patoTerm.contains("^")?parseCompositionalDescription(patoTerm):patoTerm; } /** * A method to extract PATO and Anatomical terms from Compositional Description * @param cd * @return */ private String parseCompositionalDescription(String cd) { String quality = null; Pattern patoPattern = Pattern.compile("PATO:[0-9]+"); Matcher patoMatcher = patoPattern.matcher(cd); if(patoMatcher.find()){ quality = cd.substring(patoMatcher.start(), patoMatcher.end()); } return quality; } /** * A method to find the Gene a Genotype is an allele of * * @param genotypeId */ private String getGeneForGenotype(String quality, String genotypeId) { Collection<Statement> stmts = obdq.genericTermSearch(genotypeId); for (Statement stmt : stmts) { if (stmt.getRelationId().equals(HAS_ALLELE_RELATION)) { return stmt.getNodeId(); } } return null; } }
package org.openid4java.message.ax; import org.openid4java.message.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Base class for the Attribute Exchange implementation. * <p> * Encapsulates: * <ul> * <li> the Type URI that identifies the Attribute Exchange extension * <li> a list of extension-specific parameters, with the * openid.<extension_alias> prefix removed * <li> methods for handling the extension-specific support of parameters with * multpile values * </ul> * * @see Message MessageExtension * @author Marius Scurtescu, Johnny Bufu */ public class AxMessage implements MessageExtension, MessageExtensionFactory { private static Log _log = LogFactory.getLog(AxMessage.class); private static final boolean DEBUG = _log.isDebugEnabled(); /** * The Attribute Exchange Type URI. */ public static final String OPENID_NS_AX = "http://openid.net/srv/ax/1.0"; /** * The Attribute Exchange extension-specific parameters. * <p> * The openid.<extension_alias> prefix is not part of the parameter names */ protected ParameterList _parameters; /** * Constructs an empty (no parameters) Attribute Exchange extension. */ public AxMessage() { _parameters = new ParameterList(); if (DEBUG) _log.debug("Created empty AXMessage."); } /** * Constructs an Attribute Exchange extension with a specified list of * parameters. * <p> * The parameter names in the list should not contain the * openid.<extension_alias>. */ public AxMessage(ParameterList params) { _parameters = params; if (DEBUG) _log.debug("Created AXMessage from parameter list:\n" + params); } /** * Gets the Type URI that identifies the Attribute Exchange extension. */ public String getTypeUri() { return OPENID_NS_AX; } /** * Gets ParameterList containing the Attribute Exchange extension-specific * parameters. * <p> * The openid.<extension_alias> prefix is not part of the parameter names, * as it is handled internally by the Message class. * <p> * The openid.ns.<extension_type_uri> parameter is also handled by * the Message class. * * @see Message */ public ParameterList getParameters() { return _parameters; } /** * Gets a the value of the parameter with the specified name. * * @param name The name of the parameter, * without the openid.<extension_alias> prefix. * @return The parameter value, or null if not found. */ public String getParameterValue(String name) { return _parameters.getParameterValue(name); } /** * Sets the extension's parameters to the supplied list. * <p> * The parameter names in the list should not contain the * openid.<extension_alias> prefix. */ public void setParameters(ParameterList params) { _parameters = params; } /** * Attribute exchange doesn't implement authentication services. * * @return false */ public boolean providesIdentifier() { return false; } /** * Attribute exchange parameters are required to be signed. * * @return true */ public boolean signRequired() { return true; } /** * Instantiates the apropriate Attribute Exchange object (fetch / store - * request / response) for the supplied parameter list. * * @param parameterList The Attribute Exchange specific parameters * (without the openid.<ext_alias> prefix) * extracted from the openid message. * @param isRequest Indicates whether the parameters were * extracted from an OpenID request (true), * or from an OpenID response. * @return MessageExtension implementation for * the supplied extension parameters. * @throws MessageException If a Attribute Exchange object could not be * instantiated from the supplied parameter list. */ public MessageExtension getExtension( ParameterList parameterList, boolean isRequest) throws MessageException { String axMode = null; if (parameterList.hasParameter("mode")) { axMode = parameterList.getParameterValue("mode"); if ("fetch_request".equals(axMode)) return FetchRequest.createFetchRequest(parameterList); else if ("fetch_response".equals(axMode)) return FetchResponse.createFetchResponse(parameterList); else if ("store_request".equals(axMode)) return StoreRequest.createStoreRequest(parameterList); else if ("store_response_success".equals(axMode) || "store_response_failure".equals(axMode)) return StoreResponse.createStoreResponse(parameterList); } throw new MessageException("Invalid value for attribute exchange mode: " + axMode); } }
package org.flymine.postprocess; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.util.HashSet; import java.util.Collections; import org.intermine.objectstore.query.*; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreWriter; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.intermine.ObjectStoreInterMineImpl; import org.intermine.util.DynamicUtil; import org.intermine.util.TypeUtil; import org.intermine.model.InterMineObject; import org.flymine.model.genomic.*; import org.apache.log4j.Logger; /** * Calculate additional mappings between annotation after loading into genomic ObjectStore. * Currently designed to cope with situation after loading ensembl, may need to change * as other annotation is loaded. New Locations (and updated BioEntities) are stored * back in originating ObjectStore. * * @author Richard Smith */ public class CalculateLocations { private static final Logger LOG = Logger.getLogger(CalculateLocations.class); protected ObjectStoreWriter osw; protected ObjectStore os; protected Map chrById = new HashMap(); protected Map bandToChr = new HashMap(); protected Map chrToBand = new HashMap(); protected Map chrToSc = new HashMap(); protected Map scToChr = new HashMap(); protected Map contigToSc = new HashMap(); protected Map contigToChr = new HashMap(); /** * Create a new CalculateLocations object from an ObjectStoreWriter * @param osw writer on genomic ObjectStore */ public CalculateLocations(ObjectStoreWriter osw) { this.osw = osw; this.os = osw.getObjectStore(); } public void fixPartials() throws Exception { fixPartials(Contig.class, Exon.class); } /** * Fix the Locations that connect objectCls and subjectCls objects. */ private void fixPartials(Class objectCls, Class subjectCls) throws Exception { Results results = PostProcessUtil.findLocations(os, objectCls, subjectCls, true); results.setBatchSize(500); osw.beginTransaction(); Iterator resIter = results.iterator(); Set batch = new HashSet(); int previousSubjectId = -1; while (resIter.hasNext()) { ResultsRow rr = (ResultsRow) resIter.next(); Integer objectId = (Integer) rr.get(0); BioEntity subject = (BioEntity) rr.get(1); Location location = (Location) rr.get(2); if (subject.getId().intValue() != previousSubjectId && batch.size() > 0) { fixPartialBatch(batch); batch = new HashSet(); } batch.add(rr); previousSubjectId = subject.getId().intValue(); } if (previousSubjectId != -1 && batch.size() > 0) { fixPartialBatch(batch); } osw.commitTransaction(); } /** * The argument should be a Set of ResultsRow objects returned by findLocations(). All of the * Locations in the ResultsRow should have the same Subject. */ private void fixPartialBatch(Set batch) throws Exception { if (batch.size() < 2) { // if the object doesn't have two location objects the locations can't be partial return; } LOG.info("processing partial batch size " + batch.size()); Location startLocation = null; int startLocationObjectLength = -1; Location endLocation = null; int endLocationObjectLength = -1; // Locations that are partial at the start and the end Set veryPartialLocations = new HashSet(); int subjectLengthSoFar = -1; Iterator batchIter = batch.iterator(); while (batchIter.hasNext()) { ResultsRow rr = (ResultsRow) batchIter.next(); Integer objectId = (Integer) rr.get(0); BioEntity object = (BioEntity) os.getObjectById(objectId); BioEntity subject = (BioEntity) rr.get(1); Location location = (Location) rr.get(2); try { Object objectLengthField = TypeUtil.getFieldValue(object, "length"); int objectLength = -1; if (objectLengthField instanceof Integer) { objectLength = ((Integer) objectLengthField).intValue(); } else { LOG.error("Object with ID: " + object.getId() + " has no Integer length field"); continue; } if (location.getEnd().intValue() == objectLength && location.getStart().intValue() == 1) { // both ends are partial - fix last veryPartialLocations.add(location); } if (location.getStart().intValue() == 1 && location.getEnd().intValue() != objectLength) { endLocation = location; } if (location.getEnd().intValue() == objectLength && location.getStart().intValue() != 1) { PartialLocation pl = (PartialLocation) cloneInterMineObject(location, PartialLocation.class); pl.setStartIsPartial(Boolean.FALSE); pl.setEndIsPartial(Boolean.TRUE); pl.setSubjectStart(new Integer(1)); subjectLengthSoFar = location.getEnd().intValue() - location.getStart().intValue() + 1; pl.setSubjectEnd(new Integer(subjectLengthSoFar)); startLocation = pl; osw.store(pl); } } catch (IllegalAccessException e) { LOG.error("Object with ID: " + object.getId() + " has no Integer length field"); } } Iterator veryPartialLocationsIterator = veryPartialLocations.iterator(); while (veryPartialLocationsIterator.hasNext()) { Location location = (Location) veryPartialLocationsIterator.next(); int thisObjectLength = location.getEnd().intValue() - location.getStart().intValue() + 1; PartialLocation pl = (PartialLocation) cloneInterMineObject(location, PartialLocation.class); pl.setStartIsPartial(Boolean.TRUE); pl.setEndIsPartial(Boolean.TRUE); pl.setSubjectStart(new Integer(subjectLengthSoFar + 1)); pl.setSubjectEnd(new Integer(subjectLengthSoFar + thisObjectLength)); osw.store(pl); subjectLengthSoFar += thisObjectLength; } if (endLocation == null) { LOG.error("endLocation is null - startLocation: " + startLocation + " veryPartialLocations: " + veryPartialLocations); } else { PartialLocation pl = (PartialLocation) cloneInterMineObject(endLocation, PartialLocation.class); pl.setStartIsPartial(Boolean.TRUE); pl.setEndIsPartial(Boolean.FALSE); pl.setSubjectStart(new Integer(subjectLengthSoFar + 1)); int newEndPos = pl.getSubjectStart().intValue() + (pl.getEnd().intValue() - pl.getStart().intValue()); pl.setSubjectEnd(new Integer(newEndPos)); osw.store(pl); } } // TODO 1) support pos/neg strand // 2) store chromosomes in id map -> avoid getObjectById // 3) map from chromosome to children -> less overlap comparasons // 4) check if a Location already exists? // 5) evidence collection for locations /** * Create new Location objects where required: * * | = Location that should exist ( = Location to create * * Chromosome * | | ( ( * ChromosomeBand * ( ( ( * Supercontig * | ( * Contig * | * *features* * * @throws Exception if anything goes wrong */ public void createLocations() throws Exception { osw.beginTransaction(); // 0. Hold Chromosomes in map by id makeChromosomeMap(); // 1. Find and hold locations of ChromosomeBands on Chromsomes makeChromosomeBandLocations(); // 2. Find and hold locations of Supercontigs on Chromosomes // Create locations of Supercontigs on ChromosomeBands makeSupercontigLocations(); // 3. hold offsets of Contigs on Supercontigs // create locations Contig->ChromosomeBand, Contig->Chromosome makeContigLocations(); // 4. For all BioEntities located on Contigs compute other offsets on all parents Results results = PostProcessUtil.findLocations(os, Contig.class, BioEntity.class, false); results.setBatchSize(500); Iterator resIter = results.iterator(); // create map ChromsomeBands to avoid calling getObjectById // need to keep running query after each commit transaction Map idBands = new HashMap(); Iterator bandIter = PostProcessUtil.selectObjectsOfClass(os, ChromosomeBand.class); while (bandIter.hasNext()) { ChromosomeBand band = (ChromosomeBand) bandIter.next(); idBands.put(band.getId(), band); } LOG.info("built ChromosomeBand id map, size = " + idBands.keySet().size()); // create map of Supercontigs to avoid calling getObjectById // need to keep running query after each commit transaction Map idScs = new HashMap(); Iterator scIter = PostProcessUtil.selectObjectsOfClass(os, Supercontig.class); while (scIter.hasNext()) { Supercontig sc = (Supercontig) scIter.next(); idScs.put(sc.getId(), sc); } LOG.info("built Supercontig id map, size = " + idScs.keySet().size()); // maps from BioEntity to Location Map partialsOnChromosomes = new HashMap(); Map partialsOnSupercontigs = new HashMap(); Map partialsOnChromosomeBands = new HashMap(); int i = 0, j = 0, k = 0; long start = System.currentTimeMillis(); while (resIter.hasNext()) { i++; ResultsRow rr = (ResultsRow) resIter.next(); Location locBioOnContig = (Location) rr.get(2); Integer contigId = (Integer) rr.get(0); BioEntity bio = (BioEntity) rr.get(1); SimpleLoc bioOnContig = new SimpleLoc(contigId.intValue(), bio.getId().intValue(), locBioOnContig); // first create location of feature on Chromosome SimpleLoc contigOnChr = (SimpleLoc) contigToChr.get(contigId); Chromosome chr = (Chromosome) chrById.get(new Integer(contigOnChr.getParentId())); Location bioOnChrLoc = createChromosomeLocation(contigOnChr, bioOnContig, chr, bio); if (locBioOnContig instanceof PartialLocation) { addToMapOfLists(partialsOnChromosomes, bio, bioOnChrLoc); } else { osw.store(bioOnChrLoc); } SimpleLoc bioOnChr = new SimpleLoc(chr.getId().intValue(), bio.getId().intValue(), bioOnChrLoc); // create location of feature on Supercontig Set scs = (Set) chrToSc.get(chr.getId()); if (scs != null) { Iterator iter = scs.iterator(); while (iter.hasNext()) { SimpleLoc scOnChr = (SimpleLoc) iter.next(); if (overlap(scOnChr, bioOnChr)) { Supercontig sc = (Supercontig) idScs.get(new Integer(scOnChr.getChildId())); Location bioOnScLoc = createLocation(sc, scOnChr, bio, bioOnChr); if (bioOnScLoc instanceof PartialLocation) { addToMapOfLists(partialsOnSupercontigs, bio, bioOnScLoc); } else { osw.store(bioOnScLoc); } j++; } } } // create location of feature on ChromosomeBand Set bands = (Set) chrToBand.get(chr.getId()); if (bands != null) { Iterator iter = bands.iterator(); while (iter.hasNext()) { SimpleLoc bandOnChr = (SimpleLoc) iter.next(); if (overlap(bandOnChr, bioOnChr)) { ChromosomeBand band = (ChromosomeBand) idBands.get(new Integer(bandOnChr.getChildId())); Location bioOnBandLoc = createLocation(band, bandOnChr, bio, bioOnChr); if (bioOnBandLoc instanceof PartialLocation) { addToMapOfLists(partialsOnChromosomeBands, bio, bioOnBandLoc); } else { osw.store(bioOnBandLoc); } k++; } } } if (i % 100 == 0) { long now = System.currentTimeMillis(); LOG.info("Created " + i + " Chromosome, " + j + " SuperContig locations and " + k + " ChromosomeBand locations (avg = " + ((60000L * i) / (now - start)) + " per minute)"); } } chrById = null; bandToChr = null; chrToBand = null; chrToSc = null; scToChr = null; contigToSc = null; contigToChr = null; // process partials Locations processPartials(partialsOnChromosomes); processPartials(partialsOnSupercontigs); processPartials(partialsOnChromosomeBands); osw.commitTransaction(); LOG.info("Stored " + i + " Locations between features and Chromosome."); LOG.info("Stored " + j + " Locations between features and Supercontig."); LOG.info("Stored " + k + " Locations between features and ChromosomeBand."); } /** * Put key and value in the given map. The values of the map are List, to which the new value * is appended. The Lists are created if missing. */ private void addToMapOfLists(Map map, Object key, Object value) { if (map.get(key) != null) { ((List) map.get(key)).add(value); } else { List list = new ArrayList(); list.add(value); map.put(key, list); } } /** * Process the Partial Locations in mapOfPartials and merge PartialLocations */ private void processPartials(Map mapOfPartials) throws ObjectStoreException { Iterator mapOfPartialsIter = mapOfPartials.keySet().iterator(); while (mapOfPartialsIter.hasNext()) { int minBioStart = Integer.MAX_VALUE; int maxBioEnd = -1; int minChrStart = Integer.MAX_VALUE; int maxChrEnd = -1; int newStrand = 0; Integer newStartPhase = null; Integer newEndPhase = null; BioEntity bioEntity = (BioEntity) mapOfPartialsIter.next(); List partialLocList = (List) mapOfPartials.get(bioEntity); // check that all the PartialLocations have the same object BioEntity if (!checkForSameObject(partialLocList)) { // don't try to merge these PartialLocations storeAll(partialLocList); continue; } for (Iterator partialLocListIter = partialLocList.iterator(); partialLocListIter.hasNext(); ) { Object nextObject = partialLocListIter.next(); PartialLocation pl = (PartialLocation) nextObject; // createChromosomeLocation() doesn't set subjectStart or subjectEnd yet // if (pl.getSubjectStart().intValue() < minBioStart) { // minBioStart = pl.getSubjectStart().intValue(); // if (pl.getSubjectEnd().intValue() > maxBioEnd) { // maxBioEnd = pl.getSubjectEnd().intValue(); if (pl.getStart().intValue() < minChrStart) { minChrStart = pl.getStart().intValue(); // use the start phase of the first Location in the new Location newStartPhase = pl.getPhase(); } if (pl.getEnd().intValue() > maxChrEnd) { maxChrEnd = pl.getEnd().intValue(); // use the end phase of the last Location in the new Location newEndPhase = pl.getEndPhase(); } if (newStrand == 0) { newStrand = pl.getStrand().intValue(); } else { if (newStrand != pl.getStrand().intValue()) { throw new RuntimeException("BioEntity (" + bioEntity + ") has two " + "Locations " + "with inconsistent strands"); } } } BioEntity newLocationObject = ((Location) partialLocList.get(0)).getObject(); // should check that maxChrEnd - minChrStart = maxBioEnd - minBioStart once // createChromosomeLocation() is fixed Location newLocation = (Location) DynamicUtil.createObject(Collections.singleton(Location.class)); newLocation.setStart(new Integer(minChrStart)); newLocation.setEnd(new Integer(maxChrEnd)); newLocation.setStartIsPartial(Boolean.FALSE); newLocation.setEndIsPartial(Boolean.FALSE); newLocation.setStrand(new Integer(newStrand)); newLocation.setPhase(newStartPhase); newLocation.setEndPhase(newEndPhase); newLocation.setSubject(bioEntity); newLocation.setObject(newLocationObject); osw.store(newLocation); } } /** * Create a Location that spans the locations of some child objects. eg. create a location for * Transcript that is as big as all the exons in it's exons collection. One new location will * be created for each possible Location.object - Transcript->Chromosome, Transcript->Contig * etc. * @param parentClass the parent, eg. Transcript * @param childClass the child, eg. Exon * @param refField the linking field eg. "exons" * @throws ObjectStoreException if the is a problem with the ObjectStore */ public void createSpanningLocations(Class parentClass, Class childClass, String refField) throws ObjectStoreException { Iterator resIter = findCollections(os, parentClass, childClass, refField); // Map of location.objects to Maps from parent objects to a to their (new) start and end // positions. eg. Chromosome10 -> Exon1 -> SimpleLoc {start -> 2111, end -> 2999} // Contig23 -> Exon1 -> SimpleLoc {start -> 1111, end -> 1999} Map locatedOnObjectMap = new HashMap(); while (resIter.hasNext()) { ResultsRow rr = (ResultsRow) resIter.next(); BioEntity parentObject = (BioEntity) rr.get(0); BioEntity childObject = (BioEntity) rr.get(1); Location location = (Location) rr.get(2); // the object that childObject is located on BioEntity locatedOnObject = (BioEntity) rr.get(3); Map parentObjectMap = (Map) locatedOnObjectMap.get(locatedOnObject.getId()); if (parentObjectMap == null) { parentObjectMap = new HashMap(); locatedOnObjectMap.put(locatedOnObject.getId(), parentObjectMap); } SimpleLoc parentObjectSimpleLoc = (SimpleLoc) parentObjectMap.get(parentObject.getId()); if (parentObjectSimpleLoc == null) { parentObjectSimpleLoc = new SimpleLoc(-1, -1, Integer.MAX_VALUE, -1, 0); parentObjectMap.put(parentObject.getId(), parentObjectSimpleLoc); } int currentParentStart = parentObjectSimpleLoc.getStart(); int currentParentEnd = parentObjectSimpleLoc.getEnd(); if (location.getStart().intValue() < currentParentStart) { parentObjectSimpleLoc.setStart(location.getStart().intValue()); } if (location.getEnd().intValue() > currentParentEnd) { parentObjectSimpleLoc.setEnd(location.getEnd().intValue()); } parentObjectSimpleLoc.setStrand(location.getStrand().intValue()); // TODO XXX FIXME: deal with partial locations and do consistency checks (eg. make // sure all exons are on the same strand) } osw.beginTransaction(); // make new locations and store them Iterator locatedOnObjectIterator = locatedOnObjectMap.keySet().iterator(); while (locatedOnObjectIterator.hasNext()) { Integer locatedOnObjectId = (Integer) locatedOnObjectIterator.next(); BioEntity locatedOnObject = (BioEntity) os.getObjectById(locatedOnObjectId); Map parentObjectMap = (Map) locatedOnObjectMap.get(locatedOnObjectId); Iterator parentObjectMapIterator = parentObjectMap.keySet().iterator(); while (parentObjectMapIterator.hasNext()) { Integer parentObjectId = (Integer) parentObjectMapIterator.next(); BioEntity parentObject = (BioEntity) os.getObjectById(parentObjectId); SimpleLoc parentObjectSimpleLoc = (SimpleLoc) parentObjectMap.get(parentObjectId); Location newLocation = (Location) DynamicUtil.createObject(Collections.singleton(Location.class)); newLocation.setStart(new Integer(parentObjectSimpleLoc.getStart())); newLocation.setEnd(new Integer(parentObjectSimpleLoc.getEnd())); newLocation.setStartIsPartial(Boolean.FALSE); newLocation.setEndIsPartial(Boolean.FALSE); newLocation.setStrand(new Integer(parentObjectSimpleLoc.getStrand())); newLocation.setSubject(parentObject); newLocation.setObject(locatedOnObject); osw.store(newLocation); } } osw.commitTransaction(); } /** * Query a class like Transcript that refers to a collection of located classes (like Exon) and * return an Results object containing Transcript, Exon, Exon location and location.object */ private static Iterator findCollections(ObjectStore os, Class parentClass, Class childClass, String refField) throws ObjectStoreException { Query q = new Query(); q.setDistinct(false); QueryClass qcParent = new QueryClass(parentClass); q.addFrom(qcParent); q.addToSelect(qcParent); q.addToOrderBy(qcParent); QueryClass qcChild = new QueryClass(childClass); q.addFrom(qcChild); q.addToSelect(qcChild); QueryClass qcLoc = new QueryClass(Location.class); q.addFrom(qcLoc); q.addToSelect(qcLoc); QueryClass qcLocObject = new QueryClass(BioEntity.class); q.addFrom(qcLocObject); q.addToSelect(qcLocObject); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryObjectReference ref1 = new QueryObjectReference(qcLoc, "object"); ContainsConstraint cc1 = new ContainsConstraint(ref1, ConstraintOp.CONTAINS, qcLocObject); cs.addConstraint(cc1); QueryObjectReference ref2 = new QueryObjectReference(qcLoc, "subject"); ContainsConstraint cc2 = new ContainsConstraint(ref2, ConstraintOp.CONTAINS, qcChild); cs.addConstraint(cc2); QueryCollectionReference ref3 = new QueryCollectionReference(qcParent, refField); ContainsConstraint cc3 = new ContainsConstraint(ref3, ConstraintOp.CONTAINS, qcChild); cs.addConstraint(cc3); q.setConstraint(cs); ((ObjectStoreInterMineImpl) os).precompute(q); Results res = new Results(q, os, os.getSequence()); res.setBatchSize(500); return res.iterator(); } /** * Return true if and only if all of the Locations in the List have the same object reference. */ private boolean checkForSameObject(List list) { BioEntity testObject = null; for (Iterator iter = list.iterator(); iter.hasNext(); ) { PartialLocation pl = (PartialLocation) iter.next(); if (testObject == null) { testObject = pl.getObject(); } else { if (!testObject.equals(pl.getObject())) { LOG.info("BioEntity (" + pl.getSubject() + ") is located " + "on two different BioEntities " + pl.getObject().getId() + " and " + testObject.getId()); return false; } } } return true; } /** * Store all the BioEntity objects from the List using osw. */ private void storeAll(List list) throws ObjectStoreException { Iterator iter = list.iterator(); while (iter.hasNext()) { osw.store((InterMineObject) iter.next()); } } protected Location createLocation(BioEntity parent, SimpleLoc parentOnChr, BioEntity child, SimpleLoc childOnChr) { //BioEntity child, SimpleLoc childOnChr, boolean strandIsOrientation) { if (!overlap(childOnChr, parentOnChr)) { throw new IllegalArgumentException("parent (" + parentOnChr.getStart() + ", " + parentOnChr.getEnd() + ") and child (" + childOnChr.getStart() + ", " + childOnChr.getEnd() + ") do not overlap."); } boolean startIsPartial = false; boolean endIsPartial = false; // want inclusive co-ordinates int parentLength = (parentOnChr.getEnd() - parentOnChr.getStart()) + 1; int childLength = (childOnChr.getEnd() - childOnChr.getStart()) + 1; if (parentOnChr.getStrand() == -1) { if (parentOnChr.getEnd() < childOnChr.getEnd()) { startIsPartial = true; } if (parentOnChr.getStart() > childOnChr.getStart()) { endIsPartial = true; } } else { if (childOnChr.getStart() < parentOnChr.getStart()) { startIsPartial = true; } if (childOnChr.getEnd() > parentOnChr.getEnd()) { endIsPartial = true; } } Location childOnParent = null; if (startIsPartial || endIsPartial) { PartialLocation pl = (PartialLocation) DynamicUtil.createObject(Collections.singleton(PartialLocation.class)); pl.setSubjectStart(new Integer(1)); pl.setSubjectEnd(new Integer(childLength)); if (startIsPartial) { if (parentOnChr.getStrand() == -1) { if (childOnChr.getStrand () == -1) { pl.setSubjectStart(new Integer(childOnChr.getEnd() - parentOnChr.getEnd() + 1)); } else { pl.setSubjectEnd(new Integer(parentOnChr.getEnd() - childOnChr.getStart() + 1)); } } else { if (childOnChr.getStrand () == -1) { pl.setSubjectEnd(new Integer(childOnChr.getEnd() - parentOnChr.getStart() + 1)); } else { pl.setSubjectStart(new Integer(parentOnChr.getStart() - childOnChr.getStart() + 1)); } } childOnParent = pl; } if (endIsPartial) { if (parentOnChr.getStrand() == -1) { if (childOnChr.getStrand() == -1) { pl.setSubjectEnd(new Integer(childOnChr.getEnd() - parentOnChr.getStart() + 1)); } else { pl.setSubjectStart(new Integer(parentOnChr.getStart() - childOnChr.getStart() + 1)); } } else { if (childOnChr.getStrand() == -1) { pl.setSubjectStart(new Integer(childOnChr.getEnd() - parentOnChr.getEnd() + 1)); } else { pl.setSubjectEnd(new Integer(parentOnChr.getEnd() - childOnChr.getStart() + 1)); } } childOnParent = pl; } } else { if (childOnChr.isPartial()) { // start or end of childOnParent is partial so the new location must be partial too childOnParent = (PartialLocation) DynamicUtil.createObject(Collections.singleton(PartialLocation.class)); } else { childOnParent = (Location) DynamicUtil.createObject(Collections.singleton(Location.class)); } } childOnParent.setObject(parent); childOnParent.setSubject(child); int newChildOnParentStart; int newChildOnParentEnd; if (startIsPartial) { newChildOnParentStart = 1; } else { if (parentOnChr.getStrand() == -1) { newChildOnParentStart = parentOnChr.getEnd() - childOnChr.getEnd() + 1; } else { newChildOnParentStart = childOnChr.getStart() - parentOnChr.getStart() + 1; } } if (endIsPartial) { newChildOnParentEnd = parentLength; } else { if (parentOnChr.getStrand() == -1) { newChildOnParentEnd = parentOnChr.getEnd() - childOnChr.getStart() + 1; } else { newChildOnParentEnd = childOnChr.getEnd() - parentOnChr.getStart() + 1; } } childOnParent.setStart(new Integer(newChildOnParentStart)); childOnParent.setEnd(new Integer(newChildOnParentEnd)); // we don't just check for (childOnChr.getStrand() == parentOnChr.getStrand()) because we // treat strand of 0 as equal to strand 1 if (childOnChr.getStrand() == -1 && parentOnChr.getStrand() == -1) { childOnParent.setStrand(new Integer(1)); } else { if (childOnChr.getStrand() == -1 || parentOnChr.getStrand() == -1) { childOnParent.setStrand(new Integer(-1)); } else { childOnParent.setStrand(new Integer(1)); } } if (parentOnChr.getStrand() == -1) { if (childOnChr.endIsPartial()) { startIsPartial = true; } if (childOnChr.startIsPartial()) { endIsPartial = true; } } else { if (childOnChr.startIsPartial()) { startIsPartial = true; } if (childOnChr.endIsPartial()) { endIsPartial = true; } } childOnParent.setStartIsPartial(startIsPartial ? Boolean.TRUE : Boolean.FALSE); childOnParent.setEndIsPartial(endIsPartial ? Boolean.TRUE : Boolean.FALSE); // TODO evidence? //LOG.info("Created Location " + childOnParent + " for parent: " + parent + " and child: " // + child); return childOnParent; } /** * Hold Chromosomes in map by id */ private void makeChromosomeMap() throws Exception { Query q = new Query(); QueryClass qc = new QueryClass(Chromosome.class); q.addToSelect(qc); q.addFrom(qc); SingletonResults sr = new SingletonResults(q, os, os.getSequence()); Iterator chrIter = sr.iterator(); while (chrIter.hasNext()) { Chromosome chr = (Chromosome) chrIter.next(); chrById.put(chr.getId(), chr); } } /** * Find and hold locations of ChromosomeBands on Chromsomes */ private void makeChromosomeBandLocations() throws Exception { Results results = PostProcessUtil.findLocations(os, Chromosome.class, ChromosomeBand.class, true); results.setBatchSize(500); Iterator resIter = results.iterator(); while (resIter.hasNext()) { ResultsRow rr = (ResultsRow) resIter.next(); Integer chrId = (Integer) rr.get(0); ChromosomeBand band = (ChromosomeBand) rr.get(1); Location loc = (Location) rr.get(2); SimpleLoc sl = new SimpleLoc(chrId.intValue(), band.getId().intValue(), loc); addToMap(chrToBand, chrId, sl); bandToChr.put(band.getId(), sl); } LOG.info("Found " + bandToChr.size() + " ChromosomeBands located on Chromosomes"); LOG.info("chrToBand keys " + chrToBand.keySet()); } /** * Find and hold locations of Supercontigs on Chromosomes * Create locations of Supercontigs on ChromosomeBands */ private void makeSupercontigLocations() throws Exception { Results results = PostProcessUtil.findLocations(os, Chromosome.class, Supercontig.class, true); results.setBatchSize(500); Iterator resIter = results.iterator(); // create map ChromsomeBands to avoid calling getObjectById // need to keep running query after each commit transaction Map idBands = new HashMap(); Iterator bandIter = PostProcessUtil.selectObjectsOfClass(os, ChromosomeBand.class); while (bandIter.hasNext()) { ChromosomeBand band = (ChromosomeBand) bandIter.next(); idBands.put(band.getId(), band); } LOG.info("built ChromosomeBand id map, size = " + idBands.keySet().size()); int i = 0; while (resIter.hasNext()) { ResultsRow rr = (ResultsRow) resIter.next(); Location scOnChrLoc = (Location) rr.get(2); Integer chrId = (Integer) rr.get(0); Supercontig sc = (Supercontig) rr.get(1); SimpleLoc scOnChr = new SimpleLoc(chrId.intValue(), sc.getId().intValue(), scOnChrLoc); scToChr.put(sc.getId(), scOnChr); addToMap(chrToSc, chrId, scOnChr); // find get ChromosomeBands that cover location on Chromosome Set bands = (Set) chrToBand.get(chrId); if (bands != null) { Iterator iter = bands.iterator(); while (iter.hasNext()) { SimpleLoc bandOnChr = (SimpleLoc) iter.next(); if (overlap(scOnChr, bandOnChr)) { ChromosomeBand band = (ChromosomeBand) idBands.get(new Integer(bandOnChr.getChildId())); Location scOnBandLoc = createLocation(band, bandOnChr, sc, scOnChr); osw.store(scOnBandLoc); i++; } } } } LOG.info("Stored " + i + " Locations between Supercontig and ChromosomeBand."); } /** * hold offsets of Contigs on Supercontigs * create locations Contig->ChromosomeBand, Contig->Chromosome */ private void makeContigLocations() throws Exception { Results results = PostProcessUtil.findLocations(os, Supercontig.class, Contig.class, true); results.setBatchSize(500); Iterator resIter = results.iterator(); // create map ChromsomeBands to avoid calling getObjectById // need to keep running query after each commit transaction Map idBands = new HashMap(); Iterator bandIter = PostProcessUtil.selectObjectsOfClass(os, ChromosomeBand.class); while (bandIter.hasNext()) { ChromosomeBand band = (ChromosomeBand) bandIter.next(); idBands.put(band.getId(), band); } LOG.info("built ChromosomeBand id map, size = " + idBands.keySet().size()); int i = 0; int j = 0; long start = System.currentTimeMillis(); while (resIter.hasNext()) { ResultsRow rr = (ResultsRow) resIter.next(); Location locContigOnSc = (Location) rr.get(2); Integer scId = (Integer) rr.get(0); Contig contig = (Contig) rr.get(1); SimpleLoc contigOnSc = new SimpleLoc(scId.intValue(), contig.getId().intValue(), locContigOnSc); // create location of contig on chromosome, don't expect partial locations SimpleLoc scOnChr = (SimpleLoc) scToChr.get(scId); Chromosome chr = (Chromosome) chrById.get(new Integer(scOnChr.getParentId())); Location contigOnChrLoc = createChromosomeLocation(scOnChr, contigOnSc, chr, contig); SimpleLoc contigOnChr = new SimpleLoc(chr.getId().intValue(), contig.getId().intValue(), contigOnChrLoc); contigToChr.put(contig.getId(), contigOnChr); contigToSc.put(contig.getId(), contigOnSc); osw.store(contigOnChrLoc); i++; // create location of contig on ChromosomeBand // get ChromosomeBands that cover location on Chromosome Set bands = (Set) chrToBand.get(chr.getId()); if (bands != null) { Iterator iter = bands.iterator(); while (iter.hasNext()) { SimpleLoc bandOnChr = (SimpleLoc) iter.next(); if (overlap(contigOnChr, bandOnChr)) { ChromosomeBand band = (ChromosomeBand) idBands.get(new Integer(bandOnChr.getChildId())); Location contigOnBandLoc = createLocation(band, bandOnChr, contig, contigOnChr); osw.store(contigOnBandLoc); j++; } } } if (i % 100 == 0) { long now = System.currentTimeMillis(); LOG.info("Created " + i + " Contig/Chromosome and " + j + " Contig/ChromosomeBand locations (avg = " + ((60000L * i) / (now - start)) + " per minute)"); } } LOG.info("Stored " + i + " Locations between Contig and Chromosome."); LOG.info("Stored " + j + " Locations between Contig and ChromosomeBand."); } /** * Given the location of a child BioEntity on a parent and the location of * the parent on a Chromsome, create a Location for the child on the Chromosome. * @param parentOnChr location of parent object on Chromosome * @param childOnParent location of child on parent * @param chr the Chromosome * @param child the child BioEntity * @return location of Chromosome */ protected Location createChromosomeLocation(SimpleLoc parentOnChr, SimpleLoc childOnParent, Chromosome chr, BioEntity child) { Location childOnChr; if (childOnParent.startIsPartial() || childOnParent.endIsPartial()) { childOnChr = (PartialLocation) DynamicUtil.createObject(Collections.singleton(PartialLocation.class)); } else { childOnChr = (Location) DynamicUtil.createObject(Collections.singleton(Location.class)); } if (parentOnChr.getStrand() == -1) { childOnChr.setStart(new Integer((parentOnChr.getEnd() - childOnParent.getEnd()) + 1)); childOnChr.setEnd(new Integer((parentOnChr.getEnd() - childOnParent.getStart()) + 1)); } else { childOnChr.setStart(new Integer((parentOnChr.getStart() + childOnParent.getStart()) - 1)); childOnChr.setEnd(new Integer((parentOnChr.getStart() + childOnParent.getEnd()) - 1)); } if (childOnParent.getStrand() == -1) { if (parentOnChr.getStrand() == -1) { childOnChr.setStrand(new Integer(1)); } else { childOnChr.setStrand(new Integer(-1)); } } else { if (parentOnChr.getStrand() == -1) { childOnChr.setStrand(new Integer(-1)); } else { childOnChr.setStrand(new Integer(1)); } } childOnChr.setStartIsPartial(Boolean.FALSE); childOnChr.setEndIsPartial(Boolean.FALSE); if (childOnParent.startIsPartial()) { childOnChr.setStartIsPartial(Boolean.TRUE); } if (childOnParent.endIsPartial()) { childOnChr.setEndIsPartial(Boolean.TRUE); } childOnChr.setObject(chr); childOnChr.setSubject(child); return childOnChr; } /** * Return true if locations of two objects on some parent object * have any overlap. * @param sl1 first location * @param sl2 second location * @return true if the two locations have any overlap */ protected static boolean overlap(SimpleLoc sl1, SimpleLoc sl2) { if ((sl1.getStart() >= sl2.getStart() && sl1.getStart() <= sl2.getEnd()) || (sl1.getEnd() >= sl2.getStart() && sl1.getEnd() <= sl2.getEnd()) || (sl1.getStart() >= sl2.getStart() && sl1.getEnd() <= sl2.getEnd()) || (sl2.getStart() >= sl1.getStart() && sl2.getEnd() <= sl1.getEnd())) { return true; } return false; } /** * Create a clone of given InterMineObject including the id * @param obj object to clone * @param newClass the class to create - must be the same class as obj or a sub-class * @return the cloned object * @throws Exception if problems with reflection */ protected static InterMineObject cloneInterMineObject(InterMineObject obj, Class newClass) throws Exception { InterMineObject newObj = (InterMineObject) DynamicUtil.createObject(DynamicUtil.decomposeClass(newClass)); Map fieldInfos = new HashMap(); Iterator clsIter = DynamicUtil.decomposeClass(obj.getClass()).iterator(); while (clsIter.hasNext()) { fieldInfos.putAll(TypeUtil.getFieldInfos((Class) clsIter.next())); } Iterator fieldIter = fieldInfos.keySet().iterator(); while (fieldIter.hasNext()) { String fieldName = (String) fieldIter.next(); TypeUtil.setFieldValue(newObj, fieldName, TypeUtil.getFieldProxy(obj, fieldName)); } return newObj; } private void addToMap(Map map, Integer key, SimpleLoc loc) { Set values = (Set) map.get(key); if (values == null) { values = new HashSet(); } values.add(loc); map.put(key, values); } /** * Lightweight representation of a Location for easier manipulation and * storing in maps. */ protected class SimpleLoc { int start; int parentId; int childId; int strand; int end; boolean startIsPartial; boolean endIsPartial; /** * Construct with integer values * @param parentId id of object * @param childId id of subject * @param start start value * @param end end value * @param strand strand value */ public SimpleLoc(int parentId, int childId, int start, int end, int strand) { this(parentId, childId, start, end, strand, false, false); } /** * Construct with integer values * @param parentId id of object * @param childId id of subject * @param start start value * @param end end value * @param strand strand value * @param startIsPartial start is partial flag * @param endIsPartial end is partial flag */ public SimpleLoc(int parentId, int childId, int start, int end, int strand, boolean startIsPartial, boolean endIsPartial) { this.parentId = parentId; this.childId = childId; this.start = start; this.end = end; this.strand = strand; this.startIsPartial = startIsPartial; this.endIsPartial = endIsPartial; } /** * Construct with integer values for object and subject and a Location object * @param parentId id of object * @param childId id of subject * @param loc description of location */ public SimpleLoc(int parentId, int childId, Location loc) { this.parentId = parentId; this.childId = childId; this.start = loc.getStart().intValue(); this.end = loc.getEnd().intValue(); if (loc.getStartIsPartial() == null) { this.startIsPartial = false; } else { this.startIsPartial = loc.getStartIsPartial().booleanValue(); } if (loc.getEndIsPartial() == null) { this.endIsPartial = false; } else { this.endIsPartial = loc.getEndIsPartial().booleanValue(); } if (loc.getStrand() != null) { this.strand = loc.getStrand().intValue(); } else { this.strand = 0; } } /** * Get start value * @return start value */ public int getStart() { return start; } /** * Set start value * @param start value */ public void setStart(int start) { this.start = start; } /** * Get parentId value * @return parentId value */ public int getParentId() { return parentId; } /** * Get childId value * @return childId value */ public int getChildId() { return childId; } /** * Get start value * @return start value */ public int getEnd() { return end; } /** * Set end value * @param end value */ public void setEnd(int end) { this.end = end; } /** * Get strand value * @return strand value */ public int getStrand() { return strand; } /** * Set strand value * @param strand value */ public void setStrand(int strand) { this.strand = strand; } /** * Return true if and only if the start is partial. * @return true if and only if the start is partial. */ public boolean startIsPartial() { return startIsPartial; } /** * Set the start-is-partial flag * @param startIsPartial new start-is-partial flag */ public void setStartIsPartial(boolean startIsPartial) { this.startIsPartial = startIsPartial; } /** * Return true if and only if the end is partial. * @return true if and only if the end is partial. */ public boolean endIsPartial() { return endIsPartial; } /** * Set the end-is-partial flag * @param endIsPartial the new end-is-partial */ public void setEndIsPartial(boolean endIsPartial) { this.endIsPartial = endIsPartial; } /** * Return true if the start or end of this SimpleLoc are partial. * @return true if the start or end of this SimpleLoc are partial. */ public boolean isPartial() { return (startIsPartial() || endIsPartial()); } /** * @see Object#toString() */ public String toString() { return "parent " + parentId + " child " + childId + " start " + start + " end " + end + " strand " + strand + " startIsPartial: " + startIsPartial + " endIsPartial: " + endIsPartial; } } }
package org.pentaho.di.trans.step; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLInterface; import org.w3c.dom.Node; public class RemoteStep implements Cloneable, XMLInterface { public static final String XML_TAG = "remotestep"; /** The host name or IP address to read from or to write to */ private String hostname; /** The port to read input data from or to write output data to */ private String port; /** * @param hostname * @param port */ public RemoteStep(String hostname, String port) { super(); this.hostname = hostname; this.port = port; } @Override public Object clone() throws CloneNotSupportedException { return super.clone(); } public String getXML() { StringBuffer xml = new StringBuffer(); xml.append(XMLHandler.openTag(XML_TAG)); xml.append(XMLHandler.addTagValue("hostname", hostname, false)); xml.append(XMLHandler.addTagValue("port", port, false)); xml.append(XMLHandler.closeTag(XML_TAG)); return xml.toString(); } public RemoteStep(Node node) { hostname = XMLHandler.getTagValue(node, "hostname"); port = XMLHandler.getTagValue(node, "port"); } @Override public String toString() { return hostname+":"+port; } @Override public boolean equals(Object obj) { return toString().equalsIgnoreCase(obj.toString()); } /** * @return the host name */ public String getHostname() { return hostname; } /** * @param hostname the host name to set */ public void setHostname(String hostname) { this.hostname = hostname; } /**int * @return the port */ public String getPort() { return port; } /** * @param port the port to set */ public void setPort(String port) { this.port = port; } }
package org.flymine.dataconversion; import java.io.InputStream; import java.io.IOException; import java.util.Collection; import java.util.Iterator; import java.util.Set; import java.util.HashSet; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Properties; import java.util.StringTokenizer; import org.intermine.InterMineException; import org.intermine.util.XmlUtil; import org.intermine.util.StringUtil; import org.intermine.xml.full.Attribute; import org.intermine.xml.full.Item; import org.intermine.xml.full.Reference; import org.intermine.xml.full.ReferenceList; import org.intermine.xml.full.ItemHelper; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.*; import org.intermine.dataconversion.ItemPath; import org.intermine.dataconversion.ItemReader; import org.intermine.dataconversion.ItemWriter; import org.intermine.dataconversion.DataTranslator; import org.intermine.dataconversion.ItemPrefetchDescriptor; import org.intermine.dataconversion.ItemPrefetchConstraintDynamic; import org.intermine.dataconversion.ObjectStoreItemPathFollowingImpl; import org.intermine.metadata.Model; import org.apache.log4j.Logger; /** * Convert MAGE data in fulldata Item format conforming to a source OWL definition * to fulldata Item format conforming to InterMine OWL definition. * * @author Wenyan Ji * @author Richard Smith */ public class MageDataTranslator extends DataTranslator { protected static final Logger LOG = Logger.getLogger(MageDataTranslator.class); protected Map config = new HashMap(); protected Map organismMap = new HashMap(); private Map dbs = new HashMap(); private Map pubs = new HashMap(); private Map dbRefs = new HashMap(); private String srcNs; private Map experiments = new HashMap(); protected Set microArrayResults = new HashSet(); protected Map samplesById = new HashMap(); // keep track of Reporter identifiers that are controls to set MicroArrayResult.isContol protected Set controls = new HashSet(); protected Map labeledExtractToMicroArrayAssays = new HashMap(); protected Map sampleToTreatments = new HashMap(); protected Map sampleToLabeledExtracts = new HashMap(); // genomic:MicroArrayResult identifier to genomic:MicroArrayAssay identifier protected Map resultToFeature = new HashMap(); protected Map featureToReporter = new HashMap(); protected Map assayToSamples = new HashMap(); // geneomic:MicroArrayAssay -> experiment name protected Map assayToExpName = new HashMap(); // geneomic:MicroArrayAssay -> genomic:MicroArrayExperiment protected Map assayToExperiment = new HashMap(); // genomic:Sample -> genomic:SampleCharacteristics protected Map sampleToChars = new HashMap(); protected Set assays = new HashSet(); protected Map clones = new HashMap(); //cloneItem identifier, cloneItem protected Map cloneMap = new HashMap();//cloneIdentifier, cloneItem protected Map reporterToMaterial = new HashMap(); protected Map cloneIds = new HashMap();//cloneItem identifier, alternative identifier protected Set materialIdTypes = new HashSet(); protected Map expIdNames = new HashMap(); protected Map cloneToResults = new HashMap(); protected Map sampleToLabel = new HashMap(); protected Map exptToDataSet = new HashMap(); // keep track of some item prefixes for re-hydrating MicroArrayResult Items String reporterNs = null; String assayNs = null; String resultNs = null; // TODO labeledExtractToMeasuredBioAssay prevents one extract being in multiple // assays - which is the case for timecourse data. FIX. /** * @see DataTranslator#DataTranslator */ public MageDataTranslator(ItemReader srcItemReader, Properties mapping, Model srcModel, Model tgtModel) throws Exception { super(srcItemReader, mapping, srcModel, tgtModel); srcNs = srcModel.getNameSpace().toString(); tgtNs = tgtModel.getNameSpace().toString(); readConfig(); LOG.info(config); } /** * Read in a properties file with additional information about experiments. Key is * the MAGE:Experiment.name, values are for e.g. a longer name and primary characteristic * type of samples. * @throws IOException if file not found */ protected void readConfig() throws IOException { // create a map from experiment name to a map of config values String propertiesFileName = "mage_config.properties"; InputStream is = MageDataTranslator.class.getClassLoader().getResourceAsStream(propertiesFileName); if (is == null) { throw new IllegalArgumentException("Cannot find " + propertiesFileName + " in the class path"); } Properties properties = new Properties(); properties.load(is); Iterator iter = properties.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); String exptName = key.substring(0, key.indexOf(".")); String propName = key.substring(key.indexOf(".") + 1); addToMap(config, exptName, propName, value); // also set up a map of any expt.materialIdType config values found, // these need to be kept as possible alternative material ids if (propName.equals("materialIdType")) { materialIdTypes.add(value); } } } private void addToMap(Map config, String group, String key, String value) { Map exptConfig = (Map) config.get(group); if (exptConfig == null) { exptConfig = new HashMap(); config.put(group, exptConfig); } exptConfig.put(key, value); } private String getConfig(String exptName, String propName) { String value = null; Map exptConfig = (Map) config.get(exptName); if (exptConfig != null) { value = (String) exptConfig.get(propName); } else { LOG.warn("No config details found for experiment: " + exptName); } return value; } // public Iterator getItemIterator() throws ObjectStoreException { // return srcItemReader.itemIterator("http://www.flymine.org/model/mage#BioAssayDatum", // true); /** * @see DataTranslator#translate */ public void translate(ItemWriter tgtItemWriter) throws ObjectStoreException, InterMineException { super.translate(tgtItemWriter); Iterator i; i = processOrganism().iterator(); while (i.hasNext()) { tgtItemWriter.store(ItemHelper.convert((Item) i.next())); } i = dbs.values().iterator(); while (i.hasNext()) { tgtItemWriter.store(ItemHelper.convert((Item) i.next())); } i = pubs.values().iterator(); while (i.hasNext()) { tgtItemWriter.store(ItemHelper.convert((Item) i.next())); } // needs to be called before other processXX methods i = processSamples().iterator(); while (i.hasNext()) { tgtItemWriter.store(ItemHelper.convert((Item) i.next())); } i = microArrayResults.iterator(); while (i.hasNext()) { ResultHolder holder = (ResultHolder) i.next(); tgtItemWriter.store(ItemHelper.convert(processMicroArrayResult(holder))); } i = processMicroArrayAssays().iterator(); while (i.hasNext()) { tgtItemWriter.store(ItemHelper.convert((Item) i.next())); } i = clones.values().iterator(); while (i.hasNext()) { Item clone = (Item) i.next(); // set collection of MicroArrayResults if (cloneToResults.containsKey(clone.getIdentifier())) { clone.setCollection("results", (List) cloneToResults.get(clone.getIdentifier())); } tgtItemWriter.store(ItemHelper.convert((Item) clone)); } } /** * @see DataTranslator#translateItem */ protected Collection translateItem(Item srcItem) throws ObjectStoreException, InterMineException { // TODO MageConverter could create BioDataTuples instead? Seems to link // DesignElement with BioAssay and data Collection result = new HashSet(); String className = XmlUtil.getFragmentFromURI(srcItem.getClassName()); Collection translated = super.translateItem(srcItem); Item gene = new Item(); Item organism = new Item(); if (translated != null) { for (Iterator i = translated.iterator(); i.hasNext();) { boolean storeTgtItem = true; Item tgtItem = (Item) i.next(); if (className.equals("DataSource")) { Attribute attr = srcItem.getAttribute("name"); if (attr != null) { getDb(attr.getValue()); } storeTgtItem = false; } else if (className.equals("Experiment")) { translateMicroArrayExperiment(srcItem, tgtItem); result.add(createDataSetFromExperiment(tgtItem)); } else if (className.equals("MeasuredBioAssay")) { if (assayNs == null) { assayNs = namespaceFromIdentifier(tgtItem.getIdentifier()); } setLabeledExtractToMicroArrayAssays(srcItem); translateMicroArrayAssay(srcItem, tgtItem); storeTgtItem = false; } else if (className.equals("BioAssayDatum")) { if (resultNs == null) { resultNs = namespaceFromIdentifier(tgtItem.getIdentifier()); } translateMicroArrayResult(srcItem, tgtItem); storeTgtItem = false; } else if (className.equals("Reporter")) { if (reporterNs == null) { reporterNs = namespaceFromIdentifier(tgtItem.getIdentifier()); } translateReporter(srcItem, tgtItem); } else if (className.equals("BioSequence")) { //translateBioEntity(srcItem, tgtItem); storeTgtItem = false; } else if (className.equals("BioSource")) { result.addAll(translateSample(srcItem, tgtItem)); storeTgtItem = false; } else if (className.equals("Treatment")) { result.addAll(translateTreatment(srcItem, tgtItem)); } if (storeTgtItem) { result.add(tgtItem); } } } else if (className.equals("LabeledExtract")) { translateLabeledExtract(srcItem); } return result; } /** * Given an experiment item create a corresponding DataSet and add entry * in map from one to the other. * @param expt the experiment item * @return the created DataSet */ protected Item createDataSetFromExperiment(Item expt) { Item dataSet = createItem(tgtNs + "DataSet", ""); dataSet.setReference("dataSource", getDb("ArrayExpress").getIdentifier()); if (expt.hasAttribute("identifier")) { dataSet.setAttribute("title", expt.getAttribute("identifier").getValue()); } if (expt.hasAttribute("name")) { dataSet.setAttribute("description", expt.getAttribute("name").getValue()); } exptToDataSet.put(expt.getIdentifier(), dataSet.getIdentifier()); return dataSet; } /** * @param srcItem = mage:Experiment * @param tgtItem = flymine: MicroArrayExperiment * @return experiment Item * @throws ObjectStoreException if problem occured during translating */ protected Item translateMicroArrayExperiment(Item srcItem, Item tgtItem) throws ObjectStoreException { String exptName = null; if (srcItem.hasAttribute("identifier")) { exptName = srcItem.getAttribute("identifier").getValue(); tgtItem.setAttribute("identifier", exptName); String propName = getConfig(exptName, "experimentName"); if (propName != null) { tgtItem.setAttribute("name", propName); } } // may have already created references to experiment tgtItem.setIdentifier(getExperimentId(exptName)); expIdNames.put(tgtItem.getIdentifier(), exptName); // PATH Experiment.descriptions.bibliographicReferences if (srcItem.hasCollection("descriptions")) { boolean desFlag = false; boolean pubFlag = false; Iterator desIter = getCollection(srcItem, "descriptions"); while (desIter.hasNext()) { Item desItem = (Item) desIter.next(); if (desItem.hasAttribute("text")) { if (desFlag) { LOG.error("Already set description for MicroArrayExperiment, " + " srcItem = " + srcItem.getIdentifier()); } else { tgtItem.setAttribute("description", desItem.getAttribute("text") .getValue()); desFlag = true; } } // TODO fetch pubmed id from config? String pmid = getConfig(exptName, "pmid"); if (pmid != null && !pmid.equals("")) { tgtItem.setReference("publication", getPublication(pmid.trim()).getIdentifier()); } } } // PATH Experiment.bioAssays // create map from mage:DerivedBioAssay to experiment name (String) if (srcItem.hasCollection("bioAssays")) { Iterator assayIter = getCollection(srcItem, "bioAssays"); while (assayIter.hasNext()) { Item bioAssayItem = (Item) assayIter.next(); if (bioAssayItem.getClassName().equals(srcNs + "MeasuredBioAssay")) { assayToExperiment.put(bioAssayItem.getIdentifier(), tgtItem.getIdentifier()); assayToExpName.put(bioAssayItem.getIdentifier(), exptName); } } } return tgtItem; } /** * @param srcItem = mage: MeasuredBioAssay * @param tgtItem = genomic:MicroArrayAssay * @throws ObjectStoreException if problem occured during translating */ protected void translateMicroArrayAssay(Item srcItem, Item tgtItem) throws ObjectStoreException { if (srcItem.hasAttribute("identifier")) { tgtItem.addAttribute(new Attribute("name", srcItem.getAttribute("identifier").getValue())); } assays.add(tgtItem); } /** * @param srcItem = mage:MeasuredBioAssay * @throws ObjectStoreException if anything goes wrong */ protected void setLabeledExtractToMicroArrayAssays(Item srcItem) throws ObjectStoreException { // PATH MeasuredBioAssay.featureExtraction.physicalBioAssaySource // .bioAssayCreation.sourceBioMaterialMeasurements.bioMaterial // set up map of MicroArrayAssay to LabeledExtract - to be used when setting // link between MicroArrayAssay and Sample // MeasuredBioAssay.featureExtraction.physicalBioAssaySource -> PhysicalBioAssay Item pbaItem = null; if (srcItem.hasReference("featureExtraction")) { Item feItem = getReference(srcItem, "featureExtraction"); if (feItem.hasReference("physicalBioAssaySource")) { pbaItem = getReference(feItem, "physicalBioAssaySource"); } } List labeledExtracts = new ArrayList(); if (pbaItem != null && pbaItem.hasReference("bioAssayCreation")) { Item hybri = getReference(pbaItem, "bioAssayCreation"); if (hybri.hasCollection("sourceBioMaterialMeasurements")) { Iterator iter = getCollection(hybri, "sourceBioMaterialMeasurements"); while (iter.hasNext()) { Item bmItem = (Item) iter.next(); if (bmItem.hasReference("bioMaterial")) { String extractId = (bmItem.getReference("bioMaterial").getRefId()); labeledExtracts.add(extractId); // map from mage:LabeledExtract identifier to // genomic:MicroArrayAssay identifier Set mbas = (Set) labeledExtractToMicroArrayAssays.get(extractId); if (mbas == null) { mbas = new HashSet(); labeledExtractToMicroArrayAssays.put(extractId, mbas); } mbas.add(srcItem.getIdentifier()); } } } } // map from genomic:MicroArrayAssay identifier to list of mage:LabeledExtract identifiers // assayToLabeledExtract.put(tgtItem.getIdentifier(), labeledExtracts); } /** * @param srcItem = mage:BioAssayDatum * @param tgtItem = flymine:MicroArrayResult * @throws ObjectStoreException if problem occured during translating */ public void translateMicroArrayResult(Item srcItem, Item tgtItem) throws ObjectStoreException { ResultHolder holder = new ResultHolder(identifierToInt(tgtItem.getIdentifier())); if (srcItem.hasAttribute("value")) { String value = srcItem.getAttribute("value").getValue().trim(); // only store if a numerical value, ignore errors if (StringUtil.allDigits(value)) { tgtItem.setAttribute("value", value); holder.value = value; microArrayResults.add(holder); } } // PATH BioAssayDatum.quatitationType.scale if (srcItem.hasReference("bioAssay")) { holder.assayId = identifierToInt(srcItem.getReference("bioAssay").getRefId()); } if (srcItem.hasReference("designElement")) { // map from genomic:MicroArrayResult identifier to mage:Feature identifier resultToFeature.put(tgtItem.getIdentifier(), srcItem.getReference("designElement").getRefId()); } if (srcItem.hasReference("reporter")) { holder.reporterId = identifierToInt(srcItem.getReference("reporter").getRefId()); } if (srcItem.hasReference("quantitationType")) { Item qtItem = getReference(srcItem, "quantitationType"); if (qtItem.hasAttribute("name")) { holder.type = "(Normalised) " + qtItem.getAttribute("name").getValue().intern(); } else { LOG.warn("QuantitationType ( " + qtItem.getIdentifier() + " ) does not have name attribute"); } if (qtItem.getClassName().endsWith("MeasuredSignal") || qtItem.getClassName().endsWith("DerivedSignal") || qtItem.getClassName().endsWith("Ratio") || qtItem.getClassName().endsWith("SpecializedQuantitationType")) { if (qtItem.hasReference("scale")) { Item oeItem = getReference(qtItem, "scale"); holder.scale = oeItem.getAttribute("value").getValue().intern(); } else { LOG.warn("QuantitationType (" + qtItem.getIdentifier() + "( does not have scale attribute "); } } else if (qtItem.getClassName().endsWith("Error")) { if (qtItem.hasReference("targetQuantitationType")) { // TODO if an Error does this mean flag should be set to fail?? Item msItem = getReference(qtItem, "targetQuantitationType"); if (msItem.hasReference("scale")) { Item oeItem = getReference(msItem, "scale"); holder.scale = oeItem.getAttribute("value").getValue().intern(); } else { LOG.warn("QuantitationType (" + msItem.getIdentifier() + "( does not have scale attribute "); } } } } } /** * @param srcItem = mage:Reporter * @param tgtItem = flymine:Reporter * @throws ObjectStoreException if problem occured during translating */ protected void translateReporter(Item srcItem, Item tgtItem) throws ObjectStoreException { // PATH Reporter.featureReporterMaps.featureInformationSources.feature if (srcItem.hasCollection("featureReporterMaps")) { // check is single element collection Iterator frmIter = getCollection(srcItem, "featureReporterMaps"); while (frmIter.hasNext()) { Item frm = (Item) frmIter.next(); if (frm.hasCollection("featureInformationSources")) { Iterator fisIter = getCollection(frm, "featureInformationSources"); while (fisIter.hasNext()) { Item fis = (Item) fisIter.next(); if (fis.hasReference("feature")) { featureToReporter.put(fis.getReference("feature").getRefId(), srcItem.getIdentifier()); } } } } } Item material = null; // PATH Reporter.controlType // PATH Reporter.immobilizedCharacteristics.type // create BioEntity with identifier as Reporter.name. For class look in: if (srcItem.hasReference("controlType")) { Item controlType = getReference(srcItem, "controlType"); tgtItem.setAttribute("isControl", "true"); tgtItem.setAttribute("controlType", controlType.getAttribute("value").getValue()); // will be used to set MicroArrayResult.isControl controls.add(tgtItem.getIdentifier()); } else { tgtItem.setAttribute("isControl", "false"); // Reporter.immobilizedCharacteristics.type // if Reporter.controlTypes exists then is a control if (srcItem.hasCollection("immobilizedCharacteristics")) { Iterator bioIter = getCollection(srcItem, "immobilizedCharacteristics"); while (bioIter.hasNext() && material == null) { Item bioSequence = (Item) bioIter.next(); if (bioSequence.hasReference("type")) { String type = getReference(bioSequence, "type").getAttribute("value") .getValue(); if (type.toLowerCase().equals("cdna_clone") || type.toLowerCase().equals("clone_of_unknown_source")) { String cloneId = srcItem.getAttribute("name").getValue(); material = (Item) cloneMap.get(cloneId); if (material == null) { material = createItem(tgtNs + "CDNAClone", ""); material.setAttribute("identifier", cloneId); cloneMap.put(cloneId, material); clones.put(material.getIdentifier(), material); } tgtItem.setReference("material", material.getIdentifier()); reporterToMaterial.put(tgtItem.getIdentifier(), material.getIdentifier()); } else { throw new ObjectStoreException("Unknown BioSequence type: " + type); } } if (!materialIdTypes.isEmpty() && bioSequence.hasCollection("sequenceDatabases")) { Iterator dbIter = getCollection(bioSequence, "sequenceDatabases"); while (dbIter.hasNext()) { Item dbRef = (Item) dbIter.next(); if (dbRef.hasReference("database")) { Item db = getReference(dbRef, "database"); String dbName = db.getAttribute("name").getValue(); if (materialIdTypes.contains(dbName)) { Map altIds = (Map) cloneIds.get(material.getIdentifier()); if (altIds == null) { altIds = new HashMap(); cloneIds.put(material.getIdentifier(), altIds); } altIds.put(dbName, dbRef.getAttribute("accession").getValue()); } } } } } } } // if Reporter.failTypes exists then set failure type if (srcItem.hasCollection("failTypes")) { Iterator failIter = getCollection(srcItem, "failTypes"); while (failIter.hasNext()) { Item fail = (Item) failIter.next(); tgtItem.setAttribute("failType", fail.getAttribute("value").getValue()); } } } /** * @param srcItem = databaseEntry item refed in BioSequence * @param sourceRef ref to sourceId = database id * @param subjectId = bioEntity identifier will probably be changed * when reprocessing bioEntitySet * @return synonym item */ protected Item createSynonym(Item srcItem, Reference sourceRef, String subjectId) { Item synonym = new Item(); synonym.setClassName(tgtNs + "Synonym"); synonym.setIdentifier(srcItem.getIdentifier()); synonym.setImplementations(""); synonym.addAttribute(new Attribute("type", "accession")); synonym.addReference(sourceRef); synonym.addAttribute(new Attribute("value", srcItem.getAttribute("accession").getValue())); synonym.addReference(new Reference("subject", subjectId)); return synonym; } /** * @param srcItem = mage:LabeledExtract * @throws ObjectStoreException if problem occured during translating * LabeledExtract -> {treatments} -> {sourceBioMaterialMeasurements} -> *(BioSample)extract -> {treatments} -> {sourceBioMaterialMeasurements} -> *(BioSample)not-extract -> {treatments} -> {sourceBioMaterialMeasurements} -> *(BioSource) */ public void translateLabeledExtract(Item srcItem) throws ObjectStoreException { // From LabeledExtract decending through treatments will eventually // find the BioSource that was used. This is what we create a Sample // from and has details attached to it as OntlogyTerms String sampleId = searchTreatments(srcItem, new ArrayList(), false); // map from sample to top level LabeledExtract if (sampleId != null) { Set extracts = (Set) sampleToLabeledExtracts.get(sampleId); if (extracts == null) { extracts = new HashSet(); sampleToLabeledExtracts.put(sampleId, extracts); } extracts.add(srcItem.getIdentifier()); // Find and record the label used for this Sample String label = searchTreatments(srcItem, new ArrayList(), true); if (label != null) { sampleToLabel.put(sampleId, label); } else { throw new IllegalArgumentException("Unable to find label for sample: " + sampleId); } } } /** * For a given BioMaterial iterate through treatments applied and add to a collection. * Recurse into source BioMaterials and add their treatments. * @param bioMaterial = item bioMaterial * @param treatments = list treatments * @param findLabel true if looking for the label on the extract instead of the sample * @return string of treatments * @throws ObjectStoreException if anything goes wrong */ protected String searchTreatments(Item bioMaterial, List treatments, boolean findLabel) throws ObjectStoreException { // LabeledExtract.treatments.sourceBioMaterialMeasurements.bioMaterial //.treatments.sourceBioMaterialMeasurements.bioMaterial. // [Treatment] [BioMaterialMeasurement] [BioSample] //[Treatment] [BioMaterialMeasurement] [BioSample] // PATH is recursive - duplicate a number of times? Refactor easier prefetch // PATH LabeledExtract.treatments.sourceBioMaterialMeasurements.bioMaterial // always exit once we get to BioSource - even if looking for label and none found if (bioMaterial.getClassName().equals(srcNs + "BioSource")) { // if this is sample then put list of treatments in a map if (!findLabel) { sampleToTreatments.put(bioMaterial.getIdentifier(), treatments); return bioMaterial.getIdentifier(); } else { return null; } } if (bioMaterial.hasCollection("treatments")) { Iterator treatmentIter = getCollection(bioMaterial, "treatments"); while (treatmentIter.hasNext()) { Item treatment = (Item) treatmentIter.next(); if (findLabel) { boolean isLabelling = false; // first see if this is the labelling step if (treatment.hasReference("action")) { Item action = getReference(treatment, "action"); if ("labeling".equals(action.getAttribute("value").getValue())) { isLabelling = true; } } // Find value of parameter with type 'Label used' if (isLabelling && treatment.hasCollection("protocolApplications")) { Iterator protIter = getCollection(treatment, "protocolApplications"); while (protIter.hasNext()) { Item appItem = (Item) protIter.next(); if (appItem.hasCollection("parameterValues")) { Iterator paramIter = getCollection(appItem, "parameterValues"); while (paramIter.hasNext()) { Item valueItem = (Item) paramIter.next(); String value = null; if (valueItem.hasAttribute("value")) { value = valueItem.getAttribute("value").getValue(); } Item srcParam = getReference(valueItem, "parameterType"); if (srcParam.hasAttribute("name")) { if ("Label used".equalsIgnoreCase(srcParam .getAttribute("name") .getValue()) && value != null) { return value; } } } } } } } treatments.add(treatment.getIdentifier()); // search for source bio material and nested treatments if (treatment.hasCollection("sourceBioMaterialMeasurements")) { Iterator sourceIter = getCollection(treatment, "sourceBioMaterialMeasurements"); while (sourceIter.hasNext()) { Item sourceMaterial = (Item) sourceIter.next(); if (sourceMaterial.hasReference("bioMaterial")) { // recurse into next BioMaterial return searchTreatments(getReference(sourceMaterial, "bioMaterial"), treatments, findLabel); } } } } } return null; } /** * @param srcItem = mage:BioSource * @param tgtItem = genomic:Sample * @return set of SampleCharacteristic * extra genomic:Organism item is created and saved in organismMap * @throws ObjectStoreException if problem occured during translating */ protected Set translateSample(Item srcItem, Item tgtItem) throws ObjectStoreException { Set charItems = new HashSet(); // TODO set identifier to be internal MAGE identifier? // PATH BioSource.characteristics List list = new ArrayList(); Item organism = new Item(); if (srcItem.hasCollection("characteristics")) { Iterator charIter = getCollection(srcItem, "characteristics"); while (charIter.hasNext()) { Item charItem = (Item) charIter.next(); if (charItem.hasAttribute("category")) { String category = charItem.getAttribute("category").getValue(); String value = charItem.getAttribute("value").getValue(); if (category.equals("Organism")) { if (charItem.hasAttribute("value")) { organism = createOrganism("Organism", "", value); tgtItem.setReference("organism", organism.getIdentifier()); } } else { Item tgtCharItem = createItem(tgtNs + "SampleCharacteristic", ""); tgtCharItem.setAttribute("type", charItem.getAttribute("category") .getValue()); tgtCharItem.setAttribute("value", value); charItems.add(tgtCharItem); list.add(tgtCharItem.getIdentifier()); } HashMap charMap = new HashMap(); addToMap(sampleToChars, tgtItem.getIdentifier(), category, value); } } if (list.size() > 0) { ReferenceList tgtChar = new ReferenceList("characteristics", list); tgtItem.addCollection(tgtChar); } } // PATH BioSource.materialType if (srcItem.hasReference("materialType")) { Item type = ItemHelper.convert(srcItemReader.getItemById( (String) srcItem.getReference("materialType").getRefId())); tgtItem.addAttribute(new Attribute("materialType", type.getAttribute("value").getValue())); } if (srcItem.hasAttribute("name")) { tgtItem.addAttribute(new Attribute("name", srcItem.getAttribute("name").getValue())); } samplesById.put(tgtItem.getIdentifier(), tgtItem); return charItems; } /** * @param srcItem = mage:Treatment * @param tgtItem = flymine:Treatment * @return set of target TreatmentParameter * @throws ObjectStoreException if problem occured during translating */ public Set translateTreatment(Item srcItem, Item tgtItem) throws ObjectStoreException { // TODO protocol - either attribute of treatment or reference to object // PATH Treatment.action if (srcItem.hasReference("action")) { Item action = ItemHelper.convert(srcItemReader.getItemById( (String) srcItem.getReference("action").getRefId())); if (action.hasAttribute("value")) { tgtItem.addAttribute(new Attribute("action", action.getAttribute("value").getValue())); } } Set params = new HashSet(); List paramIds = new ArrayList(); // Protocol: Treatment.protocolApplications.protocol if (srcItem.hasCollection("protocolApplications")) { Iterator protIter = getCollection(srcItem, "protocolApplications"); while (protIter.hasNext()) { Item appItem = (Item) protIter.next(); if (appItem.hasReference("protocol")) { tgtItem.addToCollection("protocols", appItem.getReference("protocol").getRefId()); } if (appItem.hasCollection("parameterValues")) { Iterator paramIter = getCollection(appItem, "parameterValues"); while (paramIter.hasNext()) { Item valueItem = (Item) paramIter.next(); Item tgtParam = createItem(tgtNs + "TreatmentParameter", ""); tgtParam.setReference("treatment", tgtItem.getIdentifier()); if (valueItem.hasAttribute("value")) { tgtParam.setAttribute("value", valueItem.getAttribute("value").getValue()); } Item srcParam = getReference(valueItem, "parameterType"); if (srcParam.hasAttribute("name")) { tgtParam.setAttribute("type", srcParam.getAttribute("name").getValue()); } // set units if (srcParam.hasReference("defaultValue")) { Item defaultItem = getReference(srcParam, "defaultValue"); if (defaultItem.hasReference("measurement")) { Item measItem = getReference(defaultItem, "measurement"); if (measItem.hasReference("unit")) { Item unitItem = getReference(measItem, "unit"); if (unitItem.hasAttribute("unitNameCV")) { tgtParam.setAttribute("unit", unitItem.getAttribute("unitNameCV").getValue()); } } } } paramIds.add(tgtParam.getIdentifier()); params.add(tgtParam); } } } } return params; } /** * got organismMap from createOrganism() * @return organism only once for the same item */ protected Set processOrganism() { Set results = new HashSet(); for (Iterator i = organismMap.keySet().iterator(); i.hasNext();) { String organismValue = (String) i.next(); Item organism = (Item) organismMap.get(organismValue); results.add(organism); } return results; } /** * got map fo assays * add experiment reference and sample1, sample2 attribute * @return assay only once for the same item */ protected Set processMicroArrayAssays() { Iterator assayIter = assays.iterator(); while (assayIter.hasNext()) { Item assay = (Item) assayIter.next(); String assayId = assay.getIdentifier(); if (assayToExperiment.containsKey(assayId)) { assay.setReference("experiment", (String) assayToExperiment.get(assayId)); } if (assayToSamples.containsKey(assayId)) { List sampleIds = (List) assayToSamples.get(assayId); assay.addCollection(new ReferenceList("samples", sampleIds)); if (sampleIds.size() != 2) { LOG.warn("Did not find exactly two samples for " + " assay: " + assay.getIdentifier() + ", " + assay.getAttribute("name").getValue() + ". Samples found: " + sampleIds); } String experimentName = (String) assayToExpName.get(assayId); if (experimentName == null) { throw new IllegalArgumentException("Unable to find experiment name for assay: " + assayId); } String sample1Label = getConfig(experimentName, "sample1"); String sample2Label = getConfig(experimentName, "sample2"); if (sample1Label == null || sample2Label == null) { throw new IllegalArgumentException("Unable to find sample label configration. " + "sample1 was " + sample1Label + " " + "sample2 was " + sample2Label); } Iterator sampleIter = sampleIds.iterator(); while (sampleIter.hasNext()) { String sampleId = (String) sampleIter.next(); String label = (String) sampleToLabel.get(sampleId); if (label.equals(sample1Label)) { assay.setAttribute("sample1", getSampleSummary(sampleId)); } else if (label.equals(sample2Label)) { assay.setAttribute("sample2", getSampleSummary(sampleId)); } else { throw new IllegalArgumentException("Unable to match label (" + label + ")" + " with sample1 (" + sample1Label + ")" + " or sample2 (" + sample2Label + ")."); } } } } return assays; } /** * @param id sample id * @return sample attributes as summary */ private String getSampleSummary(String id) { Item sample = (Item) samplesById.get(id); String summary = ""; if (sample != null && sample.getAttribute("primaryCharacteristicType") != null && sample.getAttribute("primaryCharacteristic") != null) { return sample.getAttribute("primaryCharacteristicType").getValue() + ": " + sample.getAttribute("primaryCharacteristic").getValue(); } return null; } /** * Add additional references/collections to MicroArrayResults. * Call processSamples first to allow MicroArrayResult.sample to be set * @param holder representation of MicroArrayResult * @return updated MicroArrayResult */ protected Item processMicroArrayResult(ResultHolder holder) { Item maResult = itemFromResultHolder((ResultHolder) holder);; String maResultId = maResult.getIdentifier(); String experimentId = null; // MicroArrayResult.assay // MicroArrayResult.samples //should be result2bioassay if (maResult.hasReference("assay")) { String assayId = maResult.getReference("assay").getRefId(); // assay reference should already be set // maResult.setReference("assay", assayId); if (assayToSamples.containsKey(assayId)) { maResult.addCollection(new ReferenceList("samples", (List) assayToSamples.get(assayId))); } if (assayToExperiment.containsKey(assayId)) { experimentId = (String) assayToExperiment.get(assayId); maResult.setReference("experiment", experimentId); maResult.setReference("analysis", experimentId); // source refrence to DataSet maResult.setReference("source", (String) exptToDataSet.get(experimentId)); } } // MicroArrayResult.isControl String reporterId = null; if (maResult.hasReference("reporter")) { reporterId = maResult.getReference("reporter").getRefId(); } else if (resultToFeature.containsKey(maResult.getIdentifier())) { String featureId = (String) resultToFeature.get(maResult.getIdentifier()); if (featureToReporter.containsKey(featureId)) { reporterId = (String) featureToReporter.get(featureId); } } if (reporterId != null) { if (!maResult.hasReference("reporter")) { maResult.setReference("reporter", reporterId); } if (controls.contains(reporterId)) { maResult.setAttribute("isControl", "true"); } else { maResult.setAttribute("isControl", "false"); } // MicroArrayResult.material if (reporterToMaterial.containsKey(reporterId)) { String materialId = (String) reporterToMaterial.get(reporterId); // for some experiments we want to change the material identifier for // an alternative database reference defined in the config. Alternatives // are in cloneIds map - material->alternative id String expName = (String) expIdNames.get(experimentId); String materialIdType = getConfig(expName, "materialIdType"); if (materialIdType != null && cloneIds.containsKey(materialId)) { Map typeMap = (Map) cloneIds.get(materialId); if (typeMap != null) { if (typeMap.containsKey(materialIdType)) { Item clone = (Item) clones.get(materialId); if (clone != null) { clone.setAttribute("identifier", (String) typeMap.get(materialIdType)); } } } } // CDNAClone needs to have a collection of MicroArrayResults List results = (List) cloneToResults.get(materialId); if (results == null) { results = new ArrayList(); cloneToResults.put(materialId, results); } results.add(maResult.getIdentifier()); } } return maResult; } /** * set Sample.assay * set Sample.treatments * @return sample */ protected Collection processSamples() { Iterator sampleIter = samplesById.values().iterator(); while (sampleIter.hasNext()) { Item sample = (Item) sampleIter.next(); String sampleId = sample.getIdentifier(); if (sampleToTreatments.containsKey(sampleId)) { sample.addCollection(new ReferenceList("treatments", (List) sampleToTreatments.get(sampleId))); } if (sampleToLabeledExtracts.containsKey(sampleId)) { Iterator extractIter = ((Set) sampleToLabeledExtracts.get(sampleId)).iterator(); while (extractIter.hasNext()) { String extractId = (String) extractIter.next(); Set mbaIds = (Set) labeledExtractToMicroArrayAssays.get(extractId); if (mbaIds != null) { Iterator mbaIter = mbaIds.iterator(); while (mbaIter.hasNext()) { String mbaId = (String) mbaIter.next(); List sampleIds = (List) assayToSamples.get(mbaId); if (sampleIds == null) { sampleIds = new ArrayList(); assayToSamples.put(mbaId, sampleIds); } sampleIds.add(sampleId); } String expName = (String) assayToExpName.get(mbaIds.iterator().next()); String primaryCharacteristic = getConfig(expName, "primaryCharacteristic"); List types = new ArrayList(); StringTokenizer toke = new StringTokenizer(primaryCharacteristic, " "); while (toke.hasMoreTokens()) { types.add(toke.nextToken()); } Map chars = (Map) sampleToChars.get(sampleId); if (chars != null) { Iterator charIter = chars.entrySet().iterator(); while (charIter.hasNext()) { Map.Entry entry = (Map.Entry) charIter.next(); Iterator typeIter = types.iterator(); // check possible primaryCharacteristicTypes in order while (typeIter.hasNext() && !sample.hasAttribute("primaryCharacteristicType")) { String type = (String) typeIter.next(); if (entry.getKey().equals(type)) { sample.setAttribute("primaryCharacteristicType", type); sample.setAttribute("primaryCharacteristic", (String) entry.getValue()); } } } // Set a default value for primaryCharacteristic // hack to deal with reference sample in Drosophila timecourse // data set which does no have the same ontology term as all // all other samples. if (!sample.hasAttribute("primaryCharacteristic")) { String defaultPc = getConfig(expName, "primaryCharacteristicDefault"); if (defaultPc != null) { sample.setAttribute("primaryCharacteristic", defaultPc); sample.setAttribute("primaryCharacteristicType", "Sample"); } } } } } } } return samplesById.values(); } /** * @param className = tgtClassName * @param implementation = tgtClass implementation * @param value = attribute for organism name * @return organism item */ private Item createOrganism(String className, String implementation, String value) { Item organism = new Item(); if (!organismMap.containsKey(value)) { organism = createItem(tgtNs + className, implementation); organism.setAttribute("name", value); organismMap.put(value, organism); } else { organism = (Item) organismMap.get(value); } return organism; } /** * @param dbName = databaseName * @return databaseItem */ private Item getDb(String dbName) { Item db = (Item) dbs.get(dbName); if (db == null) { db = createItem(tgtNs + "DataSource", ""); db.setAttribute("name", dbName); dbs.put(dbName, db); } return db; } /** * @param pmid pubmed id read from config * @return publication item */ private Item getPublication(String pmid) { Item pub = (Item) pubs.get(pmid); if (pub == null) { pub = createItem(tgtNs + "Publication", ""); pub.setAttribute("pubMedId", pmid); pubs.put(pmid, pub); } return pub; } /** * @return identifier for experimentItem assume only one experiment item presented */ private String getExperimentId(String expName) { Item exp = (Item) experiments.get(expName); if (exp == null) { exp = createItem(tgtNs + "MicroArrayExperiment", ""); experiments.put(expName, exp); } return exp.getIdentifier(); } /** * get an item by path and deal with conversion to/from fulldata items * @param path = ItemPath * @param startItem = Item * @return item * @throws ObjectStoreException if anything goes wrong with finding item */ private Item getItemByPath(ItemPath path, Item startItem) throws ObjectStoreException { return ItemHelper.convert(srcItemReader.getItemByPath(path, ItemHelper.convert(startItem))); } /** * Keep MicroArrayResults in memory in a more efficient way, can be changed * back into items again. This could be done in a more efficient way. */ protected class ResultHolder { protected int identifier, assayId, reporterId; protected String type, scale, value; /** * Construct * @param identifier item identifier without namespace */ public ResultHolder(int identifier) { this.identifier = identifier; } } /** * Convert a memory efficient ResulyHoldder back into a MicroArrayResult Item. * @param holder information about a MicroArrayResult * @return the re-constructed Item */ protected Item itemFromResultHolder(ResultHolder holder) { Item result = itemFactory.makeItem(resultNs + holder.identifier, tgtNs + "MicroArrayResult", ""); // Attributes if (holder.type != null) { result.setAttribute("type", holder.type); } if (holder.scale != null) { result.setAttribute("scale", holder.scale); } if (holder.value != null) { result.setAttribute("value", holder.value); } // References if (holder.assayId > 0) { result.setReference("assay", assayNs + holder.assayId); } if (holder.reporterId > 0) { result.setReference("reporter", reporterNs + holder.reporterId); } return result; } private int identifierToInt(String identifier) { return Integer.parseInt(identifier.substring(identifier.indexOf("_") + 1)); } private String namespaceFromIdentifier(String identifier) { return identifier.substring(0, identifier.indexOf("_") + 1); } /** * get an item by path and deal with conversion to/from fulldata items * @param path = ItemPath * @param start = Item * @return item * @throws ObjectStoreException if anything goes wrong with finding item */ public Iterator getItemsByPath(ItemPath path, Item start) throws ObjectStoreException { List items = new ArrayList(); Iterator iter = srcItemReader.getItemsByPath(path, ItemHelper.convert(start)).iterator(); while (iter.hasNext()) { items.add(ItemHelper.convert( (org.intermine.model.fulldata.Item) iter.next())); } return items.iterator(); } /** * static method * @return map of prefetchDescriptors */ public static Map getPrefetchDescriptors() { Map paths = new HashMap(); Set descSet; ItemPath path; String srcNs = "http://www.flymine.org/model/mage descSet = new HashSet(); path = new ItemPath("Experiment.descriptions", srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath("Experiment.bioAssays", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "Experiment", descSet); descSet = new HashSet(); path = new ItemPath("MeasuredBioAssay.featureExtraction.physicalBioAssaySource." + "bioAssayCreation.sourceBioMaterialMeasurements.bioMaterial", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "MeasuredBioAssay", descSet); path = new ItemPath("DerivedBioAssay.derivedBioAssayMap.sourceBioAssays", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "DerivedBioAssay", descSet); descSet = new HashSet(); path = new ItemPath("BioAssayDatum.quantitationType.scale", srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath("BioAssayDatum.quantitationType.targetQuantitationType.scale", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "BioAssayDatum", descSet); //prefetch cache miss? descSet = new HashSet(); path = new ItemPath("LabeledExtract.treatments.sourceBioMaterialMeasurements.bioMaterial" , srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath("LabeledExtract.treatments.sourceBioMaterialMeasurements.bioMaterial." + "treatments.sourceBioMaterialMeasurements.bioMaterial", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "LabeledExtract", descSet); descSet = new HashSet(); path = new ItemPath("BioSource.characteristics", srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath("BioSource.materialType", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "BioSource", descSet); descSet = new HashSet(); path = new ItemPath("Treatment.action", srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath("Treatment.protocolApplications.protocol", srcNs); descSet.add(path.getItemPrefetchDescriptor()); path = new ItemPath( "Treatment.protocolApplications.parameterValues.parameterType.defaultValue" + ".measurement.unit", srcNs); descSet.add(path.getItemPrefetchDescriptor()); paths.put(srcNs + "Treatment", descSet); ItemPrefetchDescriptor desc, desc1, desc2, desc3, desc4; descSet = new HashSet(); desc = new ItemPrefetchDescriptor("Reporter.featureReporterMaps"); desc.addConstraint(new ItemPrefetchConstraintDynamic("featureReporterMaps", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc2 = new ItemPrefetchDescriptor("Reporter.featureReporterMaps.feature"); desc2.addConstraint(new ItemPrefetchConstraintDynamic("feature", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc.addPath(desc2); descSet.add(desc); desc = new ItemPrefetchDescriptor("Reporter.immobilizedCharacteristics"); desc.addConstraint(new ItemPrefetchConstraintDynamic("immobilizedCharacteristics", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc2 = new ItemPrefetchDescriptor("Reporter.immobilizedCharacteristics.type"); desc2.addConstraint(new ItemPrefetchConstraintDynamic("type", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc.addPath(desc2); desc3 = new ItemPrefetchDescriptor( "Reporter.immobilizedCharacteristics.type.sequenceDatabases"); desc3.addConstraint(new ItemPrefetchConstraintDynamic("sequenceDatabases", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc2.addPath(desc3); desc4 = new ItemPrefetchDescriptor( "Reporter.immobilizedCharacteristics.type.sequenceDatabases.database"); desc4.addConstraint(new ItemPrefetchConstraintDynamic("database", ObjectStoreItemPathFollowingImpl.IDENTIFIER)); desc3.addPath(desc4); descSet.add(desc); paths.put(srcNs + "Reporter", descSet); //path = new ItemPath("Reporter.featureReporterMaps.featureInformationSources", srcNs); // path = new ItemPath( // "Reporter.featureReporterMaps.featureInformationSources.feature", srcNs); // descSet.add(path.getItemPrefetchDescriptor()); // path = new ItemPath("Reporter.controlType", srcNs); // descSet.add(path.getItemPrefetchDescriptor()); // path = new ItemPath("Reporter.immobilizedCharacteristics.type", srcNs); // descSet.add(path.getItemPrefetchDescriptor()); // path = new ItemPath( // "Reporter.immobilizedCharacteristics.type.sequenceDatabases.database", srcNs); // descSet.add(path.getItemPrefetchDescriptor()); // paths.put(srcNs + "Reporter", descSet); return paths; } }
package replicant; import arez.ArezContext; import arez.Disposable; import arez.annotations.Action; import arez.annotations.ContextRef; import arez.annotations.Observable; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.realityforge.anodoc.TestOnly; import replicant.spy.ConnectFailureEvent; import replicant.spy.ConnectedEvent; import replicant.spy.DataLoadStatus; import replicant.spy.DisconnectFailureEvent; import replicant.spy.DisconnectedEvent; import replicant.spy.MessageProcessFailureEvent; import replicant.spy.MessageProcessedEvent; import replicant.spy.MessageReadFailureEvent; import replicant.spy.RestartEvent; import replicant.spy.SubscribeCompletedEvent; import replicant.spy.SubscribeFailedEvent; import replicant.spy.SubscribeRequestQueuedEvent; import replicant.spy.SubscribeStartedEvent; import replicant.spy.SubscriptionUpdateCompletedEvent; import replicant.spy.SubscriptionUpdateFailedEvent; import replicant.spy.SubscriptionUpdateRequestQueuedEvent; import replicant.spy.SubscriptionUpdateStartedEvent; import replicant.spy.UnsubscribeCompletedEvent; import replicant.spy.UnsubscribeFailedEvent; import replicant.spy.UnsubscribeRequestQueuedEvent; import replicant.spy.UnsubscribeStartedEvent; import static org.realityforge.braincheck.Guards.*; /** * The Connector is responsible for managing a Connection to a backend datasource. */ public abstract class Connector extends ReplicantService { private static final int DEFAULT_LINKS_TO_PROCESS_PER_TICK = 100; private static final int DEFAULT_CHANGES_TO_PROCESS_PER_TICK = 100; /** * The code to parse changesets. Extracted into a separate class so it can be vary by environment. */ private final ChangeSetParser _changeSetParser = new ChangeSetParser(); /** * The schema that defines data-API used to interact with datasource. */ @Nonnull private final SystemSchema _schema; @Nonnull private ConnectorState _state = ConnectorState.DISCONNECTED; /** * The current connection managed by the connector, if any. */ @Nullable private Connection _connection; /** * Flag indicating that the Connectors internal scheduler is actively progressing * requests and responses. A scheduler should only be active if there is a connection present. */ private boolean _schedulerActive; /** * This lock is acquired by the Connector when it begins processing messages from the network. * Once the processor is idle the lock should be released to allow Arez to reflect all the changes. */ @Nullable private Disposable _schedulerLock; /** * Maximum number of entity links to attempt in a single tick of the scheduler. After this many links have * been processed then return and any remaining links can occur in a later tick. */ private int _linksToProcessPerTick = DEFAULT_LINKS_TO_PROCESS_PER_TICK; /** * Maximum number of EntityChange messages processed in a single tick of the scheduler. After this many changes have * been processed then return and any remaining change can be processed in a later tick. */ private int _changesToProcessPerTick = DEFAULT_CHANGES_TO_PROCESS_PER_TICK; /** * Action invoked after current MessageResponse is processed. This is typically used to update or alter * change Connection on message processing complete. */ @Nullable private SafeProcedure _postMessageResponseAction; protected Connector( @Nullable final ReplicantContext context, @Nonnull final SystemSchema schema ) { super( context ); _schema = Objects.requireNonNull( schema ); getReplicantRuntime().registerConnector( this ); final SchemaService schemaService = getReplicantContext().getSchemaService(); if ( !schemaService.contains( schema ) ) { schemaService.registerSchema( schema ); } } /** * Connect to the underlying data source. */ public void connect() { final ConnectorState state = getState(); if ( ConnectorState.CONNECTING != state && ConnectorState.CONNECTED != state ) { ConnectorState newState = ConnectorState.ERROR; try { doConnect( this::onConnected ); newState = ConnectorState.CONNECTING; } finally { setState( newState ); } } } /** * Perform the connection, invoking the action when connection has completed. * * @param action the action to invoke once connect has completed. */ protected abstract void doConnect( @Nonnull SafeProcedure action ); /** * Disconnect from underlying data source. */ public void disconnect() { final ConnectorState state = getState(); if ( ConnectorState.DISCONNECTING != state && ConnectorState.DISCONNECTED != state ) { ConnectorState newState = ConnectorState.ERROR; try { doDisconnect( this::onDisconnected ); newState = ConnectorState.DISCONNECTING; } finally { setState( newState ); } } } /** * Perform the disconnection, invoking the action when disconnection has completed. * * @param action the action to invoke once disconnect has completed. */ protected abstract void doDisconnect( @Nonnull SafeProcedure action ); /** * Return the schema associated with the connector. * * @return the schema associated with the connector. */ @Nonnull public final SystemSchema getSchema() { return _schema; } protected final void setConnection( @Nullable final Connection connection ) { _connection = connection; purgeSubscriptions(); } @Nullable protected final Connection getConnection() { return _connection; } @Nonnull protected final Connection ensureConnection() { if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != _connection, () -> "Replicant-0031: Connector.ensureConnection() when no connection is present." ); } assert null != _connection; return _connection; } @Nonnull private MessageResponse ensureCurrentMessageResponse() { return ensureConnection().ensureCurrentMessageResponse(); } @Action protected void purgeSubscriptions() { Stream.concat( getReplicantContext().getTypeSubscriptions().stream(), getReplicantContext().getInstanceSubscriptions().stream() ) // Only purge subscriptions for current system .filter( s -> s.getAddress().getSystemId() == getSchema().getId() ) // Purge in reverse order. First instance subscriptions then type subscriptions .sorted( Comparator.reverseOrder() ) .forEachOrdered( Disposable::dispose ); } final void setLinksToProcessPerTick( final int linksToProcessPerTick ) { _linksToProcessPerTick = linksToProcessPerTick; } final void setChangesToProcessPerTick( final int changesToProcessPerTick ) { _changesToProcessPerTick = changesToProcessPerTick; } /** * Return true if an area of interest action with specified parameters is pending or being processed. * When the action parameter is DELETE the filter parameter is ignored. */ final boolean isAreaOfInterestRequestPending( @Nonnull final AreaOfInterestRequest.Type action, @Nonnull final ChannelAddress address, @Nullable final Object filter ) { final Connection connection = getConnection(); return null != connection && connection.isAreaOfInterestRequestPending( action, address, filter ); } /** * Return the index of last matching Type in pending aoi actions list. */ final int lastIndexOfPendingAreaOfInterestRequest( @Nonnull final AreaOfInterestRequest.Type action, @Nonnull final ChannelAddress address, @Nullable final Object filter ) { final Connection connection = getConnection(); return null == connection ? -1 : connection.lastIndexOfPendingAreaOfInterestRequest( action, address, filter ); } final void requestSubscribe( @Nonnull final ChannelAddress address, @Nullable final Object filter ) { ensureConnection().requestSubscribe( address, filter ); triggerScheduler(); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy().reportSpyEvent( new SubscribeRequestQueuedEvent( address, filter ) ); } } final void requestSubscriptionUpdate( @Nonnull final ChannelAddress address, @Nullable final Object filter ) { //TODO: Verify that this address is for an updateable channel ensureConnection().requestSubscriptionUpdate( address, filter ); triggerScheduler(); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy().reportSpyEvent( new SubscriptionUpdateRequestQueuedEvent( address, filter ) ); } } final void requestUnsubscribe( @Nonnull final ChannelAddress address ) { ensureConnection().requestUnsubscribe( address ); triggerScheduler(); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy().reportSpyEvent( new UnsubscribeRequestQueuedEvent( address ) ); } } final boolean isSchedulerActive() { return _schedulerActive; } /** * Schedule request and response processing. * This method should be invoked when requests are queued or responses are received. */ protected final void triggerScheduler() { if ( !_schedulerActive ) { _schedulerActive = true; activateScheduler(); } } /** * Perform a single step progressing requests and responses. * This is invoked from the scheduler and will continue to be * invoked until it returns false. * * @return true if more work is to be done. */ protected final boolean scheduleTick() { if ( null == _schedulerLock ) { _schedulerLock = context().pauseScheduler(); } try { final boolean step1 = progressAreaOfInterestRequestProcessing(); final boolean step2 = progressResponseProcessing(); _schedulerActive = step1 || step2; } catch ( final Throwable e ) { onMessageProcessFailure( e ); _schedulerActive = false; return false; } finally { if ( !_schedulerActive ) { _schedulerLock.dispose(); _schedulerLock = null; } } return _schedulerActive; } /** * Activate the scheduler. * This involves creating a scheduler that will invoke {@link #scheduleTick()} until * that method returns false. */ protected abstract void activateScheduler(); /** * Perform a single step in sending one (or a batch) or requests to the server. * * @return true if more work is to be done. */ protected abstract boolean progressAreaOfInterestRequestProcessing(); /** * Perform a single step processing messages received from the server. * * @return true if more work is to be done. */ boolean progressResponseProcessing() { final Connection connection = ensureConnection(); final MessageResponse response = connection.getCurrentMessageResponse(); if ( null == response ) { // Select the MessageResponse if there is none active return connection.selectNextMessageResponse(); } else if ( response.needsParsing() ) { // Parse the json parseMessageResponse(); return true; } else if ( response.needsChannelChangesProcessed() ) { // Process the updates to channels processChannelChanges(); return true; } else if ( response.areEntityChangesPending() ) { // Process a chunk of entity changes processEntityChanges(); return true; } else if ( response.areEntityLinksPending() ) { // Process a chunk of links processEntityLinks(); return true; } else if ( !response.hasWorldBeenValidated() ) { // Validate the world after the change set has been applied (if feature is enabled) validateWorld(); return true; } else { completeMessageResponse(); return true; } } /** * {@inheritDoc} */ @Nonnull @Observable public ConnectorState getState() { return _state; } protected void setState( @Nonnull final ConnectorState state ) { _state = Objects.requireNonNull( state ); } /** * Build a ChannelAddress from a ChannelChange value. * * @param channelChange the change. * @return the address. */ @Nonnull final ChannelAddress toAddress( @Nonnull final ChannelChange channelChange ) { final int channelId = channelChange.getChannelId(); final Integer subChannelId = channelChange.hasSubChannelId() ? channelChange.getSubChannelId() : null; return new ChannelAddress( getSchema().getId(), channelId, subChannelId ); } @Action protected void processChannelChanges() { final MessageResponse response = ensureCurrentMessageResponse(); final ChangeSet changeSet = response.getChangeSet(); final ChannelChange[] channelChanges = changeSet.getChannelChanges(); for ( final ChannelChange channelChange : channelChanges ) { final ChannelAddress address = toAddress( channelChange ); final Object filter = channelChange.getChannelFilter(); final ChannelChange.Action actionType = channelChange.getAction(); if ( ChannelChange.Action.ADD == actionType ) { response.incChannelAddCount(); final boolean explicitSubscribe = ensureConnection() .getCurrentAreaOfInterestRequests() .stream() .anyMatch( a -> a.isInProgress() && a.getAddress().equals( address ) ); getReplicantContext().createSubscription( address, filter, explicitSubscribe ); } else if ( ChannelChange.Action.REMOVE == actionType ) { final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != subscription, () -> "Replicant-0028: Received ChannelChange of type REMOVE for address " + address + " but no such subscription exists." ); assert null != subscription; } assert null != subscription; Disposable.dispose( subscription ); response.incChannelRemoveCount(); } else { assert ChannelChange.Action.UPDATE == actionType; final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != subscription, () -> "Replicant-0033: Received ChannelChange of type UPDATE for address " + address + " but no such subscription exists." ); assert null != subscription; invariant( subscription::isExplicitSubscription, () -> "Replicant-0029: Received ChannelChange of type UPDATE for address " + address + " but subscription is implicitly subscribed." ); } assert null != subscription; subscription.setFilter( filter ); updateSubscriptionForFilteredEntities( subscription ); response.incChannelUpdateCount(); } } response.markChannelActionsProcessed(); } @Action protected void processEntityLinks() { final MessageResponse response = ensureCurrentMessageResponse(); Linkable linkable; for ( int i = 0; i < _linksToProcessPerTick && null != ( linkable = response.nextEntityToLink() ); i++ ) { linkable.link(); response.incEntityLinkCount(); } } /** * Method invoked when a filter has updated and the Connector needs to delink any entities * that are no longer part of the subscription now that the filter has changed. * * @param subscription the subscription that was updated. */ final void updateSubscriptionForFilteredEntities( @Nonnull final Subscription subscription ) { for ( final Class<?> entityType : new ArrayList<>( subscription.findAllEntityTypes() ) ) { final List<Entity> entities = subscription.findAllEntitiesByType( entityType ); if ( !entities.isEmpty() ) { final SubscriptionUpdateEntityFilter entityFilter = getSubscriptionUpdateFilter(); final ChannelAddress address = subscription.getAddress(); final Object filter = subscription.getFilter(); for ( final Entity entity : entities ) { if ( !entityFilter.doesEntityMatchFilter( address, filter, entity ) ) { entity.delinkFromSubscription( subscription ); } } } } } protected final void setPostMessageResponseAction( @Nullable final SafeProcedure postMessageResponseAction ) { _postMessageResponseAction = postMessageResponseAction; } void completeMessageResponse() { final Connection connection = ensureConnection(); final MessageResponse response = connection.ensureCurrentMessageResponse(); // OOB messages are not sequenced if ( !response.isOob() ) { connection.setLastRxSequence( response.getChangeSet().getSequence() ); } //Step: Run the post actions final RequestEntry request = response.getRequest(); if ( null != request ) { request.markResultsAsArrived(); } /* * An action will be returned if the message is an OOB message * or it is an answer to a response and the rpc invocation has * already returned. */ final SafeProcedure action = response.getCompletionAction(); if ( null != action ) { action.call(); // OOB messages are not in response to requests (at least not request associated with the current connection) if ( !response.isOob() ) { // We can remove the request because this side ran second and the RPC channel has already returned. final ChangeSet changeSet = response.getChangeSet(); final Integer requestId = changeSet.getRequestId(); if ( null != requestId ) { connection.removeRequest( requestId ); } } } connection.setCurrentMessageResponse( null ); onMessageProcessed( response.toStatus() ); if ( null != _postMessageResponseAction ) { _postMessageResponseAction.call(); _postMessageResponseAction = null; } } @Action protected void removeExplicitSubscriptions( @Nonnull final List<AreaOfInterestRequest> requests ) { requests.forEach( request -> { if ( Replicant.shouldCheckInvariants() ) { invariant( () -> AreaOfInterestRequest.Type.REMOVE == request.getType(), () -> "Replicant-0034: Connector.removeExplicitSubscriptions() invoked with request " + "with type that is not REMOVE. Request: " + request ); } final Subscription subscription = getReplicantContext().findSubscription( request.getAddress() ); if ( null != subscription ) { /* * It is unclear whether this code is actually required as should note the response form the server * automatically setExplicitSubscription to false? */ subscription.setExplicitSubscription( false ); } } ); } @Action protected void removeUnneededAddRequests( @Nonnull final List<AreaOfInterestRequest> requests ) { requests.removeIf( request -> { final ChannelAddress address = request.getAddress(); final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null == subscription || !subscription.isExplicitSubscription(), () -> "Replicant-0030: Request to add channel at address " + address + " but already explicitly subscribed to channel." ); } if ( null != subscription && !subscription.isExplicitSubscription() ) { // Existing subscription converted to an explicit subscription subscription.setExplicitSubscription( true ); request.markAsComplete(); return true; } else { return false; } } ); } @Action protected void removeUnneededUpdateRequests( @Nonnull final List<AreaOfInterestRequest> requests ) { requests.removeIf( a -> { final ChannelAddress address = a.getAddress(); final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != subscription, () -> "Replicant-0048: Request to update channel at address " + address + " but not subscribed to channel." ); } // The following code can probably be removed but it was present in the previous system // and it is unclear if there is any scenarios where it can still happen. The code has // been left in until we can verify it is no longer an issue. The above invariants will trigger // in development mode to help us track down these scenarios if ( null == subscription ) { a.markAsComplete(); return true; } else { return false; } } ); } @Action protected void removeUnneededRemoveRequests( @Nonnull final List<AreaOfInterestRequest> requests ) { requests.removeIf( request -> { final ChannelAddress address = request.getAddress(); final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != subscription, () -> "Replicant-0046: Request to unsubscribe from channel at address " + address + " but not subscribed to channel." ); invariant( () -> null == subscription || subscription.isExplicitSubscription(), () -> "Replicant-0047: Request to unsubscribe from channel at address " + address + " but subscription is not an explicit subscription." ); } // The following code can probably be removed but it was present in the previous system // and it is unclear if there is any scenarios where it can still happen. The code has // been left in until we can verify it is no longer an issue. The above invariants will trigger // in development mode to help us track down these scenarios if ( null == subscription || !subscription.isExplicitSubscription() ) { request.markAsComplete(); return true; } else { return false; } } ); } /** * Parse the json data associated with the current response and then enqueue it. */ void parseMessageResponse() { final Connection connection = ensureConnection(); final MessageResponse response = connection.ensureCurrentMessageResponse(); final String rawJsonData = response.getRawJsonData(); assert null != rawJsonData; final ChangeSet changeSet = _changeSetParser.parseChangeSet( rawJsonData ); if ( Replicant.shouldValidateChangeSetOnRead() ) { changeSet.validate(); } final RequestEntry request; if ( response.isOob() ) { /* * OOB messages are really just cached messages at this stage and they are the * same bytes as originally sent down and then cached. So the requestId present * in the json blob is for old connection and can be ignored. */ request = null; } else { final Integer requestId = changeSet.getRequestId(); final String eTag = changeSet.getETag(); final int sequence = changeSet.getSequence(); request = null != requestId ? connection.getRequest( requestId ) : null; if ( Replicant.shouldCheckApiInvariants() ) { apiInvariant( () -> null != request || null == requestId, () -> "Replicant-0066: Unable to locate request with id '" + requestId + "' specified for ChangeSet with sequence " + sequence + ". Existing Requests: " + connection.getRequests() ); } if ( null != request ) { final String cacheKey = request.getCacheKey(); if ( null != eTag && null != cacheKey ) { final CacheService cacheService = getReplicantContext().getCacheService(); if ( null != cacheService ) { cacheService.store( cacheKey, eTag, rawJsonData ); } } } } response.recordChangeSet( changeSet, request ); connection.queueCurrentResponse(); } @Action protected void processEntityChanges() { final MessageResponse response = ensureCurrentMessageResponse(); EntityChange change; for ( int i = 0; i < _changesToProcessPerTick && null != ( change = response.nextEntityChange() ); i++ ) { final int id = change.getId(); final int typeId = change.getTypeId(); final EntitySchema entitySchema = getSchema().getEntity( typeId ); final Class<?> type = entitySchema.getType(); Entity entity = getReplicantContext().getEntityService().findEntityByTypeAndId( type, id ); if ( change.isRemove() ) { if ( null != entity ) { Disposable.dispose( entity ); } else { if ( Replicant.shouldCheckInvariants() ) { fail( () -> "Replicant-0068: ChangeSet " + response.getChangeSet().getSequence() + " contained an " + "EntityChange message to delete entity of type " + typeId + " and id " + id + " but no such entity exists locally." ); } } response.incEntityRemoveCount(); } else { final EntityChangeData data = change.getData(); if ( null == entity ) { entity = getReplicantContext().findOrCreateEntity( type, id ); final Object userObject = getChangeMapper().createEntity( entitySchema, id, data ); entity.setUserObject( userObject ); } else { final Object userObject = entity.getUserObject(); assert null != userObject; getChangeMapper().updateEntity( entitySchema, userObject, data ); } final EntityChannel[] changeCount = change.getChannels(); final int schemaId = getSchema().getId(); for ( final EntityChannel entityChannel : changeCount ) { final ChannelAddress address = entityChannel.toAddress( schemaId ); final Subscription subscription = getReplicantContext().findSubscription( address ); if ( Replicant.shouldCheckInvariants() ) { invariant( () -> null != subscription, () -> "Replicant-0069: ChangeSet " + response.getChangeSet().getSequence() + " contained an " + "EntityChange message referencing channel " + entityChannel.toAddress( schemaId ) + " but no such subscription exists locally." ); } assert null != subscription; entity.linkToSubscription( subscription ); } response.incEntityUpdateCount(); final Object userObject = entity.getUserObject(); assert null != userObject; response.changeProcessed( userObject ); } } } @Nonnull protected abstract ChangeMapper getChangeMapper(); @Nonnull protected abstract SubscriptionUpdateEntityFilter getSubscriptionUpdateFilter(); void validateWorld() { ensureCurrentMessageResponse().markWorldAsValidated(); if ( Replicant.shouldValidateEntitiesOnLoad() ) { getReplicantContext().getValidator().validateEntities(); } } /** * The AreaOfInterestRequest currently being processed can be completed and * trigger scheduler to start next step. */ protected final void completeAreaOfInterestRequest() { ensureConnection().completeAreaOfInterestRequest(); triggerScheduler(); } /** * Invoked to fire an event when disconnect has completed. */ @Action protected void onConnected() { setState( ConnectorState.CONNECTED ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy().reportSpyEvent( new ConnectedEvent( getSchema().getId(), getSchema().getName() ) ); } } /** * Invoked to fire an event when failed to connect. */ @Action protected void onConnectFailure( @Nonnull final Throwable error ) { setState( ConnectorState.ERROR ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new ConnectFailureEvent( getSchema().getId(), getSchema().getName(), error ) ); } } /** * Invoked to fire an event when disconnect has completed. */ @Action protected void onDisconnected() { setState( ConnectorState.DISCONNECTED ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new DisconnectedEvent( getSchema().getId(), getSchema().getName() ) ); } } /** * Invoked to fire an event when failed to connect. */ @Action protected void onDisconnectFailure( @Nonnull final Throwable error ) { setState( ConnectorState.ERROR ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new DisconnectFailureEvent( getSchema().getId(), getSchema().getName(), error ) ); } } /** * Invoked when a change set has been completely processed. * * @param status the status describing the results of data load. */ protected void onMessageProcessed( @Nonnull final DataLoadStatus status ) { if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new MessageProcessedEvent( getSchema().getId(), getSchema().getName(), status ) ); } } /** * Called when a data load has resulted in a failure. */ @Action protected void onMessageProcessFailure( @Nonnull final Throwable error ) { if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new MessageProcessFailureEvent( getSchema().getId(), getSchema().getName(), error ) ); } disconnectIfPossible( error ); } /** * Attempted to retrieve data from backend and failed. */ @Action protected void onMessageReadFailure( @Nonnull final Throwable error ) { if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new MessageReadFailureEvent( getSchema().getId(), getSchema().getName(), error ) ); } disconnectIfPossible( error ); } final void disconnectIfPossible( @Nonnull final Throwable cause ) { if ( !ConnectorState.isTransitionState( getState() ) ) { if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new RestartEvent( getSchema().getId(), getSchema().getName(), cause ) ); } disconnect(); } } @Action protected void onSubscribeStarted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.LOADING, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new SubscribeStartedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onSubscribeCompleted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.LOADED, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new SubscribeCompletedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onSubscribeFailed( @Nonnull final ChannelAddress address, @Nonnull final Throwable error ) { updateAreaOfInterest( address, AreaOfInterest.Status.LOAD_FAILED, error ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new SubscribeFailedEvent( getSchema().getId(), getSchema().getName(), address, error ) ); } } @Action protected void onUnsubscribeStarted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.UNLOADING, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new UnsubscribeStartedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onUnsubscribeCompleted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.UNLOADED, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new UnsubscribeCompletedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onUnsubscribeFailed( @Nonnull final ChannelAddress address, @Nonnull final Throwable error ) { updateAreaOfInterest( address, AreaOfInterest.Status.UNLOADED, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new UnsubscribeFailedEvent( getSchema().getId(), getSchema().getName(), address, error ) ); } } @Action protected void onSubscriptionUpdateStarted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.UPDATING, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new SubscriptionUpdateStartedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onSubscriptionUpdateCompleted( @Nonnull final ChannelAddress address ) { updateAreaOfInterest( address, AreaOfInterest.Status.UPDATED, null ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext().getSpy() .reportSpyEvent( new SubscriptionUpdateCompletedEvent( getSchema().getId(), getSchema().getName(), address ) ); } } @Action protected void onSubscriptionUpdateFailed( @Nonnull final ChannelAddress address, @Nonnull final Throwable error ) { updateAreaOfInterest( address, AreaOfInterest.Status.UPDATE_FAILED, error ); if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() ) { getReplicantContext() .getSpy() .reportSpyEvent( new SubscriptionUpdateFailedEvent( getSchema().getId(), getSchema().getName(), address, error ) ); } } private void updateAreaOfInterest( @Nonnull final ChannelAddress address, @Nonnull final AreaOfInterest.Status status, @Nullable final Throwable error ) { final AreaOfInterest areaOfInterest = getReplicantContext().findAreaOfInterestByAddress( address ); if ( null != areaOfInterest ) { areaOfInterest.updateAreaOfInterest( status, error ); } } @Nonnull final ReplicantRuntime getReplicantRuntime() { return getReplicantContext().getRuntime(); } @ContextRef @Nonnull protected abstract ArezContext context(); /** * {@inheritDoc} */ @Override public String toString() { return Replicant.areNamesEnabled() ? "Connector[" + getSchema().getName() + "]" : super.toString(); } @TestOnly @Nullable final Disposable getSchedulerLock() { return _schedulerLock; } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ // FILE NAME: Autonomous.java (Team 339 - Kilroy) // ABSTRACT: // This file is where almost all code for Kilroy will be // written. All of these functions are functions that should // override methods in the base class (IterativeRobot). The // functions are as follows: // Init() - Initialization code for teleop mode // should go here. Will be called each time the robot enters // teleop mode. // Periodic() - Periodic code for teleop mode should // go here. Will be called periodically at a regular rate while // the robot is in teleop mode. // Team 339. package org.usfirst.frc.team339.robot; import org.usfirst.frc.team339.Hardware.Hardware; import org.usfirst.frc.team339.HardwareInterfaces.transmission.Transmission_old.debugStateValues; import org.usfirst.frc.team339.Utils.Drive; import org.usfirst.frc.team339.Utils.ErrorMessage.PrintsTo; import org.usfirst.frc.team339.Utils.Guidance; import org.usfirst.frc.team339.Utils.ManipulatorArm; import org.usfirst.frc.team339.Utils.ManipulatorArm.ArmPosition; import edu.wpi.first.wpilibj.CameraServer; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.Relay; import edu.wpi.first.wpilibj.Relay.Value; /** * This class contains all of the user code for the Autonomous * part of the match, namely, the Init and Periodic code * * @author Nathanial Lydick * @written Jan 13, 2015 */ public class Teleop { /** * User Initialization code for teleop mode should go here. Will be * called once when the robot enters teleop mode. * * @author Nathanial Lydick * @written Jan 13, 2015 */ public static void init () { // inverts the right side of the drivetrain Hardware.rightFrontMotor.setInverted(true); Hardware.rightRearMotor.setInverted(true); // Initial set up so the screen doesn't start green after Teleop starts Guidance.updateBallStatus(false); // Tell USB camera handler that we only have one USB camera CameraServer.getInstance().setSize(1);// AHK @cameratesting // Make sure the camera is really dark Hardware.axisCamera.writeBrightness( Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS); // set max speed. Hardware.drive.setMaxSpeed(MAXIMUM_TELEOP_SPEED); // Set up the transmission class so it knows how to drive. Kind of // like driver's ed for Robots. I wish my drivers ed class was this // short and painless... Hardware.transmission.setGear(2); Hardware.transmission .setFirstGearPercentage(Robot.FIRST_GEAR_PERCENTAGE); if (Hardware.inDemo.isOn() == true) { Hardware.transmission.setSecondGearPercentage( Robot.SECOND_GEAR_PERCENTAGE * (((double) Hardware.delayPot.get() - Hardware.DELAY_POT_MIN_DEGREES) / (double) (Hardware.DELAY_POT_DEGREES - Hardware.DELAY_POT_MIN_DEGREES))); } else { Hardware.transmission .setSecondGearPercentage( Robot.SECOND_GEAR_PERCENTAGE); } Hardware.transmission.setJoystickDeadbandRange(.20); Hardware.transmission.setJoysticksAreReversed(false); // make sure we don't start Teleop off with the ringlight on Hardware.ringLightRelay.set(Value.kOff); // Make sure we don't start off aligningByCamera, firing, or taking // a picture. That would be a nasty suprise: // "ENABLING" // *SHOOM* // *Newly severed head flies out of pit from firing arm* // *SPLAT* // "I guess the Industrial Safety Award is out the window" isAligningByCamera = false; fireRequested = false; prepPic = false; ballFiring = false; // Hardware.drive.alignByCameraStateMachine(0.0, 0.0, 0.0, 0.0, // 0.0, true, false, false); // currentCameraReturn = Drive.alignByCameraReturn.WORKING; // Make sure when we enable we're not telling the drivers to do // anything yet Hardware.arrowDashboard .setDirection(Guidance.Direction.neutral); // Hardware.arrowDashboard.update(); // Starts testing speed. // Turn off all the solenoids before we really start anything Hardware.catapultSolenoid0.set(false); Hardware.catapultSolenoid1.set(false); Hardware.catapultSolenoid2.set(false); // Reset all timers, encoders, and stop all the motors. Hardware.delayTimer.reset(); Hardware.rightRearEncoder.reset(); Hardware.leftRearEncoder.reset(); Hardware.leftFrontMotor.set(0.0); Hardware.leftRearMotor.set(0.0); Hardware.rightFrontMotor.set(0.0); Hardware.rightRearMotor.set(0.0); Hardware.armMotor.set(0.0); Hardware.armIntakeMotor.set(0.0); // Allows us to edit the speed of the robot using the // potentiometer on the control switch mount (Alex's fancy name) // Essentially, we multiply the percentage given to the motors in // second gear by maaath (the value of the delayPot adjusted out // of the 0-270 spectrum and into a 10-100 percentage range). If // we're not in demo, we're just in regular ol' second gear. if (Hardware.inDemo.isOn() == true) { Hardware.transmission.setSecondGearPercentage( (/* * Robot.SECOND_GEAR_PERCENTAGE //TODO check to make sure * this shouldn't be here @AHK */(Hardware.delayPot.get() * (Robot.SECOND_GEAR_PERCENTAGE - Hardware.MINIMUM_POT_SCALING_VALUE) / (Hardware.DELAY_POT_DEGREES)) + Hardware.MINIMUM_POT_SCALING_VALUE)); } else { Hardware.transmission .setSecondGearPercentage( Robot.SECOND_GEAR_PERCENTAGE); } } // end Init // private char[] reports; private static boolean done = false; // private static boolean done2 = false; private static edu.wpi.first.wpilibj.DoubleSolenoid.Value Reverse; private static edu.wpi.first.wpilibj.DoubleSolenoid.Value Forward; private static boolean testAuto = true; private static boolean testMove1IsDone = true; private static boolean testMove2IsDone = false; private static boolean testMove3IsDone = true; private static boolean testCameraIsDone = true; private static boolean isTurning180Degrees = false; // private static boolean testingAlignByCamera = false;//@DELETE // static Timer speedTesterTimer = new Timer(); // static SpeedTester speedTester = new SpeedTester( // Hardware.rightRearEncoder, speedTesterTimer); // static double speedTestValue; // static boolean speedTesting = true; /** * User Periodic code for teleop mode should go here. Will be called * periodically at a regular rate while the robot is in teleop mode. * * @author Nathanial Lydick * @written Jan 13, 2015 */ static double val; static double demoDriveRatio = 0.0; public static void periodic () { // Print out any data we want from the hardware elements. printStatements(); // if (Hardware.leftDriver.getTrigger() == true) // Hardware.axisCamera.writeExposureControl( // AxisCamera.ExposureControl.kHold); // }//@AHK TODO remove // if (Hardware.leftDriver.getRawButton(6) == true) // for (int i = 0; i < 5; i++) // for (int j = 0; j < 5; j++) // switch (j) // case 0: // Hardware.axisCamera // .saveImageWithTypeName("A"); // break; // case 1: // Hardware.axisCamera // .saveImageWithTypeName("B"); // break; // case 2: // Hardware.axisCamera // .saveImageWithTypeName("C"); // break; // case 3: // Hardware.axisCamera // .saveImageWithTypeName("D"); // break; // case 4: // Hardware.axisCamera // .saveImageWithTypeName("E"); // break; // try // Thread.sleep(20); // catch (InterruptedException e) // // TODO Auto-generated catch block // e.printStackTrace(); // val = Hardware.leftDriver.getThrottle(); // Hardware.axisCamera.writeBrightness((int) val * 100); // System.out.println("Camera brightness: " + (int) val * 100);//@AHK // remove. // Hardware.transmission.upshift(1); // driveRobot(); // speedTester.watchJoystick(Hardware.rightDriver.getY()); //@AHK REMOVE if (Hardware.inDemo.isOn() == true) { if (Hardware.leftDriver.getRawButton(8)) { demoDriveRatio = Hardware.delayPot.get() / 2; } else if (Hardware.leftDriver.getRawButton(9)) { demoDriveRatio = Hardware.delayPot.get() * 2; } else if (Hardware.leftDriver.getRawButton(10) || demoDriveRatio == 0) { demoDriveRatio = Hardware.delayPot.get(); } Hardware.transmission.setSecondGearPercentage( ((demoDriveRatio * (Robot.SECOND_GEAR_PERCENTAGE - Hardware.MINIMUM_POT_SCALING_VALUE) / (Hardware.DELAY_POT_DEGREES)) + Hardware.MINIMUM_POT_SCALING_VALUE)); } Hardware.errorMessage.printError("test12", PrintsTo.roboRIO); // If we're running tests in the lab, NOT at competition if (Hardware.runningInLab == true) { Hardware.transmission .setDebugState(debugStateValues.DEBUG_NONE); Hardware.drive.setBrakeSpeed(.30); Hardware.transmission.setJoysticksAreReversed(true); Hardware.transmission.setFirstGearPercentage(1.0); Hardware.axisCamera.setHaveCamera(false); // System.out.println("t1: " + testMove1IsDone); // System.out.println("t2: " + testMove2IsDone); // System.out.println("t3: " + testMove3IsDone); // if we hit the left driver trigger in the lab // if (Hardware.leftDriver.getTrigger() == true) // // tell the code to start testing autonomous // testAuto = true; // // Test certain aspects of autonomous // if (testAuto == true) // if (!testMove1IsDone) // System.out.print("\n" + 1 + "\n"); // if (Hardware.drive.driveStraightByInches(12.0, true, // .9, .9)) // Autonomous.resetEncoders(); // testMove1IsDone = true; // else if (!testMove2IsDone && (Hardware.leftIR.isOn() // || Hardware.rightIR.isOn())) // System.out.println("IR Detected."); // // if (Hardware.drive.turnLeftDegrees(60.0, true, // // -.55, .55)) // // //Autonomous.resetEncoders(); // // Hardware.transmission.controls(0.0, 0.0); // testMove2IsDone = true; // else if (!testMove3IsDone) // System.out.print("\n" + 3 + "\n"); // if (Hardware.drive.driveStraightByInches(12.0, true, // .3, .3)) // Autonomous.resetEncoders(); // testMove3IsDone = true; // else if (!testCameraIsDone) // // if (Hardware.drive.driveByCamera(999.0, .2, .25, // // 0.0, true)); // else // // Stop // Hardware.transmission.controls(0.0, 0.0); } // If we don't have the runningInLab flag set to true else { if (Hardware.leftOperator.getRawButton(8)) { if (Hardware.inDemo.isOn() == false)// TODO use on a // demo-by-demo basis { testingAlignByCamera = true;// @FALSE } else { testingAlignByCamera = false; } } if (testingAlignByCamera == true) { currentCameraReturn = Hardware.drive .alignByCameraStateMachine( CAMERA_ALIGN_X_DEADBAND, CAMERA_ALIGN_Y_DEADBAND, CAMERA_X_AXIS_ADJUSTED_PROPORTIONAL_CENTER, CAMERA_Y_AXIS_ADJUSTED_PROPORTIONAL_CENTER, ALIGN_BY_CAMERA_TURNING_SPEED * 1.25,// TODO // super // ugly, // fix ALIGN_BY_CAMERA_DRIVE_SPEED * 1.25, (Hardware.rightOperator .getRawButton(10) == true && Hardware.rightOperator .getRawButton( 11) == true), true, true); if (currentCameraReturn == Drive.alignByCameraReturn.DONE) { // isFiringByCamera = false; testingAlignByCamera = false; // @AHK added from align call // Tell the code to align us to the camera isAligningByCamera = true; // Tell the code we want to fire when we're done isFiringByCamera = true; // fireRequested = true; //@AHK Removed for // align-drive-align Hardware.armOutOfWayTimer.stop(); Hardware.armOutOfWayTimer.reset(); Hardware.armOutOfWayTimer.start(); currentCameraReturn = Drive.alignByCameraReturn.WORKING; } else if (currentCameraReturn == Drive.alignByCameraReturn.CANCELLED || currentCameraReturn == Drive.alignByCameraReturn.NO_BLOBS_FOUND)// @AHK { isFiringByCamera = false; testingAlignByCamera = false; fireRequested = false; Hardware.armOutOfWayTimer.stop(); Hardware.armOutOfWayTimer.reset(); currentCameraReturn = Drive.alignByCameraReturn.WORKING; } } // if (Hardware.rightDriver.getRawButton(11) == true) // lowBattery = true; // else if (Hardware.rightDriver.getRawButton(10) == true) // lowBattery = false; // Begin arm movement code if (Math.abs(Hardware.rightOperator .getY()) >= PICKUP_ARM_CONTROL_DEADZONE) { // use the formula for the sign (value/abs(value)) to get // the // direction // we want the motor to go in, // and round it just in case it isn't exactly 1, then cast // to an int // make the compiler happy Hardware.pickupArm.moveReasonably( -(int) Math .round(Hardware.rightOperator.getY() / Math.abs( Hardware.rightOperator .getY())), /* Hardware.rightOperator.getRawButton(2) */ true); // TODO ^^ fix this when the arm pot is fixed!! ^^ } else if (isAligningByCamera == false /* && testingAlignByCamera == false */) { // If the arm control joystick isn't beyond our deadzone, // stop the // arm. Hardware.pickupArm.stopArmMotor(); } // End arm movement code // When the driver hits button 2, the robot will turn 180 // degrees to the right so we can drive back through the Sally // Port. if (Hardware.leftDriver.getRawButton(2) == true) { Hardware.leftRearEncoder.reset(); Hardware.rightRearEncoder.reset(); isTurning180Degrees = false; // System.out.println("Turning 180 Degrees? " + // isTurning180Degrees); // only set to true if we are actually reversing // -- (disabled 180 degree turn 8/11/2016) Hardware.transmission .setJoysticksAreReversed(false); } // If we've turned 180 degrees (going at 60% power and braking // at the end), we set the boolean back to false and reset // the encoders. if (isTurning180Degrees == true) { if (Hardware.drive.turnLeftDegrees(180.0, true, .6, -.6) == true) { isTurning180Degrees = false; Hardware.leftRearEncoder.reset(); Hardware.rightRearEncoder.reset(); Hardware.transmission.controls(0.0, 0.0); Hardware.transmission .setJoysticksAreReversed(false); } } // Begin Ball manipulation code // pull in the ball if the pull in button is pressed. if (Hardware.rightOperator .getRawButton(TAKE_IN_BALL_BUTTON) == true) { // if they press the 3rd button on the rightOperator // joystick // override the pickup mechanism Hardware.pickupArm .pullInBall( Hardware.rightOperator .getRawButton(3)); } // push out the ball if the push out button is pressed else if (Hardware.rightOperator .getRawButton(PUSH_OUT_BALL_BUTTON) == true) { Hardware.pickupArm.pushOutBall(); } // If neither the pull in or the push out button are pressed, stop // the // intake motors else // if (isAligningByCamera == false //@AHK removed for demo, // uncomment. /* && testingAlignByCamera == false ) */ { Hardware.pickupArm.stopIntakeMotors(); } // block of code to fire if (Hardware.leftOperator.getTrigger() == true) { // Tell the code to start firing fireRequested = true; if (ballFiring == false) { ballFiring = true; Hardware.axisCamera.saveImagesSafely(); // Hardware.axisCamera // .saveTextSafely("This is a test"); } Hardware.armOutOfWayTimer.start(); } else { ballFiring = false; } // if the override button is pressed and we want to fire if (/* * Hardware.leftOperator * .getRawButton(FIRE_OVERRIDE_BUTTON) == true * && */ fireRequested == true) { // FIRE NO MATTER WHAT!!!!! if (fire(3, true) == true) { // We've shot our ball, we don't want to fire // anymore. fireRequested = false; } } // If the drivers decided they were being stupid and we don't want // fire anymore if (Hardware.leftOperator .getRawButton(FIRE_CANCEL_BUTTON) == true || (Hardware.rightOperator.getRawButton(10) == true && Hardware.rightOperator .getRawButton(11) == true)) { // Stop asking the code to fire fireRequested = false; // or cancel turning 180. // I'm commondeering this code for a GPCB (general purpose // cancel button) isTurning180Degrees = false; } // if we want to fire, the arm is out of the way, and we have enough // pressure so we don't hurt ourselves. if (fireRequested == true && Hardware.pickupArm.moveToPosition( ManipulatorArm.ArmPosition.CLEAR_OF_FIRING_ARM) == true && Hardware.armOutOfWayTimer .get() >= ARM_IS_OUT_OF_WAY_TIME && Hardware.leftOperator .getRawButton(FIRE_OVERRIDE_BUTTON) != true) { // fire, if we're ready to if (fire(3, false) == true) { // if we're done firing, drop the request fireRequested = false; Hardware.armOutOfWayTimer.stop(); Hardware.armOutOfWayTimer.reset(); } } // Begin raise/lower camera block // If the camera is down and we press the button. if (Hardware.cameraToggleButton.isOnCheckNow() == false && isAligningByCamera == false /* && testingAlignByCamera == false */) { // raise the camera Hardware.cameraSolenoid .set(DoubleSolenoid.Value.kForward); // Hardware.ringLightRelay.set(Value.kOn); } // If the camera is up and we press the toggle button. if (Hardware.cameraToggleButton.isOnCheckNow() == true && isAligningByCamera == false /* && testingAlignByCamera == false */) { // Drop the camera Hardware.cameraSolenoid .set(DoubleSolenoid.Value.kReverse); // Hardware.ringLightRelay.set(Value.kOff); } // end raise/lower camera block // Block of code to align us on the goal using the camera // Will fire the boulder when done. if (Hardware.rightOperator.getTrigger() == true) { if (Hardware.inDemo.isOn() == false)// TODO use on a // demo-by-demo basis { // Tell the code to align us to the camera isAligningByCamera = true; // Tell the code we want to fire when we're done isFiringByCamera = true; } else { // Tell the code to align us to the camera isAligningByCamera = false; // Tell the code we want to fire when we're done isFiringByCamera = false; } } // Align, but do not fire. if (Hardware.leftOperator.getRawButton(5)) { if (Hardware.inDemo.isOn() == false) { isAligningByCamera = true; } else { isAligningByCamera = false; } Hardware.testingTimer.reset(); Hardware.testingTimer.start(); } // If we want to point at the goal using the camera if (isAligningByCamera == true) { // check if there is a ball in the arm if (Hardware.armIR.isOn() == true) { // move the arm to deposit position if (Hardware.pickupArm .moveToPosition( ArmPosition.DEPOSIT) == true) { // put the ball in the catapult Hardware.pickupArm.pullInBall(true); } } // Keep trying to point at the goal currentCameraReturn = Hardware.drive.alignByCamera( PERCENT_IMAGE_PROCESSING_DEADBAND, CAMERA_ALIGNMENT_TURNING_SPEED, CAMERA_X_AXIS_ADJUSTED_PROPORTIONAL_CENTER, ADJUST_DEADBAND_BY_PERCENTAGE, false);// -.375 // if (Hardware.drive.alignByCamera( // PERCENT_IMAGE_PROCESSING_DEADBAND, // CAMERA_ALIGNMENT_TURNING_SPEED, -.30, //-.483, // false) == true) if (currentCameraReturn == Drive.alignByCameraReturn.DONE) { // Once we're in the center, tell the code we no // longer care // about // steering towards the goal isAligningByCamera = false; // If using right trigger. FIRE. if (isFiringByCamera == true && Hardware.rightOperator .getRawButton(10) == false && Hardware.rightOperator .getRawButton(11) == false) { fireRequested = true; Hardware.armOutOfWayTimer.reset(); Hardware.armOutOfWayTimer.start(); isFiringByCamera = false; } currentCameraReturn = Drive.alignByCameraReturn.WORKING; Hardware.testingTimer.stop(); System.out.println("Time to quit:" + Hardware.testingTimer.get()); } // cancel the align request if the right operator presses // buttons 10 and // 11 at the same time. if (currentCameraReturn == Drive.alignByCameraReturn.CANCELLED || currentCameraReturn == Drive.alignByCameraReturn.NO_BLOBS_FOUND)// @AHK // test // low // priority { isAligningByCamera = false; currentCameraReturn = Drive.alignByCameraReturn.WORKING; // testingAlignByCamera = false; fireRequested = false; isFiringByCamera = false; } } // end alignByCameraBlock // block of code to fire if (Hardware.leftOperator.getTrigger() == true) { // Tell the code to start firing fireRequested = true; Hardware.armOutOfWayTimer.start(); } // if the override button is pressed and we want to fire if (Hardware.leftOperator .getRawButton(FIRE_OVERRIDE_BUTTON) == true && fireRequested == true) { // FIRE NO MATTER WHAT!!!!! if (fire(3, true) == true) { // We've shot our ball, we don't want to fire // anymore. // isFiringByCamera = false; fireRequested = false; } } // If the drivers decided they were being stupid and we don't want // fire anymore if (Hardware.leftOperator .getRawButton(FIRE_CANCEL_BUTTON) == true) { // Stop asking the code to fire isFiringByCamera = false; fireRequested = false; } // if we want to fire, the arm is out of the way, and we have enough // pressure so we don't hurt ourselves. if (fireRequested == true && Hardware.pickupArm.moveToPosition( ManipulatorArm.ArmPosition.CLEAR_OF_FIRING_ARM) == true && Hardware.armOutOfWayTimer .get() >= ARM_IS_OUT_OF_WAY_TIME && Hardware.leftOperator .getRawButton(FIRE_OVERRIDE_BUTTON) != true) { if (Hardware.armIR.isOn() == true) { if (Hardware.pickupArm .moveToPosition( ArmPosition.DEPOSIT)) { Hardware.pickupArm.pullInBall(true); } } else {// fire, if we're ready to if (fire(3, false) == true) { // if we're done firing, drop the request fireRequested = false; isFiringByCamera = false; Hardware.armOutOfWayTimer.stop(); Hardware.armOutOfWayTimer.reset(); } } } // end fire block // block of code to tell the drivers where to go // TODO finish based on camera input and IR sensors // if the rightIR detects HDPE and the left one doesn't // if one of the IR's detect HDPE // if (Hardware.rightIR.isOn() == true // || Hardware.leftIR.isOn() == true) // //Tell the drivers to stop and hopefully alignByCamera //// Hardware.arrowDashboard //// .setDirection(Guidance.Direction.linedUp); // if (processingImage == false) // Hardware.cameraInTeleopTimer.start(); // Hardware.axisCamera.writeBrightness( // Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS); // //Woah, that's too dark! Someone turn on the ringlight! // Hardware.ringLightRelay.set(Value.kOn); // processingImage = true; // if (processingImage == true // && Hardware.cameraInTeleopTimer.get() >= .25) // try // Hardware.imageProcessor // .updateImage(Hardware.axisCamera.getImage()); // //This is NI yelling at us for something being wrong // catch (NIVisionException e) // //if something wrong happens, tell the stupid programmers // //who let it happen more information about where it came from // e.printStackTrace(); // //tell imageProcessor to use the image we just took to look for // //blobs // Hardware.imageProcessor.updateParticleAnalysisReports(); // if(Hardware.imageProcessor.getParticleAnalysisReports()[0].center_mass_x // Hardware.arrowDashboard.setDirection(Guidance.Direction.left); // else // if(Hardware.imageProcessor.getParticleAnalysisReports()[0].center_mass_x // Hardware.arrowDashboard.setDirection(Guidance.Direction.right); // else // Hardware.arrowDashboard.setDirection(Guidance.Direction.linedUp); // //If neither IR detects anything on the ground // else // //trust the camera // //TODO base these ones on the camera if we have one. // Hardware.arrowDashboard // .setDirection(Guidance.Direction.neutral); // //put the arrows on the screen // Hardware.arrowDashboard.update(); // If the ball is in the robot, update the driver station with // that info. This will also light up the Driver Station green // when it updates the status. if (Hardware.armIR.isOn() == true) { Guidance.updateBallStatus(true); } else { Guidance.updateBallStatus(false); } // End driver direction block // Takes Pictures based on Operator Button stuff. takePicture(); // Driving the Robot // TODO delete all conditionals. // If we want to run a speed test, tell the code that // if (Hardware.leftDriver.getRawButton(8) == true) // Hardware.forwardToggleButton.update(); // If the stop testing button (BUTTON 8 ON LEFT DRIVER) // is toggled on and we aren't moving or stopping, // then start moving. if (Hardware.forwardToggleButton.isOnCheckNow() == true && brakingTesting == false && motionToggled == false) { motionToggled = true; } if (motionToggled == true) { Hardware.transmission.controls(1.0, 1.0); Hardware.transmission.setJoystickDeadbandRange(0.0); } // If the button is toggled off, then stop moving and // start stopping. if (Hardware.forwardToggleButton.isOnCheckNow() == false && motionToggled == true) { motionToggled = false; brakingTesting = true; } if (brakingTesting == true) { // prints System.out.println(Hardware.transmission.stop().name()); if (Hardware.transmission .stop() != Hardware.transmission.inMotion) { System.out.println(Hardware.transmission.stop().name()); brakingTesting = false; } } // If we press the brake button, robot brakes /* * if (Hardware.leftDriver * .getRawButton(BRAKE_JOYSTICK_BUTTON_FIVE) == true) * { * Hardware.transmission.setJoystickDeadbandRange(0.0); * Hardware.drive.driveContinuous(LEFT_MOTOR_BRAKE_SPEED, * RIGHT_MOTOR_BRAKE_SPEED); * } * * else if (Hardware.leftDriver * .getRawButton(BRAKE_JOYSTICK_BUTTON_FOUR) == true) * { * Hardware.transmission.setJoystickDeadbandRange(0.0); * Hardware.drive.driveContinuous(LEFT_MOTOR_BRAKE_SPEED_TWO, * RIGHT_MOTOR_BRAKE_SPEED_TWO); * } */ // when brake button is pressed motor values reverse loopCounter++; // adds one every time teleop loops // checks to see if the left driver button 4 is being pressed // Labeled "Brake FWD"- Button 4- moves wheels slightly FORWARD if (Hardware.leftDriver .getRawButton(BRAKE_JOYSTICK_BUTTON_FOUR) == true) { // determines what number loop teleop is in, sets motors to // a positive number, and sets deadband to zero // if (loopCounter % BRAKING_INTERVAL < BRAKING_INTERVAL { Hardware.transmission .setJoystickDeadbandRange(0.0); // Hardware.drive.driveContinuous(MOTOR_HOLD_SPEED, // MOTOR_HOLD_SPEED); Hardware.rightRearMotor.set(-.14); Hardware.rightFrontMotor.set(-.14); Hardware.leftRearMotor.set(-.14); Hardware.leftFrontMotor.set(-.14); } } // determines what number loop teleop is in then sets motors // to a negative number // else // Labeled "Brake RVS"- Button 5- moves wheels BACKWARD else if (Hardware.leftDriver .getRawButton(BRAKE_JOYSTICK_BUTTON_FIVE) == true) { Hardware.transmission.setJoystickDeadbandRange(0.0); // Hardware.drive.driveContinuous(-MOTOR_HOLD_SPEED, // -MOTOR_HOLD_SPEED); Hardware.rightRearMotor.set(.14); Hardware.rightFrontMotor.set(.14); Hardware.leftRearMotor.set(.14); Hardware.leftFrontMotor.set(.14); } // sets deadband back to 20% else { Hardware.transmission.setJoystickDeadbandRange(.20); } // Left Drive Button 7 will activate transmission.stop(), a // a poorly named brake function- SERIOUSLY PEOPLE BE MORE CREATIVE // Part of a project working on the transmission.stop() function // as well as the previous chain of if statements // Worked on by Ashley Espeland and Cole Ramos if (Hardware.leftDriver.getRawButton(7) == true) { Hardware.transmission.stop(); } // Only let the drivers drive if we're not speed testing or aligning // by camera // drive the robot with the joysticks // TODO uncomment this conditional // TODO delete all conditionals. Fix brake if (/* speedTesting == false && */ isAligningByCamera == false && testingAlignByCamera == false && fireRequested == false && Hardware.leftDriver .getRawButton( BRAKE_JOYSTICK_BUTTON_FIVE) == false && Hardware.leftDriver.getRawButton( BRAKE_JOYSTICK_BUTTON_FOUR) == false && isTurning180Degrees == false && Hardware.rightOperator.getRawButton(6) == false && motionToggled == false && brakingTesting == false)// TODO // remove // last // term { driveRobot(); // if we want to run a speed test, run it until we're done. } if (Hardware.leftOperator.getRawButton(9)) { Hardware.axisCamera.writeBrightness( Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS); } // @AHK uncomment // else // Hardware.axisCamera.writeBrightness( // Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS); // If the camera is up, if (Hardware.cameraSolenoid .get() == DoubleSolenoid.Value.kReverse) // the light is on. { Hardware.ringLightRelay.set(Relay.Value.kOn); } else { Hardware.ringLightRelay.set(Relay.Value.kOff); } } if (Hardware.rightOperator.getRawButton(6) == true) { Hardware.transmission.controls(1.0, 1.0); System.out.println("Left Distance = " + Hardware.leftRearEncoder.getDistance()); System.out.println("Right Distance = " + Hardware.rightRearEncoder.getDistance()); } // else // Hardware.transmission.controls(0.0, 0.0); } // end Periodic /* private static boolean isSpeedTesting = false */; /** * Hand the transmission class the joystick values and motor controllers for * four wheel drive. */ public static void driveRobot () { Hardware.transmission.controls(Hardware.leftDriver.getY(), Hardware.rightDriver.getY()); // If we're pressing the upshift button, shift up. if (Hardware.rightDriver .getRawButton(GEAR_UPSHIFT_JOYSTICK_BUTTON) == true) { Hardware.transmission.upshift(1); } // If we press the downshift button, shift down. if (Hardware.rightDriver .getRawButton(GEAR_DOWNSHIFT_JOYSTICK_BUTTON) == true) { Hardware.transmission.downshift(1); } } /** * gives user voltage on given pin on pdp board * * @param pinNumber * pin on pdp * @author Becky Button */ public static void pinCurrent (int pinNumber) { System.out.println("Current on port " + pinNumber + ": " + Hardware.pdp.getCurrent(pinNumber)); } /** * Fires the catapult. * * @param power * -Can be 1, 2, or 3; corresponds to the amount of solenoids * used to * fire. * @return * -False if we're not yet done firing, true otherwise. */ public static boolean fire (int power, boolean override) { // If we have enough pressure to fire or we want to ignore the // transducer and FIRE ANYWAY, fire. Otherwise wait until we can. Hardware.transmission.controls(0, 0); if (Hardware.transducer.get() >= FIRING_MIN_PSI || override == true) { // if (Hardware.pickupArm.moveToPosition( // ManipulatorArm.ArmPosition.CLEAR_OF_FIRING_ARM) == true) if (firstTimeFireRun == true) { // start the timer and don't run this block of code again Hardware.fireTimer.start(); firstTimeFireRun = false; } // Fire with the number of solenoids that corresponds to the power // argument to this function switch (power) { case 1: Hardware.catapultSolenoid1.set(true); break; case 2: Hardware.catapultSolenoid2.set(true); Hardware.catapultSolenoid0.set(true); break; default: case 3: Hardware.catapultSolenoid0.set(true); Hardware.catapultSolenoid1.set(true); Hardware.catapultSolenoid2.set(true); break; } } // TODO reduce time to minimum possible // wait until we're done firing if (Hardware.fireTimer.get() >= .5) { // Release the solenoids, and then set up for the next time run Hardware.catapultSolenoid0.set(false); Hardware.catapultSolenoid1.set(false); Hardware.catapultSolenoid2.set(false); Hardware.fireTimer.stop(); Hardware.fireTimer.reset(); firstTimeFireRun = true; // Tell the code we're done firing return true; } // We're not done firing yet, keep calling me please! return false; } private static boolean firstTimeFireRun = true; /** * Takes a picture, processes it and saves it with left operator joystick * take unlit picture: 6&7 * take lit picture: 10&11 */ public static void takePicture () { // If we click buttons 6+7 on the left operator joystick, we dim the // brightness a lot, turn the ringlight on, and then if we haven't // already taken an image then we do and set the boolean to true to // prevent us taking more images. Otherwise we don't turn on the // ringlight and we don't take a picture. We added a timer to delay // taking the picture for the brightness to dim and for the ring // light to turn on. if (Hardware.leftOperator.getRawButton(6) == true && Hardware.leftOperator.getRawButton(7) == true) { if (prepPic == false) { // Hardware.axisCamera.writeBrightness( // Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS); Hardware.ringLightRelay.set(Value.kOn); Hardware.delayTimer.start(); prepPic = true; takingLitImage = true; } } // Once the brightness is down and the ring light is on then the // picture is taken, the brightness returns to normal, the ringlight // is turned off, and the timer is stopped and reset. // @TODO Change .25 to a constant, see line 65 under Hardware // Replaced '.25' with Hardware.CAMERA_DELAY_TIME' change back if camera // fails if (Hardware.delayTimer.get() >= Hardware.CAMERA_DELAY_TIME && prepPic == true && takingLitImage == true) { Hardware.axisCamera.saveImagesSafely(); prepPic = false; takingLitImage = false; } if (takingLitImage == false && Hardware.delayTimer.get() >= 1.0) { // Hardware.axisCamera.writeBrightness( // Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS); // Hardware.ringLightRelay.set(Value.kOff); Hardware.delayTimer.stop(); Hardware.delayTimer.reset(); } // If we click buttons 10+11, we take a picture without the // ringlight and set the boolean to true so we don't take a bunch of // other pictures. if (Hardware.leftOperator.getRawButton(10) == true && Hardware.leftOperator.getRawButton(11) == true) { if (takingUnlitImage == false) { takingUnlitImage = true; Hardware.axisCamera.saveImagesSafely(); } } else takingUnlitImage = false; // if the left operator trigger is pressed, then we check to see if // we're taking a processed picture through the boolean. If we are // not currently taking a processed picture, then it lets us take a // picture and sets the boolean to true so we don't take multiple // pictures. If it is true, then it does nothing. If we don't click // the trigger, then the boolean resets itself to false to take // pictures again. // if (Hardware.leftOperator.getTrigger() == true) // if (processingImage == true) // processImage(); // processingImage = false; } // end Take picture static boolean hasBegunTurning = true; /** * stores print statements for future use in the print "bank", statements * are * commented out when * not in use, when you write a new print statement, "deposit" the statement * in * the "bank" * do not "withdraw" statements, unless directed to * * @author Ashley Espeland * @written 1/28/16 * * Edited by Ryan McGee * */ public static void printStatements () { // System.out.println("AligningByCamera = " + isAligningByCamera); // checks to see if the robot is aligning by camera // System.out.println("Demo?\t" + Hardware.inDemo.isOn()); // System.out.println(Hardware.delayPot.get()); // System.out.println("Left Joystick: " + Hardware.leftDriver.getY()); // System.out // .println("Right Joystick: " + Hardware.rightDriver.getY()); // System.out.println("Left Operator: " + Hardware.leftOperator.getY()); // System.out.println("Right Operator: " + Hardware.rightOperator.getY()); // System.out.println("left IR = " + Hardware.leftIR.isOn()); // System.out.println("right IR = " + Hardware.rightIR.isOn()); // System.out.println("Has ball IR = " + Hardware.armIR.isOn()); // pinCurrent(12); // pinCurrent(13); // pinCurrent(14); // pinCurrent(15); // printAllPDPChannels(); System.out.println( "delay pot = " + Hardware.delayPot.get()); // System.out.println( // "delay scaling: " + (((double) Hardware.delayPot.get() // - Hardware.DELAY_POT_MIN_DEGREES) // / (double) (Hardware.DELAY_POT_DEGREES // - Hardware.DELAY_POT_MIN_DEGREES))); // prints the value of the transducer- (range in code is 50) // hits psi of 100 accurately // System.out.println("transducer = " + Hardware.transducer.get()); // System.out.println("Arm Pot = " + Hardware.armPot.get()); // Hardware.imageProcessor.processImage(); // if (Hardware.imageProcessor.reports.length > 0) // System.out.println("DistanceToGoal: " // + Hardware.imageProcessor.getZDistanceToTargetFT(0)); // prints value of the motors // System.out.println("RR Motor T = " + Hardware.rightRearMotor.get()); // System.out.println("LR Motor T = " + Hardware.leftRearMotor.get()); // System.out // .println("RF Motor T = " + Hardware.rightFrontMotor.get()); // System.out.println("LF Motor T = " + Hardware.leftFrontMotor.get()); // System.out.println("Arm Motor: " + Hardware.armMotor.get()); // System.out // .println("Intake Motor: " + Hardware.armIntakeMotor.get()); // prints the state of the solenoids // System.out.println("cameraSolenoid = " + Hardware.cameraSolenoid.get()); // System.out.println("catapultSolenoid0 = " + // Hardware.catapultSolenoid0.get()); // System.out.println("catapultSolenoid1 = " + // Hardware.catapultSolenoid1.get()); // System.out.println("catapultSolenoid2 = " + // Hardware.catapultSolenoid2.get()); // System.out.println( // "Right Rear Encoder Tics: " // + Hardware.rightRearEncoder.get()); // System.out.println( // "Left Rear Encoder Tics: " // // + Hardware.leftRearEncoder.get()); // System.out.println( // "RR distance = " // + Hardware.rightRearEncoder.getDistance()); // System.out.println( // "LR distance = " // + Hardware.leftRearEncoder.getDistance()); // System.out.println( // "RR distance = " + Hardware.rightRearEncoder.getDistance()); // System.out.println( // "LR distance = " + Hardware.leftRearEncoder.getDistance()); // System.out.println("Arm Motor = " + Hardware.armMotor.getDistance()); // prints state of switches // System.out.println("Autonomous Enabled Switch: " + // Hardware.autonomousEnabled.isOn()); // System.out.println( // "Comp Switch: " + Hardware.inCompetition.isOn()); System.out.println("Demo Switch: " + Hardware.inDemo.isOn()); // System.out.println("Position: " + // Hardware.startingPositionDial.getPosition()); // System.out.println("Position: " + // Hardware.startingPositionDial.getPosition()); // System.out.println(Hardware.ringLightRelay.get()); // System.out.println("Number of seen blobs:" // + Hardware.imageProcessor.getNumBlobs()); // Ultrasonic // System.out.println("Ultrasonic Dist: " // + Hardware.ultrasonic.getRefinedDistanceValue()); } // end printStatements private static final double MAXIMUM_TELEOP_SPEED = .85; private static final double CAMERA_ALIGN_Y_DEADBAND = .10; private static final double CAMERA_ALIGN_X_DEADBAND = .08; public static final double CAMERA_X_AXIS_ADJUSTED_PROPORTIONAL_CENTER = -.394;// -.365;// // -.375 public static final double CAMERA_Y_AXIS_ADJUSTED_PROPORTIONAL_CENTER = -.68;// -.182;// // -.192 private static final double ALIGN_BY_CAMERA_TURNING_SPEED = .75;// @AHK .5 private static final double ADJUST_DEADBAND_BY_PERCENTAGE = 0; private static final double ALIGN_BY_CAMERA_DRIVE_SPEED = .45; // right driver 3 private static final int GEAR_UPSHIFT_JOYSTICK_BUTTON = 3; // right driver 2 private static final int GEAR_DOWNSHIFT_JOYSTICK_BUTTON = 2; // left driver 4 private static final int BRAKE_JOYSTICK_BUTTON_FOUR = 4; // left driver 5 private static final int BRAKE_JOYSTICK_BUTTON_FIVE = 5; // left operator 2 private static final int CAMERA_TOGGLE_BUTTON = 2; // Right operator 2 private static final int FIRE_OVERRIDE_BUTTON = 4; // Left operator 3 private static final int FIRE_CANCEL_BUTTON = 3; // Right operator 4 private static final int TAKE_IN_BALL_BUTTON = 4; // right operator 5 private static final int PUSH_OUT_BALL_BUTTON = 5; private static final double PICKUP_ARM_CONTROL_DEADZONE = 0.2; private final static double PERCENT_IMAGE_PROCESSING_DEADBAND = .13; private final static double CAMERA_ALIGNMENT_TURNING_SPEED = .5; private final static double ARM_IS_OUT_OF_WAY_TIME = .10; private final static int BRAKING_INTERVAL = 4; private final static double MOTOR_HOLD_SPEED = 0.1; // minimum pressure when allowed to fire private static final int FIRING_MIN_PSI = 90; // TUNEABLES private static boolean testingAlignByCamera = false; private static boolean isAligningByCamera = false; private static Drive.alignByCameraReturn currentCameraReturn = Drive.alignByCameraReturn.WORKING; private static boolean isFiringByCamera = false; private static boolean cameraIsUp = false; private static boolean isDrivingByCamera = false; private static boolean fireRequested = false; private static boolean processingImage = true; // Boolean to check if we're taking a lit picture private static boolean takingLitImage = false; // Boolean to check if we're taking an unlit picture private static boolean takingUnlitImage = false; // this is for preparing to take a picture with the timer; changes // brightness, turns on ringlight, starts timer private static boolean prepPic = false; // Stores temporarily whether firingState is true, for use in whether the arm is // in the way private static boolean storeFiringState; private static int loopCounter = 0; private static boolean brakingTesting = false; private static boolean ballFiring = false; private static boolean motionToggled = false; } // end class
package ibis.satin.impl.loadBalancing; import ibis.ipl.IbisIdentifier; import ibis.ipl.ReceivePortIdentifier; import ibis.ipl.SendPort; import ibis.ipl.WriteMessage; import ibis.satin.impl.Config; import ibis.satin.impl.Satin; import ibis.satin.impl.communication.Communication; import java.io.IOException; /** * * @author rob * * A Victim represents an Ibis we can steal work from. * This class is immutable, only the sendport itself could be connected and * disconnected. * */ public final class Victim implements Config { private static int connectionCount = 0; private IbisIdentifier ident; private SendPort s; private ReceivePortIdentifier r; private boolean connected = false; private boolean closed = false; private int referenceCount = 0; public Victim(IbisIdentifier ident, SendPort s) { this.ident = ident; this.s = s; } public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof Victim) { Victim other = (Victim) o; return other.ident.equals(ident); } return false; } public boolean equals(Victim other) { if (other == this) { return true; } return other.ident.equals(ident); } public int hashCode() { return ident.hashCode(); } private void disconnect() throws IOException { if (connected) { connected = false; connectionCount s.disconnect(r); } } private SendPort getSendPort() { if (closed) { return null; } if (!connected) { r = Communication.connect(s, ident, "satin port", Satin.CONNECT_TIMEOUT); if (r == null) { Config.commLogger.debug("SATIN '" + s.identifier().ibisIdentifier() + "': unable to connect to " + ident + ", might have crashed"); return null; } connected = true; connectionCount++; } return s; } public WriteMessage newMessage() throws IOException { SendPort send; synchronized (s) { send = getSendPort(); if (send != null) { referenceCount++; } else { throw new IOException("Could not connect to " + ident); } } return send.newMessage(); } public long finish(WriteMessage m) throws IOException { try { return m.finish(); } finally { synchronized (s) { referenceCount if (CLOSE_CONNECTIONS) { if (connectionCount >= MAX_CONNECTIONS && referenceCount == 0) { disconnect(); } } } } } public long finishKeepConnection(WriteMessage m) throws IOException { try { return m.finish(); } finally { synchronized (s) { referenceCount } } } public void loseConnection() throws IOException { if (CLOSE_CONNECTIONS) { synchronized(s) { if (connectionCount >= MAX_CONNECTIONS && referenceCount == 0) { disconnect(); } } } } public void close() { synchronized (s) { if (connected) { connected = false; connectionCount } closed = true; try { s.close(); } catch (Exception e) { // ignore Config.commLogger.warn("SATIN '" + s.identifier().ibisIdentifier() + "': port.close() throws exception (ignored)", e); } } } public IbisIdentifier getIdent() { return ident; } public boolean inDifferentCluster(IbisIdentifier other) { return !clusterOf(ident).equals(clusterOf(other)); } public static String clusterOf(IbisIdentifier id) { // Not correct: considers all nodes to be in different clusters // if there is only one level. --Ceriel // int count = id.location().numberOfLevels(); // return id.location().getLevel(count-1); return id.location().getParent().toString(); } }
package info.tregmine.listeners; import java.util.List; import org.bukkit.ChatColor; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.player.AsyncPlayerChatEvent; import info.tregmine.Tregmine; import info.tregmine.WebServer; import info.tregmine.api.TregminePlayer; import info.tregmine.database.DAOException; import info.tregmine.database.IContext; import info.tregmine.database.ILogDAO; import info.tregmine.database.IPlayerDAO; public class ChatListener implements Listener { private Tregmine plugin; public ChatListener(Tregmine instance) { this.plugin = instance; } @EventHandler public void onPlayerChat(AsyncPlayerChatEvent event) { TregminePlayer sender = plugin.getPlayer(event.getPlayer()); if (sender.getChatState() != TregminePlayer.ChatState.CHAT) { return; } String channel = sender.getChatChannel(); for (TregminePlayer to : plugin.getOnlinePlayers()) { if (to.getChatState() == TregminePlayer.ChatState.SETUP) { continue; } boolean ignored; try (IContext ctx = plugin.createContext()) { IPlayerDAO playerDAO = ctx.getPlayerDAO(); ignored = playerDAO.doesIgnore(to, sender); } catch (DAOException e) { throw new RuntimeException(e); } if (sender.getRank().canNotBeIgnored()) ignored = false; if (ignored == true) continue; ChatColor txtColor = ChatColor.WHITE; if (sender.equals(to)) { txtColor = ChatColor.GRAY; } String text = event.getMessage(); for (TregminePlayer online : plugin.getOnlinePlayers()) { if (text.contains(online.getName()) && !online.hasFlag(TregminePlayer.Flags.INVISIBLE)){ text = text.replaceAll(online.getName(), online.getChatName() + txtColor); } } List<String> player_keywords; try (IContext ctx = plugin.createContext()) { IPlayerDAO playerDAO = ctx.getPlayerDAO(); player_keywords = playerDAO.getKeywords(to); } catch (DAOException e) { throw new RuntimeException(e); } if (player_keywords.size() > 0 && player_keywords != null) { for( String keyword : player_keywords ){ if (text.toLowerCase().contains(keyword.toLowerCase())) { text = text.replaceAll(keyword, ChatColor.AQUA + keyword + txtColor); } } } if (sender.getChatChannel().equalsIgnoreCase(to.getChatChannel()) || to.hasFlag(TregminePlayer.Flags.CHANNEL_VIEW)) { if ("GLOBAL".equalsIgnoreCase(sender.getChatChannel())) { to.sendMessage("<" + sender.getChatName() + ChatColor.WHITE + "> " + txtColor + text); } else { to.sendMessage(channel + " <" + sender.getChatName() + ChatColor.WHITE + "> " + txtColor + text); } } if (text.contains(to.getName()) && "GLOBAL".equalsIgnoreCase(sender.getChatChannel()) && !("GLOBAL".equalsIgnoreCase(to.getChatChannel()))) { to.sendMessage(ChatColor.BLUE + "You were mentioned in GLOBAL by " + sender.getNameColor() + sender.getChatName()); } } Tregmine.LOGGER.info(channel + " <" + sender.getName() + "> " + event.getMessage()); try (IContext ctx = plugin.createContext()) { ILogDAO logDAO = ctx.getLogDAO(); logDAO.insertChatMessage(sender, channel, event.getMessage()); } catch (DAOException e) { throw new RuntimeException(e); } event.setCancelled(true); WebServer server = plugin.getWebServer(); server.sendChatMessage(new WebServer.ChatMessage(sender, channel, event.getMessage())); } }
package io.flutter.inspector; import com.google.common.collect.Lists; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonNull; import com.google.gson.JsonObject; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.xdebugger.XSourcePosition; import com.jetbrains.lang.dart.analyzer.DartAnalysisServerService; import com.jetbrains.lang.dart.ide.runner.server.vmService.frame.DartVmServiceValue; import io.flutter.run.daemon.FlutterApp; import io.flutter.utils.CustomIconMaker; import io.flutter.utils.JsonUtils; import org.apache.commons.lang.StringUtils; import org.dartlang.analysis.server.protocol.HoverInformation; import org.dartlang.vm.service.element.InstanceRef; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; import static io.flutter.sdk.FlutterSettingsConfigurable.WIDGET_FILTERING_ENABLED; public class DiagnosticsNode { private static final CustomIconMaker iconMaker = new CustomIconMaker(); private InspectorSourceLocation location; private DiagnosticsNode parent; private CompletableFuture<String> propertyDocFuture; public DiagnosticsNode(JsonObject json, InspectorService inspectorService) { this.inspectorService = inspectorService; this.json = json; } @Override public boolean equals(Object other) { if (other instanceof DiagnosticsNode) { final DiagnosticsNode otherNode = (DiagnosticsNode)other; return getDartDiagnosticRef().equals(otherNode.getDartDiagnosticRef()); } return false; } @Override public String toString() { final String name = getName(); if (StringUtils.isEmpty(name) || !getShowName()) { return getDescription(); } return name + getSeparator() + ' ' + getDescription(); } /** * Set this node's parent. */ public void setParent(DiagnosticsNode parent) { this.parent = parent; } /** * This node's parent (if it's been set). */ @Nullable public DiagnosticsNode getParent() { return parent; } /** * Separator text to show between property names and values. */ public String getSeparator() { return getShowSeparator() ? ":" : ""; } /** * Label describing the [DiagnosticsNode], typically shown before a separator * (see [showSeparator]). * <p> * The name should be omitted if the [showName] property is false. */ public String getName() { return getStringMember("name"); } /** * Whether to show a separator between [name] and description. * <p> * If false, name and description should be shown with no separation. * `:` is typically used as a separator when displaying as text. */ public boolean getShowSeparator() { return getBooleanMember("showSeparator", true); } /** * Returns a description with a short summary of the node itself not * including children or properties. * <p> * `parentConfiguration` specifies how the parent is rendered as text art. * For example, if the parent does not line break between properties, the * description of a property should also be a single line if possible. */ public String getDescription() { return getStringMember("description"); } /** * Priority level of the diagnostic used to control which diagnostics should * be shown and filtered. * <p> * Typically this only makes sense to set to a different value than * [DiagnosticLevel.info] for diagnostics representing properties. Some * subclasses have a `level` argument to their constructor which influences * the value returned here but other factors also influence it. For example, * whether an exception is thrown computing a property value * [DiagnosticLevel.error] is returned. */ public DiagnosticLevel getLevel() { return getLevelMember("level", DiagnosticLevel.info); } /** * Whether the name of the property should be shown when showing the default * view of the tree. * <p> * This could be set to false (hiding the name) if the value's description * will make the name self-evident. */ public boolean getShowName() { return getBooleanMember("showName", true); } /** * Description to show if the node has no displayed properties or children. */ public String getEmptyBodyDescription() { return getStringMember("emptyBodyDescription"); } /** * Hint for how the node should be displayed. */ public DiagnosticsTreeStyle getStyle() { return getStyleMember("style", DiagnosticsTreeStyle.sparse); } /** * Dart class defining the diagnostic node. * For example, DiagnosticProperty<Color>, IntProperty, StringProperty, etc. * This should rarely be required except for cases where custom rendering is desired * of a specific Dart diagnostic class. */ String getType() { return getStringMember("type"); } /** * Whether the description is enclosed in double quotes. * <p> * Only relevant for String properties. */ public boolean getIsQuoted() { return getBooleanMember("quoted", false); } public boolean hasIsQuoted() { return json.has("quoted"); } /** * Optional unit the [value] is measured in. * <p> * Unit must be acceptable to display immediately after a number with no * spaces. For example: 'physical pixels per logical pixel' should be a * [tooltip] not a [unit]. * <p> * Only specified for Number properties. */ public String getUnit() { return getStringMember("unit"); } public boolean hasUnit() { return json.has("unit"); } /** * String describing just the numeric [value] without a unit suffix. * <p> * Only specified for Number properties. */ public String getNumberToString() { return getStringMember("numberToString"); } public boolean hasNumberToString() { return json.has("numberToString"); } /** * Description to use if the property [value] is true. * <p> * If not specified and [value] equals true the property's priority [level] * will be [DiagnosticLevel.hidden]. * <p> * Only applies to Flag properties. */ public String getIfTrue() { return getStringMember("ifTrue"); } public boolean hasIfTrue() { return json.has("ifTrue"); } /** * Description to use if the property value is false. * <p> * If not specified and [value] equals false, the property's priority [level] * will be [DiagnosticLevel.hidden]. * <p> * Only applies to Flag properties. */ public String getIfFalse() { return getStringMember("ifFalse"); } public boolean hasIfFalse() { return json.has("ifFalse"); } /** * Value as a List of strings. * <p> * The raw value can always be extracted with the regular observatory protocol. * <p> * Only applies to IterableProperty. */ public ArrayList<String> getValues() { if (!json.has("values")) { return null; } final JsonArray rawValues = json.getAsJsonArray("values"); final ArrayList<String> values = new ArrayList<>(rawValues.size()); for (int i = 0; i < rawValues.size(); ++i) { values.add(rawValues.get(i).getAsString()); } return values; } public boolean hasValues() { return json.has("values"); } /** * Description to use if the property [value] is not null. * <p> * If the property [value] is not null and [ifPresent] is null, the * [level] for the property is [DiagnosticsLevel.hidden] and the description * from superclass is used. * <p> * Only specified for ObjectFlagProperty. */ public String getIfPresent() { return getStringMember("ifPresent"); } public boolean hasIfPresent() { return json.has("ifPresent"); } /** * If the [value] of the property equals [defaultValue] the priority [level] * of the property is downgraded to [DiagnosticLevel.fine] as the property * value is uninteresting. * <p> * This is the default value of the object represented as a String. * The actual Dart object representing the defaultValue can also be accessed via * the observatory protocol. We can add a convenience helper method to access it here * if there is a use case. * <p> * Typically you shouldn't need to worry about the default value as the underlying * machinery will generate appropriate description and priority level based on the * default value. */ public String getDefaultValue() { return getStringMember("defaultValue"); } /** * Whether a property has a default value. */ public boolean hasDefaultValue() { return json.has("defaultValue"); } /** * Description if the property description would otherwise be empty. * <p> * Consider showing the property value in gray in an IDE if the description matches * ifEmpty. */ public String getIfEmpty() { return getStringMember("ifEmpty"); } /** * Description if the property [value] is null. */ public String getIfNull() { return getStringMember("ifNull"); } /** * Optional tooltip typically describing the property. * <p> * Example tooltip: 'physical pixels per logical pixel' * <p> * If present, the tooltip is added in parenthesis after the raw value when * generating the string description. */ public String getTooltip() { return getStringMember("tooltip"); } public boolean hasTooltip() { return json.has("tooltip"); } /** * Whether a [value] of null causes the property to have [level] * [DiagnosticLevel.warning] warning that the property is missing a [value]. */ public boolean getMissingIfNull() { return getBooleanMember("missingIfNull", false); } /** * String representation of exception thrown if accessing the property * [value] threw an exception. */ public String exception() { return getStringMember("exception"); } /** * Whether accessing the property throws an exception. */ boolean hasException() { return json.has("exception"); } public boolean hasCreationLocation() { return location != null || json.has("creationLocation"); } public InspectorSourceLocation getCreationLocation() { if (location != null) { return location; } if (!hasCreationLocation()) { return null; } location = new InspectorSourceLocation(json.getAsJsonObject("creationLocation"), null); return location; } /** * String representation of the type of the property [value]. * <p> * This is determined from the type argument `T` used to instantiate the * [DiagnosticsProperty] class. This means that the type is available even if * [value] is null, but it also means that the [propertyType] is only as * accurate as the type provided when invoking the constructor. * <p> * Generally, this is only useful for diagnostic tools that should display * null values in a manner consistent with the property type. For example, a * tool might display a null [Color] value as an empty rectangle instead of * the word "null". */ public String getPropertyType() { return getStringMember("propertyType"); } /** * If the [value] of the property equals [defaultValue] the priority [level] * of the property is downgraded to [DiagnosticLevel.fine] as the property * value is uninteresting. * <p> * [defaultValue] has type [T] or is [kNoDefaultValue]. */ public DiagnosticLevel getDefaultLevel() { return getLevelMember("defaultLevel", DiagnosticLevel.info); } /** * Whether the value of the property is a Diagnosticable value itself. * Optionally, properties that are themselves Diagnosticable should be * displayed as trees of diagnosticable properties and children. * <p> * TODO(jacobr): add helpers to get the properties and children of * this diagnosticable value even if getChildren and getProperties * would return null. This will allow showing nested data for properties * that don't show children by default in other debugging output but * could. */ public boolean getIsDiagnosticableValue() { return getBooleanMember("isDiagnosticableValue", false); } /** * Service used to retrieve more detailed information about the value of * the property and its children and properties. */ private final InspectorService inspectorService; /** * JSON describing the diagnostic node. */ private final JsonObject json; private CompletableFuture<ArrayList<DiagnosticsNode>> children; private CompletableFuture<Map<String, InstanceRef>> valueProperties; private final boolean isProperty = false; public boolean isProperty() { return isProperty; } public String getStringMember(@NotNull String memberName) { return JsonUtils.getStringMember(json, memberName); } private boolean getBooleanMember(String memberName, boolean defaultValue) { if (!json.has(memberName)) { return defaultValue; } final JsonElement value = json.get(memberName); if (value instanceof JsonNull) { return defaultValue; } return value.getAsBoolean(); } private DiagnosticLevel getLevelMember(String memberName, DiagnosticLevel defaultValue) { if (!json.has(memberName)) { return defaultValue; } final JsonElement value = json.get(memberName); if (value instanceof JsonNull) { return defaultValue; } return DiagnosticLevel.valueOf(value.getAsString()); } private DiagnosticsTreeStyle getStyleMember(String memberName, DiagnosticsTreeStyle defaultValue) { if (!json.has(memberName)) { return defaultValue; } final JsonElement value = json.get(memberName); if (value instanceof JsonNull) { return defaultValue; } return DiagnosticsTreeStyle.valueOf(value.getAsString()); } /** * Returns a reference to the value the DiagnosticsNode object is describing. */ public InspectorInstanceRef getValueRef() { final JsonElement valueId = json.get("valueId"); return new InspectorInstanceRef(valueId.isJsonNull() ? null : valueId.getAsString()); } public boolean isEnumProperty() { final String type = getType(); return type != null && type.startsWith("EnumProperty<"); } /** * Returns a list of raw Dart property values of the Dart value of this * property that are useful for custom display of the property value. * For example, get the red, green, and blue components of color. * <p> * Unfortunately we cannot just use the list of fields from the Observatory * Instance object for the Dart value because much of the relevant * information to display good visualizations of Flutter values is stored * in properties not in fields. */ public CompletableFuture<Map<String, InstanceRef>> getValueProperties() { final InspectorInstanceRef valueRef = getValueRef(); if (valueProperties == null) { if (getPropertyType() == null || valueRef == null || valueRef.getId() == null) { valueProperties = new CompletableFuture<>(); valueProperties.complete(null); return valueProperties; } if (isEnumProperty()) { // Populate all the enum property values. valueProperties = inspectorService.getEnumPropertyValues(getValueRef()); return valueProperties; } final String[] propertyNames; // Add more cases here as visual displays for additional Dart objects // are added. switch (getPropertyType()) { case "Color": propertyNames = new String[]{"red", "green", "blue", "alpha"}; break; case "IconData": propertyNames = new String[]{"codePoint"}; break; default: valueProperties = new CompletableFuture<>(); valueProperties.complete(null); return valueProperties; } valueProperties = inspectorService.getDartObjectProperties(getValueRef(), propertyNames); } return valueProperties; } public boolean hasChildren() { return getBooleanMember("hasChildren", false); } public boolean isCreatedByLocalProject() { return getBooleanMember("createdByLocalProject", false); } /** * Check whether children are already available. */ public boolean childrenReady() { return children != null && children.isDone(); } public CompletableFuture<ArrayList<DiagnosticsNode>> getChildren() { if (children == null) { if (hasChildren()) { children = inspectorService.getChildren(getDartDiagnosticRef()); // Apply filters. if (WIDGET_FILTERING_ENABLED) { try { final ArrayList<DiagnosticsNode> nodes = children.get(); final ArrayList<DiagnosticsNode> filtered = Lists.newArrayList(nodes); // Filter private classes as a baby-step. filtered.removeIf(FlutterWidget.Filter.PRIVATE_CLASS); if (!filtered.isEmpty()) { children = new CompletableFuture<>(); children.complete(filtered); } else { if (!nodes.isEmpty()) { final CompletableFuture<ArrayList<DiagnosticsNode>> future = nodes.get(0).getChildren(); for (int i = 1; i < nodes.size(); ++i) { future.thenCombine(nodes.get(i).getChildren(), (nodes1, nodes2) -> Stream.of(nodes1, nodes2) .flatMap(Collection::stream) .collect(Collectors.toList())); } return future; } } } catch (InterruptedException | ExecutionException e) { // Ignore. } } } else { // Known to have no children so we can provide the children immediately. children = new CompletableFuture<>(); children.complete(new ArrayList<>()); } } return children; } /** * Reference the actual Dart DiagnosticsNode object this object is referencing. */ public InspectorInstanceRef getDartDiagnosticRef() { return new InspectorInstanceRef(json.get("objectId").getAsString()); } public CompletableFuture<ArrayList<DiagnosticsNode>> getProperties() { final CompletableFuture<ArrayList<DiagnosticsNode>> properties = inspectorService.getProperties(getDartDiagnosticRef()); return properties.thenApplyAsync((ArrayList<DiagnosticsNode> nodes) -> { // Map locations to property nodes where available. final InspectorSourceLocation creationLocation = getCreationLocation(); if (creationLocation != null) { final ArrayList<InspectorSourceLocation> parameterLocations = creationLocation.getParameterLocations(); if (parameterLocations != null) { final Map<String, InspectorSourceLocation> names = new HashMap<>(); for (InspectorSourceLocation location : parameterLocations) { final String name = location.getName(); if (name != null) { names.put(name, location); } } for (DiagnosticsNode node : nodes) { node.setParent(this); final String name = node.getName(); if (name != null) { final InspectorSourceLocation parameterLocation = names.get(name); if (parameterLocation != null) { node.setCreationLocation(parameterLocation); } } } } } return nodes; }); } @NotNull public CompletableFuture<String> getPropertyDoc() { if (propertyDocFuture == null) { propertyDocFuture = createPropertyDocFurure(); } return propertyDocFuture; } private CompletableFuture<String> createPropertyDocFurure() { final DiagnosticsNode parent = getParent(); if (parent != null) { return inspectorService.toDartVmServiceValueForSourceLocation(parent.getValueRef()) .thenComposeAsync((DartVmServiceValue vmValue) -> inspectorService.getPropertyLocation(vmValue.getInstanceRef(), getName()) .thenApplyAsync((XSourcePosition sourcePosition) -> { if (sourcePosition != null) { final VirtualFile file = sourcePosition.getFile(); final int offset = sourcePosition.getOffset(); final Project project = getProject(file); if (project != null) { final List<HoverInformation> hovers = DartAnalysisServerService.getInstance(project).analysis_getHover(file, offset); if (!hovers.isEmpty()) { return hovers.get(0).getDartdoc(); } } } return "Unable to find property source"; })); } return CompletableFuture.completedFuture("Unable to find property source"); } @Nullable private Project getProject(@NotNull VirtualFile file) { final FlutterApp app = inspectorService.getApp(); return app != null ? app.getProject() : ProjectUtil.guessProjectForFile(file); } private void setCreationLocation(InspectorSourceLocation location) { this.location = location; } public InspectorService getInspectorService() { return inspectorService; } @Nullable public FlutterWidget getWidget() { return FlutterWidget.getCatalog().getWidget(getDescription()); } @Nullable public Icon getIcon() { Icon icon = null; final FlutterWidget widget = getWidget(); if (widget != null) { icon = widget.getIcon(); } if (icon == null) { icon = iconMaker.fromWidgetName(getDescription()); } return icon; } /** * Returns true if two diagnostic nodes are indistinguishable from * the perspective of a user debugging. * <p> * In practice this means that all fields but the objectId and valueId * properties for the DiagnosticsNode objects are identical. The valueId * field may change even for properties that have not changed because in * some cases such as the 'created' property for an element, the property * value is created dynamically each time 'getProperties' is called. */ public boolean identicalDisplay(DiagnosticsNode node) { if (node == null) { return false; } final Set<Map.Entry<String, JsonElement>> entries = json.entrySet(); if (entries.size() != node.json.entrySet().size()) { return false; } for (Map.Entry<String, JsonElement> entry : entries) { final String key = entry.getKey(); if (key.equals("objectId") || key.equals("valueId")) { continue; } if (entry.getValue().equals(node.json.get(key))) { return false; } } return true; } }
// BUI - a user interface library for the JME 3D engine // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.jmex.bui.layout; import java.util.Arrays; import com.jmex.bui.BComponent; import com.jmex.bui.BContainer; import com.jmex.bui.Log; import com.jmex.bui.util.Dimension; import com.jmex.bui.util.Insets; /** * Lays out components in a simple grid arrangement, wherein the width and height of each column * and row is defined by the widest preferred width and height of any component in that column and * row. * * <p> The table layout defaults to left horizontal alignment and top vertical alignment. */ public class TableLayout extends BLayoutManager { /** An enumeration class representing alignments. */ public static class Alignment { } /** Left justifies the table contents within the container. */ public static final Alignment LEFT = new Alignment(); /** Centers the table contents within the container. */ public static final Alignment CENTER = new Alignment(); /** Right justifies the table contents within the container. */ public static final Alignment RIGHT = new Alignment(); /** Top justifies the table contents within the container. */ public static final Alignment TOP = new Alignment(); /** Bottom justifies the table contents within the container. */ public static final Alignment BOTTOM = new Alignment(); /** Divides the column space among the columns in proportion to their preferred size. This only * works with {@link #setHorizontalAlignment}. */ public static final Alignment STRETCH = new Alignment(); /** * Creates a table layout with the specified number of columns and a zero pixel gap between * rows and columns. */ public TableLayout (int columns) { this(columns, 0, 0); } /** * Creates a table layout with the specified number of columns and the specifeid gap between * rows and columns. */ public TableLayout (int columns, int rowgap, int colgap) { // A table must have at least a column columns = Math.max(1, columns); _prefMetrics.columnWidths = new int[columns]; _realMetrics.columnWidths = new int[columns]; _fixedColumns = new boolean[columns]; _rowgap = rowgap; _colgap = colgap; } /** * Configures the horizontal alignment (or stretching) of this table. This must be called * before the container using this layout is validated. */ public TableLayout setHorizontalAlignment (Alignment align) { _halign = align; return this; } /** * Configures the vertical alignment of this table. This must be called before the container * using this layout is validated. */ public TableLayout setVerticalAlignment (Alignment align) { _valign = align; return this; } /** * Configures a column as fixed or free. If a table layout is configured with * <code>STRETCH</code> horizontal alignment, extra space is divided up among all of the * non-fixed columns. All columns are non-fixed by default. */ public TableLayout setFixedColumn (int column, boolean fixed) { _fixedColumns[column] = fixed; return this; } /** * Configures whether or not the table will force all rows to be a uniform size. This must be * called before the container using this layout is validated. */ public TableLayout setEqualRows (boolean equalRows) { _equalRows = equalRows; return this; } // documentation inherited public Dimension computePreferredSize (BContainer target, int whint, int hhint) { Metrics metrics = computeMetrics(target, true, whint); int cx = (metrics.columnWidths.length-1) * _colgap; int rx = (computeRows(target, true)-1) * _rowgap; return new Dimension(sum(metrics.columnWidths) + cx, sum(metrics.rowHeights) + rx); } // documentation inherited public void layoutContainer (BContainer target) { Insets insets = target.getInsets(); int availwid = target.getWidth() - insets.getHorizontal(); Metrics metrics = computeMetrics(target, false, availwid); int totwidth = sum(metrics.columnWidths) + (metrics.columnWidths.length-1) * _colgap; int totheight = sum(metrics.rowHeights) + (computeRows(target, false)-1) * _rowgap; // account for our horizontal alignment int sx = insets.left; if (_halign == RIGHT) { sx += target.getWidth() - insets.getHorizontal() - totwidth; } else if (_halign == CENTER) { sx += (target.getWidth() - insets.getHorizontal() - totwidth)/2; } // account for our vertical alignment int y = insets.bottom; if (_valign == CENTER) { y += totheight + (target.getHeight() - insets.getVertical() - totheight)/2; } else if (_valign == TOP) { y = target.getHeight() - insets.top; } int row = 0, col = 0, x = sx; for (int ii = 0, ll = target.getComponentCount(); ii < ll; ii++) { BComponent child = target.getComponent(ii); int width = Math.min(metrics.columnWidths[col], availwid); child.setBounds(x, y - metrics.rowHeights[row], width, metrics.rowHeights[row]); x += (metrics.columnWidths[col] + _colgap); if (++col == metrics.columnWidths.length) { y -= (metrics.rowHeights[row] + _rowgap); row++; col = 0; x = sx; } } } protected Metrics computeMetrics (BContainer target, boolean preferred, int whint) { Metrics metrics = preferred ? _prefMetrics : _realMetrics; if (whint == metrics.cachedHint) { return metrics; } metrics.cachedHint = whint; int rows = computeRows(target, preferred); if (metrics.rowHeights == null || metrics.rowHeights.length != rows) { metrics.rowHeights = new int[rows]; } else { Arrays.fill(metrics.rowHeights, 0); } Arrays.fill(metrics.columnWidths, 0); int row = 0, col = 0, maxrh = 0; for (int ii = 0, ll = target.getComponentCount(); ii < ll; ii++) { BComponent child = target.getComponent(ii); if (child.isVisible()) { Dimension psize = child.getPreferredSize(whint, -1); if (psize.height > metrics.rowHeights[row]) { metrics.rowHeights[row] = psize.height; if (maxrh < metrics.rowHeights[row]) { maxrh = metrics.rowHeights[row]; } } if (psize.width > metrics.columnWidths[col]) { metrics.columnWidths[col] = psize.width; } } if (++col == metrics.columnWidths.length) { col = 0; row++; } } // if we are stretching, adjust the column widths accordingly (however, no adjusting if // we're computing our preferred size) int naturalWidth; if (!preferred && _halign == STRETCH && (naturalWidth = sum(metrics.columnWidths)) > 0) { // sum the width of the non-fixed columns int freewid = 0; for (int ii = 0; ii < _fixedColumns.length; ii++) { if (!_fixedColumns[ii]) { freewid += metrics.columnWidths[ii]; } } // now divide up the extra space among said non-fixed columns int avail = target.getWidth() - target.getInsets().getHorizontal() - naturalWidth - (_colgap * (metrics.columnWidths.length-1)); int used = 0; for (int ii = 0; ii < metrics.columnWidths.length; ii++) { if (_fixedColumns[ii]) { continue; } int adjust = metrics.columnWidths[ii] * avail / freewid; metrics.columnWidths[ii] += adjust; used += adjust; } // add any rounding error to the first non-fixed column if (metrics.columnWidths.length > 0) { for (int ii = 0; ii < _fixedColumns.length; ii++) { if (!_fixedColumns[ii]) { metrics.columnWidths[ii] += (avail - used); break; } } } } // if we're equalizing rows, make all row heights the max if (_equalRows) { Arrays.fill(metrics.rowHeights, maxrh); } return metrics; } protected int computeRows (BContainer target, boolean preferred) { Metrics metrics = preferred ? _prefMetrics : _realMetrics; int ccount = target.getComponentCount(); int rows = ccount / metrics.columnWidths.length; if (ccount % metrics.columnWidths.length != 0) { rows++; } return rows; } protected int sum (int[] values) { int total = 0; for (int ii = 0; ii < values.length; ii++) { total += values[ii]; } return total; } protected class Metrics { public int cachedHint = Integer.MIN_VALUE; public int[] columnWidths; public int[] rowHeights; } protected Alignment _halign = LEFT, _valign = TOP; protected boolean _equalRows; protected int _rowgap, _colgap; protected boolean[] _fixedColumns; protected Metrics _prefMetrics = new Metrics(), _realMetrics = new Metrics(); }
// viztool - a tool for visualizing collections of java classes // This program is free software; you can redistribute it and/or modify it // option) any later version. // This program is distributed in the hope that it will be useful, but // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // with this program; if not, write to the Free Software Foundation, Inc., // 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.viztool; import java.awt.*; import java.awt.geom.Rectangle2D; import javax.swing.*; import com.samskivert.viztool.viz.HierarchyVisualizer; /** * A very simple UI element for displaying visualizations on screen. */ public class VizPanel extends JPanel { public VizPanel (HierarchyVisualizer viz) { _viz = viz; // set the font Font font = new Font("Courier", Font.PLAIN, 10); setFont(font); } public void paintComponent (Graphics g) { super.paintComponent(g); Graphics2D gfx = (Graphics2D)g; Rectangle2D bounds = getBounds(); _viz.layout(gfx, 0, 0, bounds.getWidth(), bounds.getHeight()); _viz.paint(gfx, 0); } protected HierarchyVisualizer _viz; }
package net.sf.jabref; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.logging.Logger; import net.sf.jabref.export.LatexFieldFormatter; import net.sf.jabref.gui.FileListEntry; import net.sf.jabref.gui.FileListTableModel; /** * Sends the selected entry as email - by Oliver Kopp * * It uses the mailto:-mechanism * * Microsoft Outlook does not support attachments via mailto * Therefore, the folder(s), where the file(s) belonging to the entry are stored, * are opened. This feature is disabled by default and can be switched on at * preferences/external programs */ public class SendAsEMailAction extends AbstractWorker { private static final Logger logger = Logger.getLogger(SendAsEMailAction.class.getName()); String message = null; private JabRefFrame frame; public SendAsEMailAction(JabRefFrame frame) { this.frame = frame; } public void run() { if (!Desktop.isDesktopSupported()) { message = Globals.lang("Error creating email"); return; } BasePanel panel = frame.basePanel(); if (panel == null) return; if (panel.getSelectedEntries().length == 0) { message = Globals.lang("No entries selected."); return; } StringWriter sw = new StringWriter(); BibtexEntry[] bes = panel.getSelectedEntries(); LatexFieldFormatter ff = new LatexFieldFormatter(); ArrayList<String> attachments = new ArrayList<String>(); // open folders is needed to indirectly support email programs, which cannot handle // the unofficial "mailto:attachment" property boolean openFolders = JabRefPreferences.getInstance().getBoolean("openFoldersOfAttachedFiles"); for (BibtexEntry entry : bes) { try { entry.write(sw, ff, true); FileListTableModel tm = new FileListTableModel(); tm.setContent(entry.getField("file")); for (int i=0; i< tm.getRowCount(); i++) { FileListEntry flEntry = tm.getEntry(i); File f = Util.expandFilename(flEntry.getLink(), frame.basePanel().metaData().getFileDirectory(GUIGlobals.FILE_FIELD)); if (f != null) { // file exists attachments.add(f.getPath()); if (openFolders) { try { Util.openFolderAndSelectFile(f.getAbsolutePath()); } catch (IOException e) { logger.fine(e.getMessage()); } } } } } catch (Exception e) { e.printStackTrace(); message = Globals.lang("Error creating email"); return; } } String mailTo = "?Body=".concat(sw.getBuffer().toString()); mailTo = mailTo.concat("&Subject="); mailTo = mailTo.concat(JabRefPreferences.getInstance().get(JabRefPreferences.EMAIL_SUBJECT)); for (String path: attachments) { mailTo = mailTo.concat("&Attachment=\"").concat(path); mailTo = mailTo.concat("\""); } URI uriMailTo = null; try { uriMailTo = new URI("mailto", mailTo, null); } catch (URISyntaxException e1) { e1.printStackTrace(); message = Globals.lang("Error creating email"); return; } Desktop desktop = Desktop.getDesktop(); try { desktop.mail(uriMailTo); } catch (IOException e) { e.printStackTrace(); message = Globals.lang("Error creating email"); return; } message = String.format("%s: %d", Globals.lang("Entries added to an email"), bes.length); } public void update() { frame.output(message); } }
package org.apache.fop.pdf; import java.io.IOException; /** * Special PDFStream for embeddable TrueType fonts. */ public class PDFTTFStream extends PDFStream { private int origLength; /** * Main constructor * @param len original length */ public PDFTTFStream(int len) { super(); origLength = len; } /** * Overload the base object method so we don't have to copy * byte arrays around so much * @see org.apache.fop.pdf.PDFObject#output(OutputStream) */ protected int output(java.io.OutputStream stream) throws java.io.IOException { getDocumentSafely().getLogger().debug("Writing " + origLength + " bytes of TTF font data"); int length = super.output(stream); getDocumentSafely().getLogger().debug("Embedded TrueType/OpenType font"); return length; } /** * @see org.apache.fop.pdf.AbstractPDFStream#buildStreamDict(String) */ protected String buildStreamDict(String lengthEntry) { final String filterEntry = getFilterList().buildFilterDictEntries(); return (getObjectID() + "<< /Length " + lengthEntry + " /Length1 " + origLength + "\n" + filterEntry + "\n>>\n"); } /** * Sets the TrueType font data. * @param data the font payload * @param size size of the payload * @throws IOException in case of an I/O problem */ public void setData(byte[] data, int size) throws IOException { this.data.clear(); this.data.getOutputStream().write(data, 0, size); } }
package org.junit.jupiter.engine.extension; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.DynamicTest.dynamicTest; import static org.junit.jupiter.api.MethodOrderer.Random.RANDOM_SEED_PROPERTY_NAME; import static org.junit.jupiter.engine.Constants.PARALLEL_EXECUTION_ENABLED_PROPERTY_NAME; import static org.junit.platform.engine.discovery.DiscoverySelectors.selectClass; import static org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder.request; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.DynamicTest; import org.junit.jupiter.api.MethodDescriptor; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.MethodOrderer.Alphanumeric; import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; import org.junit.jupiter.api.MethodOrderer.Random; import org.junit.jupiter.api.MethodOrdererContext; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.RepeatedTest; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestFactory; import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.TestReporter; import org.junit.jupiter.engine.JupiterTestEngine; import org.junit.jupiter.engine.TrackLogRecords; import org.junit.platform.commons.logging.LogRecordListener; import org.junit.platform.commons.util.ClassUtils; import org.junit.platform.launcher.LauncherDiscoveryRequest; import org.junit.platform.testkit.engine.EngineTestKit; import org.junit.platform.testkit.engine.Events; import org.mockito.Mockito; /** * Integration tests that verify support for custom test method execution order * in the {@link JupiterTestEngine}. * * @since 5.4 */ class OrderedMethodTests { private static final Set<String> callSequence = Collections.synchronizedSet(new LinkedHashSet<>()); private static final Set<String> threadNames = Collections.synchronizedSet(new LinkedHashSet<>()); @BeforeEach void clearCallSequence() { callSequence.clear(); threadNames.clear(); } @Test void alphanumeric() { Class<?> testClass = AlphanumericTestCase.class; // The name of the base class MUST start with a letter alphanumerically // greater than "A" so that BaseTestCase comes after AlphanumericTestCase // if methods are sorted by class name for the fallback ordering if two // methods have the same name but different parameter lists. Note, however, // that Alphanumeric actually does not order methods like that, but we want // this check to remain in place to ensure that the ordering does not rely // on the class names. assertThat(testClass.getSuperclass().getName()).isGreaterThan(testClass.getName()); var tests = executeTestsInParallel(AlphanumericTestCase.class); tests.assertStatistics(stats -> stats.succeeded(callSequence.size())); assertThat(callSequence).containsExactly("$()", "AAA()", "AAA(org.junit.jupiter.api.TestInfo)", "AAA(org.junit.jupiter.api.TestReporter)", "ZZ_Top()", "___()", "a1()", "a2()", "b()", "c()", "zzz()"); assertThat(threadNames).hasSize(1); } @Test void orderAnnotation() { assertOrderAnnotationSupport(OrderAnnotationTestCase.class); } @Test void orderAnnotationInNestedTestClass() { assertOrderAnnotationSupport(OuterTestCase.class); } private void assertOrderAnnotationSupport(Class<?> testClass) { var tests = executeTestsInParallel(testClass); tests.assertStatistics(stats -> stats.succeeded(callSequence.size())); assertThat(callSequence).containsExactly("test1", "test2", "test3", "test4", "test5", "test6"); assertThat(threadNames).hasSize(1); } @Test void random() { Set<String> uniqueSequences = new HashSet<>(); for (int i = 0; i < 10; i++) { callSequence.clear(); var tests = executeTestsInParallel(RandomTestCase.class); tests.assertStatistics(stats -> stats.succeeded(callSequence.size())); uniqueSequences.add(callSequence.stream().collect(Collectors.joining(","))); } // We assume that at least 3 out of 10 are different... assertThat(uniqueSequences).size().isGreaterThanOrEqualTo(3); // and that at least 2 different threads were used... assertThat(threadNames).size().isGreaterThanOrEqualTo(2); } @Test @TrackLogRecords void randomWithBogusSeed(LogRecordListener listener) { String seed = "explode"; String expectedMessage = "Failed to convert configuration parameter [" + Random.RANDOM_SEED_PROPERTY_NAME + "] with value [" + seed + "] to a long. Using System.nanoTime() as fallback."; Set<String> uniqueSequences = new HashSet<>(); for (int i = 0; i < 10; i++) { callSequence.clear(); listener.clear(); var tests = executeTestsInParallelWithRandomSeed(RandomTestCase.class, seed); tests.assertStatistics(stats -> stats.succeeded(callSequence.size())); uniqueSequences.add(callSequence.stream().collect(Collectors.joining(","))); // @formatter:off assertTrue(listener.stream(Random.class, Level.WARNING) .map(LogRecord::getMessage) .anyMatch(expectedMessage::equals)); // @formatter:on } // We assume that at least 3 out of 10 are different... assertThat(uniqueSequences).size().isGreaterThanOrEqualTo(3); } @Test @TrackLogRecords void randomWithCustomSeed(LogRecordListener listener) { String seed = "42"; String expectedMessage = "Using custom seed for configuration parameter [" + Random.RANDOM_SEED_PROPERTY_NAME + "] with value [" + seed + "]."; for (int i = 0; i < 10; i++) { callSequence.clear(); listener.clear(); var tests = executeTestsInParallelWithRandomSeed(RandomTestCase.class, seed); tests.assertStatistics(stats -> stats.succeeded(callSequence.size())); // With a custom seed, the "randomness" must be the same for every iteration. assertThat(callSequence).containsExactly("test2()", "test3()", "test4()", "repetition 1 of 1", "test1()"); // @formatter:off assertTrue(listener.stream(Random.class, Level.CONFIG) .map(LogRecord::getMessage) .anyMatch(expectedMessage::equals)); // @formatter:on } } @Test @TrackLogRecords void misbehavingMethodOrdererThatAddsElements(LogRecordListener listener) { Class<?> testClass = MisbehavingByAddingTestCase.class; executeTestsInParallel(testClass).assertStatistics(stats -> stats.succeeded(2)); assertThat(callSequence).containsExactlyInAnyOrder("test1()", "test2()"); String expectedMessage = "MethodOrderer [" + MisbehavingByAdding.class.getName() + "] added 2 MethodDescriptor(s) for test class [" + testClass.getName() + "] which will be ignored."; assertExpectedLogMessage(listener, expectedMessage); } @Test @TrackLogRecords void misbehavingMethodOrdererThatRemovesElements(LogRecordListener listener) { Class<?> testClass = MisbehavingByRemovingTestCase.class; executeTestsInParallel(testClass).assertStatistics(stats -> stats.succeeded(3)); assertThat(callSequence).containsExactlyInAnyOrder("test1()", "test2()", "test3()"); String expectedMessage = "MethodOrderer [" + MisbehavingByRemoving.class.getName() + "] removed 2 MethodDescriptor(s) for test class [" + testClass.getName() + "] which will be retained with arbitrary ordering."; assertExpectedLogMessage(listener, expectedMessage); } private void assertExpectedLogMessage(LogRecordListener listener, String expectedMessage) { // @formatter:off assertTrue(listener.stream(Level.WARNING) .map(LogRecord::getMessage) .anyMatch(expectedMessage::equals)); // @formatter:on } private Events executeTestsInParallel(Class<?> testClass) { return executeTests(testClass, Collections.singletonMap(PARALLEL_EXECUTION_ENABLED_PROPERTY_NAME, "true")); } private Events executeTestsInParallelWithRandomSeed(Class<?> testClass, String seed) { var configurationParameters = Map.of( PARALLEL_EXECUTION_ENABLED_PROPERTY_NAME, "true", RANDOM_SEED_PROPERTY_NAME, seed ); return executeTests(testClass, configurationParameters); } private Events executeTests(Class<?> testClass, Map<String, String> configurationParameters) { // @formatter:off LauncherDiscoveryRequest discoveryRequest = request() .selectors(selectClass(testClass)) .configurationParameters(configurationParameters) .build(); // @formatter:on return EngineTestKit.execute("junit-jupiter", discoveryRequest).tests(); } static class BaseTestCase { @Test void AAA() { } @Test void c() { } } @TestMethodOrder(Alphanumeric.class) static class AlphanumericTestCase extends BaseTestCase { @BeforeEach void trackInvocations(TestInfo testInfo) { var method = testInfo.getTestMethod().get(); var signature = String.format("%s(%s)", method.getName(), ClassUtils.nullSafeToString(method.getParameterTypes())); callSequence.add(signature); threadNames.add(Thread.currentThread().getName()); } @TestFactory DynamicTest b() { return dynamicTest("dynamic", () -> { }); } @Test void $() { } @Test void ___() { } @Test void AAA(TestReporter testReporter) { } @Test void AAA(TestInfo testInfo) { } @Test void ZZ_Top() { } @Test void a1() { } @Test void a2() { } @RepeatedTest(1) void zzz() { } } @TestMethodOrder(OrderAnnotation.class) static class OrderAnnotationTestCase { @BeforeEach void trackInvocations(TestInfo testInfo) { callSequence.add(testInfo.getDisplayName()); threadNames.add(Thread.currentThread().getName()); } @Test @DisplayName("test6") // @Order(6) void defaultOrderValue() { } @Test @DisplayName("test3") @Order(3) void $() { } @Test @DisplayName("test5") @Order(5) void AAA() { } @TestFactory @DisplayName("test4") @Order(4) DynamicTest aaa() { return dynamicTest("test4", () -> { }); } @Test @DisplayName("test1") @Order(1) void zzz() { } @RepeatedTest(value = 1, name = "{displayName}") @DisplayName("test2") @Order(2) void ___() { } } static class OuterTestCase { @Nested class NestedOrderAnnotationTestCase extends OrderAnnotationTestCase { } } @TestMethodOrder(Random.class) static class RandomTestCase { @BeforeEach void trackInvocations(TestInfo testInfo) { callSequence.add(testInfo.getDisplayName()); threadNames.add(Thread.currentThread().getName()); } @Test void test1() { } @Test void test2() { } @Test void test3() { } @TestFactory DynamicTest test4() { return dynamicTest("dynamic", () -> { }); } @RepeatedTest(1) void test5() { } } @TestMethodOrder(MisbehavingByAdding.class) static class MisbehavingByAddingTestCase { @BeforeEach void trackInvocations(TestInfo testInfo) { callSequence.add(testInfo.getDisplayName()); } @Test void test1() { } @Test void test2() { } } @TestMethodOrder(MisbehavingByRemoving.class) static class MisbehavingByRemovingTestCase { @BeforeEach void trackInvocations(TestInfo testInfo) { callSequence.add(testInfo.getDisplayName()); } @Test void test1() { } @Test void test2() { } @Test void test3() { } } static class MisbehavingByAdding implements MethodOrderer { @Override public void orderMethods(MethodOrdererContext context) { context.getMethodDescriptors().add(mock(MethodDescriptor.class)); context.getMethodDescriptors().add(mock(MethodDescriptor.class)); } @SuppressWarnings("unchecked") static <T> T mock(Class<? super T> type) { return (T) Mockito.mock(type); } } static class MisbehavingByRemoving implements MethodOrderer { @Override public void orderMethods(MethodOrdererContext context) { context.getMethodDescriptors().remove(0); context.getMethodDescriptors().remove(0); } } }
package org.junit.platform.console.options; import java.net.URI; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.junit.platform.engine.discovery.ClassNameFilter; import picocli.CommandLine; import picocli.CommandLine.Command; import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Option; import picocli.CommandLine.ParameterException; import picocli.CommandLine.ParseResult; import picocli.CommandLine.Spec; /** * @since 1.0 */ @Command(name = "ConsoleLauncher", sortOptions = false, description = "Launches the JUnit Platform from the console.") class AvailableOptions { private static final String CP_OPTION = "cp"; @Option(names = { "-h", "--help" }, usageHelp = true, description = "Display help information.") private boolean helpRequested; @Option(names = { "--h", "-help" }, help = true, hidden = true) private boolean helpRequested2; @Option(names = "--disable-ansi-colors", description = "Disable ANSI colors in output (not supported by all terminals).") private boolean disableAnsiColors; @Option(names = "-disable-ansi-colors", hidden = true) private boolean disableAnsiColors2; @Option(names = "--disable-banner", description = "Disable print out of the welcome message.") private boolean disableBanner; @Option(names = "-disable-banner", hidden = true) private boolean disableBanner2; @Option(names = "--details", paramLabel = "MODE", description = "Select an output details mode for when tests are executed. " + "Use one of: ${COMPLETION-CANDIDATES}. If 'none' is selected, " + "then only the summary and test failures are shown. Default: ${DEFAULT-VALUE}.") private Details details = CommandLineOptions.DEFAULT_DETAILS; @Option(names = "-details", hidden = true) private Details details2 = CommandLineOptions.DEFAULT_DETAILS; @Option(names = "--details-theme", paramLabel = "THEME", description = "Select an output details tree theme for when tests are executed. " + "Use one of: ${COMPLETION-CANDIDATES}. Default: ${DEFAULT-VALUE}.") private Theme theme = CommandLineOptions.DEFAULT_THEME; @Option(names = "-details-theme", hidden = true) private Theme theme2 = CommandLineOptions.DEFAULT_THEME; @Option(names = { "-cp", "--classpath", "--class-path" }, split = ";|:", paramLabel = "PATH", arity = "1", description = "Provide additional classpath entries " + "-- for example, for adding engines and their dependencies. This option can be repeated.") private List<Path> additionalClasspathEntries = new ArrayList<>(); @Option(names = { "--cp", "-classpath", "-class-path" }, split = ";|:", hidden = true) private List<Path> additionalClasspathEntries2 = new ArrayList<>(); @Option(names = "--fail-if-no-tests", description = "Fail and return exit status code 2 if no tests are found.") private boolean failIfNoTests; // no single-dash equivalent: was introduced in 5.3-M1 @Option(names = "--reports-dir", paramLabel = "DIR", description = "Enable report output into a specified local directory (will be created if it does not exist).") private Path reportsDir; @Option(names = "-reports-dir", hidden = true) private Path reportsDir2; @Option(names = "--scan-modules", description = "EXPERIMENTAL: Scan all resolved modules for test discovery.") private boolean scanModulepath; @Option(names = "-scan-modules", hidden = true) private boolean scanModulepath2; @Option(names = { "-o", "--select-module" }, paramLabel = "NAME", arity = "1", description = "EXPERIMENTAL: Select single module for test discovery. This option can be repeated.") private List<String> selectedModules = new ArrayList<>(); @Option(names = { "--o", "-select-module" }, arity = "1", hidden = true) private List<String> selectedModules2 = new ArrayList<>(); @Option(names = { "--scan-class-path", "--scan-classpath" }, split = ";|:", paramLabel = "PATH", arity = "0..1", description = "Scan all directories on the classpath or explicit classpath roots. " + "Without arguments, only directories on the system classpath as well as additional classpath " + "entries supplied via -" + CP_OPTION + " (directories and JAR files) are scanned. " + "Explicit classpath roots that are not on the classpath will be silently ignored. " + "This option can be repeated.") private List<Path> selectedClasspathEntries = new ArrayList<>(); @Option(names = { "-scan-class-path", "-scan-classpath" }, split = ";|:", arity = "0..1", hidden = true) private List<Path> selectedClasspathEntries2 = new ArrayList<>(); @Option(names = { "-u", "--select-uri" }, paramLabel = "URI", arity = "1", description = "Select a URI for test discovery. This option can be repeated.") private List<URI> selectedUris = new ArrayList<>(); @Option(names = { "--u", "-select-uri" }, arity = "1", hidden = true) private List<URI> selectedUris2 = new ArrayList<>(); @Option(names = { "-f", "--select-file" }, paramLabel = "FILE", arity = "1", description = "Select a file for test discovery. This option can be repeated.") private List<String> selectedFiles = new ArrayList<>(); @Option(names = { "--f", "-select-file" }, arity = "1", hidden = true) private List<String> selectedFiles2 = new ArrayList<>(); @Option(names = { "-d", "--select-directory" }, paramLabel = "DIR", arity = "1", description = "Select a directory for test discovery. This option can be repeated.") private List<String> selectedDirectories = new ArrayList<>(); @Option(names = { "--d", "-select-directory" }, arity = "1", hidden = true) private List<String> selectedDirectories2 = new ArrayList<>(); @Option(names = { "-p", "--select-package" }, paramLabel = "PKG", arity = "1", description = "Select a package for test discovery. This option can be repeated.") private List<String> selectedPackages = new ArrayList<>(); @Option(names = { "--p", "-select-package" }, arity = "1", hidden = true) private List<String> selectedPackages2 = new ArrayList<>(); @Option(names = { "-c", "--select-class" }, paramLabel = "CLASS", arity = "1", description = "Select a class for test discovery. This option can be repeated.") private List<String> selectedClasses = new ArrayList<>(); @Option(names = { "--c", "-select-class" }, arity = "1", hidden = true) private List<String> selectedClasses2 = new ArrayList<>(); @Option(names = { "-m", "--select-method" }, paramLabel = "NAME", arity = "1", description = "Select a method for test discovery. This option can be repeated.") private List<String> selectedMethods = new ArrayList<>(); @Option(names = { "--m", "-select-method" }, arity = "1", hidden = true) private List<String> selectedMethods2 = new ArrayList<>(); @Option(names = { "-r", "--select-resource" }, paramLabel = "RESOURCE", arity = "1", description = "Select a classpath resource for test discovery. This option can be repeated.") private List<String> selectedClasspathResources = new ArrayList<>(); @Option(names = { "--r", "-select-resource" }, arity = "1", hidden = true) private List<String> selectedClasspathResources2 = new ArrayList<>(); @Option(names = { "-n", "--include-classname" }, paramLabel = "PATTERN", arity = "1", description = "Provide a regular expression to include only classes whose fully qualified names match. " + "To avoid loading classes unnecessarily, the default pattern only includes class " + "names that begin with \"Test\" or end with \"Test\" or \"Tests\". " + "When this option is repeated, all patterns will be combined using OR semantics. " + "Default: ${DEFAULT-VALUE}") private List<String> includeClassNamePatterns = new ArrayList<>( Arrays.asList(ClassNameFilter.STANDARD_INCLUDE_PATTERN)); @Option(names = { "--n", "-include-classname" }, arity = "1", hidden = true) private List<String> includeClassNamePatterns2 = new ArrayList<>(); @Option(names = { "-N", "--exclude-classname" }, paramLabel = "PATTERN", arity = "1", description = "Provide a regular expression to exclude those classes whose fully qualified names match. " + "When this option is repeated, all patterns will be combined using OR semantics.") private List<String> excludeClassNamePatterns = new ArrayList<>(); @Option(names = { "--N", "-exclude-classname" }, arity = "1", hidden = true) private List<String> excludeClassNamePatterns2 = new ArrayList<>(); @Option(names = { "--include-package" }, paramLabel = "PKG", arity = "1", description = "Provide a package to be included in the test run. This option can be repeated.") private List<String> includePackages = new ArrayList<>(); @Option(names = { "-include-package" }, arity = "1", hidden = true) private List<String> includePackages2 = new ArrayList<>(); @Option(names = { "--exclude-package" }, paramLabel = "PKG", arity = "1", description = "Provide a package to be excluded from the test run. This option can be repeated.") private List<String> excludePackages = new ArrayList<>(); @Option(names = { "-exclude-package" }, arity = "1", hidden = true) private List<String> excludePackages2 = new ArrayList<>(); @Option(names = { "-t", "--include-tag" }, paramLabel = "TAG", arity = "1", description = "Provide a tag or tag expression to include only tests whose tags match. " + "When this option is repeated, all patterns will be combined using OR semantics.") private List<String> includedTags = new ArrayList<>(); @Option(names = { "--t", "-include-tag" }, arity = "1", hidden = true) private List<String> includedTags2 = new ArrayList<>(); @Option(names = { "-T", "--exclude-tag" }, paramLabel = "TAG", arity = "1", description = "Provide a tag or tag expression to exclude those tests whose tags match. " + "When this option is repeated, all patterns will be combined using OR semantics.") private List<String> excludedTags = new ArrayList<>(); @Option(names = { "--T", "-exclude-tag" }, arity = "1", hidden = true) private List<String> excludedTags2 = new ArrayList<>(); @Option(names = { "-e", "--include-engine" }, paramLabel = "ID", arity = "1", description = "Provide the ID of an engine to be included in the test run. This option can be repeated.") private List<String> includedEngines = new ArrayList<>(); @Option(names = { "--e", "-include-engine" }, arity = "1", hidden = true) private List<String> includedEngines2 = new ArrayList<>(); @Option(names = { "-E", "--exclude-engine" }, paramLabel = "ID", arity = "1", description = "Provide the ID of an engine to be excluded from the test run. This option can be repeated.") private List<String> excludedEngines = new ArrayList<>(); @Option(names = { "--E", "-exclude-engine" }, arity = "1", hidden = true) private List<String> excludedEngines2 = new ArrayList<>(); // Implementation note: the @Option annotation is on a setter method to allow validation. private Map<String, String> configurationParameters = new LinkedHashMap<>(); @Spec private CommandSpec spec; AvailableOptions() { } @Option(names = "--config", paramLabel = "KEY=VALUE", arity = "1", description = "Set a configuration parameter for test discovery and execution. This option can be repeated.") public void setConfigurationParameters(Map<String, String> map) { for (String key : map.keySet()) { String newValue = map.get(key); validateUnique(key, newValue); configurationParameters.put(key, newValue); } } private void validateUnique(String key, String newValue) { String existing = configurationParameters.get(key); if (existing != null && !existing.equals(newValue)) { throw new ParameterException(spec.commandLine(), String.format("Duplicate key '%s' for values '%s' and '%s'.", key, existing, newValue)); } } @Option(names = { "-config" }, arity = "1", hidden = true) public void setConfigurationParameters2(Map<String, String> keyValuePairs) { setConfigurationParameters(keyValuePairs); } CommandLine getParser() { CommandLine result = new CommandLine(this); result.setUsageHelpWidth(90); result.setCaseInsensitiveEnumValuesAllowed(true); result.setAtFileCommentChar(null); // for --select-method com.acme.Foo return result; } CommandLineOptions toCommandLineOptions(ParseResult parseResult) { CommandLineOptions result = new CommandLineOptions(); // General Purpose result.setDisplayHelp(this.helpRequested || this.helpRequested2); result.setAnsiColorOutputDisabled(this.disableAnsiColors || this.disableAnsiColors2); result.setBannerDisabled(this.disableBanner || this.disableBanner2); result.setDetails(choose(this.details, this.details2, CommandLineOptions.DEFAULT_DETAILS)); result.setTheme(choose(this.theme, this.theme2, CommandLineOptions.DEFAULT_THEME)); result.setAdditionalClasspathEntries(merge(this.additionalClasspathEntries, this.additionalClasspathEntries2)); result.setFailIfNoTests(this.failIfNoTests); // Reports result.setReportsDir(choose(this.reportsDir, this.reportsDir2, null)); // Java Platform Module System result.setScanModulepath(this.scanModulepath || this.scanModulepath2); result.setSelectedModules(merge(this.selectedModules, this.selectedModules2)); // Selectors result.setScanClasspath(parseResult.hasMatchedOption("scan-class-path")); // flag was specified result.setSelectedClasspathEntries(merge(this.selectedClasspathEntries, this.selectedClasspathEntries2)); result.setSelectedUris(merge(this.selectedUris, this.selectedUris2)); result.setSelectedFiles(merge(this.selectedFiles, this.selectedFiles2)); result.setSelectedDirectories(merge(this.selectedDirectories, this.selectedDirectories2)); result.setSelectedPackages(merge(this.selectedPackages, this.selectedPackages2)); result.setSelectedClasses(merge(this.selectedClasses, this.selectedClasses2)); result.setSelectedMethods(merge(this.selectedMethods, this.selectedMethods2)); result.setSelectedClasspathResources(merge(this.selectedClasspathResources, this.selectedClasspathResources2)); // Filters result.setIncludedClassNamePatterns(merge(this.includeClassNamePatterns, this.includeClassNamePatterns2)); result.setExcludedClassNamePatterns(merge(this.excludeClassNamePatterns, this.excludeClassNamePatterns2)); result.setIncludedPackages(merge(this.includePackages, this.includePackages2)); result.setExcludedPackages(merge(this.excludePackages, this.excludePackages2)); result.setIncludedTagExpressions(merge(this.includedTags, this.includedTags2)); result.setExcludedTagExpressions(merge(this.excludedTags, this.excludedTags2)); result.setIncludedEngines(merge(this.includedEngines, this.includedEngines2)); result.setExcludedEngines(merge(this.excludedEngines, this.excludedEngines2)); // Configuration Parameters result.setConfigurationParameters(this.configurationParameters); return result; } private static <T> List<T> merge(List<T> list1, List<T> list2) { List<T> result = new ArrayList<>(list1); result.addAll(list2); return result; } private static <T> T choose(T left, T right, T defaultValue) { return left == right ? left : (left == defaultValue ? right : left); } }
package imj3.draft.processing; import static imj3.draft.machinelearning.Datum.Default.datum; import static java.lang.Math.max; import static java.lang.Math.min; import static net.sourceforge.aprog.tools.Tools.array; import static net.sourceforge.aprog.tools.Tools.baseName; import static net.sourceforge.aprog.tools.Tools.cast; import static net.sourceforge.aprog.tools.Tools.join; import static net.sourceforge.aprog.tools.Tools.last; import static net.sourceforge.aprog.tools.Tools.unchecked; import imj2.draft.AutoCloseableImageWriter; import imj3.core.Image2D; import imj3.draft.machinelearning.BufferedDataSource; import imj3.draft.machinelearning.Classifier; import imj3.draft.machinelearning.DataSource; import imj3.draft.machinelearning.Datum; import imj3.draft.machinelearning.FilteredCompositeDataSource; import imj3.draft.machinelearning.Measure; import imj3.draft.machinelearning.MedianCutClustering; import imj3.draft.machinelearning.NearestNeighborClassifier; import imj3.draft.machinelearning.Measure.Predefined; import imj3.draft.processing.Image2DSource.PatchIterator; import imj3.tools.AwtImage2D; import imj3.tools.CommonTools; import imj3.tools.XMLSerializable; import imj3.tools.CommonSwingTools.NestedList; import imj3.tools.CommonSwingTools.PropertyGetter; import imj3.tools.CommonSwingTools.PropertyOrdering; import imj3.tools.CommonSwingTools.PropertySetter; import imj3.tools.CommonSwingTools.StringGetter; import imj3.tools.IMJTools; import java.awt.Rectangle; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import javax.imageio.ImageIO; import net.sourceforge.aprog.tools.CommandLineArgumentsParser; import net.sourceforge.aprog.tools.TicToc; import net.sourceforge.aprog.tools.Tools; import net.sourceforge.aprog.xml.XMLTools; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; /** * @author codistmonk (creation 2015-02-16) */ @PropertyOrdering({ "classes", "training", "algorithms" }) public final class Pipeline implements XMLSerializable { private List<TrainingField> trainingFields; private List<Algorithm> algorithms; private List<ClassDescription> classDescriptions; private Result trainingResult; private Result classificationResult; private ComputationStatus computationStatus; @Override public final Element toXML(final Document document, final Map<Object, Integer> ids) { final Element result = XMLSerializable.super.toXML(document, ids); final Node trainingFieldsNode = result.appendChild(document.createElement("trainingFields")); for (final TrainingField trainingField : this.getTrainingFields()) { trainingFieldsNode.appendChild(trainingField.toXML(document, ids)); } final Node algorithmsNode = result.appendChild(document.createElement("algorithms")); for (final Algorithm algorithm : this.getAlgorithms()) { algorithmsNode.appendChild(algorithm.toXML(document, ids)); } final Node classDescriptionsNode = result.appendChild(document.createElement("classDescriptions")); for (final ClassDescription classDescription : this.getClassDescriptions()) { classDescriptionsNode.appendChild(classDescription.toXML(document, ids)); } result.appendChild(XMLSerializable.newElement("trainingResult", this.getTrainingResult(), document, ids)); result.appendChild(XMLSerializable.newElement("classificationResult", this.getClassificationResult(), document, ids)); return result; } @Override public final Pipeline fromXML(final Element xml, final Map<Integer, Object> objects) { XMLSerializable.super.fromXML(xml, objects); /** * @author codistmonk (creation 2015-02-27) */ @PropertyOrdering({ "patchSize", "patchSizeRange", "patchSparsity", "patchSparsityRange", "stride", "strideRange", "usingXY", "usingXYRange", "classifier" }) public abstract class Algorithm implements XMLSerializable { private String clusteringName = MedianCutClustering.class.getName(); private Classifier classifier; private int patchSize = 1; private String patchSizeRange; private int patchSparsity = 1; private String patchSparsityRange; private int stride = 1; private String strideRange; private boolean usingXY = false; private String usingXYRange; @Override public final Element toXML(final Document document, final Map<Object, Integer> ids) { final Element result = XMLSerializable.super.toXML(document, ids); result.setAttribute(ENCLOSING_INSTANCE_ID, ids.get(this.getPipeline()).toString()); result.setAttribute("clusteringName", this.getClusteringName()); result.appendChild(XMLSerializable.newElement("classifier", this.getClassifier(), document, ids)); result.setAttribute("patchSize", this.getPatchSizeAsString()); result.setAttribute("patchSizeRange", this.getPatchSizeRange()); result.setAttribute("patchSparsity", this.getPatchSparsityAsString()); result.setAttribute("patchSparsityRange", this.getPatchSparsityRange()); result.setAttribute("stride", this.getStrideAsString()); result.setAttribute("strideRange", this.getStrideRange()); result.setAttribute("usingXY", this.getUsingXYAsString()); result.setAttribute("usingXYRange", this.getUsingXYRange()); return this.subclassToXML(document, ids, result); } protected abstract Element subclassToXML(Document document, Map<Object, Integer> ids, Element result); @Override public final Algorithm fromXML(final Element xml, final Map<Integer, Object> objects) { XMLSerializable.super.fromXML(xml, objects); this.setClusteringName(xml.getAttribute("clusteringName")); this.setClassifier(XMLSerializable.objectFromXML((Element) XMLTools.getNode(xml, "classifier").getFirstChild(), objects)); this.setPatchSize(xml.getAttribute("patchSize")); this.setPatchSizeRange(xml.getAttribute("patchSizeRange")); this.setPatchSparsity(xml.getAttribute("patchSparsity")); this.setPatchSparsityRange(xml.getAttribute("patchSparsityRange")); this.setStride(xml.getAttribute("stride")); this.setStrideRange(xml.getAttribute("strideRange")); this.setUsingXY(xml.getAttribute("usingXY")); this.setUsingXYRange(xml.getAttribute("usingXYRange")); return this.subclassFromXML(xml, objects); } protected abstract Algorithm subclassFromXML(Element xml, Map<Integer, Object> objects); @PropertyGetter("clustering") public final String getClusteringName() { return this.clusteringName; } @PropertySetter("clustering") public final Algorithm setClusteringName(final String clusteringName) { this.clusteringName = clusteringName; return this; } public final int getPatchSize() { return this.patchSize; } public final Algorithm setPatchSize(final int patchSize) { this.patchSize = patchSize; return this; } @PropertyGetter("patchSize") public final String getPatchSizeAsString() { return Integer.toString(this.getPatchSize()); } @PropertySetter("patchSize") @Trainable("patchSizeRange") public final Algorithm setPatchSize(final String patchSizeAsString) { return this.setPatchSize(Integer.parseInt(patchSizeAsString)); } @PropertyGetter("patchSizeRange") public final String getPatchSizeRange() { if (this.patchSizeRange == null) { this.patchSizeRange = "1"; } return this.patchSizeRange; } @PropertySetter("patchSizeRange") public final Algorithm setPatchSizeRange(final String patchSizeRange) { this.patchSizeRange = patchSizeRange; return this; } public final int getPatchSparsity() { return this.patchSparsity; } public final Algorithm setPatchSparsity(final int patchSparsity) { this.patchSparsity = patchSparsity; return this; } @PropertyGetter("patchSparsity") public final String getPatchSparsityAsString() { return Integer.toString(this.getPatchSparsity()); } @PropertySetter("patchSparsity") @Trainable("patchSparsityRange") public final Algorithm setPatchSparsity(final String patchSparsityAsString) { return this.setPatchSparsity(Integer.parseInt(patchSparsityAsString)); } @PropertyGetter("patchSparsityRange") public final String getPatchSparsityRange() { if (this.patchSparsityRange == null) { this.patchSparsityRange = "1"; } return this.patchSparsityRange; } @PropertySetter("patchSparsityRange") public final Algorithm setPatchSparsityRange(final String patchSparsityRange) { this.patchSparsityRange = patchSparsityRange; return this; } public final int getStride() { return this.stride; } public final Algorithm setStride(final int stride) { this.stride = stride; return this; } @PropertyGetter("stride") public final String getStrideAsString() { return Integer.toString(this.getStride()); } @PropertySetter("stride") @Trainable("strideRange") public final Algorithm setStride(final String strideAsString) { return this.setStride(Integer.parseInt(strideAsString)); } @PropertyGetter("strideRange") public final String getStrideRange() { if (this.strideRange == null) { this.strideRange = "1"; } return this.strideRange; } @PropertySetter("strideRange") public final Algorithm setStrideRange(final String strideRange) { this.strideRange = strideRange; return this; } public final boolean isUsingXY() { return this.usingXY; } public final Algorithm setUsingXY(final boolean usingXY) { this.usingXY = usingXY; return this; } @PropertyGetter("usingXY") public final String getUsingXYAsString() { return this.isUsingXY() ? "1" : "0"; } @PropertySetter("usingXY") @Trainable("usingXYRange") public final Algorithm setUsingXY(final String usingXYAsString) { return this.setUsingXY(Integer.parseInt(usingXYAsString) != 0); } @PropertyGetter("usingXYRange") public final String getUsingXYRange() { if (this.usingXYRange == null) { this.usingXYRange = "0"; } return this.usingXYRange; } @PropertySetter("usingXYRange") public final Algorithm setUsingXYRange(final String usingXYRange) { this.usingXYRange = usingXYRange; return this; } public final Classifier getClassifier() { return this.classifier; } public final Algorithm setClassifier(final Classifier classifier) { this.classifier = classifier; return this; } public final Pipeline getPipeline() { return Pipeline.this; } public abstract int getClassCount(); public abstract Algorithm train(DataSource trainingSet); @Override public final String toString() { final String classifierName = this.getClusteringName(); final String suffix = this instanceof UnsupervisedAlgorithm ? " (unsupervised: " + this.getClassCount() + ")" : ""; return classifierName.substring(classifierName.lastIndexOf('.') + 1) + suffix; } private static final long serialVersionUID = 7689582280746561160L; } /** * @author codistmonk (creation 2015-02-24) */ public final class UnsupervisedAlgorithm extends Algorithm { private int classCount; @Override protected final Element subclassToXML(final Document document, final Map<Object, Integer> ids, final Element result) { result.setAttribute("classCount", this.getClassCountAsString()); return result; } @Override protected final UnsupervisedAlgorithm subclassFromXML(final Element xml, final Map<Integer, Object> objects) { return this.setClassCount(xml.getAttribute("classCount")); } @Override public final int getClassCount() { return this.classCount; } public final UnsupervisedAlgorithm setClassCount(final int classCount) { this.classCount = classCount; return this; } @PropertyGetter("classCount") public final String getClassCountAsString() { return Integer.toString(this.getClassCount()); } @PropertySetter("classCount") public final UnsupervisedAlgorithm setClassCount(final String classCountAsString) { return this.setClassCount(Integer.parseInt(classCountAsString)); } @Override public final UnsupervisedAlgorithm train(final DataSource trainingSet) { return (UnsupervisedAlgorithm) this.setClassifier(new MedianCutClustering( Measure.Predefined.L2_ES, this.getClassCount()).cluster(trainingSet).updatePrototypeIndices()); } private static final long serialVersionUID = 130550869712582710L; } /** * @author codistmonk (creation 2015-02-24) */ @PropertyOrdering({ "prototypes", "prototypeRanges" }) public final class SupervisedAlgorithm extends Algorithm { private Map<String, Integer> prototypeCounts; private Map<String, String> prototypeCountRanges; @Override protected final Element subclassToXML(final Document document, final Map<Object, Integer> ids, final Element result) { result.appendChild(XMLSerializable.newElement("prototypeCounts", this.getPrototypeCounts(), document, ids)); result.appendChild(XMLSerializable.newElement("prototypeCountRanges", this.getPrototypeCountRanges(), document, ids)); return result; } @Override protected final SupervisedAlgorithm subclassFromXML(final Element xml, final Map<Integer, Object> objects) { this.prototypeCounts = XMLSerializable.objectFromXML((Element) XMLTools.getNode(xml, "prototypeCounts").getFirstChild(), objects); this.prototypeCountRanges = XMLSerializable.objectFromXML((Element) XMLTools.getNode(xml, "prototypeCountRanges").getFirstChild(), objects); return this; } @Trainable("prototypeRanges") public final Map<String, Integer> getPrototypeCounts() { if (this.prototypeCounts == null) { this.prototypeCounts = new LinkedHashMap<>(); } { final Collection<String> classes = new LinkedHashSet<>(); for (final Pipeline.ClassDescription classDescription : this.getPipeline().getClassDescriptions()) { final String name = classDescription.getName(); classes.add(name); if (!this.prototypeCounts.containsKey(name)) { this.prototypeCounts.put(name, 1); } } this.prototypeCounts.keySet().retainAll(classes); } return this.prototypeCounts; } @PropertyGetter("prototypes") public final String getPrototypeCountsAsString() { final String string = this.getPrototypeCounts().toString(); return string.substring(1, string.length() - 1).replace(',', ';'); } @PropertySetter("prototypes") public final SupervisedAlgorithm setPrototypeCounts(final String prototypeCountsAsString) { final Map<String, Integer> prototypeCounts = this.getPrototypeCounts(); final Map<String, Integer> tmp = new HashMap<>(); for (final String keyValue : prototypeCountsAsString.split(";")) { final String[] keyAndValue = keyValue.split("="); final String key = keyAndValue[0].trim(); final int value = Integer.parseInt(keyAndValue[1].trim()); if (!prototypeCounts.containsKey(key)) { throw new IllegalArgumentException(); } tmp.put(key, value); } if (!tmp.keySet().containsAll(prototypeCounts.keySet())) { throw new IllegalArgumentException(); } this.prototypeCounts.putAll(tmp); return this; } public final Map<String, String> getPrototypeCountRanges() { if (this.prototypeCountRanges == null) { this.prototypeCountRanges = new LinkedHashMap<>(); } { final Collection<String> classes = new LinkedHashSet<>(); for (final Pipeline.ClassDescription classDescription : this.getPipeline().getClassDescriptions()) { final String name = classDescription.getName(); classes.add(name); if (!this.prototypeCountRanges.containsKey(name)) { this.prototypeCountRanges.put(name, "1"); } } this.prototypeCountRanges.keySet().retainAll(classes); } return this.prototypeCountRanges; } @PropertyGetter("prototypeRanges") public final String getPrototypeCountRangesAsString() { return Tools.join("; ", this.getPrototypeCountRanges().entrySet().toArray()); } @PropertySetter("prototypeRanges") public final SupervisedAlgorithm setPrototypeCountRanges(final String prototypeCountRangesAsString) { final Map<String, String> prototypeCountRanges = this.getPrototypeCountRanges(); final Map<String, String> tmp = parseRangeMap(prototypeCountRangesAsString, prototypeCountRanges.keySet()); if (!tmp.keySet().containsAll(prototypeCountRanges.keySet())) { throw new IllegalArgumentException(); } this.prototypeCountRanges.putAll(tmp); return this; } @Override public final int getClassCount() { return this.getPipeline().getClassDescriptions().size(); } @Override public final SupervisedAlgorithm train(final DataSource trainingSet) { final Predefined measure = Measure.Predefined.L2_ES; final NearestNeighborClassifier classifier = new NearestNeighborClassifier(measure); final Map<String, Integer> classLabels = this.getPipeline().getClassLabels(); for (final Map.Entry<String, Integer> entry : this.getPrototypeCounts().entrySet()) { final int classLabel = classLabels.get(entry.getKey()); final NearestNeighborClassifier subClassifier = new MedianCutClustering( measure, entry.getValue()).cluster(new FilteredCompositeDataSource( c -> classLabel == (int) c.getPrototype().getValue()[0]).add(trainingSet)); for (final Datum prototype : subClassifier.getPrototypes()) { classifier.getPrototypes().add(prototype.setIndex(classLabel)); } } return (SupervisedAlgorithm) this.setClassifier(classifier); } private static final long serialVersionUID = 6887222324834498847L; } private static final long serialVersionUID = -4539259556658072410L; /** * @param commandLineArguments * <br>Must not be null * @throws IOException */ public static final void main(final String[] commandLineArguments) throws IOException { final CommandLineArgumentsParser arguments = new CommandLineArgumentsParser(commandLineArguments); final File pipelineFile = new File(arguments.get("jo", "")); final String groundTruthName = arguments.get("groundtruth", ""); final String inputPath = arguments.get("in", ""); final int lod = arguments.get("lod", 0)[0]; final String pipelineName = baseName(pipelineFile.getName()); final File classificationFile = new File(arguments.get("out", getClassificationPathFromImagePath(inputPath, groundTruthName, pipelineName))); // final Pipeline pipeline = (Pipeline) xstream.fromXML(pipelineFile); final Pipeline pipeline = (Pipeline) Tools.readObject(pipelineFile.getPath()); final Image2D image = IMJTools.read(inputPath, lod); final int width = image.getWidth(); final int height = image.getHeight(); final Image2D result = new AwtImage2D(null, new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB)); pipeline.classify(image, null, result); { Tools.debugPrint("Writing", classificationFile); ImageIO.write((RenderedImage) result.toAwt(), "png", classificationFile); } { final String overlayedContoursFormat = "jpg"; final File overlayedContoursFile = new File(baseName(inputPath) + "_overlayedcontours_" + groundTruthName + "_" + pipelineName + "." + overlayedContoursFormat); final Image2D overlayedCountours = new AwtImage2D(null, new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB)); for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { final long label = result.getPixelValue(x, y); final long north = label != 0L && 0 < y ? result.getPixelValue(x, y - 1) : label; final long west = label != 0L && 0 < x ? result.getPixelValue(x - 1, y) : label; final long east = label != 0L && x + 1 < width ? result.getPixelValue(x + 1, y) : label; final long south = label != 0L && y + 1 < height ? result.getPixelValue(x, y + 1) : label; if (label == north && label == west && label == east && label == south) { overlayedCountours.setPixelValue(x, y, image.getPixelValue(x, y)); } else { overlayedCountours.setPixelValue(x, y, label); } } } Tools.debugPrint("Writing", overlayedContoursFile); AutoCloseableImageWriter.write((RenderedImage) overlayedCountours.toAwt(), overlayedContoursFormat, 0.9F, overlayedContoursFile); } } public static final Map<String, String> parseRangeMap(final String mapAsString, final Collection<String> keys) { final Map<String, String> tmp = new HashMap<>(); for (final String keyValue : mapAsString.split(";")) { final String[] keyAndValue = keyValue.split("="); final String key = keyAndValue[0].trim(); final String value = keyAndValue[1].trim(); if (!keys.contains(key)) { throw new IllegalArgumentException(); } tmp.put(key, value); } return tmp; } public static final String getGroundTruthPathFromImagePath(final String imagePath, final String groundTruthName) { return baseName(imagePath) + "_groundtruth_" + groundTruthName + ".png"; } public static final String getClassificationPathFromImagePath(final String imagePath, final String groundTruthName, final String pipelineName) { return baseName(imagePath) + "_classification_" + groundTruthName + "_" + pipelineName + ".png"; } public static final <K, N extends Number> double f1(final Map<K, Map<K, N>> confusionMatrix) { double numerator = 0.0; double denominator = 0.0; for (final Map.Entry<K, Map<K, N>> entry : confusionMatrix.entrySet()) { final Object expectedKey = entry.getKey(); double total = 0.0; for (final N count : entry.getValue().values()) { total += count.doubleValue(); } if (total == 0.0) { total = 1.0; } for (final Map.Entry<K, N> subEntry : entry.getValue().entrySet()) { final Object actualKey = subEntry.getKey(); final double count = subEntry.getValue().doubleValue(); if (expectedKey.equals(actualKey)) { numerator += count / total; } else { denominator += count / total; } } } denominator += (numerator *= 2.0); return denominator == 0.0 ? 0.0 : numerator / denominator; } /** * @author codistmonk (creation 2015-03-09) */ @Retention(RetentionPolicy.RUNTIME) public static abstract @interface Trainable { public abstract String value(); } /** * @author codistmonk (creation 2015-02-16) */ @PropertyOrdering({ "name", "label" }) public static final class ClassDescription implements XMLSerializable { private String name = "class"; private int label = 0xFF000000; @Override public final Element toXML(final Document document, final Map<Object, Integer> ids) { final Element result = XMLSerializable.super.toXML(document, ids); result.setAttribute("name", this.getName()); result.setAttribute("label", this.getLabelAsString()); return result; } @Override public final ClassDescription fromXML(final Element xml, final Map<Integer, Object> objects) { XMLSerializable.super.fromXML(xml, objects); this.setName(xml.getAttribute("name")); this.setLabel(xml.getAttribute("label")); return this; } @StringGetter @PropertyGetter("name") public final String getName() { return this.name; } @PropertySetter("name") public final Pipeline.ClassDescription setName(final String name) { this.name = name; return this; } public final int getLabel() { return this.label; } public final Pipeline.ClassDescription setLabel(final int label) { this.label = label; return this; } @PropertyGetter("label") public final String getLabelAsString() { return "#" + Integer.toHexString(this.getLabel()).toUpperCase(Locale.ENGLISH); } @PropertySetter("label") public final Pipeline.ClassDescription setLabel(final String labelAsString) { return this.setLabel((int) Long.parseLong(labelAsString.substring(1), 16)); } private static final long serialVersionUID = 4974707407567297906L; } /** * @author codistmonk (creation 2015-02-17) */ @PropertyOrdering({ "image", "bounds" }) public static final class TrainingField implements XMLSerializable { private String imagePath = ""; private final Rectangle bounds = new Rectangle(); @Override public final Element toXML(final Document document, final Map<Object, Integer> ids) { final Element result = XMLSerializable.super.toXML(document, ids); result.setAttribute("imagePath", this.getImagePath()); result.setAttribute("bounds", this.getBoundsAsString()); return result; } @Override public final TrainingField fromXML(final Element xml, final Map<Integer, Object> objects) { XMLSerializable.super.fromXML(xml, objects); this.setImagePath(xml.getAttribute("imagePath")); this.setBounds(xml.getAttribute("bounds")); return this; } @PropertyGetter("image") public final String getImagePath() { return this.imagePath; } @PropertySetter("image") public final Pipeline.TrainingField setImagePath(final String imagePath) { this.imagePath = imagePath; return this; } public final Rectangle getBounds() { return this.bounds; } @PropertyGetter("bounds") public final String getBoundsAsString() { return join(",", this.getBounds().x, this.getBounds().y, this.getBounds().width, this.getBounds().height); } @PropertySetter("bounds") public final Pipeline.TrainingField setBounds(final String boundsAsString) { final int[] bounds = Arrays.stream(boundsAsString.split(",")).mapToInt(Integer::parseInt).toArray(); this.getBounds().setBounds(bounds[0], bounds[1], bounds[2], bounds[3]); return this; } @Override public final String toString() { return new File(this.getImagePath()).getName() + "[" + this.getBoundsAsString() + "]"; } private static final long serialVersionUID = 847822079141878928L; } /** * @author codistmonk (creation 2015-02-27) */ static final class ConcreteTrainingField implements Serializable { private final Image2D image; private final Image2D labels; private final Rectangle bounds; public ConcreteTrainingField(final Image2D image, final Image2D labels) { this(image, labels, new Rectangle(image.getWidth(), image.getHeight())); } public ConcreteTrainingField(final Image2D image, final Image2D labels, final Rectangle bounds) { this.image = image; this.labels = labels; this.bounds = bounds; } public final Image2D getImage() { return this.image; } public final Image2D getLabels() { return this.labels; } public final Rectangle getBounds() { return this.bounds; } private static final long serialVersionUID = 1918328132237430637L; } /** * @author codistmonk (creation 2015-03-09) */ static final class ParameterTraining implements Serializable { private final Object object; private final Method accessor; private final Object key; private final int[] candidates; private int currentIndex; public ParameterTraining(final Object object, final Method accessor, final Object key, final int[] candidates) { this.object = object; this.accessor = accessor; this.key = key; this.candidates = candidates; } public final int getCandidateCount() { return this.candidates.length; } public final boolean hasNext() { return this.getCurrentIndex() + 1 < this.getCandidateCount(); } public final int getCurrentIndex() { return this.currentIndex; } public final void setCurrentIndex(final int currentIndex) { this.currentIndex = currentIndex; this.set(); } public final void next() { this.setCurrentIndex(this.getCurrentIndex() + 1); } public final void set() { try { if (this.key == null) { this.accessor.invoke(this.object, Integer.toString(this.candidates[this.currentIndex])); } else { ((Map<Object, Object>) this.accessor.invoke(this.object)).put(this.key, this.candidates[this.currentIndex]); } } catch (final Exception exception) { throw Tools.unchecked(exception); } } @Override public final String toString() { return Arrays.toString(this.candidates) + "@" + this.currentIndex; } private static final long serialVersionUID = 1071010309738220354L; public static final int[] getCandidates(final Object object, final Method setter) { try { final Method trainer = getTrainer(object, setter.getAnnotation(Trainable.class)); return new CommandLineArgumentsParser("range", trainer.invoke(object).toString()).get("range", 1); } catch (final Exception exception) { throw unchecked(exception); } } public static final Method getTrainer(final Object object, final Trainable trainable) { for (final Method method : object.getClass().getMethods()) { final PropertyGetter getter = method.getAnnotation(PropertyGetter.class); if (getter != null && getter.value().equals(trainable.value())) { return method; } } return null; } public static final void addTo(final List<ParameterTraining> parameterTrainings, final Object object, final Method accessor) { if (accessor.getParameterCount() == 1) { parameterTrainings.add(new ParameterTraining(object, accessor, null, getCandidates(object, accessor))); } else if (accessor.getParameterCount() == 0 && Map.class.isAssignableFrom(accessor.getReturnType())) { try { final Map<String, ?> map = (Map<String, ?>) accessor.invoke(object); final Method trainer = getTrainer(object, accessor.getAnnotation(Trainable.class)); final Map<?, String> trainerMap = parseRangeMap(trainer.invoke(object).toString(), map.keySet()); for (final Object key : map.keySet()) { parameterTrainings.add(new ParameterTraining(object, accessor, key, new CommandLineArgumentsParser("range", trainerMap.get(key)).get("range", 1))); } } catch (final Exception exception) { throw unchecked(exception); } } else { Tools.debugError(accessor); Tools.debugError(accessor.getParameterCount()); throw new IllegalArgumentException(); } } } /** * @author codistmonk (creation 2015-03-14) */ public static enum ComputationStatus { COMPUTING, CANCELED, IDLE; } /** * @author codistmonk (creation 2015-03-15) */ public static final class Result implements XMLSerializable { private long milliseconds; private Map<Integer, Map<Integer, AtomicLong>> confusionMatrix; @Override public final Element toXML(final Document document, final Map<Object, Integer> ids) { final Element result = XMLSerializable.super.toXML(document, ids); result.setAttribute("milliseconds", Long.toString(this.getMilliseconds())); result.appendChild(XMLSerializable.objectToXML(this.getConfusionMatrix(), document, ids)); return result; } @Override public Result fromXML(final Element xml, final Map<Integer, Object> objects) { XMLSerializable.super.fromXML(xml, objects); this.setMilliseconds(Long.parseLong(xml.getAttribute("milliseconds"))); this.confusionMatrix = XMLSerializable.objectFromXML((Element) xml.getFirstChild(), objects); return this; } public final long getMilliseconds() { return this.milliseconds; } public final void setMilliseconds(final long milliseconds) { this.milliseconds = milliseconds; } public final Map<Integer, Map<Integer, AtomicLong>> getConfusionMatrix() { if (this.confusionMatrix == null) { this.confusionMatrix = new HashMap<>(); } return this.confusionMatrix; } private static final long serialVersionUID = -7054191235319173687L; } }
package peergos.tests; import org.junit.*; import peergos.corenode.CoreNode; import peergos.corenode.UserPublicKeyLink; import peergos.crypto.*; import peergos.crypto.asymmetric.*; import peergos.crypto.asymmetric.curve25519.*; import peergos.crypto.random.*; import java.time.LocalDate; import java.util.*; public class UserPublicKeyLinkTests { @BeforeClass public static void init() throws Exception { PublicSigningKey.addProvider(PublicSigningKey.Type.Ed25519, new JavaEd25519()); // use insecure random otherwise tests take ages UserTests.setFinalStatic(TweetNaCl.class.getDeclaredField("prng"), new Random(1)); } @Test public void createInitial() { User user = User.random(new SafeRandom.Java(), new JavaEd25519(), new JavaCurve25519()); UserPublicKeyLink.UsernameClaim node = UserPublicKeyLink.UsernameClaim.create("someuser", user, LocalDate.now().plusYears(2)); UserPublicKeyLink upl = new UserPublicKeyLink(user.toUserPublicKey(), node); testSerialization(upl); } public void testSerialization(UserPublicKeyLink link) { byte[] serialized1 = link.toByteArray(); UserPublicKeyLink upl2 = UserPublicKeyLink.fromByteArray(link.owner, serialized1); byte[] serialized2 = upl2.toByteArray(); if (!Arrays.equals(serialized1, serialized2)) throw new IllegalStateException("toByteArray not inverse of fromByteArray!"); } @Test public void createChain() { User oldUser = User.random(new SafeRandom.Java(), new JavaEd25519(), new JavaCurve25519()); User newUser = User.random(new SafeRandom.Java(), new JavaEd25519(), new JavaCurve25519()); List<UserPublicKeyLink> links = UserPublicKeyLink.createChain(oldUser, newUser, "someuser", LocalDate.now().plusYears(2)); links.forEach(link -> testSerialization(link)); } @Test public void coreNode() throws Exception { CoreNode core = CoreNode.getDefault(); User user = User.insecureRandom(); String username = "someuser"; // register the username UserPublicKeyLink.UsernameClaim node = UserPublicKeyLink.UsernameClaim.create(username, user, LocalDate.now().plusYears(2)); UserPublicKeyLink upl = new UserPublicKeyLink(user.toUserPublicKey(), node); boolean success = core.updateChain(username, Arrays.asList(upl)); List<UserPublicKeyLink> chain = core.getChain(username); if (chain.size() != 1 || !chain.get(0).equals(upl)) throw new IllegalStateException("Retrieved chain element different "+chain +" != "+Arrays.asList(upl)); // now change the expiry UserPublicKeyLink.UsernameClaim node2 = UserPublicKeyLink.UsernameClaim.create(username, user, LocalDate.now().plusYears(3)); UserPublicKeyLink upl2 = new UserPublicKeyLink(user.toUserPublicKey(), node2); boolean success2 = core.updateChain(username, Arrays.asList(upl2)); List<UserPublicKeyLink> chain2 = core.getChain(username); if (chain2.size() != 1 || !chain2.get(0).equals(upl2)) throw new IllegalStateException("Retrieved chain element different "+chain2 +" != "+Arrays.asList(upl2)); // now change the keys User user2 = User.insecureRandom(); List<UserPublicKeyLink> chain3 = UserPublicKeyLink.createChain(user, user2, username, LocalDate.now().plusWeeks(1)); boolean success3 = core.updateChain(username, chain3); List<UserPublicKeyLink> chain3Retrieved = core.getChain(username); if (!chain3.equals(chain3Retrieved)) throw new IllegalStateException("Retrieved chain element different"); // update the expiry at the end of the chain UserPublicKeyLink.UsernameClaim node4 = UserPublicKeyLink.UsernameClaim.create(username, user2, LocalDate.now().plusWeeks(2)); UserPublicKeyLink upl4 = new UserPublicKeyLink(user2.toUserPublicKey(), node4); List<UserPublicKeyLink> chain4 = Arrays.asList(upl4); boolean success4 = core.updateChain(username, chain4); List<UserPublicKeyLink> chain4Retrieved = core.getChain(username); if (!chain4.equals(Arrays.asList(chain4Retrieved.get(chain4Retrieved.size()-1)))) throw new IllegalStateException("Retrieved chain element different after expiry update"); // check username lookup String uname = core.getUsername(user2.toUserPublicKey()); if (!uname.equals(username)) throw new IllegalStateException("Returned username is different! "+uname + " != "+username); // try to claim the same username with a different key User user3 = User.insecureRandom(); UserPublicKeyLink.UsernameClaim node3 = UserPublicKeyLink.UsernameClaim.create(username, user3, LocalDate.now().plusYears(2)); UserPublicKeyLink upl3 = new UserPublicKeyLink(user3.toUserPublicKey(), node3); try { boolean shouldFail = core.updateChain(username, Arrays.asList(upl3)); throw new RuntimeException("Should have failed before here!"); } catch (IllegalStateException e) {} } }
package org.eclipse.kura.linux.net.util; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.StringTokenizer; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.eclipse.kura.KuraErrorCode; import org.eclipse.kura.KuraException; import org.eclipse.kura.core.linux.util.LinuxProcessUtil; import org.eclipse.kura.core.net.WifiAccessPointImpl; import org.eclipse.kura.core.net.util.NetworkUtil; import org.eclipse.kura.core.util.ProcessUtil; import org.eclipse.kura.net.wifi.WifiAccessPoint; import org.eclipse.kura.net.wifi.WifiMode; import org.eclipse.kura.net.wifi.WifiSecurity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class iwScanTool { private static final Logger s_logger = LoggerFactory.getLogger(iwScanTool.class); private static final Object s_lock = new Object(); private String m_ifaceName; private ExecutorService m_executor; private static Future<?> m_task; private int m_timeout; private Process m_proccess; private boolean m_status; private String m_errmsg; public iwScanTool() { m_timeout = 20; } public iwScanTool(String ifaceName) { this(); m_ifaceName = ifaceName; m_errmsg = ""; m_status = false; } public iwScanTool(String ifaceName, int tout) { this(ifaceName); m_timeout = tout; } public List<WifiAccessPoint> scan() throws KuraException { List<WifiAccessPoint> wifiAccessPoints = new ArrayList<WifiAccessPoint>(); synchronized (s_lock) { StringBuilder sb = new StringBuilder(); Process pr = null; try { if(!LinuxNetworkUtil.isUp(m_ifaceName)) { // activate the interface sb.append("ip link set ").append(m_ifaceName).append(" up"); pr = ProcessUtil.exec(sb.toString()); // remove the previous ip address (needed on mgw) sb = new StringBuilder(); sb.append("ip addr flush dev ").append(m_ifaceName); pr = ProcessUtil.exec(sb.toString()); } } catch (Exception e) { throw new KuraException(KuraErrorCode.INTERNAL_ERROR, e); } finally { if (pr != null) { ProcessUtil.destroy(pr); } } long timerStart = System.currentTimeMillis(); m_executor = Executors.newSingleThreadExecutor(); m_task = m_executor.submit(new Runnable() { @Override public void run() { int stat = -1; m_proccess = null; StringBuilder sb = new StringBuilder(); sb.append("iw dev ").append(m_ifaceName).append(" scan"); s_logger.info("scan() :: executing: {}", sb.toString()); m_status = false; try { m_proccess = ProcessUtil.exec(sb.toString()); stat = m_proccess.waitFor(); s_logger.info("scan() :: {} command returns status={}", sb.toString(), stat); if (stat == 0) { m_status = true; } else { s_logger.error("scan() :: failed to execute {} error code is {}", sb.toString(), stat); s_logger.error("scan() :: STDERR: " + LinuxProcessUtil.getInputStreamAsString(m_proccess.getErrorStream())); } } catch (Exception e) { m_errmsg = "exception executing scan command"; e.printStackTrace(); } } }); while (!m_task.isDone()) { if (System.currentTimeMillis() > timerStart+m_timeout*1000) { s_logger.warn("scan() :: scan timeout"); sb = new StringBuilder(); sb.append("iw dev ").append(m_ifaceName).append(" scan"); try { int pid = LinuxProcessUtil.getPid(sb.toString()); if (pid >= 0) { s_logger.warn("scan() :: scan timeout :: killing pid {}", pid); LinuxProcessUtil.kill(pid); } } catch (Exception e) { e.printStackTrace(); } m_task.cancel(true); m_task = null; m_errmsg = "timeout executing scan command"; break; } try { Thread.sleep(500); } catch (InterruptedException e) { } } if ((m_status == false) || (m_proccess == null)) { throw new KuraException(KuraErrorCode.INTERNAL_ERROR, m_errmsg); } s_logger.info("scan() :: the 'iw scan' command executed successfully, parsing output ..."); try { wifiAccessPoints = parse(); } catch (Exception e) { throw new KuraException(KuraErrorCode.INTERNAL_ERROR, e, "error parsing scan results"); } finally { s_logger.info("scan() :: destroing scan proccess ..."); ProcessUtil.destroy(m_proccess); m_proccess = null; s_logger.info("scan() :: Terminating WifiMonitor Thread ..."); m_executor.shutdownNow(); try { m_executor.awaitTermination(2, TimeUnit.SECONDS); } catch (InterruptedException e) { s_logger.warn("Interrupted", e); } s_logger.info("scan() :: 'iw scan' thread terminated? - {}", m_executor.isTerminated()); m_executor = null; } } return wifiAccessPoints; } private List<WifiAccessPoint> parse() throws Exception { List<WifiAccessPoint> wifiAccessPoints = new ArrayList<WifiAccessPoint>(); //get the output BufferedReader br = new BufferedReader(new InputStreamReader(m_proccess.getInputStream())); String line = null; String ssid = null; List<Long> bitrate = null; long frequency = -1; byte[] hardwareAddress = null; WifiMode mode = null; EnumSet<WifiSecurity> rsnSecurity = null; int strength = -1; EnumSet<WifiSecurity> wpaSecurity = null; List<String> capabilities = null; while((line = br.readLine()) != null) { if (line.startsWith("scan aborted!")) { br.close(); s_logger.warn("parse() :: scan operation was aborted"); throw new KuraException(KuraErrorCode.INTERNAL_ERROR, "iw scan operation was aborted"); } if (line.startsWith("BSS")) { //new AP if(ssid != null) { WifiAccessPointImpl wifiAccessPoint = new WifiAccessPointImpl(ssid); wifiAccessPoint.setBitrate(bitrate); wifiAccessPoint.setFrequency(frequency); wifiAccessPoint.setHardwareAddress(hardwareAddress); wifiAccessPoint.setMode(WifiMode.MASTER); //FIME - is this right? - always MASTER - or maybe AD-HOC too? wifiAccessPoint.setRsnSecurity(rsnSecurity); wifiAccessPoint.setStrength(strength); wifiAccessPoint.setWpaSecurity(wpaSecurity); if ((capabilities != null) && (capabilities.size() > 0)) { wifiAccessPoint.setCapabilities(capabilities); } wifiAccessPoints.add(wifiAccessPoint); } //reset ssid = null; bitrate = null; frequency = -1; hardwareAddress = null; mode = null; rsnSecurity = null; strength = -1; wpaSecurity = null; capabilities = null; //parse out the MAC StringTokenizer st = new StringTokenizer(line, " "); st.nextToken(); //eat BSS String macAddressString = st.nextToken(); if(macAddressString != null) { hardwareAddress = NetworkUtil.macToBytes(macAddressString); } } else if(line.contains("freq: ")) { StringTokenizer st = new StringTokenizer(line, " "); st.nextToken(); //eat freq: frequency = Long.parseLong(st.nextToken()); } else if(line.contains("SSID: ")) { ssid = line.trim().substring(5).trim(); } else if(line.contains("RSN:")) { rsnSecurity = EnumSet.noneOf(WifiSecurity.class); boolean foundGroup = false; boolean foundPairwise = false; boolean foundAuthSuites = false; while((line = br.readLine()) != null) { if(line.contains("Group cipher:")) { foundGroup = true; if(line.contains("CCMP")) { rsnSecurity.add(WifiSecurity.GROUP_CCMP); } if(line.contains("TKIP")) { rsnSecurity.add(WifiSecurity.GROUP_TKIP); } if(line.contains("WEP104")) { rsnSecurity.add(WifiSecurity.GROUP_WEP104); } if(line.contains("WEP40")) { rsnSecurity.add(WifiSecurity.GROUP_WEP40); } } else if(line.contains("Pairwise ciphers:")) { foundPairwise = true; if(line.contains("CCMP")) { rsnSecurity.add(WifiSecurity.PAIR_CCMP); } if(line.contains("TKIP")) { rsnSecurity.add(WifiSecurity.PAIR_TKIP); } if(line.contains("WEP104")) { rsnSecurity.add(WifiSecurity.PAIR_WEP104); } if(line.contains("WEP40")) { rsnSecurity.add(WifiSecurity.PAIR_WEP40); } } else if(line.contains("Authentication suites:")) { foundAuthSuites = true; if(line.contains("802_1X")) { rsnSecurity.add(WifiSecurity.KEY_MGMT_802_1X); } if(line.contains("PSK")) { rsnSecurity.add(WifiSecurity.KEY_MGMT_PSK); } } else { s_logger.debug("Ignoring line in RSN: " + line); } if(foundGroup && foundPairwise && foundAuthSuites) { break; } } } else if(line.contains("WPA:")) { wpaSecurity = EnumSet.noneOf(WifiSecurity.class); boolean foundGroup = false; boolean foundPairwise = false; boolean foundAuthSuites = false; while((line = br.readLine()) != null) { if(line.contains("Group cipher:")) { foundGroup = true; if(line.contains("CCMP")) { wpaSecurity.add(WifiSecurity.GROUP_CCMP); } if(line.contains("TKIP")) { wpaSecurity.add(WifiSecurity.GROUP_TKIP); } if(line.contains("WEP104")) { wpaSecurity.add(WifiSecurity.GROUP_WEP104); } if(line.contains("WEP40")) { wpaSecurity.add(WifiSecurity.GROUP_WEP40); } } else if(line.contains("Pairwise ciphers:")) { foundPairwise = true; if(line.contains("CCMP")) { wpaSecurity.add(WifiSecurity.PAIR_CCMP); } if(line.contains("TKIP")) { wpaSecurity.add(WifiSecurity.PAIR_TKIP); } if(line.contains("WEP104")) { wpaSecurity.add(WifiSecurity.PAIR_WEP104); } if(line.contains("WEP40")) { wpaSecurity.add(WifiSecurity.PAIR_WEP40); } } else if(line.contains("Authentication suites:")) { foundAuthSuites = true; if(line.contains("802_1X")) { wpaSecurity.add(WifiSecurity.KEY_MGMT_802_1X); } if(line.contains("PSK")) { wpaSecurity.add(WifiSecurity.KEY_MGMT_PSK); } } else { s_logger.debug("Ignoring line in WPA: " + line); } if(foundGroup && foundPairwise && foundAuthSuites) { break; } } } else if(line.contains("Supported rates: ")) { //Supported rates: 1.0* 2.0* 5.5* 11.0* 18.0 24.0 36.0 54.0 if(bitrate == null) { bitrate = new ArrayList<Long>(); } StringTokenizer st = new StringTokenizer(line, " *"); while(st.hasMoreTokens()) { String token = st.nextToken(); if(!(token.contains("Supported") || token.contains("rates:"))) { bitrate.add((long) (Float.parseFloat(token) * 1000000)); } } } else if(line.contains("Extended supported rates: ")) { //Extended supported rates: 6.0 9.0 12.0 48.0 if(bitrate == null) { bitrate = new ArrayList<Long>(); } StringTokenizer st = new StringTokenizer(line, " *"); while(st.hasMoreTokens()) { String token = st.nextToken(); if(!(token.contains("Extended") || token.contains("supported") || token.contains("rates:"))) { bitrate.add((long) (Float.parseFloat(token) * 1000000)); } } } else if(line.contains("signal:")) { try { //signal: -56.00 dBm StringTokenizer st = new StringTokenizer(line, " "); st.nextToken(); //eat signal: final String strengthRaw = st.nextToken(); if (strengthRaw.contains("/")) { // Could also be of format 39/100 final String[] parts = strengthRaw.split("/"); strength = (int) Float.parseFloat(parts[0]); } else { strength = Math.abs((int)Float.parseFloat(strengthRaw)); } } catch (RuntimeException e) { s_logger.debug("Cannot parse signal strength " + line); } } else if (line.contains("capability:")) { capabilities = new ArrayList<String>(); line = line.substring("capability:".length()).trim(); StringTokenizer st = new StringTokenizer(line, " "); while (st.hasMoreTokens()) { capabilities.add(st.nextToken()); } } } //store the last one if(ssid != null) { WifiAccessPointImpl wifiAccessPoint = new WifiAccessPointImpl(ssid); wifiAccessPoint.setBitrate(bitrate); wifiAccessPoint.setFrequency(frequency); wifiAccessPoint.setHardwareAddress(hardwareAddress); wifiAccessPoint.setMode(mode); wifiAccessPoint.setRsnSecurity(rsnSecurity); wifiAccessPoint.setStrength(strength); wifiAccessPoint.setWpaSecurity(wpaSecurity); if ((capabilities != null) && (capabilities.size() > 0)) { wifiAccessPoint.setCapabilities(capabilities); } wifiAccessPoints.add(wifiAccessPoint); } br.close(); return wifiAccessPoints; } }
package picoded.page.builder; import java.io.*; import java.util.*; // JMTE inner functions add-on import com.floreysoft.jmte.*; // Sub modules useds import picoded.enums.*; import picoded.conv.*; import picoded.struct.*; import picoded.fileUtils.*; import picoded.servlet.*; import picoded.servletUtils.*; /// Core class that handle the conversion and copying process. /// NOT the folder iteration /// @TODO /// + Minify the rawPageName/index.html public class PageBuilderCore { // Local variables /// The folder to get the various page definition from public File pagesFolder = null; /// The output folder to process the output to (if given) public File outputFolder = null; /// The local JMTE reference public JMTE jmteObj = null; /// The URI root context for built files public String uriRootPrefix = "./"; /// LESS compiler protected LessToCss less = new LessToCss(); /// Dependency chain tracking protected GenericConvertListSet<String> dependencyTracker = new GenericConvertListSet<String>(); /// Components filter utility protected PageComponentFilter componentsFilter = null; // Constructor /// Constructor, with the folders defined /// @param The various pages definition folder public PageBuilderCore(File inPagesFolder) { pagesFolder = inPagesFolder; componentsFilter = new PageComponentFilter(this); } /// Constructor, with the folders defined /// @param The various pages definition folder public PageBuilderCore(String inPagesFolder) { this(new File(inPagesFolder)); } /// Constructor, with the folders defined /// @param The various pages definition folder /// @param The target folder to build the result into public PageBuilderCore(File inPagesFolder, File inOutputFolder) { pagesFolder = inPagesFolder; outputFolder = inOutputFolder; componentsFilter = new PageComponentFilter(this); } /// Constructor, with the folders defined /// @param The various pages definition folder /// @param The target folder to build the result into public PageBuilderCore(String inPagesFolder, String inOutputFolder) { this(new File(inPagesFolder), new File(inOutputFolder)); } // Public vars access /// @returns Gets the protected JMTE object, used internally. /// This is autocreated if not set public JMTE getJMTE() { if (jmteObj == null) { setJMTE(new JMTE()); } return jmteObj; } /// Overides the default (if loaded) JMTE object. public void setJMTE(JMTE set) { jmteObj = set; } /// @returns Gets the protected uriRootPrefix, used internally public String getUriRootPrefix() { return uriRootPrefix; } /// Overides the uriRootPrefix. public void setUriRootPrefix(String set) { if (set == null || set.length() <= 0) { set = "/"; } if (!set.endsWith("/")) { set = set + "/"; } uriRootPrefix = set; } // Utility functions /// Utility to get path safe rawPageName /// @param rawPageName used to generate the vars /// @returns The rawPageName which is subpath safe protected String safePageName(String rawPageName) { if (rawPageName.endsWith("/")) { rawPageName = rawPageName.substring(0, rawPageName.length() - 1); } return rawPageName.replaceAll("/", "-"); } /// Utility to get page frame ID /// @param rawPageName used to generate the vars /// @returns The page frame ID used (that is character safe?) protected String pageFrameID(String rawPageName) { return "page-" + safePageName(rawPageName); } /// Utility to get page frame ID : legacy format specific to IFAM (to phase out) /// @param rawPageName used to generate the vars /// @returns The page frame ID used (that is character safe?) protected String pageFrameID_ifamLegacy(String rawPageName) { return "pageFrame_" + safePageName(rawPageName); } /// Gets the requested page template.json from the following priority order /// 1) The page path itself /// 2) Any parent path folder /// 3) The root folder protected Map<String,Object> getTemplateJson(String rawPageName) { String resStr = null; String fileName = "template.json"; List<Map<String,Object>> templateList = new ArrayList<Map<String,Object>>(); // Page path itself templateList.add( GenericConvert.toStringMap( FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/" + fileName), null /*"UTF-8"*/, null), null ) ); // Gets the parent paths (if valid) String[] splitNames = splitPageName(rawPageName); if (splitNames.length <= 1) { // There were no parent folders, skip the parent paths checks } else { // Go one "directory" parent upward splitNames = ArrayConv.subarray(splitNames, 0, splitNames.length - 1); // Breaks once root directory is reached / or result found while (splitNames.length > 0) { // Join the name path, and get the file templateList.add( GenericConvert.toStringMap( FileUtils.readFileToString_withFallback(new File(pagesFolder, String.join("/", splitNames) + "/" + fileName), null /*"UTF-8"*/, null), null ) ); // Go one "directory" parent upward splitNames = ArrayConv.subarray(splitNames, 0, splitNames.length - 1); } } // Get from the root folder (v2) templateList.add( GenericConvert.toStringMap( FileUtils.readFileToString_withFallback(new File(pagesFolder, fileName), null /*"UTF-8"*/, null), null ) ); // Flips the list Collections.reverse(templateList); Map<String,Object> res = new HashMap<String,Object>(); for(Map<String,Object> template : templateList) { if(template != null) { res.putAll(template); } } return res; } /// Gets and extract out a page specific configuration. In its respective page.json file /// @param rawPageName used to generate the vars /// @returns The data hash map protected Map<String,Object> pageConfigFetch(String rawPageName) { // Basic filter safety rawPageName = filterRawPageName(rawPageName); // Get the config from rawPageName folder itself return GenericConvert.toStringMap( FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/page.json" ), null /*"UTF-8"*/, "{}") ); } /// Generates the needed map string template for the respective page /// @param rawPageName used to generate the vars /// @returns The data map used inside JMTE protected Map<String, Object> pageJMTEvars(String rawPageName) { // Basic filter safety rawPageName = filterRawPageName(rawPageName); // Initialize to root if not previously set if (uriRootPrefix == null) { uriRootPrefix = "."; } // Removes trailing /, unless its the only character if (uriRootPrefix.length() > 1 && uriRootPrefix.endsWith("/")) { uriRootPrefix = uriRootPrefix.substring(0, uriRootPrefix.length() - 1); } /*else { uriRootPrefix = ""; }*/ // Uri slash fixing (in case of double slash) String pageURI = uriRootPrefix + "/" + rawPageName; while (pageURI.indexOf(" pageURI = pageURI.replaceAll(" } // Get the template json stack Map<String, Object> ret = getTemplateJson(rawPageName); ret.put("PagesRootURI", uriRootPrefix); ret.put("PageRootURI", uriRootPrefix); //because FAportal and orgeva ret.put("PageURI", pageURI); ret.put("PageNameRaw", rawPageName); ret.put("PageName", safePageName(rawPageName)); ret.put("PageClass", pageFrameID(rawPageName)); ret.put("Page", buildPageComponentMap()); ret.put("PageConfig", pageConfigFetch(rawPageName)); // Legacy to phase out // @TODO : Phase Out ret.put("PageFrameID", pageFrameID_ifamLegacy(rawPageName)); ret.put("PageComponent", buildPageComponentMap()); return ret; } /// HTML specific version of getCommonFile protected String getCommonPrefixOrSuffixHtml(String rawPageName, String fixType) { return getCommonFile(rawPageName, fixType + ".html"); } /// Filters the rawPageName, into its valid form (remove any pre/suf-fix of slashes) protected String filterRawPageName(String rawPageName) { if(rawPageName == null) { rawPageName = ""; } rawPageName = rawPageName.trim(); rawPageName = rawPageName.replaceAll("\\.","/"); while (rawPageName.indexOf(" rawPageName = rawPageName.replaceAll(" } while (rawPageName.startsWith("/")) { rawPageName = rawPageName.substring(1); } while (rawPageName.endsWith("/")) { rawPageName = rawPageName.substring(0, rawPageName.length()); } return rawPageName; } /// Takes a pagename, and split in its pathing. /// Used to find parent folders protected String[] splitPageName(String rawPageName) { return filterRawPageName(rawPageName).split("/"); } /// Gets the requested page prefix / suffix from the following priority order /// 1) The page path itself /// 2) Any parent path folder /// 3) The root folder (v3) /// 4) The common folder (v2) /// 5) The index folder (legacy support, do not use) protected String getCommonFile(String rawPageName, String fileName) { String res = null; // Get from the rawPageName folder itself (v2) res = FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/" + fileName), null /*"UTF-8"*/, null); // Gets the parent paths (if valid) if (res == null) { String[] splitNames = splitPageName(rawPageName); if (splitNames.length <= 1) { // There were no parent folders, skip the parent paths checks } else { // Go one "directory" parent upward splitNames = ArrayConv.subarray(splitNames, 0, splitNames.length - 1); // Breaks once root directory is reached / or result found while (res == null && splitNames.length > 0) { // Join the name path, and get the file res = FileUtils.readFileToString_withFallback(new File(pagesFolder, String.join("/", splitNames) + "/" + fileName), null /*"UTF-8"*/, null); // Go one "directory" parent upward splitNames = ArrayConv.subarray(splitNames, 0, splitNames.length - 1); } } } // Get from the root folder (v2) if (res == null) { res = FileUtils.readFileToString_withFallback(new File(pagesFolder, fileName), null /*"UTF-8"*/, null); } // Get from the common folder (v2) if (res == null) { res = FileUtils.readFileToString_withFallback(new File(pagesFolder, "common/" + fileName), null /*"UTF-8"*/, null); } // Legacy support (v1) get from index folder if (res == null) { res = FileUtils.readFileToString_withFallback(new File(pagesFolder, "index/" + fileName), null /*"UTF-8"*/, null); } // Fallbacks to blank if (res == null) { return ""; } return res; } // HTML handling /// Gets the prefix public String prefixHTML(String rawPageName) { return getJMTE().parseTemplate(getCommonPrefixOrSuffixHtml(rawPageName, "prefix"), pageJMTEvars(rawPageName)); } /// Gets the prefix public String suffixHTML(String rawPageName) { return getJMTE().parseTemplate(getCommonPrefixOrSuffixHtml(rawPageName, "suffix"), pageJMTEvars(rawPageName)); } /// Gets and returns a page frame raw string without going through the JMTE parser public String buildPageInnerRawHTML(String rawPageName) { // Depenency chain tracking rawPageName = filterRawPageName(rawPageName); dependencyTracker.add(rawPageName); String indexFileStr = FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/" + safePageName(rawPageName) + ".html"), null /*"UTF-8"*/, null); if (indexFileStr == null) { indexFileStr = FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/index.html"), null /*"UTF-8"*/, ""); } if ((indexFileStr = indexFileStr.trim()).length() == 0) { if (hasPageFile(rawPageName)) { //has file return ""; //this is a blank HTML file } return null; } return indexFileStr.toString(); } /// Gets and returns a page frame string, with its respective JMTE input vars? public String buildPageInnerHTML(String rawPageName) { return buildPageInnerHTML(rawPageName, null); } /// Gets and returns a page frame string, with its respective JMTE input vars? public String buildPageInnerHTML(String rawPageName, Map<String, Object> jmteTemplate) { String indexFileStr = FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/" + safePageName(rawPageName) + ".html"), null /*"UTF-8"*/, null); if (indexFileStr == null) { indexFileStr = FileUtils.readFileToString_withFallback(new File(pagesFolder, rawPageName + "/index.html"), null /*"UTF-8"*/, ""); } if ((indexFileStr = indexFileStr.trim()).length() == 0) { if (hasPageFile(rawPageName)) { //has file return ""; //this is a blank HTML file } return null; } if( jmteTemplate == null ) { jmteTemplate = pageJMTEvars(rawPageName); } return getJMTE().parseTemplate(indexFileStr.toString(), jmteTemplate); } /// Get the page frame div header, this is used to do a "search replace" for script / css injection protected String pageFrameHeaderDiv(String rawPageName) { return "<div class='pageFrame " + pageFrameID(rawPageName) + " "+ pageFrameID_ifamLegacy(rawPageName)+ "' id='" + pageFrameID_ifamLegacy(rawPageName) + "'>\n"; } /// Builds a rawPageName HTML frame public String buildPageFrame(String rawPageName) { return buildPageFrame(rawPageName, null); } /// Builds a rawPageName HTML frame public String buildPageFrame(String rawPageName, String injectionStr) { String innerHTML = buildPageInnerHTML(rawPageName, pageJMTEvars(rawPageName)); if (innerHTML == null) { return null; } StringBuilder frame = new StringBuilder(); frame.append(pageFrameHeaderDiv(rawPageName)); if (injectionStr != null) { frame.append(injectionStr); } frame.append(innerHTML); frame.append("\n</div>\n"); return frame.toString(); } /// Builds the FULL rawPageName HTML, with prefix and suffix public StringBuilder buildFullPageFrame(String rawPageName) { return buildFullPageFrame(rawPageName, null); } /// Builds the FULL rawPageName HTML, with prefix and suffix public StringBuilder buildFullPageFrame(String rawPageName, String injectionStr) { String frameHTML = buildPageFrame(rawPageName, injectionStr); if (frameHTML != null) { StringBuilder ret = new StringBuilder(); ret.append(prefixHTML(rawPageName)); ret.append(frameHTML); ret.append(suffixHTML(rawPageName)); return ret; } return null; } // Subpages searching handling /// Sub page name listing /// @param rawPageName Parent page to search from /// @return Collection of sub pages name from the parent page public Set<String> subPagesList(String rawPageName) { rawPageName = filterRawPageName(rawPageName); HashSet<String> res = new HashSet<String>(); // The current folder to scan File folder = new File(pagesFolder, rawPageName); // Scan for subdirectories ONLY if this is a directory if (folder.isDirectory()) { // For each sub directory, build it as a page for (File pageDefine : FileUtils.listDirs(folder)) { // Get sub page name String subPageName = pageDefine.getName(); // Common and index zone only if its top layer if (subPageName.equalsIgnoreCase("common") || subPageName.equalsIgnoreCase("index")) { if (rawPageName.length() <= 0) { res.add(subPageName); } } else if (subPageName.equalsIgnoreCase("assets") || subPageName.equalsIgnoreCase("web-inf")) { // ignoring certain reserved folders } else { res.add(subPageName); } } } // Return result set return res; } /// Build and returns the page components map public PageComponentMap buildPageComponentMap() { return new PageComponentMap(this, ""); } // Page Building parts /// Enum of file types that is processed protected enum PageFileType { js, jsons, jsons_to_js, less, less_to_css, html } /// Indicate if the page definition FOLDER exists /// @param PageName to build /// @return boolean true if pageName has folder public boolean hasPageFolder(String pageName) { try { File definitionFolder = new File(pagesFolder, pageName); return definitionFolder.exists() && definitionFolder.isDirectory(); } catch (Exception e) { // Failed? } return false; } /// Indicates if the page definition FILE exists /// @param PageName to build /// @return boolean true if pageName is a folder public boolean hasPageFile(String pageName) { try { File definitionFolder = new File(pagesFolder, pageName); return ((new File(definitionFolder, "index.html")).exists() || (new File(definitionFolder, safePageName(pageName) + ".html")).exists()); } catch (Exception e) { // Failed? } return false; } /// Process the file, according to its type, and outputs it into the respective file /// @param filetype enum /// @param Source file (returns false if not exists) /// @param Target file (written if source file exists) /// @param The page name used, used in certain type logic /// @param The JMTE variable map to use /// @return true, if a file was processed and written public boolean processPageFile(PageFileType type, File input, File output, String rawPageName, Map<String, Object> jmteVarMap) throws IOException { if (input.exists() && input.isFile() && input.canRead()) { // Gets its string value, and process only if not blank String fileVal = FileUtils.readFileToString(input); if ((fileVal = fileVal.trim()).length() > 0) { // Does specific conversions if (type == PageFileType.jsons_to_js) { // Does a JMTE filter fileVal = getJMTE().parseTemplate(fileVal, jmteVarMap); // Adds the script object wrapper fileVal = "window.pageFrames = window.pageFrames || {}; window.pageFrames." + safePageName(rawPageName) + " = (" + fileVal + ");"; } else if (type == PageFileType.less_to_css) { /// Add the config .less file String lessPrefix = getCommonFile(rawPageName, "prefix.less"); String lessSuffix = getCommonFile(rawPageName, "suffix.less"); /// Does an outer wrap, if its not index page (which applies style to 'all') if (!rawPageName.equalsIgnoreCase("index") && !rawPageName.equalsIgnoreCase("common")) { fileVal = "." + pageFrameID(rawPageName) + " { \n" + fileVal + "\n } \n"; } // Ensure prefix, and suffix are added fileVal = (lessPrefix + "\n" + fileVal + "\n" + lessSuffix).trim(); // Does a JMTE filter fileVal = getJMTE().parseTemplate(fileVal, jmteVarMap); // Less to css conversion fileVal = less.compile(fileVal); } else { // Does a JMTE filter fileVal = getJMTE().parseTemplate(fileVal, jmteVarMap); } // Write to file if it differ FileUtils.writeStringToFile_ifDifferant(output, null /*"UTF-8"*/, fileVal); // Indicate file is "deployed" return true; } } return false; } /// Varient of processPageFile, where it iterates an array set of input files till a valid file is found /// @param filetype enum /// @param Source file (returns false if not exists) /// @param Target file (written if source file exists) /// @param The page name used, used in certain type logic /// @param The JMTE variable map to use /// @return true, if a file was processed and written public boolean processPageFile(PageFileType type, File[] inputArr, File output, String rawPageName, Map<String, Object> jmteVarMap) throws IOException { for (File input : inputArr) { if (processPageFile(type, input, output, rawPageName, jmteVarMap)) { return true; } } return false; } // Full Page Building /// Builds all the assets for a single page /// Copies out the various files, and does direct conversion on them if needed. /// @param rawPageName to build /// @return boolean true, if page had content to be built public boolean buildAndOutputPage(String rawPageName) { // rawPageName here assumes NO "/" suffix if (rawPageName.endsWith("/")) { rawPageName = rawPageName.substring(0, rawPageName.length() - 1); } // System.out allowed here, because LESS does a system out ANYWAY. // Help to make more "sense" of the done output System.out.print("> PageBuilder[Core].buildPage(\'" + rawPageName + "\'): "); // Output folder validitiy check if (outputFolder == null) { throw new RuntimeException("Missing output folder, unable to generate : " + rawPageName); } // Future extension, possible loop hole abuse. Im protecting against it early if (rawPageName.startsWith(".")) { throw new RuntimeException("Unable to load page name, starting with '.' : " + rawPageName); } if (rawPageName.indexOf("..") >= 0) { throw new RuntimeException("Unable to load page name, containing '..' : " + rawPageName); } if (rawPageName.toLowerCase().indexOf("web-inf") >= 0) { throw new RuntimeException("Unable to load page name, that may bypass WEB-INF : " + rawPageName); } try { // Prepares output and definition FILE objects, and JMTE map File outputPageFolder = new File(outputFolder, rawPageName); File definitionFolder = new File(pagesFolder, rawPageName); Map<String, Object> jmteVarMap = pageJMTEvars(rawPageName); String pageName_safe = safePageName(rawPageName); // Create the output folder as needed if (!outputPageFolder.exists()) { outputPageFolder.mkdirs(); } // Copy the page assets folder // @TODO: Optimize this to only copy IF newer // Folder to copy from boolean hasAssets = false; File pageAssetsFolder = new File(definitionFolder, "assets"); if (pageAssetsFolder.exists() && pageAssetsFolder.isDirectory()) { // Copy if folder to target FileUtils.copyDirectory_ifDifferent(pageAssetsFolder, new File(outputPageFolder, "assets")); hasAssets = true; } // Copy out all the various other files // @TODO: Optimize this to only copy IF newer // Ignores: index files (as of now) // For each sub directory, build it as a page for (File inFile : FileUtils.listFiles(definitionFolder, null, false)) { String fileName = inFile.getName(); String[] splitFileName = fileName.split("."); // Ignore index based files if(splitFileName[0].equalsIgnoreCase("index")) { continue; } // File to output to File outFile = new File(outputPageFolder, fileName); // Copy over the file FileUtils.copyFile_ifDifferent(inFile, outFile); String fileExt = (splitFileName.length > 1)? splitFileName[ splitFileName.length - 1 ] : ""; if(fileExt.equalsIgnoreCase("less")) { } else if(fileExt.equalsIgnoreCase("es6")) { } } // Process the JS script (if provided) boolean hasJsFile = processPageFile(PageFileType.js, new File[] { new File(definitionFolder, "index.js"), new File(definitionFolder, pageName_safe + ".js") }, new File(outputPageFolder, pageName_safe + ".js"), rawPageName, jmteVarMap); // Build the JSONS script (if provided) boolean hasJsonsFile = processPageFile(PageFileType.jsons_to_js, new File[] { new File(definitionFolder, "index.jsons"), new File(definitionFolder, pageName_safe + ".jsons") }, new File(outputPageFolder, pageName_safe + ".jsons.js"), rawPageName, jmteVarMap); // Build the LESS script (if provided) boolean hasLessFile = processPageFile(PageFileType.less_to_css, new File[] { new File(definitionFolder, "index.less"), new File(definitionFolder, pageName_safe + ".less") }, new File( outputPageFolder, pageName_safe + ".css"), rawPageName, jmteVarMap); // Build the html page // The HTML output (if valid) StringBuilder indexStrBuilder = buildFullPageFrame(rawPageName); if (indexStrBuilder == null) { return hasAssets || hasJsFile || hasJsonsFile || hasLessFile; } String indexStr = indexStrBuilder.toString(); // Build the injector code for this page (before </head>) StringBuilder injectorStrBuilder = new StringBuilder(); if (hasLessFile) { if (indexStr.indexOf(rawPageName + "/" + pageName_safe + ".css") > 0) { // Skips injection if already included } else { injectorStrBuilder.append("<link rel='stylesheet' type='text/css' href='" + uriRootPrefix + "/" + rawPageName + "/" + pageName_safe + ".css'></link>\n"); } } if (hasJsFile) { if (indexStr.indexOf(rawPageName + "/" + pageName_safe + ".js") > 0) { // Skips injection if already included } else { injectorStrBuilder.append("<script src='" + uriRootPrefix + "/" + rawPageName + "/" + pageName_safe + ".js'></script>\n"); } } if (hasJsonsFile) { if (indexStr.indexOf(rawPageName + "/" + pageName_safe + ".jsons.js") > 0) { // Skips injection if already included } else { injectorStrBuilder.append("<script src='" + uriRootPrefix + "/" + rawPageName + "/" + pageName_safe + ".jsons.js'></script>\n"); } } String injectorStr = injectorStrBuilder.toString(); // Ammend the HTML output // Apply injector code if any if (injectorStr.length() > 0) { // Rebuild with injection indexStr = buildFullPageFrame(rawPageName, injectorStr).toString(); } // Components resolution indexStr = componentsFilter.resolve(indexStr); // HTML minify // Apply a simplistic compression (so avoid inline JS with line comments for nuts) // @TODO: A proper minifier library integration, like: // indexStr = indexStr.trim().replaceAll("\\s+", " "); // indexStr = indexStr.trim().replaceAll("\\>\\s\\<", "><"); // Write out to file // Write to file if it differ FileUtils.writeStringToFile_ifDifferant(new File(outputPageFolder, "index.html"), null /*"UTF-8"*/, indexStr); // Returns success return true; } catch (IOException e) { throw new RuntimeException(e); } // End and returns failure // return false; } /// Builds all pages (NOT including itself) inside a page folder /// @param rawPageName to build /// @return boolean true, if page had content to be built public boolean buildPageFolder(String rawPageName) { boolean res = false; if (rawPageName != null) { rawPageName = rawPageName.trim(); } if (rawPageName.equalsIgnoreCase("/")) { rawPageName = ""; } // The current folder to scan File folder = new File(pagesFolder, rawPageName); // Possible page pathing error fix if (rawPageName.length() > 0 && !rawPageName.endsWith("/")) { rawPageName = rawPageName + "/"; } // Scan for subdirectories ONLY if this is a directory if (folder.isDirectory()) { // For each sub directory, build it as a page for (File pageDefine : FileUtils.listDirs(folder)) { // Build each page String subPageName = pageDefine.getName(); // Scan for sub pages if (subPageName.equalsIgnoreCase("common") || subPageName.equalsIgnoreCase("index")) { buildAndOutputPage(rawPageName + subPageName); if (rawPageName.length() <= 0) { //buildAndOutputPage(rawPageName + subPageName); } else { System.out.print("> PageBuilder[Core].buildPageFolder - WARNING, common / index nested build (\'" + rawPageName + "\', \'" + subPageName + "\'): "); } } else if (subPageName.equalsIgnoreCase("assets") || subPageName.equalsIgnoreCase("web-inf") || subPageName.equalsIgnoreCase("build")) { // ignoring certain reserved folders } else { // Build the page buildAndOutputPage(rawPageName + subPageName); // Recursive iterate res = buildPageFolder(rawPageName + subPageName + "/") || true; } } } return res; } /// Builds all pages (INCLUDING itself, if possible) inside a page folder /// @param rawPageName to build /// @return boolean true, if page had content to be built public boolean buildPageFolder_includingSelf(String rawPageName) { boolean res = false; if (hasPageFile(rawPageName)) { res = buildAndOutputPage(rawPageName) || res; } return buildPageFolder(rawPageName) || res; } // Dependency chain management /// Dependency chain tracking public GenericConvertListSet<String> dependencyTracker() { return dependencyTracker(); } /// Reset the Dependency tracking public void dependencyTrackerReset() { dependencyTracker.clear(); } /// Recursively get the sub depencies, and do the full tracking protected GenericConvertListSet<String> fullDependencyTracker() { // @TODO Recursive pulls return dependencyTracker(); } /// Build the depency for a raw file protected String dependencyBuildFile(String filename) { StringBuilder res = new StringBuilder(); for(String name : dependencyTracker) { String pageData = FileUtils.readFileToString_withFallback(new File(pagesFolder, name + "/"+filename), null /*"UTF-8"*/, null); if( pageData != null ) { res.append(pageData); } } return res.toString(); } /// Builds the LESS from the depency chain public String dependencyLess() { return dependencyBuildFile("depend.less"); } /// Builds the CSS from the depency chain public String dependencyCss() { return less.compile(dependencyLess()); } }
package nl.tudelft.lifetiles.annotation.model; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.stream.Stream; /** * Static class which parses gene annotations. * * @author Jos * */ public final class GeneAnnotationParser { /** * The index of the type field in a annotation line. */ private static final int TYPE_FIELD = 2; /** * The index of the start field in a annotation line. */ private static final int START_FIELD = 3; /** * The index of the end field in a annotation line. */ private static final int END_FIELD = 4; /** * The index of the extra field in a annotation line. */ private static final int EXTRA_FIELD = 8; /** * The index of the name field in the extra field in a annotation line. */ private static final String NAME_FIELD = "Name"; /** * Static class can not have a public or default constructor. */ private GeneAnnotationParser() { // noop } /** * Parses a file of genes into a map from gene name to gene. * * @param file * the file with gene annotations. * @throws IOException * When there is an error reading the specified file. * @return map from gene name to gene. */ public static Set<GeneAnnotation> parseGeneAnnotations(final File file) throws IOException { Set<GeneAnnotation> genomeAnnotations = new HashSet<>(); Stream<String> annotationLines = Files.lines(file.toPath()); annotationLines.map(GeneAnnotationParser::parseGeneAnnotation) .filter(genome -> genome != null) .forEach(genome -> genomeAnnotations.add(genome)); annotationLines.close(); return genomeAnnotations; } /** * Parses a single line of the gene file into a gene. * * @param line * Single line of the gene file. * @return parsed gene. */ private static GeneAnnotation parseGeneAnnotation(final String line) { String[] columns = line.split("\t"); if (columns[TYPE_FIELD].equals("gene")) { long start = Long.parseLong(columns[START_FIELD]); long end = Long.parseLong(columns[END_FIELD]); Map<String, String> fields = extractGeneFields(columns[EXTRA_FIELD] .split(";")); String name = fields.get(NAME_FIELD); return new GeneAnnotation(start, end, name); } return null; } /** * Method which extract the gene fields into a map. * * @param fields * Fields of the gene. * @return Map of attributes in the gene. */ private static Map<String, String> extractGeneFields(final String[] fields) { final Map<String, String> genomeFields = new HashMap<String, String>(); for (String field : fields) { String[] attribute = field.split("="); genomeFields.put(attribute[0], attribute[1]); } return genomeFields; } }
package liquibase.sqlgenerator.core; import liquibase.CatalogAndSchema; import liquibase.database.Database; import liquibase.database.core.MSSQLDatabase; import liquibase.structure.core.Schema; import liquibase.sql.Sql; import liquibase.sql.UnparsedSql; import liquibase.sqlgenerator.SqlGeneratorChain; import liquibase.statement.core.GetViewDefinitionStatement; public class GetViewDefinitionGeneratorMSSQL extends GetViewDefinitionGenerator { @Override public int getPriority() { return PRIORITY_DATABASE; } @Override public boolean supports(GetViewDefinitionStatement statement, Database database) { return database instanceof MSSQLDatabase; } @Override public Sql[] generateSql(GetViewDefinitionStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { CatalogAndSchema schema = database.correctSchema(new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName())); String sql = "exec sp_helptext '" + schema.getSchemaName() + "."+ statement.getViewName() + "'"; return new Sql[]{new UnparsedSql(sql) }; }}
package jeranvier.math.timeseries; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Queue; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ArrayBlockingQueue; import jeranvier.math.linearAlgebra.Matrix; import jeranvier.math.linearAlgebra.Vector; import jeranvier.math.stats.SimpleStats; import jeranvier.math.util.Complex; public class Timeseries extends TreeMap<Long,Double> implements Serializable{ private static final long serialVersionUID = -4556555657603027141L; public static enum AVERAGE_TYPE{CENTERED, DELAYED, AHEAD}; public Timeseries(Map<Long,Double> data){ super(data); } @Override public String toString(){ StringBuilder sb = new StringBuilder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ sb.append(entry.getKey()+"\t"+entry.getValue()+"\n"); } return sb.toString(); } public Timeseries subTimeseries(Long start, Long end){ Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> element : this.entrySet()){ if(element.getKey() > start && element.getKey() < end){ tsb.put(element.getKey(), element.getValue()); } } return tsb.build(); } public Timeseries substituteAll(Double[] values){ if(this.size() != values.length){ throw new IllegalArgumentException("operation on timeseries of different sizes (" +this.size() +"=!"+values.length+")"); } Builder tsb = new Timeseries.Builder(); int i = 0; for(Map.Entry<Long, Double> element : this.entrySet()){ tsb.put(element.getKey(), values[i]); i++; } return tsb.build(); } public Timeseries max(int maxValue) { Builder tsb = new Timeseries.Builder(); Iterator<Map.Entry<Long, Double>> ite = this.entrySet().iterator(); while(ite.hasNext()){ Map.Entry<Long, Double> element = ite.next(); if((Double)element.getValue() <= maxValue){ tsb.put(element.getKey(), element.getValue()); } } return tsb.build(); } public Timeseries movingAverage(int radius, AVERAGE_TYPE type){ int windowLength = 2 * radius + 1; Builder tsb = new Timeseries.Builder(); Queue<Double> window = new ArrayBlockingQueue<Double>(windowLength); Queue<Long> times = new ArrayBlockingQueue<Long>(radius +1); for(Map.Entry<Long, Double> entry : this.entrySet()){ times.add(entry.getKey()); window.add(entry.getValue()); if(window.size() == windowLength){ tsb.put(times.poll(), SimpleStats.mean((Collection<? extends Number>) window)); window.poll(); } if(times.size() == radius + 1){ times.poll(); } } return tsb.build(); } public Timeseries add(double value){ Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() + value); } return tsb.build(); } public Timeseries substract(double value){ Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() - value); } return tsb.build(); } public Timeseries divide(double value){ Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() / value); } return tsb.build(); } public Timeseries multiply(double value){ Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() * value); } return tsb.build(); } public Timeseries substract(Map<Long, Double> values){ if(this.size() != values.size()){ throw new IllegalArgumentException("operation on timeseries of different sizes"); } Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() - values.get(entry.getKey())); } return tsb.build(); } public Timeseries add(Map<Long, Double> values){ if(this.size() != values.size()){ throw new IllegalArgumentException("operation on timeseries of different sizes"); } Builder tsb = new Timeseries.Builder(); for(Map.Entry<Long, Double> entry : this.entrySet()){ tsb.put(entry.getKey(), entry.getValue() + values.get(entry.getKey())); } return tsb.build(); } public Timeseries resample(long start, long end, long step, Resampler<Long, Double> resampler){ Builder tsb = new Timeseries.Builder(); for(long i = start; i <= end; i+=step){ tsb.put(i, get(i, resampler)); } return tsb.build(); } private Double get(long key, Resampler<Long, Double> resampler) { if(this.containsKey(key)){ return get(key); } else{ return resampler.interpolate(this.floorEntry(key), key, this.ceilingEntry(key)); } } public static final class Builder{ SortedMap<Long, Double> data; public Builder(){ data = new TreeMap<>(); } public void put(Long key, Double value) { data.put(key, value); } public Timeseries build() { return new Timeseries(data); } } public Timeseries linearResample(long start, long end, long step) { return this.resample(start, end, step, (f, k ,c)->{ double ratio = ((double)(k-f.getKey()))/(c.getKey()-f.getKey()); return f.getValue() + ratio*(c.getValue() - f.getValue()); }); } public Timeseries linearResample(long step) { return this.resample(this.firstKey(), this.lastKey(), step, (f, k ,c)->{ double ratio = ((double)(k-f.getKey()))/(c.getKey()-f.getKey()); return f.getValue() + ratio*(c.getValue() - f.getValue()); }); } public Timeseries nearestNeighborResample(long start, long end, long step) { return this.resample(start, end, step, (f, k ,c)->{ if(Math.abs(f.getKey()-k) < Math.abs(c.getKey()-k)){ return f.getValue(); } else{ return c.getValue(); } }); } public Vector vector() { Vector.Builder vb = new Vector.Builder(this.size()); int i=1; for(Double value : this.values()){ vb.set(i, new Complex(value, 0.0)); i++; } return vb.build(); } }
package jp.com.koto.helper; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; /** * File * @author pollseed */ public class FileParser extends AbstractFileParser { /** * @param path * @param cut */ public FileParser(String path, int cut) { super(path, cut); } /** * @param path * @param cut * @param splits */ FileParser(String path, int cut, String splits) { super(path, cut, splits); } @Override public void parse(File file) { BufferedReader br = null; BufferedWriter bw = null; try { br = new BufferedReader(new FileReader(file)); bw = new BufferedWriter(new PrintWriter(super.path)); String line = null; int lineCnt = 0; while ((line = br.readLine()) != null) { String[] strs = line.split(super.splits); if (strs.length <= super.cut) { continue; } if (lineCnt != 0) { bw.newLine(); } int strCnt = 0; for (String str : strs) { if (strCnt != 0) { bw.append(","); } bw.append(str); strCnt++; if (strCnt > 500) { bw.flush(); } } lineCnt++; if (lineCnt > 100) { bw.flush(); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { br.close(); } catch (IOException e) { e.printStackTrace(); } try { bw.close(); } catch (IOException e) { e.printStackTrace(); } } } }
package org.mifosplatform.portfolio.savings.domain; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.SAVINGS_ACCOUNT_RESOURCE_NAME; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.annualFeeAmountParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.annualFeeOnMonthDayParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.localeParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.lockinPeriodFrequencyParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.lockinPeriodFrequencyTypeParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.withdrawalFeeAmountParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.withdrawalFeeForTransfersParamName; import static org.mifosplatform.portfolio.savings.SavingsApiConstants.withdrawalFeeTypeParamName; import java.math.BigDecimal; import java.math.MathContext; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OrderBy; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import org.apache.commons.lang.ObjectUtils; import org.apache.commons.lang.StringUtils; import org.hibernate.annotations.LazyCollection; import org.hibernate.annotations.LazyCollectionOption; import org.joda.time.LocalDate; import org.joda.time.MonthDay; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.mifosplatform.infrastructure.core.api.JsonCommand; import org.mifosplatform.infrastructure.core.data.ApiParameterError; import org.mifosplatform.infrastructure.core.data.DataValidatorBuilder; import org.mifosplatform.infrastructure.core.domain.LocalDateInterval; import org.mifosplatform.infrastructure.core.exception.PlatformApiDataValidationException; import org.mifosplatform.infrastructure.core.service.DateUtils; import org.mifosplatform.infrastructure.security.service.RandomPasswordGenerator; import org.mifosplatform.organisation.monetary.data.CurrencyData; import org.mifosplatform.organisation.monetary.domain.MonetaryCurrency; import org.mifosplatform.organisation.monetary.domain.Money; import org.mifosplatform.organisation.office.domain.Office; import org.mifosplatform.organisation.staff.domain.Staff; import org.mifosplatform.portfolio.accountdetails.domain.AccountType; import org.mifosplatform.portfolio.client.domain.Client; import org.mifosplatform.portfolio.group.domain.Group; import org.mifosplatform.portfolio.loanproduct.domain.PeriodFrequencyType; import org.mifosplatform.portfolio.savings.SavingsApiConstants; import org.mifosplatform.portfolio.savings.SavingsCompoundingInterestPeriodType; import org.mifosplatform.portfolio.savings.SavingsInterestCalculationDaysInYearType; import org.mifosplatform.portfolio.savings.SavingsInterestCalculationType; import org.mifosplatform.portfolio.savings.SavingsPeriodFrequencyType; import org.mifosplatform.portfolio.savings.SavingsPostingInterestPeriodType; import org.mifosplatform.portfolio.savings.SavingsWithdrawalFeesType; import org.mifosplatform.portfolio.savings.data.SavingsAccountTransactionDTO; import org.mifosplatform.portfolio.savings.domain.interest.PostingPeriod; import org.mifosplatform.portfolio.savings.exception.InsufficientAccountBalanceException; import org.mifosplatform.portfolio.savings.service.SavingsEnumerations; import org.mifosplatform.useradministration.domain.AppUser; import org.springframework.data.jpa.domain.AbstractPersistable; @Entity @Table(name = "m_savings_account", uniqueConstraints = { @UniqueConstraint(columnNames = { "account_no" }, name = "sa_account_no_UNIQUE"), @UniqueConstraint(columnNames = { "external_id" }, name = "sa_external_id_UNIQUE") }) public class SavingsAccount extends AbstractPersistable<Long> { @Column(name = "account_no", length = 20, unique = true, nullable = false) private String accountNumber; @Column(name = "external_id", nullable = true) private String externalId; @ManyToOne(optional = true) @JoinColumn(name = "client_id", nullable = true) private Client client; @ManyToOne(optional = true) @JoinColumn(name = "group_id", nullable = true) private Group group; @ManyToOne @JoinColumn(name = "product_id", nullable = false) private SavingsProduct product; @ManyToOne @JoinColumn(name = "field_officer_id", nullable = true) private Staff fieldOfficer; @Column(name = "status_enum", nullable = false) private Integer status; @Column(name = "account_type_enum", nullable = false) private Integer accountType; @Temporal(TemporalType.DATE) @Column(name = "submittedon_date", nullable = true) private Date submittedOnDate; @ManyToOne(optional = true) @JoinColumn(name = "submittedon_userid", nullable = true) private AppUser submittedBy; @Temporal(TemporalType.DATE) @Column(name = "rejectedon_date") private Date rejectedOnDate; @ManyToOne(optional = true) @JoinColumn(name = "rejectedon_userid", nullable = true) private AppUser rejectedBy; @Temporal(TemporalType.DATE) @Column(name = "withdrawnon_date") private Date withdrawnOnDate; @ManyToOne(optional = true) @JoinColumn(name = "withdrawnon_userid", nullable = true) private AppUser withdrawnBy; @Temporal(TemporalType.DATE) @Column(name = "approvedon_date") private Date approvedOnDate; @ManyToOne(optional = true) @JoinColumn(name = "approvedon_userid", nullable = true) private AppUser approvedBy; @Temporal(TemporalType.DATE) @Column(name = "activatedon_date", nullable = true) private Date activatedOnDate; @ManyToOne(optional = true) @JoinColumn(name = "activatedon_userid", nullable = true) private AppUser activatedBy; @Temporal(TemporalType.DATE) @Column(name = "closedon_date") private Date closedOnDate; @ManyToOne(optional = true) @JoinColumn(name = "closedon_userid", nullable = true) private AppUser closedBy; @Embedded private MonetaryCurrency currency; @Column(name = "nominal_annual_interest_rate", scale = 6, precision = 19, nullable = false) private BigDecimal nominalAnnualInterestRate; /** * The interest period is the span of time at the end of which savings in a * client's account earn interest. * * A value from the {@link SavingsCompoundingInterestPeriodType} * enumeration. */ @Column(name = "interest_compounding_period_enum", nullable = false) private Integer interestCompoundingPeriodType; /** * A value from the {@link SavingsPostingInterestPeriodType} enumeration. */ @Column(name = "interest_posting_period_enum", nullable = false) private Integer interestPostingPeriodType; /** * A value from the {@link SavingsInterestCalculationType} enumeration. */ @Column(name = "interest_calculation_type_enum", nullable = false) private Integer interestCalculationType; /** * A value from the {@link SavingsInterestCalculationDaysInYearType} * enumeration. */ @Column(name = "interest_calculation_days_in_year_type_enum", nullable = false) private Integer interestCalculationDaysInYearType; @Column(name = "min_required_opening_balance", scale = 6, precision = 19, nullable = true) private BigDecimal minRequiredOpeningBalance; @Column(name = "lockin_period_frequency", nullable = true) private Integer lockinPeriodFrequency; @Column(name = "lockin_period_frequency_enum", nullable = true) private Integer lockinPeriodFrequencyType; /** * When account becomes <code>active</code> this field is derived if * <code>lockinPeriodFrequency</code> and * <code>lockinPeriodFrequencyType</code> details are present. */ @Temporal(TemporalType.DATE) @Column(name = "lockedin_until_date_derived", nullable = true) private Date lockedInUntilDate; @Column(name = "withdrawal_fee_amount", scale = 6, precision = 19, nullable = true) private BigDecimal withdrawalFeeAmount; @Column(name = "withdrawal_fee_type_enum", nullable = true) private Integer withdrawalFeeType; @Column(name = "withdrawal_fee_for_transfer", nullable = true) private boolean withdrawalFeeApplicableForTransfer; @Column(name = "annual_fee_amount", scale = 6, precision = 19, nullable = true) private BigDecimal annualFeeAmount; @Column(name = "annual_fee_on_month", nullable = true) private Integer annualFeeOnMonth; @Column(name = "annual_fee_on_day", nullable = true) private Integer annualFeeOnDay; @Temporal(TemporalType.DATE) @Column(name = "annual_fee_next_due_date", nullable = true) private Date annualFeeNextDueDate; @Embedded private SavingsAccountSummary summary; @OrderBy(value = "dateOf, id") @LazyCollection(LazyCollectionOption.FALSE) @OneToMany(cascade = CascadeType.ALL, mappedBy = "savingsAccount", orphanRemoval = true) private final List<SavingsAccountTransaction> transactions = new ArrayList<SavingsAccountTransaction>(); @Transient private boolean accountNumberRequiresAutoGeneration = false; @Transient private SavingsAccountTransactionSummaryWrapper savingsAccountTransactionSummaryWrapper; @Transient private SavingsHelper savingsHelper; protected SavingsAccount() { } public static SavingsAccount createNewApplicationForSubmittal(final Client client, final Group group, final SavingsProduct product, final Staff fieldOfficer, final String accountNo, final String externalId, final AccountType accountType, final LocalDate submittedOnDate, final BigDecimal interestRate, final SavingsCompoundingInterestPeriodType interestCompoundingPeriodType, final SavingsPostingInterestPeriodType interestPostingPeriodType, final SavingsInterestCalculationType interestCalculationType, final SavingsInterestCalculationDaysInYearType interestCalculationDaysInYearType, final BigDecimal minRequiredOpeningBalance, final Integer lockinPeriodFrequency, final SavingsPeriodFrequencyType lockinPeriodFrequencyType, final BigDecimal withdrawalFeeAmount, final SavingsWithdrawalFeesType withdrawalFeeType, final boolean withdrawalFeeApplicableForTransfer, final BigDecimal annualFeeAmount, final MonthDay annualFeeOnMonthDay) { final SavingsAccountStatusType status = SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL; return new SavingsAccount(client, group, product, fieldOfficer, accountNo, externalId, status, accountType, submittedOnDate, interestRate, interestCompoundingPeriodType, interestPostingPeriodType, interestCalculationType, interestCalculationDaysInYearType, minRequiredOpeningBalance, lockinPeriodFrequency, lockinPeriodFrequencyType, withdrawalFeeAmount, withdrawalFeeType, withdrawalFeeApplicableForTransfer, annualFeeAmount, annualFeeOnMonthDay); } private SavingsAccount(final Client client, final Group group, final SavingsProduct product, final Staff fieldOfficer, final String accountNo, final String externalId, final SavingsAccountStatusType status, final AccountType accountType, final LocalDate submittedOnDate, final BigDecimal nominalAnnualInterestRate, final SavingsCompoundingInterestPeriodType interestCompoundingPeriodType, final SavingsPostingInterestPeriodType interestPostingPeriodType, final SavingsInterestCalculationType interestCalculationType, final SavingsInterestCalculationDaysInYearType interestCalculationDaysInYearType, final BigDecimal minRequiredOpeningBalance, final Integer lockinPeriodFrequency, final SavingsPeriodFrequencyType lockinPeriodFrequencyType, final BigDecimal withdrawalFeeAmount, final SavingsWithdrawalFeesType withdrawalFeeType, final boolean withdrawalFeeApplicableForTransfer, final BigDecimal annualFeeAmount, final MonthDay annualFeeOnMonthDay) { this.client = client; this.group = group; this.product = product; this.fieldOfficer = fieldOfficer; if (StringUtils.isBlank(accountNo)) { this.accountNumber = new RandomPasswordGenerator(19).generate(); this.accountNumberRequiresAutoGeneration = true; } else { this.accountNumber = accountNo; } this.currency = product.currency(); this.externalId = externalId; this.status = status.getValue(); this.accountType = accountType.getValue(); this.submittedOnDate = submittedOnDate.toDate(); this.nominalAnnualInterestRate = nominalAnnualInterestRate; this.interestCompoundingPeriodType = interestCompoundingPeriodType.getValue(); this.interestPostingPeriodType = interestPostingPeriodType.getValue(); this.interestCalculationType = interestCalculationType.getValue(); this.interestCalculationDaysInYearType = interestCalculationDaysInYearType.getValue(); this.minRequiredOpeningBalance = minRequiredOpeningBalance; this.lockinPeriodFrequency = lockinPeriodFrequency; if (lockinPeriodFrequencyType != null) { this.lockinPeriodFrequencyType = lockinPeriodFrequencyType.getValue(); } this.withdrawalFeeAmount = withdrawalFeeAmount; if (withdrawalFeeType != null) { this.withdrawalFeeType = withdrawalFeeType.getValue(); } this.withdrawalFeeApplicableForTransfer = withdrawalFeeApplicableForTransfer; this.annualFeeAmount = annualFeeAmount; if (annualFeeOnMonthDay != null) { this.annualFeeOnMonth = annualFeeOnMonthDay.getMonthOfYear(); this.annualFeeOnDay = annualFeeOnMonthDay.getDayOfMonth(); } this.summary = new SavingsAccountSummary(); } /** * Used after fetching/hydrating a {@link SavingsAccount} object to inject * helper services/components used for update summary details after * events/transactions on a {@link SavingsAccount}. */ public void setHelpers(final SavingsAccountTransactionSummaryWrapper savingsAccountTransactionSummaryWrapper, final SavingsHelper savingsHelper) { this.savingsAccountTransactionSummaryWrapper = savingsAccountTransactionSummaryWrapper; this.savingsHelper = savingsHelper; } public boolean isNotActive() { return !isActive(); } public boolean isActive() { return SavingsAccountStatusType.fromInt(this.status).isActive(); } public boolean isNotSubmittedAndPendingApproval() { return !isSubmittedAndPendingApproval(); } public boolean isSubmittedAndPendingApproval() { return SavingsAccountStatusType.fromInt(this.status).isSubmittedAndPendingApproval(); } public boolean isApproved() { return SavingsAccountStatusType.fromInt(this.status).isApproved(); } public boolean isClosed() { return SavingsAccountStatusType.fromInt(this.status).isClosed(); } public void postInterest(final MathContext mc, final LocalDate interestPostingUpToDate, final List<Long> existingTransactionIds, final List<Long> existingReversedTransactionIds) { final List<PostingPeriod> postingPeriods = calculateInterestUsing(mc, interestPostingUpToDate); Money interestPostedToDate = Money.zero(this.currency); boolean recalucateDailyBalanceDetails = false; existingTransactionIds.addAll(findExistingTransactionIds()); existingReversedTransactionIds.addAll(findExistingReversedTransactionIds()); for (final PostingPeriod interestPostingPeriod : postingPeriods) { final LocalDate interestPostingTransactionDate = interestPostingPeriod.dateOfPostingTransaction(); final Money interestEarnedToBePostedForPeriod = interestPostingPeriod.getInterestEarned(); if (!interestPostingTransactionDate.isAfter(interestPostingUpToDate)) { interestPostedToDate = interestPostedToDate.plus(interestEarnedToBePostedForPeriod); final SavingsAccountTransaction postingTransaction = findInterestPostingTransactionFor(interestPostingTransactionDate); if (postingTransaction == null) { final SavingsAccountTransaction newPostingTransaction = SavingsAccountTransaction.interestPosting(this, office(), interestPostingTransactionDate, interestEarnedToBePostedForPeriod); this.transactions.add(newPostingTransaction); recalucateDailyBalanceDetails = true; } else { final boolean correctionRequired = postingTransaction.hasNotAmount(interestEarnedToBePostedForPeriod); if (correctionRequired) { postingTransaction.reverse(); final SavingsAccountTransaction newPostingTransaction = SavingsAccountTransaction.interestPosting(this, office(), interestPostingTransactionDate, interestEarnedToBePostedForPeriod); this.transactions.add(newPostingTransaction); recalucateDailyBalanceDetails = true; } } } } if (recalucateDailyBalanceDetails) { // no openingBalance concept supported yet but probably will to // allow // for migrations. final Money openingAccountBalance = Money.zero(this.currency); // update existing transactions so derived balance fields are // correct. recalculateDailyBalances(openingAccountBalance); } this.summary.updateSummary(this.currency, this.savingsAccountTransactionSummaryWrapper, this.transactions); } private SavingsAccountTransaction findInterestPostingTransactionFor(final LocalDate postingDate) { SavingsAccountTransaction postingTransation = null; for (final SavingsAccountTransaction transaction : this.transactions) { if (transaction.isInterestPostingAndNotReversed() && transaction.occursOn(postingDate)) { postingTransation = transaction; break; } } return postingTransation; } /** * All interest calculation based on END-OF-DAY-BALANCE. * * Interest calculation is performed on-the-fly over all account * transactions. * * * 1. Calculate Interest From Beginning Of Account 1a. determine the * 'crediting' periods that exist for this savings acccount 1b. determine * the 'compounding' periods that exist within each 'crediting' period * calculate the amount of interest due at the end of each 'crediting' * period check if an existing 'interest posting' transaction exists for * date and matches the amount posted */ public List<PostingPeriod> calculateInterestUsing(final MathContext mc, final LocalDate upToInterestCalculationDate) { // no openingBalance concept supported yet but probably will to allow // for migrations. final Money openingAccountBalance = Money.zero(this.currency); // update existing transactions so derived balance fields are // correct. recalculateDailyBalances(openingAccountBalance); // 1. default to calculate interest based on entire history OR // 2. determine latest 'posting period' and find interest credited to // that period // A generate list of EndOfDayBalances (not including interest postings) final SavingsPostingInterestPeriodType postingPeriodType = SavingsPostingInterestPeriodType.fromInt(this.interestPostingPeriodType); final SavingsCompoundingInterestPeriodType compoundingPeriodType = SavingsCompoundingInterestPeriodType .fromInt(this.interestCompoundingPeriodType); final SavingsInterestCalculationDaysInYearType daysInYearType = SavingsInterestCalculationDaysInYearType .fromInt(this.interestCalculationDaysInYearType); final List<LocalDateInterval> postingPeriodIntervals = this.savingsHelper.determineInterestPostingPeriods(getActivationLocalDate(), upToInterestCalculationDate, postingPeriodType); final List<PostingPeriod> allPostingPeriods = new ArrayList<PostingPeriod>(); Money periodStartingBalance = Money.zero(this.currency); final SavingsInterestCalculationType interestCalculationType = SavingsInterestCalculationType.fromInt(this.interestCalculationType); final BigDecimal interestRateAsFraction = this.nominalAnnualInterestRate.divide(BigDecimal.valueOf(100l), mc); for (final LocalDateInterval periodInterval : postingPeriodIntervals) { final PostingPeriod postingPeriod = PostingPeriod.createFrom(periodInterval, periodStartingBalance, retreiveOrderedListOfTransactions(), this.currency, compoundingPeriodType, interestCalculationType, interestRateAsFraction, daysInYearType.getValue()); periodStartingBalance = postingPeriod.closingBalance(); allPostingPeriods.add(postingPeriod); } this.savingsHelper.calculateInterestForAllPostingPeriods(this.currency, allPostingPeriods); this.summary.updateFromInterestPeriodSummaries(this.currency, allPostingPeriods); this.summary.updateSummary(this.currency, this.savingsAccountTransactionSummaryWrapper, this.transactions); return allPostingPeriods; } private List<SavingsAccountTransaction> retreiveOrderedListOfTransactions() { final List<SavingsAccountTransaction> listOfTransactionsSorted = retreiveListOfTransactions(); final List<SavingsAccountTransaction> orderedNonInterestPostingTransactions = new ArrayList<SavingsAccountTransaction>(); for (final SavingsAccountTransaction transaction : listOfTransactionsSorted) { if (!transaction.isInterestPostingAndNotReversed() && transaction.isNotReversed()) { orderedNonInterestPostingTransactions.add(transaction); } } return orderedNonInterestPostingTransactions; } private List<SavingsAccountTransaction> retreiveListOfTransactions() { final List<SavingsAccountTransaction> listOfTransactionsSorted = new ArrayList<SavingsAccountTransaction>(); for (final SavingsAccountTransaction transaction : this.transactions) { listOfTransactionsSorted.add(transaction); } final SavingsAccountTransactionComparator transactionComparator = new SavingsAccountTransactionComparator(); Collections.sort(listOfTransactionsSorted, transactionComparator); return listOfTransactionsSorted; } private void recalculateDailyBalances(final Money openingAccountBalance) { Money runningBalance = openingAccountBalance.copy(); final List<SavingsAccountTransaction> accountTransactionsSorted = retreiveListOfTransactions(); for (final SavingsAccountTransaction transaction : accountTransactionsSorted) { if (transaction.isReversed()) { transaction.zeroBalanceFields(); } else { Money transactionAmount = Money.zero(this.currency); if (transaction.isCredit()) { transactionAmount = transactionAmount.plus(transaction.getAmount(this.currency)); } else if (transaction.isDebit()) { transactionAmount = transactionAmount.minus(transaction.getAmount(this.currency)); } runningBalance = runningBalance.plus(transactionAmount); transaction.updateRunningBalance(runningBalance); } } // loop over transactions in reverse LocalDate endOfBalanceDate = DateUtils.getLocalDateOfTenant(); for (int i = accountTransactionsSorted.size() - 1; i >= 0; i final SavingsAccountTransaction transaction = accountTransactionsSorted.get(i); if (transaction.isNotReversed()) { transaction.updateCumulativeBalanceAndDates(this.currency, endOfBalanceDate); // this transactions transaction date is end of balance date for // previous transaction. endOfBalanceDate = transaction.transactionLocalDate().minusDays(1); } } } public SavingsAccountTransaction deposit(final SavingsAccountTransactionDTO transactionDTO) { if (isNotActive()) { final String defaultUserMessage = "Transaction is not allowed. Account is not active."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.account.is.not.active", defaultUserMessage, "transactionDate", transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter())); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } if (isDateInTheFuture(transactionDTO.getTransactionDate())) { final String defaultUserMessage = "Transaction date cannot be in the future."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.in.the.future", defaultUserMessage, "transactionDate", transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter())); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } if (transactionDTO.getTransactionDate().isBefore(getActivationLocalDate())) { final Object[] defaultUserArgs = Arrays.asList(transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter()), getActivationLocalDate().toString(transactionDTO.getFormatter())).toArray(); final String defaultUserMessage = "Transaction date cannot be before accounts activation date."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.before.activation.date", defaultUserMessage, "transactionDate", defaultUserArgs); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } transactionDTO.getExistingTransactionIds().addAll(findExistingTransactionIds()); transactionDTO.getExistingReversedTransactionIds().addAll(findExistingReversedTransactionIds()); final Money amount = Money.of(this.currency, transactionDTO.getTransactionAmount()); final SavingsAccountTransaction transaction = SavingsAccountTransaction.deposit(this, office(), transactionDTO.getPaymentDetail(), transactionDTO.getTransactionDate(), amount); this.transactions.add(transaction); this.summary.updateSummary(this.currency, this.savingsAccountTransactionSummaryWrapper, this.transactions); return transaction; } private LocalDate getActivationLocalDate() { LocalDate activationLocalDate = null; if (this.activatedOnDate != null) { activationLocalDate = new LocalDate(this.activatedOnDate); } return activationLocalDate; } public SavingsAccountTransaction withdraw(final SavingsAccountTransactionDTO transactionDTO, final boolean applyWithdrawFee) { if (isNotActive()) { final String defaultUserMessage = "Transaction is not allowed. Account is not active."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.account.is.not.active", defaultUserMessage, "transactionDate", transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter())); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } if (isDateInTheFuture(transactionDTO.getTransactionDate())) { final String defaultUserMessage = "Transaction date cannot be in the future."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.in.the.future", defaultUserMessage, "transactionDate", transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter())); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } if (transactionDTO.getTransactionDate().isBefore(getActivationLocalDate())) { final Object[] defaultUserArgs = Arrays.asList(transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter()), getActivationLocalDate().toString(transactionDTO.getFormatter())).toArray(); final String defaultUserMessage = "Transaction date cannot be before accounts activation date."; final ApiParameterError error = ApiParameterError.parameterError("error.msg.savingsaccount.transaction.before.activation.date", defaultUserMessage, "transactionDate", defaultUserArgs); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } if (isAccountLocked(transactionDTO.getTransactionDate())) { final String defaultUserMessage = "Withdrawal is not allowed. No withdrawals are allowed until after " + getLockedInUntilLocalDate().toString(transactionDTO.getFormatter()); final ApiParameterError error = ApiParameterError.parameterError( "error.msg.savingsaccount.transaction.withdrawals.blocked.during.lockin.period", defaultUserMessage, "transactionDate", transactionDTO.getTransactionDate().toString(transactionDTO.getFormatter()), getLockedInUntilLocalDate().toString(transactionDTO.getFormatter())); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); dataValidationErrors.add(error); throw new PlatformApiDataValidationException(dataValidationErrors); } transactionDTO.getExistingTransactionIds().addAll(findExistingTransactionIds()); transactionDTO.getExistingReversedTransactionIds().addAll(findExistingReversedTransactionIds()); final Money transactionAmountMoney = Money.of(this.currency, transactionDTO.getTransactionAmount()); final SavingsAccountTransaction transaction = SavingsAccountTransaction.withdrawal(this, office(), transactionDTO.getPaymentDetail(), transactionDTO.getTransactionDate(), transactionAmountMoney); this.transactions.add(transaction); if (applyWithdrawFee && isAutomaticWithdrawalFee()) { SavingsAccountTransaction withdrawalFeeTransaction = null; Money feeAmount = Money.zero(this.currency); switch (SavingsWithdrawalFeesType.fromInt(this.withdrawalFeeType)) { case INVALID: break; case FLAT: feeAmount = Money.of(this.currency, this.withdrawalFeeAmount); withdrawalFeeTransaction = SavingsAccountTransaction .fee(this, office(), transactionDTO.getTransactionDate(), feeAmount); this.transactions.add(withdrawalFeeTransaction); break; case PERCENT_OF_AMOUNT: final BigDecimal feeAmountDecimal = transactionDTO.getTransactionAmount().multiply(this.withdrawalFeeAmount) .divide(BigDecimal.valueOf(100l)); feeAmount = Money.of(this.currency, feeAmountDecimal); withdrawalFeeTransaction = SavingsAccountTransaction .fee(this, office(), transactionDTO.getTransactionDate(), feeAmount); this.transactions.add(withdrawalFeeTransaction); break; } } this.summary.updateSummary(this.currency, this.savingsAccountTransactionSummaryWrapper, this.transactions); return transaction; } public boolean isBeforeLastPostingPeriod(final LocalDate transactionDate) { boolean transactionBeforeLastInterestPosting = false; for (final SavingsAccountTransaction transaction : retreiveListOfTransactions()) { if (transaction.isInterestPostingAndNotReversed() && transaction.isAfter(transactionDate)) { transactionBeforeLastInterestPosting = true; break; } } return transactionBeforeLastInterestPosting; } public void validateAccountBalanceDoesNotBecomeNegative(final BigDecimal transactionAmount) { final List<SavingsAccountTransaction> transactionsSortedByDate = retreiveListOfTransactions(); Money runningBalance = Money.zero(this.currency); for (final SavingsAccountTransaction transaction : transactionsSortedByDate) { if (transaction.isNotReversed() && transaction.isCredit()) { runningBalance = runningBalance.plus(transaction.getAmount(this.currency)); } else if (transaction.isNotReversed() && transaction.isDebit()) { runningBalance = runningBalance.minus(transaction.getAmount(this.currency)); } if (runningBalance.isLessThanZero()) { final BigDecimal withdrawalFee = null; throw new InsufficientAccountBalanceException("transactionAmount", getAccountBalance(), withdrawalFee, transactionAmount); } } } public void validateAccountBalanceDoesNotBecomeNegative(final String transactionAction) { final List<SavingsAccountTransaction> transactionsSortedByDate = retreiveListOfTransactions(); Money runningBalance = Money.zero(this.currency); for (final SavingsAccountTransaction transaction : transactionsSortedByDate) { if (transaction.isNotReversed() && transaction.isCredit()) { runningBalance = runningBalance.plus(transaction.getAmount(this.currency)); } else if (transaction.isNotReversed() && transaction.isDebit()) { runningBalance = runningBalance.minus(transaction.getAmount(this.currency)); } if (runningBalance.isLessThanZero()) { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + transactionAction); baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("results.in.balance.going.negative"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } } } public SavingsAccountTransaction addAnnualFee(final MathContext mc, final DateTimeFormatter formatter, final LocalDate annualFeeTransactionDate, final LocalDate today, final List<Long> existingTransactionIds, final List<Long> existingReversedTransactionIds) { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.applyAnnualFeeTransactionAction); if (isNotActive()) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("transaction.invalid.account.is.not.active"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final LocalDate nextAnnualFeeDueDate = getNextAnnualFeeDueDate(); if (nextAnnualFeeDueDate == null || annualFeeSettingsNotSet()) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("no.annualfee.settings"); throw new PlatformApiDataValidationException(dataValidationErrors); } if (nextAnnualFeeDueDate.isBefore(getActivationLocalDate())) { baseDataValidator.reset().parameter("annualFeeTransactionDate").value(getActivationLocalDate().toString(formatter)) .failWithCodeNoParameterAddedToErrorCode("before.activationDate"); throw new PlatformApiDataValidationException(dataValidationErrors); } if (isDateInTheFuture(annualFeeTransactionDate)) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("transaction.in.the.future"); throw new PlatformApiDataValidationException(dataValidationErrors); } if (isNotValidAnnualFeeTransactionDate(annualFeeTransactionDate, today)) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("invalid.date"); throw new PlatformApiDataValidationException(dataValidationErrors); } Date currentAnnualFeeNextDueDate = findLatestAnnualFeeTransactionDueDate(); if (currentAnnualFeeNextDueDate != null && new LocalDate(currentAnnualFeeNextDueDate).isEqual(annualFeeTransactionDate)) { baseDataValidator.reset().parameter("annualFeeTransactionDate").value(annualFeeTransactionDate.toString(formatter)) .failWithCodeNoParameterAddedToErrorCode("transaction.exists.on.date"); throw new PlatformApiDataValidationException(dataValidationErrors); } existingTransactionIds.addAll(findExistingTransactionIds()); existingReversedTransactionIds.addAll(findExistingReversedTransactionIds()); final Money annualFee = Money.of(this.currency, this.annualFeeAmount); final SavingsAccountTransaction annualFeeTransaction = SavingsAccountTransaction.annualFee(this, office(), annualFeeTransactionDate, annualFee); this.transactions.add(annualFeeTransaction); validateAccountBalanceDoesNotBecomeNegative(SavingsApiConstants.applyAnnualFeeTransactionAction); this.summary.updateSummary(this.currency, this.savingsAccountTransactionSummaryWrapper, this.transactions); calculateInterestUsing(mc, today); currentAnnualFeeNextDueDate = findLatestAnnualFeeTransactionDueDate(); if (currentAnnualFeeNextDueDate != null) { final LocalDate newAnnualFeeNextDueDate = new LocalDate(currentAnnualFeeNextDueDate).withMonthOfYear(this.annualFeeOnMonth) .withDayOfMonth(this.annualFeeOnDay).plusYears(1); this.annualFeeNextDueDate = newAnnualFeeNextDueDate.toDate(); } else { updateToNextAnnualFeeDueDateFrom(getActivationLocalDate()); } return annualFeeTransaction; } private boolean isNotValidAnnualFeeTransactionDate(final LocalDate annualFeeTransactionDate, final LocalDate today) { return !isValidAnnualFeeTransactionDate(annualFeeTransactionDate, today); } private boolean isValidAnnualFeeTransactionDate(final LocalDate annualFeeTransactionDate, final LocalDate today) { LocalDate startingDate = getActivationLocalDate(); boolean isValid = false; while (!startingDate.isAfter(today) && !isValid) { LocalDate nextDueLocalDate = startingDate.withMonthOfYear(this.annualFeeOnMonth).withDayOfMonth(this.annualFeeOnDay); if (startingDate.isAfter(nextDueLocalDate)) { nextDueLocalDate = nextDueLocalDate.plusYears(1); } isValid = nextDueLocalDate.isEqual(annualFeeTransactionDate); startingDate = nextDueLocalDate.plusYears(1); } return isValid; } private void updateToNextAnnualFeeDueDateFrom(final LocalDate startingDate) { LocalDate nextDueLocalDate = startingDate.withMonthOfYear(this.annualFeeOnMonth).withDayOfMonth(this.annualFeeOnDay); if (startingDate.isAfter(nextDueLocalDate)) { nextDueLocalDate = nextDueLocalDate.plusYears(1); } this.annualFeeNextDueDate = nextDueLocalDate.toDate(); } private LocalDate getNextAnnualFeeDueDate() { LocalDate nextAnnualFeeDueDate = null; if (this.annualFeeNextDueDate != null) { nextAnnualFeeDueDate = new LocalDate(this.annualFeeNextDueDate); } return nextAnnualFeeDueDate; } private boolean annualFeeSettingsNotSet() { return !annualFeeSettingsSet(); } private boolean annualFeeSettingsSet() { return this.annualFeeOnDay != null && this.annualFeeOnMonth != null; } private boolean isAutomaticWithdrawalFee() { return this.withdrawalFeeType != null; } private boolean isAccountLocked(final LocalDate transactionDate) { boolean isLocked = false; final boolean accountHasLockedInSetting = this.lockedInUntilDate != null; if (accountHasLockedInSetting) { isLocked = getLockedInUntilLocalDate().isAfter(transactionDate); } return isLocked; } private LocalDate getLockedInUntilLocalDate() { LocalDate lockedInUntilLocalDate = null; if (this.lockedInUntilDate != null) { lockedInUntilLocalDate = new LocalDate(this.lockedInUntilDate); } return lockedInUntilLocalDate; } private boolean isDateInTheFuture(final LocalDate transactionDate) { return transactionDate.isAfter(DateUtils.getLocalDateOfTenant()); } private BigDecimal getAccountBalance() { return this.summary.getAccountBalance(this.currency).getAmount(); } public void modifyApplication(final JsonCommand command, final Map<String, Object> actualChanges) { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.modifyApplicationAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.hasStateOf(currentStatus)) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final String localeAsInput = command.locale(); final String dateFormat = command.dateFormat(); if (command.isChangeInLocalDateParameterNamed(SavingsApiConstants.submittedOnDateParamName, getSubmittedOnLocalDate())) { final LocalDate newValue = command.localDateValueOfParameterNamed(SavingsApiConstants.submittedOnDateParamName); final String newValueAsString = command.stringValueOfParameterNamed(SavingsApiConstants.submittedOnDateParamName); actualChanges.put(SavingsApiConstants.submittedOnDateParamName, newValueAsString); actualChanges.put(SavingsApiConstants.localeParamName, localeAsInput); actualChanges.put(SavingsApiConstants.dateFormatParamName, dateFormat); this.submittedOnDate = newValue.toDate(); } if (command.isChangeInStringParameterNamed(SavingsApiConstants.accountNoParamName, this.accountNumber)) { final String newValue = command.stringValueOfParameterNamed(SavingsApiConstants.accountNoParamName); actualChanges.put(SavingsApiConstants.accountNoParamName, newValue); this.accountNumber = StringUtils.defaultIfEmpty(newValue, null); } if (command.isChangeInStringParameterNamed(SavingsApiConstants.externalIdParamName, this.externalId)) { final String newValue = command.stringValueOfParameterNamed(SavingsApiConstants.externalIdParamName); actualChanges.put(SavingsApiConstants.externalIdParamName, newValue); this.externalId = StringUtils.defaultIfEmpty(newValue, null); } if (command.isChangeInLongParameterNamed(SavingsApiConstants.clientIdParamName, clientId())) { final Long newValue = command.longValueOfParameterNamed(SavingsApiConstants.clientIdParamName); actualChanges.put(SavingsApiConstants.clientIdParamName, newValue); } if (command.isChangeInLongParameterNamed(SavingsApiConstants.groupIdParamName, groupId())) { final Long newValue = command.longValueOfParameterNamed(SavingsApiConstants.groupIdParamName); actualChanges.put(SavingsApiConstants.groupIdParamName, newValue); } if (command.isChangeInLongParameterNamed(SavingsApiConstants.productIdParamName, this.product.getId())) { final Long newValue = command.longValueOfParameterNamed(SavingsApiConstants.productIdParamName); actualChanges.put(SavingsApiConstants.productIdParamName, newValue); } if (command.isChangeInLongParameterNamed(SavingsApiConstants.fieldOfficerIdParamName, fieldOfficerId())) { final Long newValue = command.longValueOfParameterNamed(SavingsApiConstants.fieldOfficerIdParamName); actualChanges.put(SavingsApiConstants.fieldOfficerIdParamName, newValue); } if (command.isChangeInBigDecimalParameterNamed(SavingsApiConstants.nominalAnnualInterestRateParamName, this.nominalAnnualInterestRate)) { final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(SavingsApiConstants.nominalAnnualInterestRateParamName); actualChanges.put(SavingsApiConstants.nominalAnnualInterestRateParamName, newValue); actualChanges.put("locale", localeAsInput); this.nominalAnnualInterestRate = newValue; } if (command.isChangeInIntegerParameterNamed(SavingsApiConstants.interestCompoundingPeriodTypeParamName, this.interestCompoundingPeriodType)) { final Integer newValue = command.integerValueOfParameterNamed(SavingsApiConstants.interestCompoundingPeriodTypeParamName); this.interestCompoundingPeriodType = newValue != null ? SavingsCompoundingInterestPeriodType.fromInt(newValue).getValue() : newValue; actualChanges.put(SavingsApiConstants.interestCompoundingPeriodTypeParamName, this.interestCompoundingPeriodType); } if (command.isChangeInIntegerParameterNamed(SavingsApiConstants.interestPostingPeriodTypeParamName, this.interestPostingPeriodType)) { final Integer newValue = command.integerValueOfParameterNamed(SavingsApiConstants.interestPostingPeriodTypeParamName); this.interestPostingPeriodType = newValue != null ? SavingsPostingInterestPeriodType.fromInt(newValue).getValue() : newValue; actualChanges.put(SavingsApiConstants.interestPostingPeriodTypeParamName, this.interestPostingPeriodType); } if (command.isChangeInIntegerParameterNamed(SavingsApiConstants.interestCalculationTypeParamName, this.interestCalculationType)) { final Integer newValue = command.integerValueOfParameterNamed(SavingsApiConstants.interestCalculationTypeParamName); this.interestCalculationType = newValue != null ? SavingsInterestCalculationType.fromInt(newValue).getValue() : newValue; actualChanges.put(SavingsApiConstants.interestCalculationTypeParamName, this.interestCalculationType); } if (command.isChangeInIntegerParameterNamed(SavingsApiConstants.interestCalculationDaysInYearTypeParamName, this.interestCalculationDaysInYearType)) { final Integer newValue = command.integerValueOfParameterNamed(SavingsApiConstants.interestCalculationDaysInYearTypeParamName); this.interestCalculationDaysInYearType = newValue != null ? SavingsInterestCalculationDaysInYearType.fromInt(newValue) .getValue() : newValue; actualChanges.put(SavingsApiConstants.interestCalculationDaysInYearTypeParamName, this.interestCalculationDaysInYearType); } if (command.isChangeInBigDecimalParameterNamedDefaultingZeroToNull(SavingsApiConstants.minRequiredOpeningBalanceParamName, this.minRequiredOpeningBalance)) { final BigDecimal newValue = command .bigDecimalValueOfParameterNamedDefaultToNullIfZero(SavingsApiConstants.minRequiredOpeningBalanceParamName); actualChanges.put(SavingsApiConstants.minRequiredOpeningBalanceParamName, newValue); actualChanges.put("locale", localeAsInput); this.minRequiredOpeningBalance = Money.of(this.currency, newValue).getAmount(); } if (command.isChangeInIntegerParameterNamedDefaultingZeroToNull(SavingsApiConstants.lockinPeriodFrequencyParamName, this.lockinPeriodFrequency)) { final Integer newValue = command .integerValueOfParameterNamedDefaultToNullIfZero(SavingsApiConstants.lockinPeriodFrequencyParamName); actualChanges.put(SavingsApiConstants.lockinPeriodFrequencyParamName, newValue); actualChanges.put("locale", localeAsInput); this.lockinPeriodFrequency = newValue; } if (command.isChangeInIntegerParameterNamed(SavingsApiConstants.lockinPeriodFrequencyTypeParamName, this.lockinPeriodFrequencyType)) { final Integer newValue = command.integerValueOfParameterNamed(SavingsApiConstants.lockinPeriodFrequencyTypeParamName); actualChanges.put(SavingsApiConstants.lockinPeriodFrequencyTypeParamName, newValue); this.lockinPeriodFrequencyType = newValue != null ? SavingsPeriodFrequencyType.fromInt(newValue).getValue() : newValue; } // set period type to null if frequency is null if (this.lockinPeriodFrequency == null) { this.lockinPeriodFrequencyType = null; } if (command.isChangeInBigDecimalParameterNamedDefaultingZeroToNull(withdrawalFeeAmountParamName, this.withdrawalFeeAmount)) { final BigDecimal newValue = command.bigDecimalValueOfParameterNamedDefaultToNullIfZero(withdrawalFeeAmountParamName); actualChanges.put(withdrawalFeeAmountParamName, newValue); actualChanges.put(localeParamName, localeAsInput); this.withdrawalFeeAmount = newValue; } if (command.isChangeInIntegerParameterNamedDefaultingZeroToNull(withdrawalFeeTypeParamName, this.withdrawalFeeType)) { final Integer newValue = command.integerValueOfParameterNamedDefaultToNullIfZero(withdrawalFeeTypeParamName); actualChanges.put(withdrawalFeeTypeParamName, newValue); this.withdrawalFeeType = newValue != null ? SavingsWithdrawalFeesType.fromInt(newValue).getValue() : newValue; } if (command.isChangeInBooleanParameterNamed(withdrawalFeeForTransfersParamName, this.withdrawalFeeApplicableForTransfer)) { final boolean newValue = command.booleanPrimitiveValueOfParameterNamed(withdrawalFeeForTransfersParamName); actualChanges.put(withdrawalFeeForTransfersParamName, newValue); this.withdrawalFeeApplicableForTransfer = newValue; } // set period type to null if frequency is null if (this.withdrawalFeeAmount == null) { this.withdrawalFeeType = null; } if (command.isChangeInBigDecimalParameterNamedDefaultingZeroToNull(annualFeeAmountParamName, this.annualFeeAmount)) { final BigDecimal newValue = command.bigDecimalValueOfParameterNamedDefaultToNullIfZero(annualFeeAmountParamName); actualChanges.put(annualFeeAmountParamName, newValue); actualChanges.put(localeParamName, localeAsInput); this.annualFeeAmount = newValue; } if (command.hasParameter(annualFeeOnMonthDayParamName)) { final MonthDay monthDay = command.extractMonthDayNamed(annualFeeOnMonthDayParamName); final String actualValueEntered = command.stringValueOfParameterNamed(annualFeeOnMonthDayParamName); final Integer dayOfMonthValue = monthDay.getDayOfMonth(); if (this.annualFeeOnDay != dayOfMonthValue) { actualChanges.put(annualFeeOnMonthDayParamName, actualValueEntered); actualChanges.put(localeParamName, localeAsInput); this.annualFeeOnDay = dayOfMonthValue; } final Integer monthOfYear = monthDay.getMonthOfYear(); if (this.annualFeeOnMonth != monthOfYear) { actualChanges.put(annualFeeOnMonthDayParamName, actualValueEntered); actualChanges.put(localeParamName, localeAsInput); this.annualFeeOnMonth = monthOfYear; } } // set period type to null if frequency is null if (this.annualFeeAmount == null) { this.annualFeeOnDay = null; this.annualFeeOnMonth = null; } validateLockinDetails(); validateWithdrawalFeeDetails(); validateAnnualFeeDetails(); } private void validateAnnualFeeDetails() { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME); if (this.annualFeeAmount == null) { if (this.annualFeeOnMonth != null || this.annualFeeOnDay != null) { baseDataValidator.reset().parameter(annualFeeAmountParamName).value(this.annualFeeAmount).notNull(); } } else { if (this.annualFeeOnMonth == null || this.annualFeeOnDay == null) { baseDataValidator.reset().parameter(annualFeeOnMonthDayParamName).value(this.annualFeeOnMonth).notNull(); } baseDataValidator.reset().parameter(annualFeeAmountParamName).value(this.annualFeeAmount).zeroOrPositiveAmount(); } if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } private void validateWithdrawalFeeDetails() { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME); if (this.withdrawalFeeAmount == null) { baseDataValidator.reset().parameter(withdrawalFeeTypeParamName).value(this.withdrawalFeeType).ignoreIfNull() .isOneOfTheseValues(1, 2); if (this.withdrawalFeeType != null) { baseDataValidator.reset().parameter(withdrawalFeeAmountParamName).value(this.withdrawalFeeAmount).notNull(); } } else { baseDataValidator.reset().parameter(withdrawalFeeAmountParamName).value(this.withdrawalFeeAmount).zeroOrPositiveAmount(); baseDataValidator.reset().parameter(withdrawalFeeTypeParamName).value(this.withdrawalFeeType).notNull() .isOneOfTheseValues(1, 2); } if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } private void validateLockinDetails() { final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME); if (this.lockinPeriodFrequency == null) { baseDataValidator.reset().parameter(lockinPeriodFrequencyTypeParamName).value(this.lockinPeriodFrequencyType).ignoreIfNull() .inMinMaxRange(0, 3); if (this.lockinPeriodFrequencyType != null) { baseDataValidator.reset().parameter(lockinPeriodFrequencyParamName).value(this.lockinPeriodFrequency).notNull() .integerZeroOrGreater(); } } else { baseDataValidator.reset().parameter(lockinPeriodFrequencyParamName).value(this.lockinPeriodFrequencyType) .integerZeroOrGreater(); baseDataValidator.reset().parameter(lockinPeriodFrequencyTypeParamName).value(this.lockinPeriodFrequencyType).notNull() .inMinMaxRange(0, 3); } if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } public Map<String, Object> deriveAccountingBridgeData(final CurrencyData currencyData, final List<Long> existingTransactionIds, final List<Long> existingReversedTransactionIds) { final Map<String, Object> accountingBridgeData = new LinkedHashMap<String, Object>(); accountingBridgeData.put("savingsId", getId()); accountingBridgeData.put("savingsProductId", productId()); accountingBridgeData.put("currency", currencyData); accountingBridgeData.put("officeId", officeId()); accountingBridgeData.put("cashBasedAccountingEnabled", isCashBasedAccountingEnabledOnSavingsProduct()); accountingBridgeData.put("accrualBasedAccountingEnabled", isAccrualBasedAccountingEnabledOnSavingsProduct()); final List<Map<String, Object>> newLoanTransactions = new ArrayList<Map<String, Object>>(); for (final SavingsAccountTransaction transaction : this.transactions) { if (transaction.isReversed() && !existingReversedTransactionIds.contains(transaction.getId())) { newLoanTransactions.add(transaction.toMapData(currencyData)); } else if (!existingTransactionIds.contains(transaction.getId())) { newLoanTransactions.add(transaction.toMapData(currencyData)); } } accountingBridgeData.put("newSavingsTransactions", newLoanTransactions); return accountingBridgeData; } public Collection<Long> findExistingTransactionIds() { final Collection<Long> ids = new ArrayList<Long>(); for (final SavingsAccountTransaction transaction : this.transactions) { ids.add(transaction.getId()); } return ids; } public Collection<Long> findExistingReversedTransactionIds() { final Collection<Long> ids = new ArrayList<Long>(); for (final SavingsAccountTransaction transaction : this.transactions) { if (transaction.isReversed()) { ids.add(transaction.getId()); } } return ids; } public void update(final Client client) { this.client = client; } public void update(final Group group) { this.group = group; } public void update(final SavingsProduct product) { this.product = product; } public void update(final Staff fieldOfficer) { this.fieldOfficer = fieldOfficer; } public void updateAccountNo(final String newAccountNo) { this.accountNumber = newAccountNo; this.accountNumberRequiresAutoGeneration = false; } public boolean isAccountNumberRequiresAutoGeneration() { return this.accountNumberRequiresAutoGeneration; } public Long productId() { return this.product.getId(); } private Boolean isCashBasedAccountingEnabledOnSavingsProduct() { return this.product.isCashBasedAccountingEnabled(); } private Boolean isAccrualBasedAccountingEnabledOnSavingsProduct() { return this.product.isAccrualBasedAccountingEnabled(); } public Long officeId() { Long officeId = null; if (this.client != null) { officeId = this.client.officeId(); } else { officeId = this.group.officeId(); } return officeId; } public Office office() { Office office = null; if (this.client != null) { office = this.client.getOffice(); } else { office = this.group.getOffice(); } return office; } public Long clientId() { Long id = null; if (this.client != null) { id = this.client.getId(); } return id; } public Long groupId() { Long id = null; if (this.group != null) { id = this.group.getId(); } return id; } public Long fieldOfficerId() { Long id = null; if (this.fieldOfficer != null) { id = this.fieldOfficer.getId(); } return id; } public MonetaryCurrency getCurrency() { return this.currency; } public void validateNewApplicationState(final LocalDate todayDateOfTenant) { validateLockinDetails(); validateWithdrawalFeeDetails(); validateAnnualFeeDetails(); final LocalDate submittedOn = getSubmittedOnLocalDate(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.summitalAction); if (submittedOn.isAfter(todayDateOfTenant)) { baseDataValidator.reset().parameter(SavingsApiConstants.submittedOnDateParamName).value(submittedOn) .failWithCodeNoParameterAddedToErrorCode("cannot.be.a.future.date"); } if (this.client != null && this.client.isActivatedAfter(submittedOn)) { baseDataValidator.reset().parameter(SavingsApiConstants.submittedOnDateParamName).value(this.client.getActivationLocalDate()) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.client.activation.date"); } else if (this.group != null && this.group.isActivatedAfter(submittedOn)) { baseDataValidator.reset().parameter(SavingsApiConstants.submittedOnDateParamName).value(this.group.getActivationLocalDate()) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.client.activation.date"); } if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } private LocalDate getSubmittedOnLocalDate() { LocalDate submittedOn = null; if (this.submittedOnDate != null) { submittedOn = new LocalDate(this.submittedOnDate); } return submittedOn; } private LocalDate getApprovedOnLocalDate() { LocalDate approvedOnLocalDate = null; if (this.approvedOnDate != null) { approvedOnLocalDate = new LocalDate(this.approvedOnDate); } return approvedOnLocalDate; } public Client getClient() { return this.client; } public Map<String, Object> approveApplication(final AppUser currentUser, final JsonCommand command, final LocalDate tenantsTodayDate) { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.approvalAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.hasStateOf(currentStatus)) { baseDataValidator.reset().parameter(SavingsApiConstants.approvedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.status = SavingsAccountStatusType.APPROVED.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); // only do below if status has changed in the 'approval' case final LocalDate approvedOn = command.localDateValueOfParameterNamed(SavingsApiConstants.approvedOnDateParamName); final String approvedOnDateChange = command.stringValueOfParameterNamed(SavingsApiConstants.approvedOnDateParamName); this.approvedOnDate = approvedOn.toDate(); this.approvedBy = currentUser; actualChanges.put(SavingsApiConstants.localeParamName, command.locale()); actualChanges.put(SavingsApiConstants.dateFormatParamName, command.dateFormat()); actualChanges.put(SavingsApiConstants.approvedOnDateParamName, approvedOnDateChange); final LocalDate submittalDate = getSubmittedOnLocalDate(); if (approvedOn.isBefore(submittalDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String submittalDateAsString = formatter.print(submittalDate); baseDataValidator.reset().parameter(SavingsApiConstants.approvedOnDateParamName).value(submittalDateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.submittal.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (approvedOn.isAfter(tenantsTodayDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.approvedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("cannot.be.a.future.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } // FIXME - kw - support field officer history for savings accounts // if (this.fieldOfficer != null) { // final LoanOfficerAssignmentHistory loanOfficerAssignmentHistory = // LoanOfficerAssignmentHistory.createNew(this, // this.fieldOfficer, approvedOn); // this.loanOfficerHistory.add(loanOfficerAssignmentHistory); return actualChanges; } public Map<String, Object> undoApplicationApproval() { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.undoApprovalAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.APPROVED.hasStateOf(currentStatus)) { baseDataValidator.reset().parameter(SavingsApiConstants.approvedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.approved.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.status = SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); this.approvedOnDate = null; this.approvedBy = null; this.rejectedOnDate = null; this.rejectedBy = null; this.withdrawnOnDate = null; this.withdrawnBy = null; this.closedOnDate = null; this.closedBy = null; actualChanges.put(SavingsApiConstants.approvedOnDateParamName, ""); // FIXME - kw - support field officer history for savings accounts // this.loanOfficerHistory.clear(); return actualChanges; } public void undoTransaction(final Long transactionId, final List<Long> reversedTransactionIds) { SavingsAccountTransaction transactionToUndo = null; for (final SavingsAccountTransaction transaction : this.transactions) { if (transaction.isIdentifiedBy(transactionId)) { transactionToUndo = transaction; } } if (transactionToUndo == null) { // throw non found exception } else { transactionToUndo.reverse(); reversedTransactionIds.add(transactionId); if (transactionToUndo.isAnnualFee()) { this.annualFeeNextDueDate = findLatestAnnualFeeTransactionDueDate(); if (this.annualFeeNextDueDate == null) { updateToNextAnnualFeeDueDateFrom(getActivationLocalDate()); } else { final LocalDate newAnnualFeeNextDueDate = new LocalDate(this.annualFeeNextDueDate) .withMonthOfYear(this.annualFeeOnMonth).withDayOfMonth(this.annualFeeOnDay).plusYears(1); this.annualFeeNextDueDate = newAnnualFeeNextDueDate.toDate(); } } } } private Date findLatestAnnualFeeTransactionDueDate() { Date nextDueDate = null; LocalDate lastAnnualFeeTransactionDate = null; for (final SavingsAccountTransaction transaction : retreiveOrderedListOfTransactions()) { if (transaction.isAnnualFeeAndNotReversed()) { if (lastAnnualFeeTransactionDate == null) { lastAnnualFeeTransactionDate = transaction.transactionLocalDate(); nextDueDate = lastAnnualFeeTransactionDate.toDate(); } if (transaction.transactionLocalDate().isAfter(lastAnnualFeeTransactionDate)) { lastAnnualFeeTransactionDate = transaction.transactionLocalDate(); nextDueDate = lastAnnualFeeTransactionDate.toDate(); } } } return nextDueDate; } public Map<String, Object> rejectApplication(final AppUser currentUser, final JsonCommand command, final LocalDate tenantsTodayDate) { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.rejectAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.hasStateOf(currentStatus)) { baseDataValidator.reset().parameter(SavingsApiConstants.rejectedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.status = SavingsAccountStatusType.REJECTED.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); final LocalDate rejectedOn = command.localDateValueOfParameterNamed(SavingsApiConstants.rejectedOnDateParamName); final String rejectedOnAsString = command.stringValueOfParameterNamed(SavingsApiConstants.rejectedOnDateParamName); this.rejectedOnDate = rejectedOn.toDate(); this.rejectedBy = currentUser; this.withdrawnOnDate = null; this.withdrawnBy = null; this.closedOnDate = rejectedOn.toDate(); this.closedBy = currentUser; actualChanges.put(SavingsApiConstants.localeParamName, command.locale()); actualChanges.put(SavingsApiConstants.dateFormatParamName, command.dateFormat()); actualChanges.put(SavingsApiConstants.rejectedOnDateParamName, rejectedOnAsString); actualChanges.put(SavingsApiConstants.closedOnDateParamName, rejectedOnAsString); final LocalDate submittalDate = getSubmittedOnLocalDate(); if (rejectedOn.isBefore(submittalDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String submittalDateAsString = formatter.print(submittalDate); baseDataValidator.reset().parameter(SavingsApiConstants.rejectedOnDateParamName).value(submittalDateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.submittal.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (rejectedOn.isAfter(tenantsTodayDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.rejectedOnDateParamName).value(rejectedOn) .failWithCodeNoParameterAddedToErrorCode("cannot.be.a.future.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } return actualChanges; } public Map<String, Object> applicantWithdrawsFromApplication(final AppUser currentUser, final JsonCommand command, final LocalDate tenantsTodayDate) { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.withdrawnByApplicantAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.SUBMITTED_AND_PENDING_APPROVAL.hasStateOf(currentStatus)) { baseDataValidator.reset().parameter(SavingsApiConstants.withdrawnOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.status = SavingsAccountStatusType.WITHDRAWN_BY_APPLICANT.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); final LocalDate withdrawnOn = command.localDateValueOfParameterNamed(SavingsApiConstants.withdrawnOnDateParamName); final String withdrawnOnAsString = command.stringValueOfParameterNamed(SavingsApiConstants.withdrawnOnDateParamName); this.rejectedOnDate = null; this.rejectedBy = null; this.withdrawnOnDate = withdrawnOn.toDate(); this.withdrawnBy = currentUser; this.closedOnDate = withdrawnOn.toDate(); this.closedBy = currentUser; actualChanges.put(SavingsApiConstants.localeParamName, command.locale()); actualChanges.put(SavingsApiConstants.dateFormatParamName, command.dateFormat()); actualChanges.put(SavingsApiConstants.withdrawnOnDateParamName, withdrawnOnAsString); actualChanges.put(SavingsApiConstants.closedOnDateParamName, withdrawnOnAsString); final LocalDate submittalDate = getSubmittedOnLocalDate(); if (withdrawnOn.isBefore(submittalDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String submittalDateAsString = formatter.print(submittalDate); baseDataValidator.reset().parameter(SavingsApiConstants.withdrawnOnDateParamName).value(submittalDateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.submittal.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (withdrawnOn.isAfter(tenantsTodayDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.withdrawnOnDateParamName).value(withdrawnOn) .failWithCodeNoParameterAddedToErrorCode("cannot.be.a.future.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } return actualChanges; } public Map<String, Object> activate(final AppUser currentUser, final JsonCommand command, final LocalDate tenantsTodayDate, final List<Long> existingTransactionIds, final List<Long> existingReversedTransactionIds) { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.activateAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.APPROVED.hasStateOf(currentStatus)) { baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.approved.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final Locale locale = command.extractLocale(); final DateTimeFormatter fmt = DateTimeFormat.forPattern(command.dateFormat()).withLocale(locale); final LocalDate activationDate = command.localDateValueOfParameterNamed(SavingsApiConstants.activatedOnDateParamName); this.status = SavingsAccountStatusType.ACTIVE.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); actualChanges.put(SavingsApiConstants.localeParamName, command.locale()); actualChanges.put(SavingsApiConstants.dateFormatParamName, command.dateFormat()); actualChanges.put(SavingsApiConstants.activatedOnDateParamName, activationDate.toString(fmt)); this.rejectedOnDate = null; this.rejectedBy = null; this.withdrawnOnDate = null; this.withdrawnBy = null; this.closedOnDate = null; this.closedBy = null; this.activatedOnDate = activationDate.toDate(); this.activatedBy = currentUser; this.lockedInUntilDate = calculateDateAccountIsLockedUntil(getActivationLocalDate()); if (annualFeeSettingsSet()) { updateToNextAnnualFeeDueDateFrom(getActivationLocalDate()); } if (this.client != null && this.client.isActivatedAfter(activationDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String dateAsString = formatter.print(this.client.getActivationLocalDate()); baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName).value(dateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.client.activation.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (this.group != null && this.group.isActivatedAfter(activationDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String dateAsString = formatter.print(this.client.getActivationLocalDate()); baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName).value(dateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.group.activation.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final LocalDate approvalDate = getApprovedOnLocalDate(); if (activationDate.isBefore(approvalDate)) { final DateTimeFormatter formatter = DateTimeFormat.forPattern(command.dateFormat()).withLocale(command.extractLocale()); final String dateAsString = formatter.print(approvalDate); baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName).value(dateAsString) .failWithCodeNoParameterAddedToErrorCode("cannot.be.before.approval.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (activationDate.isAfter(tenantsTodayDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.activatedOnDateParamName).value(activationDate) .failWithCodeNoParameterAddedToErrorCode("cannot.be.a.future.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } // auto enter deposit for minimum required opening balance when // activating account. final Money minRequiredOpeningBalance = Money.of(this.currency, this.minRequiredOpeningBalance); if (minRequiredOpeningBalance.isGreaterThanZero()) { final SavingsAccountTransactionDTO transactionDTO = new SavingsAccountTransactionDTO(fmt, activationDate, minRequiredOpeningBalance.getAmount(), existingTransactionIds, existingReversedTransactionIds, null); deposit(transactionDTO); final Money openingAccountBalance = Money.zero(this.currency); recalculateDailyBalances(openingAccountBalance); } return actualChanges; } public Map<String, Object> close(final AppUser currentUser, final JsonCommand command, final LocalDate tenantsTodayDate) { final Map<String, Object> actualChanges = new LinkedHashMap<String, Object>(); final List<ApiParameterError> dataValidationErrors = new ArrayList<ApiParameterError>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors) .resource(SAVINGS_ACCOUNT_RESOURCE_NAME + SavingsApiConstants.closeAction); final SavingsAccountStatusType currentStatus = SavingsAccountStatusType.fromInt(this.status); if (!SavingsAccountStatusType.ACTIVE.hasStateOf(currentStatus)) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("not.in.active.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final Locale locale = command.extractLocale(); final DateTimeFormatter fmt = DateTimeFormat.forPattern(command.dateFormat()).withLocale(locale); final LocalDate closedDate = command.localDateValueOfParameterNamed(SavingsApiConstants.closedOnDateParamName); if (closedDate.isBefore(getActivationLocalDate())) { baseDataValidator.reset().parameter(SavingsApiConstants.closedOnDateParamName).value(closedDate) .failWithCode("must.be.after.activation.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } if (closedDate.isAfter(tenantsTodayDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.closedOnDateParamName).value(closedDate) .failWithCode("cannot.be.a.future.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final List<SavingsAccountTransaction> savingsAccountTransactions = retreiveListOfTransactions(); if (savingsAccountTransactions.size() > 0) { final SavingsAccountTransaction accountTransaction = savingsAccountTransactions.get(savingsAccountTransactions.size() - 1); if (accountTransaction.isAfter(closedDate)) { baseDataValidator.reset().parameter(SavingsApiConstants.closedOnDateParamName).value(closedDate) .failWithCode("must.be.after.last.transaction.date"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } } if (getAccountBalance().doubleValue() > 0) { baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("results.in.balance.not.zero"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } this.status = SavingsAccountStatusType.CLOSED.getValue(); actualChanges.put(SavingsApiConstants.statusParamName, SavingsEnumerations.status(this.status)); actualChanges.put(SavingsApiConstants.localeParamName, command.locale()); actualChanges.put(SavingsApiConstants.dateFormatParamName, command.dateFormat()); actualChanges.put(SavingsApiConstants.closedOnDateParamName, closedDate.toString(fmt)); this.rejectedOnDate = null; this.rejectedBy = null; this.withdrawnOnDate = null; this.withdrawnBy = null; this.closedOnDate = closedDate.toDate(); this.closedBy = currentUser; return actualChanges; } private Date calculateDateAccountIsLockedUntil(final LocalDate activationLocalDate) { Date lockedInUntilLocalDate = null; final PeriodFrequencyType lockinPeriodFrequencyType = PeriodFrequencyType.fromInt(this.lockinPeriodFrequencyType); switch (lockinPeriodFrequencyType) { case INVALID: break; case DAYS: lockedInUntilLocalDate = activationLocalDate.plusDays(this.lockinPeriodFrequency).toDate(); break; case WEEKS: lockedInUntilLocalDate = activationLocalDate.plusWeeks(this.lockinPeriodFrequency).toDate(); break; case MONTHS: lockedInUntilLocalDate = activationLocalDate.plusMonths(this.lockinPeriodFrequency).toDate(); break; case YEARS: lockedInUntilLocalDate = activationLocalDate.plusYears(this.lockinPeriodFrequency).toDate(); break; } return lockedInUntilLocalDate; } public Group group() { return this.group; } public boolean isWithdrawalFeeApplicableForTransfer() { return this.withdrawalFeeApplicableForTransfer; } public void activateAccountBasedOnBalance() { if (SavingsAccountStatusType.fromInt(this.status).isClosed() && !this.summary.getAccountBalance(getCurrency()).isZero()) { this.status = SavingsAccountStatusType.ACTIVE.getValue(); } } public LocalDate getClosedOnDate() { return (LocalDate) ObjectUtils.defaultIfNull(new LocalDate(this.closedOnDate), null); } public SavingsAccountSummary getSummary() { return this.summary; } public List<SavingsAccountTransaction> getTransactions() { return this.transactions; } public void setStatus(final Integer status) { this.status = status; } }
package rsv.process.model; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.apache.log4j.Logger; public class ResourceDetailModel extends RSVDatabase { private static final Logger logger = Logger.getLogger(ResourceDetailModel.class); public void update(int resource_id, int metric_id, String xml) throws SQLException { //find current record String sql = "select id from resource_detail where resource_id = ? and metric_id = ?"; PreparedStatement stmt = RSVDatabase.db.prepareStatement(sql); stmt.setInt(1, resource_id); stmt.setInt(2, metric_id); ResultSet rs = stmt.executeQuery(); if(rs.next()) { //do update sql = "update resource_detail set xml = ? where resource_id = ? and metric_id = ?"; stmt = RSVDatabase.db.prepareStatement(sql); stmt.setString(1, xml); stmt.setInt(2, resource_id); stmt.setInt(3, metric_id); } else { //do insert sql = "insert into resource_detail (resource_id, metric_id, xml) values (?, ?, ?)"; stmt = RSVDatabase.db.prepareStatement(sql); stmt.setInt(1, resource_id); stmt.setInt(2, metric_id); stmt.setString(3, xml); } logger.info("Upserted resource_detail for resource " + resource_id + " for metric " + metric_id); stmt.execute(); } }
package eta.base; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.List; import java.util.HashSet; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.nio.ByteOrder; import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.OpenOption; import java.nio.file.FileSystems; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.FileAttribute; import java.nio.file.StandardOpenOption; import java.nio.charset.Charset; import java.nio.channels.Channels; import java.nio.channels.Channel; import java.nio.channels.FileChannel; import java.nio.channels.Selector; import java.nio.channels.SelectionKey; import java.nio.channels.SelectableChannel; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import eta.runtime.Runtime; import eta.runtime.stg.TSO; import eta.runtime.stg.StgContext; import eta.runtime.stg.Closure; import eta.runtime.stg.Capability; import eta.runtime.io.MemoryManager; import eta.runtime.RuntimeLogging; import static eta.runtime.RuntimeLogging.*; import ghc_prim.ghc.types.datacons.Czh; import ghc_prim.ghc.types.datacons.ZC; import ghc_prim.ghc.types.tycons.ZMZN; import ghc_prim.ghc.Types; import java.lang.management.ManagementFactory; import com.sun.management.OperatingSystemMXBean; public class Utils { // TODO: Verify correctness public static float rintFloat(float f) { return (float) Math.rint((double) f); } // TODO: Verify public static boolean isPrintableChar(int c) { return ((((1 << Character.UPPERCASE_LETTER) | (1 << Character.LOWERCASE_LETTER) | (1 << Character.TITLECASE_LETTER) | (1 << Character.MODIFIER_LETTER) | (1 << Character.OTHER_LETTER) | (1 << Character.COMBINING_SPACING_MARK) | (1 << Character.OTHER_NUMBER) | (1 << Character.MODIFIER_SYMBOL) | (1 << Character.ENCLOSING_MARK) | (1 << Character.DECIMAL_DIGIT_NUMBER) | (1 << Character.DASH_PUNCTUATION) | (1 << Character.OTHER_PUNCTUATION) | (1 << Character.CONNECTOR_PUNCTUATION) | (1 << Character.MATH_SYMBOL) | (1 << Character.SPACE_SEPARATOR) | (1 << Character.OTHER_SYMBOL) | (1 << Character.END_PUNCTUATION) | (1 << Character.FINAL_QUOTE_PUNCTUATION) | (1 << Character.START_PUNCTUATION) | (1 << Character.CURRENCY_SYMBOL) | (1 << Character.INITIAL_QUOTE_PUNCTUATION) | (1 << Character.LETTER_NUMBER) | (1 << Character.LETTER_NUMBER)) >> Character.getType(c)) & 1) != 0; } public static boolean isFloatNegativeZero(float f) { return f == -0.0f; } public static boolean isFloatDenormalized(float f) { int bits = Float.floatToRawIntBits(f); return ((bits >> 23) & 0xff) == 0 && (bits & 0x7fffff) != 0; } public static boolean isFloatFinite(float f) { int bits = Float.floatToRawIntBits(f); return ((bits >> 23) & 0xff) != 0xff; } public static boolean isDoubleNegativeZero(double d) { return d == -0.0; } public static boolean isDoubleDenormalized(double d) { long bits = Double.doubleToRawLongBits(d); return ((bits >> 52) & 0x7ffL) == 0 && (bits & 0xfffffffffffffL) != 0; } public static boolean isDoubleFinite(double d) { long bits = Double.doubleToRawLongBits(d); return ((bits >> 52) & 0x7ffL) != 0x7ffL; } public static int c_isatty(int tty) { return System.console() != null ? 1 : 0; } public static String c_localeEncoding() { return Charset.defaultCharset().name(); } public static boolean isNonBlocking(Channel c) { if (c instanceof SelectableChannel) { return !((SelectableChannel) c).isBlocking(); } return false; } public static int c_write(final Channel fd, final long address, final int count) throws IOException { // Clear interrupt status to avoid unnecessarily closing the stream. Thread.interrupted(); if (Runtime.debugIO()) { debugIO("c_write: " + fd + " Address: " + address + " Count: " + count); } final boolean nonBlocking = isNonBlocking(fd); final WritableByteChannel wc = (WritableByteChannel) fd; final ByteBuffer buffer = MemoryManager.getBoundedBuffer(address); buffer.limit(buffer.position() + count); final int written = wc.write(buffer); if (Runtime.debugIO()) { debugIO("c_write: " + fd + " return: " + written); } return written; } public static int c_read(final Channel fd, final long address, final int count) throws IOException { // Clear interrupt status to avoid unnecessarily closing the stream. Thread.interrupted(); final boolean nonBlocking = isNonBlocking(fd); if (Runtime.debugIO()) { debugIO("c_read: " + fd.toString() + " Address: " + address + " Count: " + count + " NonBlocking: " + nonBlocking); } final ReadableByteChannel rc = (ReadableByteChannel) fd; final ByteBuffer buffer = MemoryManager.getBoundedBuffer(address); buffer.limit(buffer.position() + count); int size = rc.read(buffer); if (size == 0 && nonBlocking) { size = -1; } else if (size == -1) { size = 0; } if (Runtime.debugIO()) { debugIO("c_read: " + fd.toString() + " nonBlocking: " + nonBlocking + " return: " + size); } return size; } public static String byteBufferToStr(long address, int len) throws UnsupportedEncodingException { return new String(eta.ghc_prim.Utils.byteBufferToBytes(address, len), "UTF-8"); } public static String getOS() { final String originalRawName = System.getProperty("os.name"); final String rawName = originalRawName.toLowerCase(); if (rawName.startsWith("windows")) { return "mingw32"; } else if (rawName.startsWith("mac")) { return "darwin"; } else if (rawName.startsWith("linux")) { return "linux"; } else if (rawName.startsWith("freebsd")) { return "freebsd"; } else if (rawName.startsWith("sunos")) { return "solaris"; } else { return originalRawName; } } public static String getArch() { return System.getProperty("os.arch"); } public static boolean isBigEndian() { return ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN); } public static boolean isNewlineCRLF() { return "\r\n".equals(System.lineSeparator()); } public static void debugBelch(String format, String string) { RuntimeLogging.debugBelch(format, string); } public static void errorBelch(String format, String string) { RuntimeLogging.errorBelch(format, string); } private static boolean hasMXBean = false; static { try { Class.forName("java.lang.management.ManagementFactory"); Class.forName("com.sun.management.OperatingSystemMXBean"); hasMXBean = true; } catch (ClassNotFoundException cne) {} } /* Returns CPU time in picoseconds */ public static long getCPUTime() { if (hasMXBean) { return ((OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean()) .getProcessCpuTime() * 1000; } else { return (System.nanoTime() - Capability.startTimeNanos) * 1000; } } public static Channel getStdOut() { return Channels.newChannel(System.out); } public static Channel getStdIn() { return Channels.newChannel(System.in); } public static Channel getStdErr() { return Channels.newChannel(System.err); } private static ThreadLocal<Integer> errno = new ThreadLocal<Integer>(); public static void initErrno() { if (errno.get() == null) errno.set(0); } public static int get_errno() { initErrno(); return errno.get(); } public static void set_errno(int errnoCode) { errno.set(errnoCode); } public static <T> T convertInstanceOfObject(Object o, Class<T> clazz) { try { return clazz.cast(o); } catch(ClassCastException e) { return null; } } public static boolean instanceOf(Object o, Class clazz) { return clazz.isInstance(o); } public static MessageDigest c_MD5Init() throws NoSuchAlgorithmException { return MessageDigest.getInstance("MD5"); } public static void c_MD5Update(MessageDigest md, long address, int len) { ByteBuffer contents = MemoryManager.getBoundedBuffer(address); contents.limit(contents.position() + len); md.update(contents); } public static void c_MD5Final(long address, MessageDigest md) { ByteBuffer result = MemoryManager.getBoundedBuffer(address); byte[] hash = md.digest(); result.put(hash); } public static long _malloc(int size) { return MemoryManager.allocateBuffer(size, true); } public static long _calloc(int size, int bytes) { int totalBytes = size * bytes; long address = MemoryManager.allocateBuffer(totalBytes, true); ByteBuffer buffer = MemoryManager.getBoundedBuffer(address); int times = totalBytes / 8; int remTimes = totalBytes % 8; while (times buffer.putLong(0); } while (remTimes buffer.put((byte) 0); } return address; } public static long _realloc(long oldAddress, int newSize) { long newAddress = MemoryManager.allocateBuffer(newSize, true); int oldSize = MemoryManager.allocatedSize(oldAddress); c_memcpy(newAddress, oldAddress, Math.min(oldSize, newSize)); MemoryManager.free(oldAddress); return newAddress; } public static long c_memcpy(long destAddress, long srcAddress, int size) { MemoryManager.copy(srcAddress,destAddress,size); return destAddress; } public static long c_memset(long address, int c_, int size) { MemoryManager.set(address, c_, size); return address; } public static long c_memmove(long destAddress, long srcAddress, int size) { MemoryManager.move(srcAddress, destAddress, size); return destAddress; } public static BasicFileAttributes c_fstat(Path p) throws IOException { return Files.readAttributes(p, BasicFileAttributes.class); } public static Map<Object, Integer> fileLocks = new HashMap<Object, Integer>(); public static synchronized boolean lockFile(Object key, boolean forWriting) { Integer readers = fileLocks.get(key); if (readers == null) { int readersInt = forWriting? -1 : 1; fileLocks.put(key, readersInt); } else { if (forWriting || readers < 0) return false; fileLocks.put(key, readers + 1); } return true; } public static synchronized boolean unlockFile(Object key) { Integer readers = fileLocks.get(key); if (readers == null) return false; int newReaders = 0; if (readers < 0) { newReaders = readers + 1; } else { newReaders = readers - 1; } if (newReaders == 0) { fileLocks.remove(key); } else { fileLocks.put(key, newReaders); } return true; } public static void setNonBlockingFD(Channel c, boolean nonblocking) throws IOException { if (c instanceof SelectableChannel) { ((SelectableChannel) c).configureBlocking(!nonblocking); } } public static long c_lseek(FileChannel fc, long offset, int mode) throws IOException { switch (mode) { case 0: fc.position(fc.position() + offset); break; case 1: fc.position(offset); break; case 2: fc.position(fc.size() + offset); break; default: return (-1); } return fc.position(); } public static boolean fdReady(Channel c, boolean write, int msecs) { if (c instanceof SelectableChannel) { try { Selector s = Selector.open(); SelectableChannel sc = (SelectableChannel) c; SelectionKey selectKey = sc.register(s, write? SelectionKey.OP_WRITE : SelectionKey.OP_READ); if (msecs > 0) { return (s.select(msecs) > 0); } else { return (s.selectNow() > 0); } } catch (IOException e) { return true; } } return true; } public static int c_rand() { return (int)(Math.random() * 32768.0); } public static int cmp_thread(TSO t1, TSO t2) { int id1 = t1.id; int id2 = t2.id; if (id1 == id2) return 0; else if (id1 > id2) return 1; else return -1; } public static Closure jstringToString(StgContext context, String str) { int off = 0; int len = str.length(); int codepoint = 0; ZC prevCurrent = null; ZC current = new ZC(null, null); ZC head = current; for (off = 0; off < len; off += Character.charCount(codepoint)) { codepoint = str.codePointAt(off); current.x1 = new Czh(codepoint); ZC next = new ZC(null, null); current.x2 = next; prevCurrent = current; current = next; } if (head.x1 == null) return Types.DZMZN(); prevCurrent.x2 = Types.DZMZN(); return head; } public static Path getPath(String path) { return Paths.get(path); } public static FileChannel fileChannelOpen(Path path, Set<OpenOption> options, FileAttribute<Set<PosixFilePermission>> attribute) throws IOException { // checks where file store for our path does know about POSIX Set<String> faViews = FileSystems.getDefault().supportedFileAttributeViews(); if (faViews.contains("posix")) { return FileChannel.open(path, options, attribute); } // return and open channel File fChan = path.toFile(); if(options.contains(StandardOpenOption.CREATE_NEW)) { // creates new file boolean created = fChan.createNewFile(); if (!created) { throw new IOException("Could not create file " + fChan.getAbsolutePath()); } } Set<PosixFilePermission> perms = attribute.value(); fChan.setExecutable( perms.contains(PosixFilePermission.OWNER_EXECUTE), perms.contains(PosixFilePermission.GROUP_EXECUTE) || perms.contains(PosixFilePermission.OTHERS_EXECUTE) ); fChan.setWritable( perms.contains(PosixFilePermission.OWNER_WRITE), perms.contains(PosixFilePermission.GROUP_WRITE) || perms.contains(PosixFilePermission.OTHERS_WRITE) ); fChan.setReadable( perms.contains(PosixFilePermission.OWNER_READ), perms.contains(PosixFilePermission.GROUP_READ) || perms.contains(PosixFilePermission.OTHERS_READ) ); // prepare options, if file was crated --> removed from map HashSet<OpenOption> fOpts = new HashSet<>(options); fOpts.remove(StandardOpenOption.CREATE_NEW); return FileChannel.open(fChan.toPath(), fOpts); } public static final int pathSeparatorChar = File.pathSeparatorChar; public static void puts(String msg) { System.out.println(msg); } }
package seedu.addressbook.storage; import seedu.addressbook.data.AddressBook; import seedu.addressbook.data.exception.IllegalValueException; import seedu.addressbook.storage.jaxb.AdaptedAddressBook; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; /** * Represents the file used to store address book data. */ public class StorageFile { /** Default file path used if the user doesn't provide the file name. */ public static final String DEFAULT_STORAGE_FILEPATH = "addressbook.xml"; /** * Signals that the given file path does not fulfill the storage filepath constraints. */ public static class InvalidStorageFilePathException extends IllegalValueException { public InvalidStorageFilePathException(String message) { super(message); } } /** * Signals that some error has occured while trying to convert and read/write data between the application * and the storage file. */ public static class StorageOperationException extends Exception { public StorageOperationException(String message) { super(message); } } private final JAXBContext jaxbContext; public final Path path; /** * @throws InvalidStorageFilePathException if the default path is invalid */ public StorageFile() throws InvalidStorageFilePathException { this(DEFAULT_STORAGE_FILEPATH); } /** * @throws InvalidStorageFilePathException if the given file path is invalid */ public StorageFile(String filePath) throws InvalidStorageFilePathException { try { jaxbContext = JAXBContext.newInstance(AdaptedAddressBook.class); } catch (JAXBException jaxbe) { throw new RuntimeException("jaxb initialisation error"); } path = Paths.get(filePath); if (!isValidPath(path)) { throw new InvalidStorageFilePathException("Storage file should end with '.xml'"); } } /** * Returns true if the given path is acceptable as a storage file. * The file path is considered acceptable if it ends with '.xml' */ private static boolean isValidPath(Path filePath) { return filePath.toString().endsWith(".xml"); } /** * Saves all data to this storage file. * * @throws StorageOperationException if there were errors converting and/or storing data to file. */ public void save(AddressBook addressBook) throws StorageOperationException { try (final Writer fileWriter = new BufferedWriter(new FileWriter(path.toFile()))) { final AdaptedAddressBook toSave = new AdaptedAddressBook(addressBook); final Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); marshaller.marshal(toSave, fileWriter); } catch (IOException ioe) { throw new StorageOperationException("Error writing to file: " + path); } catch (JAXBException jaxbe) { throw new StorageOperationException("Error converting address book into storage format"); } } /** * Loads data from this storage file. * * @throws StorageOperationException if there were errors reading and/or converting data from file. */ public AddressBook load() throws StorageOperationException { // create empty file if not found if (!Files.exists(path) || !Files.isRegularFile(path)) { final AddressBook empty = new AddressBook(); save(empty); return empty; } try (final Reader fileReader = new BufferedReader(new FileReader(path.toFile()))) { final Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); final AdaptedAddressBook loaded = (AdaptedAddressBook) unmarshaller.unmarshal(fileReader); // manual check for missing elements if (loaded.isAnyRequiredFieldMissing()) { throw new StorageOperationException("File data missing some elements"); } return loaded.toModelType(); } catch (FileNotFoundException fnfe) { throw new AssertionError("A non-existent file scenario is already handled earlier."); // other errors } catch (IOException ioe) { throw new StorageOperationException("Error writing to file: " + path); } catch (JAXBException jaxbe) { throw new StorageOperationException("Error parsing file data format"); } catch (IllegalValueException ive) { throw new StorageOperationException("File contains illegal data values; data type constraints not met"); } } public String getPath() { return path.toString(); } }
package soot.jimple.infoflow.data; import heros.solver.LinkedNode; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import soot.NullType; import soot.SootField; import soot.SootMethod; import soot.Type; import soot.Unit; import soot.Value; import soot.jimple.Stmt; import soot.jimple.infoflow.solver.IInfoflowCFG.UnitContainer; import soot.jimple.infoflow.source.SourceInfo; import soot.jimple.internal.JimpleLocal; import com.google.common.collect.Sets; /** * The abstraction class contains all information that is necessary to track the taint. * * @author Steven Arzt * @author Christian Fritz */ public class Abstraction implements Cloneable, LinkedNode<Abstraction> { private static Abstraction zeroValue = null; private static boolean flowSensitiveAliasing; /** * the access path contains the currently tainted variable or field */ private final AccessPath accessPath; private Abstraction predecessor = null; private Set<Abstraction> neighbors = null; private Stmt currentStmt = null; private SourceContext sourceContext = null; // only used in path generation private Set<SourceContextAndPath> pathCache = null; /** * Unit/Stmt which activates the taint when the abstraction passes it */ private Unit activationUnit = null; /** * taint is thrown by an exception (is set to false when it reaches the catch-Stmt) */ private boolean exceptionThrown = false; private int hashCode = 0; /** * The postdominators we need to pass in order to leave the current conditional * branch. Do not use the synchronized Stack class here to avoid deadlocks. */ private List<UnitContainer> postdominators = null; private boolean isImplicit = false; /** * Only valid for inactive abstractions. Specifies whether an access paths * has been cut during alias analysis. */ private boolean dependsOnCutAP = false; public Abstraction(Value taint, SourceInfo sourceInfo, Value sourceVal, Stmt sourceStmt, boolean exceptionThrown, boolean isImplicit) { this(taint, sourceInfo.getTaintSubFields(), sourceVal, sourceStmt, sourceInfo.getUserData(), exceptionThrown, isImplicit); } protected Abstraction(Value taint, boolean taintSubFields, Value sourceVal, Stmt sourceStmt, Object userData, boolean exceptionThrown, boolean isImplicit){ this(taint, taintSubFields, new SourceContext(sourceVal, sourceStmt, userData), exceptionThrown, null, isImplicit); } protected Abstraction(Value taint, boolean taintSubFields, SourceContext sourceContext, boolean exceptionThrown, Unit activationUnit, boolean isImplicit){ this.sourceContext = sourceContext; this.accessPath = new AccessPath(taint, taintSubFields); if (flowSensitiveAliasing) this.activationUnit = activationUnit; else this.activationUnit = null; this.exceptionThrown = exceptionThrown; this.neighbors = null; this.isImplicit = isImplicit; } /** * Creates an abstraction as a copy of an existing abstraction, * only exchanging the access path. -> only used by AbstractionWithPath * @param p The access path for the new abstraction * @param original The original abstraction to copy */ protected Abstraction(AccessPath p, Abstraction original){ if (original == null) { sourceContext = null; exceptionThrown = false; activationUnit = null; flowSensitiveAliasing = true; isImplicit = false; } else { sourceContext = original.sourceContext; exceptionThrown = original.exceptionThrown; activationUnit = original.activationUnit; postdominators = original.postdominators == null ? null : new ArrayList<UnitContainer>(original.postdominators); dependsOnCutAP = original.dependsOnCutAP; isImplicit = original.isImplicit; } accessPath = p; neighbors = null; } public final Abstraction deriveInactiveAbstraction(Unit activationUnit){ if (!flowSensitiveAliasing) return this; // If this abstraction is already inactive, we keep it if (!this.isAbstractionActive()) return this; Abstraction a = deriveNewAbstractionMutable(accessPath, null); a.postdominators = null; a.activationUnit = activationUnit; return a; } public Abstraction deriveNewAbstraction(AccessPath p, Stmt currentStmt){ return deriveNewAbstraction(p, currentStmt, isImplicit); } public Abstraction deriveNewAbstraction(AccessPath p, Stmt currentStmt, boolean isImplicit){ // If the new abstraction looks exactly like the current one, there is // no need to create a new object if (this.accessPath.equals(p) && this.currentStmt == currentStmt && this.isImplicit == isImplicit) return this; Abstraction abs = deriveNewAbstractionMutable(p, currentStmt); abs.isImplicit = isImplicit; return abs; } private Abstraction deriveNewAbstractionMutable(AccessPath p, Stmt currentStmt){ if (this.accessPath.equals(p) && this.currentStmt == currentStmt) { Abstraction abs = clone(); abs.currentStmt = currentStmt; return abs; } Abstraction abs = new Abstraction(p, this); abs.predecessor = this; abs.currentStmt = currentStmt; if (!abs.getAccessPath().isEmpty()) abs.postdominators = null; if (!abs.isAbstractionActive()) abs.dependsOnCutAP = abs.dependsOnCutAP || p.isCutOffApproximation(); abs.sourceContext = null; return abs; } public final Abstraction deriveNewAbstraction(Value taint, boolean cutFirstField, Type baseType){ return deriveNewAbstraction(taint, cutFirstField, null, baseType); } public final Abstraction deriveNewAbstraction(Value taint, boolean cutFirstField, Stmt currentStmt, Type baseType){ assert !this.getAccessPath().isEmpty(); SootField[] orgFields = accessPath.getFields(); SootField[] fields = null; if (orgFields != null) { fields = new SootField[cutFirstField ? orgFields.length - 1 : orgFields.length]; for (int i = cutFirstField ? 1 : 0; i < orgFields.length; i++) fields[cutFirstField ? i - 1 : i] = orgFields[i]; } Type[] orgTypes = accessPath.getFieldTypes(); Type[] types = null; if (orgTypes != null) { types = new Type[cutFirstField ? orgTypes.length - 1 : orgTypes.length]; for (int i = cutFirstField ? 1 : 0; i < orgTypes.length; i++) types[cutFirstField ? i - 1 : i] = orgTypes[i]; } if (cutFirstField) baseType = accessPath.getFirstFieldType(); AccessPath newAP = new AccessPath(taint, fields, baseType, types, accessPath.getTaintSubFields()); if (this.getAccessPath().equals(newAP) && this.currentStmt == currentStmt) return this; return deriveNewAbstractionMutable(newAP, currentStmt); } /** * Derives a new abstraction that models the current local being thrown as * an exception * @param throwStmt The statement at which the exception was thrown * @return The newly derived abstraction */ public final Abstraction deriveNewAbstractionOnThrow(Stmt throwStmt){ assert !this.exceptionThrown; Abstraction abs = clone(); abs.currentStmt = throwStmt; abs.sourceContext = null; abs.exceptionThrown = true; return abs; } /** * Derives a new abstraction that models the current local being caught as * an exception * @param taint The value in which the tainted exception is stored * @return The newly derived abstraction */ public final Abstraction deriveNewAbstractionOnCatch(Value taint){ assert this.exceptionThrown; Abstraction abs = deriveNewAbstractionMutable(new AccessPath(taint, true), null); abs.exceptionThrown = false; return abs; } /** * Gets the path of statements from the source to the current statement * with which this abstraction is associated. If this path is ambiguous, * a single path is selected randomly. * @return The path from the source to the current statement */ public Set<SourceContextAndPath> getPaths() { return getPaths(true, this); } /** * Gets the path of statements from the source to the current statement * with which this abstraction is associated. If this path is ambiguous, * a single path is selected randomly. * @return The path from the source to the current statement */ public Set<SourceContextAndPath> getSources() { Runtime.getRuntime().gc(); return getPaths(false, this); } private Abstraction sinkAbs = null; /** * Gets the path of statements from the source to the current statement * with which this abstraction is associated. If this path is ambiguous, * a single path is selected randomly. * @return The path from the source to the current statement */ private Set<SourceContextAndPath> getPaths(boolean reconstructPaths, Abstraction flagAbs) { // If we run into a loop, we symbolically save where to continue on the // next run and abort for now if (sinkAbs == flagAbs) { if (pathCache == null) return Collections.emptySet(); return Collections.unmodifiableSet(pathCache); } this.sinkAbs = flagAbs; this.pathCache = Sets.newHashSet(); if (sourceContext != null) { // Construct the path root SourceContextAndPath sourceAndPath = new SourceContextAndPath (sourceContext.getValue(), sourceContext.getStmt(), sourceContext.getUserData()).extendPath(sourceContext.getStmt()); pathCache.add(sourceAndPath); // Sources may not have predecessors assert predecessor == null; } else { for (SourceContextAndPath curScap : predecessor.getPaths(reconstructPaths, flagAbs)) { SourceContextAndPath extendedPath = (currentStmt == null || !reconstructPaths) ? curScap : curScap.extendPath(currentStmt); this.pathCache.add(extendedPath); } } if (neighbors != null) for (Abstraction nb : neighbors) this.pathCache.addAll(nb.getPaths(reconstructPaths, flagAbs)); assert pathCache != null; return Collections.unmodifiableSet(pathCache); } public boolean isAbstractionActive(){ return activationUnit == null; } public boolean isImplicit() { return isImplicit; } @Override public String toString(){ return (isAbstractionActive()?"":"_")+accessPath.toString() + " | "+(activationUnit==null?"":activationUnit.toString()) + ">>"; } public AccessPath getAccessPath(){ return accessPath; } public Unit getActivationUnit(){ return this.activationUnit; } public Abstraction getActiveCopy(){ assert !this.isAbstractionActive(); Abstraction a = clone(); a.sourceContext = null; a.activationUnit = null; return a; } /** * Gets whether this value has been thrown as an exception * @return True if this value has been thrown as an exception, otherwise * false */ public boolean getExceptionThrown() { return this.exceptionThrown; } public final Abstraction deriveConditionalAbstractionEnter(UnitContainer postdom, Stmt conditionalUnit) { assert this.isAbstractionActive(); if (postdominators != null && postdominators.contains(postdom)) return this; Abstraction abs = deriveNewAbstractionMutable (AccessPath.getEmptyAccessPath(), conditionalUnit); if (abs.postdominators == null) abs.postdominators = Collections.singletonList(postdom); else abs.postdominators.add(0, postdom); return abs; } public final Abstraction deriveConditionalAbstractionCall(Unit conditionalCallSite) { assert this.isAbstractionActive(); assert conditionalCallSite != null; Abstraction abs = deriveNewAbstractionMutable (AccessPath.getEmptyAccessPath(), (Stmt) conditionalCallSite); // Postdominators are only kept intraprocedurally in order to not // mess up the summary functions with caller-side information abs.postdominators = null; return abs; } public final Abstraction dropTopPostdominator() { if (postdominators == null || postdominators.isEmpty()) return this; Abstraction abs = clone(); abs.sourceContext = null; abs.postdominators.remove(0); return abs; } public UnitContainer getTopPostdominator() { if (postdominators == null || postdominators.isEmpty()) return null; return this.postdominators.get(0); } public boolean isTopPostdominator(Unit u) { UnitContainer uc = getTopPostdominator(); if (uc == null) return false; return uc.getUnit() == u; } public boolean isTopPostdominator(SootMethod sm) { UnitContainer uc = getTopPostdominator(); if (uc == null) return false; return uc.getMethod() == sm; } @Override public Abstraction clone() { Abstraction abs = new Abstraction(accessPath, this); abs.predecessor = this; abs.neighbors = null; abs.currentStmt = null; assert abs.equals(this); return abs; } @Override public boolean equals(Object obj) { if (super.equals(obj)) return true; if (obj == null || getClass() != obj.getClass()) return false; Abstraction other = (Abstraction) obj; if (accessPath == null) { if (other.accessPath != null) return false; } else if (!accessPath.equals(other.accessPath)) return false; return localEquals(other); } /** * Checks whether this object locally equals the given object, i.e. the both * are equal modulo the access path * @param other The object to compare this object with * @return True if this object is locally equal to the given one, otherwise * false */ private boolean localEquals(Abstraction other) { // deliberately ignore prevAbs if (sourceContext == null) { if (other.sourceContext != null) return false; } else if (!sourceContext.equals(other.sourceContext)) return false; if (activationUnit == null) { if (other.activationUnit != null) return false; } else if (!activationUnit.equals(other.activationUnit)) return false; if (this.exceptionThrown != other.exceptionThrown) return false; if (postdominators == null) { if (other.postdominators != null) return false; } else if (!postdominators.equals(other.postdominators)) return false; if(this.dependsOnCutAP != other.dependsOnCutAP) return false; if(this.isImplicit != other.isImplicit) return false; return true; } @Override public int hashCode() { if (this.hashCode != 0) return hashCode; final int prime = 31; int result = 1; // deliberately ignore prevAbs result = prime * result + ((sourceContext == null) ? 0 : sourceContext.hashCode()); result = prime * result + ((accessPath == null) ? 0 : accessPath.hashCode()); result = prime * result + ((activationUnit == null) ? 0 : activationUnit.hashCode()); result = prime * result + (exceptionThrown ? 1231 : 1237); result = prime * result + ((postdominators == null) ? 0 : postdominators.hashCode()); result = prime * result + (dependsOnCutAP ? 1231 : 1237); result = prime * result + (isImplicit ? 1231 : 1237); this.hashCode = result; return this.hashCode; } /** * Checks whether this abstraction entails the given abstraction, i.e. this * taint also taints everything that is tainted by the given taint. * @param other The other taint abstraction * @return True if this object at least taints everything that is also tainted * by the given object */ public boolean entails(Abstraction other) { if (accessPath == null) { if (other.accessPath != null) return false; } else if (!accessPath.entails(other.accessPath)) return false; return localEquals(other); } /** * Gets the context of the taint, i.e. the statement and value of the source * @return The statement and value of the source */ public SourceContext getSourceContext() { return sourceContext; } public boolean dependsOnCutAP() { return dependsOnCutAP; } Abstraction getPredecessor() { return this.predecessor; } @Override public void addNeighbor(Abstraction originalAbstraction) { assert this != zeroValue; assert originalAbstraction.equals(this); // We should not register ourselves as a neighbor if (originalAbstraction == this) return; synchronized (this) { if (neighbors == null) neighbors = Sets.newIdentityHashSet(); if (this.predecessor != originalAbstraction.predecessor || this.currentStmt != originalAbstraction.currentStmt) this.neighbors.add(originalAbstraction); } } public static Abstraction getZeroAbstraction(boolean flowSensitiveAliasing) { if (zeroValue == null) { zeroValue = new Abstraction(new JimpleLocal("zero", NullType.v()), new SourceInfo(false), null, null, false, false); Abstraction.flowSensitiveAliasing = flowSensitiveAliasing; } return zeroValue; } }
package pokemon.modele; import java.util.Arrays; import java.util.Random; import java.util.Scanner; import java.util.Stack; import java.util.Vector; import pokemon.annotations.Tps; import pokemon.launcher.MyGdxGame; @Tps(nbhours=17) public class Combat extends Thread { protected Terrain terrain; protected Climat climat; protected PokemonCombat[] equipe1; protected PokemonCombat[] equipe2; protected PokemonCombat[] pkmListe; protected boolean endOfTurn; protected String buffer; protected boolean bufferReady; protected PokemonCombat pCourant; protected PokemonCombat cibleCourante; protected Capacite capCur; protected int actflag; protected int act; protected int ind; protected boolean freeze; //0 niveau 1 XP 2 PV 3 ATT 4 DEF 5 ATTSP 6 DEFSP 7 VIT 8 Precision (100) 9 Esquive (5% de base) public Combat(){ terrain=Terrain.Plaine; climat=Climat.Normal; buffer=""; bufferReady=false; actflag=-1; act=-1; ind=-1; freeze=false; endOfTurn=false; } public Combat(Joueur j1,Joueur j2){ this(); this.initSolo(j1,j2); } public Combat(Joueur j,Dresseur d){ this(); this.initSolo(j,d); } public int gagnant(){ int nbko1=0; int nbko2=0; for(int i=0;i<equipe1.length;i++){ if(equipe1[i].pkm.stats[2][0]<=0){ nbko1++; } } for(int i=0;i<equipe2.length;i++){ if(equipe2[i].pkm.stats[2][0]<=0){ nbko2++; } } if(nbko2==equipe2.length){ return 1; } if(nbko1==equipe1.length){ return 2; } return 0; } public PokemonCombat[] getPkmListe() { return pkmListe; } public void run(){ this.combatsolo(); } public void initSolo(Joueur j1,Joueur j2){ equipe1=new PokemonCombat[j1.teamsize]; equipe2=new PokemonCombat[j2.teamsize]; pkmListe=new PokemonCombat[2]; for(int i=0;i<j1.teamsize;i++){ equipe1[i]=new PokemonCombat(j1.team[i],false,j1); equipe1[i].equipe=equipe1; } for(int i=0;i<j2.teamsize;i++){ equipe2[i]=new PokemonCombat(j2.team[i],true,j2); equipe2[i].equipe=equipe2; } pkmListe[0]=equipe1[0]; pkmListe[1]=equipe2[0]; pkmListe[0].adv[0]=pkmListe[1]; pkmListe[0].XpStack.add(pkmListe[0].adv[0].pkm); pkmListe[1].adv[0]=pkmListe[0]; pkmListe[1].XpStack.add(pkmListe[1].adv[0].pkm); pkmListe[0].listeIndice=0; pkmListe[1].listeIndice=1; } public void initSolo(Joueur j,Dresseur d){ equipe1=new PokemonCombat[j.teamsize]; equipe2=new PokemonCombat[d.getTeam().size()]; pkmListe=new PokemonCombat[2]; for(int i=0;i<j.teamsize;i++){ equipe1[i]=new PokemonCombat(j.team[i],false,j); equipe1[i].equipe=equipe1; } for(int i=0;i<d.getTeam().size();i++){ equipe2[i]=new PokemonCombat(d.getTeam().elementAt(i),true,d); equipe2[i].equipe=equipe2; } pkmListe[0]=equipe1[0]; pkmListe[1]=equipe2[0]; pkmListe[0].adv[0]=pkmListe[1]; pkmListe[0].XpStack.add(pkmListe[0].adv[0].pkm); pkmListe[1].adv[0]=pkmListe[0]; pkmListe[1].XpStack.add(pkmListe[1].adv[0].pkm); pkmListe[0].listeIndice=0; pkmListe[1].listeIndice=1; } public int combatsolo(){ System.out.println("FREEZE DE DEBUT DE COMBAT"); this.setfreeze(true); while(this.gagnant()==0){ Arrays.sort(pkmListe); for(int i=0;i<pkmListe.length;i++){ this.resetAct(); this.setBufferState(false); pCourant=pkmListe[i]; this.capCur=null; this.cibleCourante=null; pkmListe[i].action(pkmListe[i].adv[0],this); //System.out.println("FIN DE TOUR \n"+pCourant.pkm.nom+" "+cibleCourante.pkm.nom+" "+capCur.nom); } //Application des dgats sur la dure endOfTurn=true; for(PokemonCombat p:pkmListe){ if(p.pkm.statut==Statut.Empoisonne || p.pkm.statut==Statut.Brule ){ this.pCourant=p; this.capCur=p.pkm.statut.dummy; this.cibleCourante=p; p.pkm.statut.StatEffect(p.pkm,1,this); this.setfreeze(true); for(Statut s: p.pkm.supTemp){ this.pCourant=p; this.capCur=s.dummy; this.cibleCourante=p; s.StatEffect(p.pkm,1,this); this.setfreeze(true); } if(p.pkm.stats[2][0]<=0){ p.XPreward(this); pokeswap(p,true); } } } endOfTurn=false; } return this.gagnant(); } public void action(PokemonCombat user,PokemonCombat cible){ int isdone=0; int i=0; int ch1=0; int ch2=1; while(isdone==0){ System.out.println("Debut du tour du joueur"); this.getAct(); switch(actflag){ case 0: System.out.println("Execution d'une Capacite"); //while((act=sc.nextInt())<user.cap.max){System.out.println(act); } //act=sc.nextInt(); //Application des statuts pouvant empecher l'action ch1=user.pkm.statut.StatEffect(user.pkm,0,this); for(Statut s: user.pkm.supTemp){ if(s.StatEffect(user.pkm,0,this)==0){ ch2=0; } } if(ch1==1 && ch2==1){ this.capCur=user.pkm.cap.elementAt(act).cible; this.cibleCourante=cible; user.pkm.cap.utiliser(act,user.pkm,cible.pkm,this); } //Consequences de l'action this.chercherKO(); if(user.pkm.stats[2][0]<=(int)(user.pkm.stats[2][1]/2) && cible.pkm.statut!=Statut.KO){ if(user.pkm.objTenu instanceof Medicament && cible.pkm.objTenu!=null){ Medicament m=(Medicament)user.pkm.objTenu; this.ajoutBuffer(user.pkm.nom+" utilise sa baie"); if(m.baie){ m.script(user.pkm,this); user.pkm.objTenu=null; } } } if(cible.pkm.stats[2][0]<=(int)(cible.pkm.stats[2][1]/2) && cible.pkm.statut!=Statut.KO){ if(cible.pkm.objTenu instanceof Medicament && cible.pkm.objTenu!=null){ Medicament m=(Medicament)cible.pkm.objTenu; this.ajoutBuffer(cible.pkm.nom+" utilise sa baie"); if(m.baie){ m.script(cible.pkm,this); cible.pkm.objTenu=null; } } } isdone=1; this.setBufferState(true); break; case 1: //Inventaire break; case 2: pokeswap(user,false); //traitement capacite passive ici isdone=1; this.setBufferState(true); break; case 3: System.out.println("FUITE"); break; default : System.out.println("mauvaise input dans "+user.pkm.nom+" action"); break; } } System.out.println("FIN D'ACTION DU JOUEUR"); } public void ajoutXpStack(PokemonCombat pkmc){ for(PokemonCombat p:pkmListe){ if(p.equipe!=pkmc.equipe && !p.XpStack.contains(pkmc.pkm)){ p.XpStack.push(pkmc.pkm); } } } public void chercherKO(){ for(PokemonCombat p: pkmListe){ if(p.pkm.stats[2][0]<=0){ p.XPreward(this); pokeswap(p,true); } } } public void pokeswap(PokemonCombat user,boolean ko){ /*int i=0; int act=0;*/ int done=0; int select=0;// Pkm pkmRef; Stack<Pkm> stackRef; if(!user.isIA){ while(done==0){ if(ko){ user.waitPlswap(); select=user.swap;} else{ select=this.act; } if(user.equipe[select].pkm.statut!=Statut.KO){ System.out.println(user.equipe[select].pkm.nom+" remplace "+user.pkm.nom); //pkmRef=user.pkm; stackRef=user.XpStack; user.pkm=user.equipe[select].pkm; user.XpStack=user.equipe[select].XpStack; //user.equipe[act].pkm=pkmRef; user.equipe[act].XpStack=stackRef; ajoutXpStack(user); done=1; } else{ System.out.println("Vous ne pouvez pas envoyer un Pokemon K.O au combat !"); } } } else{ for(int i=0;i<user.equipe.length;i++){ if(user.equipe[i].pkm.statut!=Statut.KO){ System.out.println(user.prop+" envoie "+user.equipe[i].pkm.nom+" au combat"); user.setSwap(i); user.waitIAswap(); //pkmRef=user.pkm; stackRef=user.XpStack; user.pkm=user.equipe[i].pkm; user.XpStack=user.equipe[i].XpStack; //p.pkm=pkmRef; p.XpStack=stackRef; ajoutXpStack(user); break; } } } } public Terrain getTerrain(){ return terrain; } public Climat getClimat(){ return climat; } public PokemonCombat[] getEquipe1(){ return equipe1;} public PokemonCombat[] getEquipe2(){ return equipe2;} public Capacite getCapCur() { return capCur; } // Fonctions de manipulation des objets synchroniss entre modele et vue public synchronized void ajoutBuffer(String s){ this.buffer+=s+"\n"; notify(); } public synchronized boolean bufferIsEmpty(){ return this.buffer.compareTo("")==0; } public synchronized boolean bufferIsReady(){ return bufferReady; } public synchronized String readBuffer(){ while(this.buffer.compareTo("")==0){ try { wait(); } catch(InterruptedException ie) { ie.printStackTrace(); } } return this.buffer; } public synchronized void resetBuffer(){ this.buffer=""; setBufferState(false);} public synchronized void setBufferState(boolean st){ bufferReady=st; if(bufferReady){ notify(); } } public synchronized void getAct(){ while(actflag==-1 && act==-1){ try { wait(); } catch(InterruptedException ie) { ie.printStackTrace(); } } } public synchronized void setAct(int aflag,int act){ actflag=aflag; this.act=act; notify(); } public synchronized void resetAct(){ this.actflag=-1; this.act=-1; notify(); } public synchronized void setfreeze(boolean f){ freeze=f; if(!freeze){ System.out.println("SETFREEZE "+f); notify();} else{ while(freeze){ try { System.out.println("SETFREEZE "+f); this.wait(); } catch(InterruptedException ie) { ie.printStackTrace(); } } } } public synchronized PokemonCombat getPCourant(){ return pCourant;} public synchronized PokemonCombat getCibleCourante() { return cibleCourante; } public synchronized void setCible(PokemonCombat cible){ this.cibleCourante=cible; } public synchronized boolean getendOfTurn(){ return endOfTurn;} }
package com.rox.emu.env; import org.junit.Test; import static junit.framework.TestCase.assertNotNull; import static org.junit.Assert.*; import static org.junit.Assert.assertFalse; public class RoxWordTest { @Test public void testEquality(){ assertTrue(RoxWord.ZERO.equals(RoxWord.ZERO)); assertEquals(RoxWord.ZERO, RoxWord.ZERO); assertEquals(RoxWord.ZERO, 0); assertEquals(RoxWord.ZERO.hashCode(), RoxWord.ZERO.hashCode()); assertTrue(RoxWord.fromLiteral(1).equals(1)); assertEquals(RoxWord.fromLiteral(1), 1); assertEquals(RoxWord.fromLiteral(1), RoxWord.fromLiteral(1)); assertEquals(RoxWord.fromLiteral(1).hashCode(), RoxWord.fromLiteral(1).hashCode()); assertTrue(RoxWord.fromLiteral(0b1111111111111110).equals(0b1111111111111110)); assertEquals(RoxWord.fromLiteral(0b1111111111111110), 0b1111111111111110); assertEquals(RoxWord.fromLiteral(99), RoxByte.fromLiteral(99)); assertEquals(RoxWord.fromLiteral(99).hashCode(), RoxByte.fromLiteral(99).hashCode()); } @Test public void testInequality(){ assertFalse(RoxWord.ZERO.equals(RoxWord.fromLiteral(10))); assertNotEquals(RoxWord.ZERO, RoxWord.fromLiteral(1)); assertNotEquals(RoxWord.ZERO, 1);; assertFalse(RoxWord.fromLiteral(1).equals(2)); assertNotEquals(RoxWord.fromLiteral(1), 2); assertNotEquals(RoxWord.fromLiteral(1), RoxWord.fromLiteral(2)); assertNotEquals(RoxWord.fromLiteral(1).hashCode(), RoxWord.fromLiteral(2).hashCode()); assertFalse(RoxWord.fromLiteral(0b1111111111111110).equals(0b1111111111111100)); assertNotEquals(RoxWord.fromLiteral(0b1111111111111110), 0b1111111111111100); assertNotEquals(RoxWord.fromLiteral(99), RoxByte.fromLiteral(98)); assertNotEquals(RoxWord.fromLiteral(99).hashCode(), RoxByte.fromLiteral(98).hashCode()); assertFalse(RoxWord.fromLiteral(23).equals("Test 1")); assertNotEquals(RoxWord.fromLiteral(23), "Test 2"); assertNotEquals(RoxWord.fromLiteral(23).hashCode(), "Test 3".hashCode()); } @Test public void testEmptyWordCreation(){ final RoxWord myWord = RoxWord.ZERO; assertNotNull(myWord); assertEquals(0, myWord.getAsInt()); } @Test public void testSingleArgumentLowByteWordCreation(){ final RoxWord myWord = RoxWord.from(RoxByte.fromLiteral(10)); assertNotNull(myWord); assertEquals(10, myWord.getAsInt()); } @Test public void testLowByteWordCreation(){ final RoxWord myWord = RoxWord.from(RoxByte.ZERO, RoxByte.fromLiteral(10)); assertNotNull(myWord); assertEquals(10, myWord.getAsInt()); } @Test public void testHighByteWordCreation(){ final RoxWord myWord = RoxWord.from(RoxByte.fromLiteral(1), RoxByte.ZERO); assertNotNull(myWord); assertEquals(256, myWord.getAsInt()); } @Test public void testTwoByteWordCreation(){ final RoxWord myWord = RoxWord.from(RoxByte.fromLiteral(1), RoxByte.fromLiteral(1)); assertNotNull(myWord); assertEquals(257, myWord.getAsInt()); } @Test public void testGetLowByte(){ final RoxWord myWord = RoxWord.from(RoxByte.fromLiteral(10), RoxByte.fromLiteral(20)); assertEquals(RoxByte.fromLiteral(20), myWord.getLowByte()); } @Test public void testGetHighByte(){ final RoxWord myWord = RoxWord.from(RoxByte.fromLiteral(10), RoxByte.fromLiteral(20)); assertEquals(RoxByte.fromLiteral(10), myWord.getHighByte()); } @Test public void testValidLiteralFrom(){ //Any value above 0xFFFF will just be treated as 'v &= 0xFFFF' for (int i=0x0; i<0x10010; i++) { final RoxWord word = RoxWord.fromLiteral(i); final RoxByte expectedLoByte = RoxByte.fromLiteral(i & 0xFF); final RoxByte expectedHiByte = RoxByte.fromLiteral((i >> 8) & 0xFF); assertEquals(expectedLoByte, word.getLowByte()); assertEquals(expectedHiByte, word.getHighByte()); } } }
package net.nanopool.samples; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; import javax.sql.DataSource; import net.nanopool.NanoPoolDataSource; import com.mysql.jdbc.jdbc2.optional.MysqlConnectionPoolDataSource; public class Simple { public static void main(String[] args) throws SQLException { MysqlConnectionPoolDataSource source = new MysqlConnectionPoolDataSource(); source.setServerName("localhost"); source.setPort(3306); source.setDatabaseName("test"); source.setUser("root"); source.setPassword(""); // timeouts: source.setLoginTimeout(5 /*seconds*/); source.setConnectTimeout(5000 /*milliseconds*/); source.setSocketTimeout(5000 /*milliseconds*/); System.out.println("Creating connection pool"); DataSource pds = new NanoPoolDataSource(source, 10, 300000); System.out.println("Getting new connection"); Connection con = pds.getConnection(); try { System.out.println("Creating statement"); Statement st = con.createStatement(); System.out.println("Executing query"); ResultSet rs = st.executeQuery("select now()"); if (rs.next()) { System.out.println(rs.getString(1)); } } finally { System.out.println("Closing connection"); con.close(); } System.out.println("Shutting down pool"); List<SQLException> exceptions = ((NanoPoolDataSource)pds).shutdown(); if (!exceptions.isEmpty()) { System.out.println("Caught these SQLExceptions in shutdown:"); for (SQLException ex : exceptions) ex.printStackTrace(System.out); } System.out.println("All done."); } }
package org.deft.web; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.deft.example.AsyncDbHandler; import org.deft.web.handler.RequestHandler; import org.junit.BeforeClass; import org.junit.Test; public class DeftSystemTest { private static final int PORT = 8081; public static final String expectedPayload = "hello test"; private static class ExampleRequestHandler extends RequestHandler { @Override public void get(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write(expectedPayload); } } private static class WRequestHandler extends RequestHandler { @Override public void get(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("1"); } } private static class WWRequestHandler extends RequestHandler { @Override public void get(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("1"); response.write("2"); } } private static class WWFWRequestHandler extends RequestHandler { @Override public void get(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("1"); response.write("2"); response.flush(); response.write("3"); } } private static class WFWFRequestHandler extends RequestHandler { @Override public void get(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("1"); response.flush(); response.write("2"); response.flush(); } } private static class DeleteRequestHandler extends RequestHandler { @Override public void delete(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("del"); response.flush(); response.write("ete"); response.flush(); } } private static class PostRequestHandler extends RequestHandler { @Override public void post(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("po"); response.flush(); response.write("st"); response.flush(); } } private static class PutRequestHandler extends RequestHandler { @Override public void put(org.deft.web.protocol.HttpRequest request, org.deft.web.protocol.HttpResponse response) { response.write("p"); response.flush(); response.write("ut"); response.flush(); } } @BeforeClass public static void setup() { Map<String, RequestHandler> reqHandlers = new HashMap<String, RequestHandler>(); reqHandlers.put("/", new ExampleRequestHandler()); reqHandlers.put("/mySql", new AsyncDbHandler()); reqHandlers.put("/w", new WRequestHandler()); reqHandlers.put("/ww", new WWRequestHandler()); reqHandlers.put("/wwfw", new WWFWRequestHandler()); reqHandlers.put("/wfwf", new WFWFRequestHandler()); reqHandlers.put("/delete", new DeleteRequestHandler()); reqHandlers.put("/post", new PostRequestHandler()); reqHandlers.put("/put", new PutRequestHandler()); final Application application = new Application(reqHandlers); // start deft instance from a new thread because the start invocation is blocking // (invoking thread will be I/O loop thread) new Thread(new Runnable() { @Override public void run() { new HttpServer(application).listen(PORT).getIOLoop().start(); } }).start(); } @Test public void simpleGetRequestTest() throws ClientProtocolException, IOException { doSimpleGetRequest(); } private void doSimpleGetRequest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpGet httpget = new HttpGet("http://localhost:" + PORT + "/"); HttpResponse response = httpclient.execute(httpget); List<String> expectedHeaders = Arrays.asList(new String[] {"Server", "Date"}); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); assertEquals(expectedHeaders.size(), response.getAllHeaders().length); for (String header : expectedHeaders) { assertTrue(response.getFirstHeader(header) != null); } assertEquals(expectedPayload, convertStreamToString(response.getEntity().getContent()).trim()); } /** * Test a RH that does a single write * @throws ClientProtocolException * @throws IOException */ @Test public void wTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpGet httpget = new HttpGet("http://localhost:" + PORT + "/w"); HttpResponse response = httpclient.execute(httpget); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("1", payLoad); } @Test public void wwTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpGet httpget = new HttpGet("http://localhost:" + PORT + "/ww"); HttpResponse response = httpclient.execute(httpget); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("12", payLoad); } @Test public void wwfwTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpGet httpget = new HttpGet("http://localhost:" + PORT + "/wwfw"); HttpResponse response = httpclient.execute(httpget); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("123", payLoad); } @Test public void wfwfTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpGet httpget = new HttpGet("http://localhost:" + PORT + "/wfwf"); HttpResponse response = httpclient.execute(httpget); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("12", payLoad); } @Test public void deleteTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpDelete httpdelete = new HttpDelete("http://localhost:" + PORT + "/delete"); HttpResponse response = httpclient.execute(httpdelete); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("delete", payLoad); } @Test public void PostTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpPost httppost = new HttpPost("http://localhost:" + PORT + "/post"); HttpResponse response = httpclient.execute(httppost); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("post", payLoad); } @Test public void putTest() throws ClientProtocolException, IOException { HttpParams params = new BasicHttpParams(); params.setParameter(" Connection", "Close"); HttpClient httpclient = new DefaultHttpClient(params); HttpPut httpput = new HttpPut("http://localhost:" + PORT + "/put"); HttpResponse response = httpclient.execute(httpput); assertNotNull(response); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(new ProtocolVersion("HTTP", 1, 1), response.getStatusLine().getProtocolVersion()); assertEquals("OK", response.getStatusLine().getReasonPhrase()); String payLoad = convertStreamToString(response.getEntity().getContent()).trim(); assertEquals("put", payLoad); } @Test public void simpleConcurrentGetRequestTest() { int nThreads = 8; int nRequests = 2048; final CountDownLatch latch = new CountDownLatch(nRequests); ExecutorService executor = Executors.newFixedThreadPool(nThreads); for (int i = 1; i <= nRequests; i++) { executor.submit(new Runnable() { @Override public void run() { try { doSimpleGetRequest(); latch.countDown(); } catch (Exception e) { e.printStackTrace(); } } }); } try { latch.await(15 * 1000, TimeUnit.MILLISECONDS); // max wait time } catch (InterruptedException e) { e.printStackTrace(); } if (latch.getCount() != 0) { assertTrue("Did not finish " + nRequests + " # of requests", false); } } public String convertStreamToString(InputStream is) throws IOException { if (is != null) { StringBuilder sb = new StringBuilder(); String line; try { BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8")); while ((line = reader.readLine()) != null) { sb.append(line).append("\n"); } } finally { is.close(); } return sb.toString(); } else { return ""; } } }
package org.iq80.snappy; import com.google.common.base.Throwables; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.primitives.Longs; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.TimeUnit; import static java.lang.String.format; import static org.iq80.snappy.BenchmarkDriver.JAVA_BLOCK; import static org.iq80.snappy.BenchmarkDriver.JAVA_STREAM; import static org.iq80.snappy.BenchmarkDriver.JNI_BLOCK; import static org.iq80.snappy.BenchmarkDriver.JNI_STREAM; /** * Port of the micro-benchmarks for Snappy. * <p/> * Make sure to run these with the server version of hot spot. I use the following configuration: * <pre> * {@code * -Dorg.xerial.snappy.lib.name=libsnappyjava.jnilib -server -XX:+UseCompressedOops -Xms128M -Xmx128M -XX:+UseConcMarkSweepGC * } * </pre> */ public class SnappyBench { private static final int NUMBER_OF_RUNS = 5; private static final int CALIBRATE_ITERATIONS = 100; private static final int WARM_UP_SECONDS = 45; private static final int SECONDS_PER_RUN = 1; public static void main(String[] args) { System.err.printf("Running micro-benchmarks.\n"); SnappyBench snappyBench = new SnappyBench(); // verify implementation with a round trip for every input snappyBench.verify(); // warm up the code paths so hot spot optimizes the code snappyBench.warmUp(); // Easy to use individual tests // for (int i = 0; i < 100; i++) { // snappyBench.runUncompress(TestData.txt1); // snappyBench.runUncompress(TestData.txt2); // snappyBench.runUncompress(TestData.txt3); // snappyBench.runUncompress(TestData.txt4); // snappyBench.runUncompress(TestData.sum); // snappyBench.runUncompress(TestData.lsp); // snappyBench.runUncompress(TestData.man); // snappyBench.runUncompress(TestData.c); // snappyBench.runUncompress(TestData.cp); snappyBench.runCompress("Block Compress", JNI_BLOCK, JAVA_BLOCK); snappyBench.runUncompress("Block Uncompress", JNI_BLOCK, JAVA_BLOCK); snappyBench.runRoundTrip("Block Round Trip", JNI_BLOCK, JAVA_BLOCK); snappyBench.runCompress("Stream Compress", JNI_STREAM, JAVA_STREAM); snappyBench.runUncompress("Stream Uncompress", JNI_STREAM, JAVA_STREAM); snappyBench.runRoundTrip("Stream RoundTrip", JNI_STREAM, JAVA_STREAM); } public void verify() { for (TestData testData : TestData.values()) { byte[] contents = testData.getContents(); byte[] compressed = new byte[Snappy.maxCompressedLength(contents.length)]; int compressedSize = Snappy.compress(contents, 0, contents.length, compressed, 0); byte[] uncompressed = new byte[contents.length]; Snappy.uncompress(compressed, 0, compressedSize, uncompressed, 0); if (!Arrays.equals(uncompressed, testData.getContents())) { throw new AssertionError("Failed for " + testData); } Arrays.fill(uncompressed, (byte) 0); compressed = Arrays.copyOf(compressed, compressedSize); Snappy.uncompress(compressed, 0, compressedSize, uncompressed, 0); if (!Arrays.equals(uncompressed, testData.getContents())) { throw new AssertionError("Failed for " + testData); } } for (TestData testData : TestData.values()) { try { byte[] contents = testData.getContents(); ByteArrayOutputStream rawOut = new ByteArrayOutputStream(Snappy.maxCompressedLength(contents.length)); SnappyOutputStream out = new SnappyOutputStream(rawOut); out.write(contents); out.close(); SnappyInputStream in = new SnappyInputStream(new ByteArrayInputStream(rawOut.toByteArray())); byte[] uncompressed = ByteStreams.toByteArray(in); if (!Arrays.equals(uncompressed, testData.getContents())) { throw new AssertionError("Failed for " + testData); } } catch (IOException e) { throw Throwables.propagate(e); } } } public void warmUp() { // Warm up the code { long end = System.nanoTime() + TimeUnit.SECONDS.toNanos(WARM_UP_SECONDS); do { for (TestData testData : TestData.values()) { benchmarkCompress(testData, JAVA_BLOCK, 100); } } while (System.nanoTime() < end); end = System.nanoTime() + TimeUnit.SECONDS.toNanos(WARM_UP_SECONDS); do { for (TestData testData : TestData.values()) { benchmarkUncompress(testData, JAVA_BLOCK, 100); } } while (System.nanoTime() < end); } } private static void printHeader(String benchmarkTitle) { System.err.println(); System.err.println(); System.err.println(benchmarkTitle); System.err.println(); System.err.printf("%-8s %8s %9s %9s %11s %11s %7s\n", "", "", "JNI", "Java", "JNI", "Java", ""); System.err.printf("%-8s %8s %9s %9s %11s %11s %7s\n", "Input", "Size", "Compress", "Compress", "Throughput", "Throughput", "Change"); System.err.printf(" } public void runCompress(String benchmarkTitle, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { printHeader(benchmarkTitle); for (TestData testData : TestData.values()) { runCompress(testData, oldDriver, newDriver); } } private void runCompress(TestData testData, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { long iterations = calibrateIterations(testData, oldDriver, true); long oldBytesPerSecond = benchmarkCompress(testData, oldDriver, iterations); long newBytesPerSecond = benchmarkCompress(testData, newDriver, iterations); // results String oldHumanReadableSpeed = toHumanReadableSpeed(oldBytesPerSecond); String newHumanReadableSpeed = toHumanReadableSpeed(newBytesPerSecond); double improvement = 100.0d * (newBytesPerSecond - oldBytesPerSecond) / oldBytesPerSecond; System.err.printf( "%-8s %8d %8.1f%% %8.1f%% %11s %11s %+6.1f%% %s\n", testData, testData.size(), oldDriver.getCompressionRatio(testData) * 100.0, newDriver.getCompressionRatio(testData) * 100.0, oldHumanReadableSpeed, newHumanReadableSpeed, improvement, testData.getInfo()); } private long benchmarkCompress(TestData testData, BenchmarkDriver driver, long iterations) { long[] firstBenchmarkRuns = new long[NUMBER_OF_RUNS]; for (int run = 0; run < NUMBER_OF_RUNS; ++run) { firstBenchmarkRuns[run] = driver.compress(testData, iterations); } long firstMedianTimeInNanos = getMedianValue(firstBenchmarkRuns); return (long) (1.0 * iterations * testData.size() / nanosToSeconds(firstMedianTimeInNanos)); } public void runUncompress(String benchmarkTitle, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { printHeader(benchmarkTitle); for (TestData testData : TestData.values()) { runUncompress(testData, oldDriver, newDriver); } } private void runUncompress(TestData testData, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { long iterations = calibrateIterations(testData, oldDriver, false); long oldBytesPerSecond = benchmarkUncompress(testData, oldDriver, iterations); long newBytesPerSecond = benchmarkUncompress(testData, newDriver, iterations); // results String newHumanReadableSpeed = toHumanReadableSpeed(newBytesPerSecond); String oldHumanReadableSpeed = toHumanReadableSpeed(oldBytesPerSecond); double improvement = 100.0d * (newBytesPerSecond - oldBytesPerSecond) / oldBytesPerSecond; System.err.printf( "%-8s %8d %8.1f%% %8.1f%% %11s %11s %+6.1f%% %s\n", testData, testData.size(), oldDriver.getCompressionRatio(testData) * 100.0, newDriver.getCompressionRatio(testData) * 100.0, oldHumanReadableSpeed, newHumanReadableSpeed, improvement, testData.getInfo()); } private long benchmarkUncompress(TestData testData, BenchmarkDriver driver, long iterations) { long[] jniBenchmarkRuns = new long[NUMBER_OF_RUNS]; for (int run = 0; run < NUMBER_OF_RUNS; ++run) { jniBenchmarkRuns[run] = driver.uncompress(testData, iterations); } long jniMedianTimeInNanos = getMedianValue(jniBenchmarkRuns); return (long) (1.0 * iterations * testData.size() / nanosToSeconds(jniMedianTimeInNanos)); } public void runRoundTrip(String benchmarkTitle, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { printHeader(benchmarkTitle); for (TestData testData : TestData.values()) { runRoundTrip(testData, oldDriver, newDriver); } } private void runRoundTrip(TestData testData, BenchmarkDriver oldDriver, BenchmarkDriver newDriver) { long iterations = calibrateIterations(testData, oldDriver, true); long oldBytesPerSecond = benchmarkRoundTrip(testData, oldDriver, iterations); long newBytesPerSecond = benchmarkRoundTrip(testData, newDriver, iterations); // results String newHumanReadableSpeed = toHumanReadableSpeed(newBytesPerSecond); String oldHumanReadableSpeed = toHumanReadableSpeed(oldBytesPerSecond); double improvement = 100.0d * (newBytesPerSecond - oldBytesPerSecond) / oldBytesPerSecond; System.err.printf( "%-8s %8d %8.1f%% %8.1f%% %11s %11s %+6.1f%% %s\n", testData, testData.size(), oldDriver.getCompressionRatio(testData) * 100.0, newDriver.getCompressionRatio(testData) * 100.0, oldHumanReadableSpeed, newHumanReadableSpeed, improvement, testData.getInfo()); } private long benchmarkRoundTrip(TestData testData, BenchmarkDriver driver, long iterations) { long[] jniBenchmarkRuns = new long[NUMBER_OF_RUNS]; for (int run = 0; run < NUMBER_OF_RUNS; ++run) { jniBenchmarkRuns[run] = driver.roundTrip(testData, iterations); } long jniMedianTimeInNanos = getMedianValue(jniBenchmarkRuns); return (long) (1.0 * iterations * testData.size() / nanosToSeconds(jniMedianTimeInNanos)); } private long calibrateIterations(TestData testData, BenchmarkDriver driver, boolean compression) { // Run a few iterations first to find out approximately how fast // the benchmark is. long start = System.nanoTime(); if (compression) { driver.compress(testData, CALIBRATE_ITERATIONS); } else { driver.uncompress(testData, CALIBRATE_ITERATIONS); } long timeInNanos = System.nanoTime() - start; // Let each test case run for about 200ms, but at least as many // as we used to calibrate. // Run five times and pick the median. long iterations = 0; if (timeInNanos > 0) { double iterationsPerSecond = CALIBRATE_ITERATIONS / nanosToSeconds(timeInNanos); iterations = (long) (SECONDS_PER_RUN * iterationsPerSecond); } iterations = Math.max(iterations, CALIBRATE_ITERATIONS); return iterations; } private double nanosToSeconds(long nanos) { return 1.0 * nanos / TimeUnit.SECONDS.toNanos(1); } private String toHumanReadableSpeed(long bytesPerSecond) { String humanReadableSpeed; if (bytesPerSecond < 1024) { humanReadableSpeed = format("%dB/s", bytesPerSecond); } else if (bytesPerSecond < 1024 * 1024) { humanReadableSpeed = format("%.1fkB/s", bytesPerSecond / 1024.0f); } else if (bytesPerSecond < 1024 * 1024 * 1024) { humanReadableSpeed = format("%.1fMB/s", bytesPerSecond / (1024.0f * 1024.0f)); } else { humanReadableSpeed = format("%.1fGB/s", bytesPerSecond / (1024.0f * 1024.0f * 1024.0f)); } return humanReadableSpeed; } private long getMedianValue(long[] benchmarkRuns) { ArrayList<Long> list = new ArrayList<Long>(Longs.asList(benchmarkRuns)); Collections.sort(list); return list.get(benchmarkRuns.length / 2); } @SuppressWarnings({"UnusedDeclaration"}) public enum TestData { html("html"), urls("urls.10K"), jpg("house.jpg", false), pdf("mapreduce-osdi-1.pdf"), html4("html_x_4"), cp("cp.html"), c("fields.c"), lsp("grammar.lsp"), xls("kennedy.xls"), txt1("alice29.txt"), txt2("asyoulik.txt"), txt3("lcet10.txt"), txt4("plrabn12.txt"), bin("ptt5"), sum("sum"), man("xargs.1"), pb("geo.protodata"), gaviota("kppkn.gtb"); private final String fileName; private final boolean compressibleData; private final byte[] contents; private final byte[] compressed; TestData(String fileName) { this(fileName, true); } TestData(String fileName, boolean compressibleData) { this.fileName = fileName; this.compressibleData = compressibleData; try { contents = Files.toByteArray(new File("testdata", fileName)); } catch (IOException e) { throw Throwables.propagate(e); } // Read the file and create buffers out side of timing byte[] compressed = new byte[Snappy.maxCompressedLength(contents.length)]; int compressedSize; try { compressedSize = org.xerial.snappy.Snappy.compress(contents, 0, contents.length, compressed, 0); } catch (IOException e) { throw Throwables.propagate(e); } this.compressed = Arrays.copyOf(compressed, compressedSize); } public String getFileName() { return fileName; } public boolean isCompressibleData() { return compressibleData; } public String getInfo() { if (compressibleData) { return name(); } else { return name() + " (not compressible)"; } } public byte[] getContents() { return Arrays.copyOf(contents, contents.length); } public int size() { return contents.length; } public byte[] getCompressed() { return Arrays.copyOf(compressed, compressed.length); } public int compressedSize() { return compressed.length; } } }
package picocli; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.contrib.java.lang.system.ProvideSystemProperty; import org.junit.contrib.java.lang.system.RestoreSystemProperties; import org.junit.rules.TestRule; import picocli.CommandLine.Command; import picocli.CommandLine.DuplicateOptionAnnotationsException; import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Model.OptionSpec; import picocli.CommandLine.Model.PositionalParamSpec; import picocli.CommandLine.Option; import picocli.CommandLine.ParameterException; import java.util.*; import static org.junit.Assert.*; import static picocli.CommandLine.ScopeType.INHERIT; import static picocli.CommandLine.ScopeType.LOCAL; public class InheritedOptionTest { @Rule public final ProvideSystemProperty ansiOFF = new ProvideSystemProperty("picocli.ansi", "false"); @Rule // allows tests to set any kind of properties they like, without having to individually roll them back public final TestRule restoreSystemProperties = new RestoreSystemProperties(); @Command(subcommands = Sub.class) static class Top { @Option(names = "--verbose", scope = INHERIT) boolean verbose; } @Command(name = "sub", subcommands = SubSub.class) static class Sub { } @Command(name = "subsub") static class SubSub { } @Test public void testGlobalOptionIsAddedToSubcommand() { Top top = new Top(); CommandLine cmd = new CommandLine(top); cmd.parseArgs("sub", "--verbose"); assertTrue(top.verbose); } @Test public void testGlobalOptionIsAddedToSubSubcommand() { Top top = new Top(); CommandLine cmd = new CommandLine(top); cmd.parseArgs("sub", "subsub", "--verbose"); assertTrue(top.verbose); } @Test public void testGlobalOptionDisallowedIfSubcommandAlreadyHasGlobalOptionWithSameName() { Top top = new Top(); CommandLine cmd = new CommandLine(top); class Other { @Option(names = "--verbose", scope = INHERIT) boolean verbose; } Other other = new Other(); try { cmd.addSubcommand("other", other); fail("Expected exception"); } catch (DuplicateOptionAnnotationsException ex) { String msg = String.format("Option name '--verbose' is used by both field boolean %s.verbose and field boolean %s.verbose", top.getClass().getName(), other.getClass().getName()); assertEquals(msg, ex.getMessage()); } //cmd.parseArgs("other", "--verbose"); //assertTrue(top.verbose); //assertFalse(other.verbose); } @Test public void testGlobalOptionDisallowedIfSubcommandAlreadyHasNonGlobalOptionWithSameName() { Top top = new Top(); CommandLine cmd = new CommandLine(top); class Other { @Option(names = "--verbose") // local boolean verbose; } Other other = new Other(); try { cmd.addSubcommand("other", other); fail("Expected exception"); } catch (DuplicateOptionAnnotationsException ex) { String msg = String.format("Option name '--verbose' is used by both field boolean %s.verbose and field boolean %s.verbose", top.getClass().getName(), other.getClass().getName()); assertEquals(msg, ex.getMessage()); } } static class Base { @Option(names = "--verbose", scope = INHERIT) boolean verbose; } @Command(name = "ext", subcommands = ExtSub.class, resourceBundle = "picocli.InheritedOptionTest$MyBundle") //MyBundle.class.getName() static class Ext extends Base{ } @Command(name = "sub") static class ExtSub { @Command void subsub() {} } @Test public void testGlobalOptionInBaseClass() { //TestUtil.setTraceLevel("DEBUG"); // both top-level command and subcommand extend from a base class where global option is defined Ext ext = new Ext(); CommandLine cmd = new CommandLine(ext); cmd.parseArgs("sub", "subsub", "--verbose"); assertTrue(ext.verbose); } public static class MyBundle extends ListResourceBundle { // used in Ext protected Object[][] getContents() { return new Object[][] { {"verbose", "VERBOSE DESCRIPTION"}, {"sub.verbose", "SUB CUSTOM VERBOSE DESCRIPTION IS IGNORED"} }; } } @Test public void testGlobalOptionDescriptionFromResourceBundle() { CommandLine cmd = new CommandLine(new Ext()); String top = cmd.getUsageMessage(); String expected = String.format("" + "Usage: ext [--verbose] [COMMAND]%n" + " --verbose VERBOSE DESCRIPTION%n" + "Commands:%n" + " sub%n"); assertEquals(expected, top); CommandLine subCmd = cmd.getSubcommands().get("sub"); String sub = subCmd.getUsageMessage(); String expectedSub = String.format("" + "Usage: ext sub [--verbose] [COMMAND]%n" + " --verbose VERBOSE DESCRIPTION%n" + "Commands:%n" + " subsub%n"); assertEquals(expectedSub, sub); CommandLine subsubCmd = subCmd.getSubcommands().get("subsub"); String subsub = subsubCmd.getUsageMessage(); String expectedSubSub = String.format("" + "Usage: ext sub subsub [--verbose]%n" + " --verbose VERBOSE DESCRIPTION%n"); assertEquals(expectedSubSub, subsub); } @Test public void testProgrammaticOptionBuilderScopeLocalByDefault() { assertEquals(LOCAL, OptionSpec.builder("-a").scopeType()); } @Test public void testProgrammaticOptionBuilderScopeMutable() { assertEquals(INHERIT, OptionSpec.builder("-a").scopeType(INHERIT).scopeType()); assertEquals(INHERIT, OptionSpec.builder("-a").scopeType(INHERIT).build().scopeType()); } @Test public void testProgrammaticOptionLocalByDefault() { assertEquals(LOCAL, OptionSpec.builder("-a").build().scopeType()); } @Test public void testProgrammaticOptionBuilderInheritedFalseByDefault() { assertFalse(OptionSpec.builder("-a").inherited()); } @Test public void testProgrammaticOptionBuilderInheritedMutable() { assertTrue(OptionSpec.builder("-a").inherited(true).inherited()); assertTrue(OptionSpec.builder("-a").inherited(true).build().inherited()); } @Test public void testProgrammaticOptionInheritedFalseByDefault() { assertFalse(OptionSpec.builder("-a").build().inherited()); } @Test public void testProgrammaticAddOptionBeforeSub() { OptionSpec optA = OptionSpec.builder("-a").scopeType(INHERIT).build(); CommandSpec spec = CommandSpec.create(); spec.add(optA); assertFalse(optA.inherited()); CommandSpec sub = CommandSpec.create(); spec.addSubcommand("sub", sub); assertNotNull(spec.findOption("-a")); assertNotNull(sub.findOption("-a")); assertFalse(spec.findOption("-a").inherited()); assertTrue(sub.findOption("-a").inherited()); } @Test public void testProgrammaticAddOptionAfterSub() { OptionSpec optA = OptionSpec.builder("-a").scopeType(INHERIT).build(); CommandSpec spec = CommandSpec.create(); CommandSpec sub = CommandSpec.create(); spec.addSubcommand("sub", sub); spec.add(optA); assertFalse(optA.inherited()); assertNotNull(spec.findOption("-a")); assertNotNull(sub.findOption("-a")); assertFalse(spec.findOption("-a").inherited()); assertTrue(sub.findOption("-a").inherited()); } @Test public void testProgrammaticPositionalParamBuilderScopeLocalByDefault() { assertEquals(LOCAL, PositionalParamSpec.builder().scopeType()); } @Test public void testProgrammaticPositionalParamBuilderScopeMutable() { assertEquals(INHERIT, PositionalParamSpec.builder().scopeType(INHERIT).scopeType()); assertEquals(INHERIT, PositionalParamSpec.builder().scopeType(INHERIT).build().scopeType()); } @Test public void testProgrammaticPositionalParamLocalByDefault() { assertEquals(LOCAL, PositionalParamSpec.builder().build().scopeType()); } @Test public void testProgrammaticPositionalParamBuilderInheritedFalseByDefault() { assertFalse(PositionalParamSpec.builder().inherited()); } @Test public void testProgrammaticPositionalParamBuilderInheritedMutable() { assertTrue(PositionalParamSpec.builder().inherited(true).inherited()); assertTrue(PositionalParamSpec.builder().inherited(true).build().inherited()); } @Test public void testProgrammaticPositionalParamInheritedFalseByDefault() { assertFalse(PositionalParamSpec.builder().build().inherited()); } @Test public void testProgrammaticAddPositionalParamBeforeSub() { PositionalParamSpec positional = PositionalParamSpec.builder().scopeType(INHERIT).build(); CommandSpec spec = CommandSpec.create(); spec.add(positional); assertFalse(positional.inherited()); CommandSpec sub = CommandSpec.create(); spec.addSubcommand("sub", sub); assertFalse(spec.positionalParameters().isEmpty()); assertFalse(sub.positionalParameters().isEmpty()); assertFalse(spec.positionalParameters().get(0).inherited()); assertTrue(sub.positionalParameters().get(0).inherited()); } @Test public void testProgrammaticAddPositionalParamAfterSub() { PositionalParamSpec positional = PositionalParamSpec.builder().scopeType(INHERIT).build(); CommandSpec spec = CommandSpec.create(); CommandSpec sub = CommandSpec.create(); spec.addSubcommand("sub", sub); spec.add(positional); assertFalse(positional.inherited()); assertFalse(spec.positionalParameters().isEmpty()); assertFalse(sub.positionalParameters().isEmpty()); assertFalse(spec.positionalParameters().get(0).inherited()); assertTrue(sub.positionalParameters().get(0).inherited()); } @Command(name = "TopWithDefault", subcommands = SubWithDefault.class) static class TopWithDefault { List<String> xvalues = new ArrayList<String>(); @Option(names = "-x", defaultValue = "xxx", scope = INHERIT) public void setX(String x) { xvalues.add(x); } @Option(names = "-y", scope = INHERIT) String y = "yyy"; @Option(names = "-z", defaultValue = "zzz", scope = INHERIT) String z; } @Command(name = "sub", subcommands = SubSubWithDefault.class) static class SubWithDefault { } @Command(name = "subsub") static class SubSubWithDefault { } @Test public void testInheritedOptionsWithDefault() { TopWithDefault bean = new TopWithDefault(); CommandLine cmd = new CommandLine(bean); cmd.parseArgs(); assertEquals(Arrays.asList("xxx"), bean.xvalues); assertEquals("yyy", bean.y); assertEquals("zzz", bean.z); cmd.parseArgs("-y=1", "-z=2", "sub"); assertEquals("1", bean.y); assertEquals("2", bean.z); assertEquals(Arrays.asList("xxx", "xxx"), bean.xvalues); // setters cannot be initialized cmd.parseArgs("sub", "subsub"); assertEquals("zzz", bean.z); assertEquals(Arrays.asList("xxx", "xxx", "xxx"), bean.xvalues); // setters cannot be initialized assertEquals("yyy", bean.y); } }
package ui.console.validation; import ui.console.input.InputHandler; /** * Class that is used to validate strings * @author Benny Lach * */ public class StringValidator { /** * Function to validate a given string * @param input The string to validate * @param cancelSequence The sequence that represents a cancel action * @param shrink Boolean that identifies if a String should be shrinked if it's too long * @return Result of the validation */ public static StringValidationType validateString(String input, String cancelSequence, Boolean shrink) { // string is null if (input == null) { return StringValidationType.Wrong; } // string is empty if (input.length() == 0) { System.out.println("The string must have at least one character!"); return StringValidationType.Wrong; } // check cancel sequence if (cancelSequence != null && input.equals(cancelSequence)) { return StringValidationType.CancelSequence; } // string is too long if (input.length() > 3) { // shrinking is accepted if (shrink) { if (InputHandler.getBooleanInput("The string is to long. Do you want to auto shring the string to 3 characters? [y/n]", "y")) { return StringValidationType.ShrinkInput; } } return StringValidationType.CancelSequence; } return StringValidationType.Correct; } }
package org.raku.rakudo; import java.util.*; import org.raku.nqp.runtime.*; import org.raku.nqp.sixmodel.*; import org.raku.nqp.sixmodel.reprs.ContextRefInstance; import org.raku.nqp.sixmodel.reprs.P6int; import org.raku.nqp.sixmodel.reprs.P6str; import org.raku.nqp.sixmodel.reprs.P6num; import org.raku.nqp.sixmodel.reprs.P6OpaqueREPRData; @SuppressWarnings("unused") public final class Binder { /* Possible results of binding. */ public static final int BIND_RESULT_OK = 0; public static final int BIND_RESULT_FAIL = 1; public static final int BIND_RESULT_JUNCTION = 2; /* Compile time trial binding result indicators. */ public static final int TRIAL_BIND_NOT_SURE = 0; /* Plausible, but need to check at runtime. */ public static final int TRIAL_BIND_OK = 1; /* Bind will always work out. */ public static final int TRIAL_BIND_NO_WAY = -1; /* Bind could never work out. */ /* Flags. */ private static final int SIG_ELEM_BIND_CAPTURE = 1; private static final int SIG_ELEM_BIND_PRIVATE_ATTR = 2; private static final int SIG_ELEM_BIND_PUBLIC_ATTR = 4; private static final int SIG_ELEM_BIND_ATTRIBUTIVE = (SIG_ELEM_BIND_PRIVATE_ATTR | SIG_ELEM_BIND_PUBLIC_ATTR); private static final int SIG_ELEM_SLURPY_POS = 8; private static final int SIG_ELEM_SLURPY_NAMED = 16; private static final int SIG_ELEM_SLURPY_LOL = 32; private static final int SIG_ELEM_INVOCANT = 64; private static final int SIG_ELEM_MULTI_INVOCANT = 128; private static final int SIG_ELEM_IS_RW = 256; private static final int SIG_ELEM_IS_COPY = 512; private static final int SIG_ELEM_IS_RAW = 1024; private static final int SIG_ELEM_IS_OPTIONAL = 2048; private static final int SIG_ELEM_ARRAY_SIGIL = 4096; private static final int SIG_ELEM_HASH_SIGIL = 8192; private static final int SIG_ELEM_DEFAULT_FROM_OUTER = 16384; private static final int SIG_ELEM_IS_CAPTURE = 32768; private static final int SIG_ELEM_UNDEFINED_ONLY = 65536; private static final int SIG_ELEM_DEFINED_ONLY = 131072; private static final int SIG_ELEM_DEFINEDNES_CHECK = (SIG_ELEM_UNDEFINED_ONLY | SIG_ELEM_DEFINED_ONLY); private static final int SIG_ELEM_TYPE_GENERIC = 524288; private static final int SIG_ELEM_DEFAULT_IS_LITERAL = 1048576; private static final int SIG_ELEM_NATIVE_INT_VALUE = 2097152; private static final int SIG_ELEM_NATIVE_NUM_VALUE = 4194304; private static final int SIG_ELEM_NATIVE_STR_VALUE = 8388608; private static final int SIG_ELEM_NATIVE_VALUE = (SIG_ELEM_NATIVE_INT_VALUE | SIG_ELEM_NATIVE_NUM_VALUE | SIG_ELEM_NATIVE_STR_VALUE); private static final int SIG_ELEM_SLURPY_ONEARG = 16777216; private static final int SIG_ELEM_SLURPY = (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED | SIG_ELEM_SLURPY_LOL | SIG_ELEM_SLURPY_ONEARG); private static final int SIG_ELEM_CODE_SIGIL = 33554432; private static final int SIG_ELEM_IS_COERCIVE = 67108864; /* Hints for Parameter attributes. */ private static final int HINT_variable_name = 0; private static final int HINT_named_names = 1; private static final int HINT_type_captures = 2; private static final int HINT_flags = 3; private static final int HINT_type = 4; private static final int HINT_post_constraints = 5; private static final int HINT_sub_signature = 6; private static final int HINT_default_value = 7; private static final int HINT_container_descriptor = 8; private static final int HINT_attr_package = 9; /* Other hints. */ private static final int HINT_ENUMMAP_storage = 0; private static final int HINT_CAPTURE_list = 0; private static final int HINT_CAPTURE_hash = 1; private static final int HINT_LIST_reified = 0; private static final int HINT_SIG_params = 0; private static SixModelObject createBox(ThreadContext tc, RakOps.GlobalExt gcx, Object arg, int flag) { switch (flag) { case CallSiteDescriptor.ARG_INT: return Ops.box_i((long)arg, gcx.Int, tc); case CallSiteDescriptor.ARG_NUM: return Ops.box_n((double)arg, gcx.Num, tc); case CallSiteDescriptor.ARG_STR: return Ops.box_s((String)arg, gcx.Str, tc); default: throw new RuntimeException("Impossible case reached in createBox"); } } private static String arityFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject params, int numParams, int numPosArgs, boolean tooMany) { int arity = 0; int count = 0; String fail = tooMany ? "Too many" : "Too few"; /* Work out how many we could have been passed. */ for (int i = 0; i < numParams; i++) { SixModelObject param = params.at_pos_boxed(tc, i); param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags); int flags = (int)tc.native_i; SixModelObject namedNames = param.get_attribute_boxed(tc, gcx.Parameter, "@!named_names", HINT_named_names); if (namedNames != null) continue; if ((flags & SIG_ELEM_SLURPY_NAMED) != 0) continue; if ((flags & SIG_ELEM_SLURPY) != 0) { count = -1000; // cargo-culted from BOOTSTRAP.nqp: "in case a pos can sneak past a slurpy somehow" } else if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) { count++; } else { count++; arity++; } } /* Now generate decent error. */ if (arity == count) return String.format( "%s positionals passed; expected %d arguments but got %d", fail, arity, numPosArgs); else if (count <= -1) return String.format( "%s positionals passed; expected at least %d arguments but got only %d", fail, arity, numPosArgs); else return String.format( "%s positionals passed; expected %d %s %d arguments but got %d", fail, arity, arity + 1 == count ? "or" : "to" , count, numPosArgs); } /* Binds any type captures. */ public static void bindTypeCaptures(ThreadContext tc, SixModelObject typeCaps, CallFrame cf, SixModelObject type) { long elems = typeCaps.elems(tc); StaticCodeInfo sci = cf.codeRef.staticInfo; for (long i = 0; i < elems; i++) { typeCaps.at_pos_native(tc, i); String name = tc.native_s; cf.oLex[sci.oTryGetLexicalIdx(name)] = type; } } /* Assigns an attributive parameter to the desired attribute. */ private static int assignAttributive(ThreadContext tc, CallFrame cf, String varName, int paramFlags, SixModelObject attrPackage, SixModelObject value, Object[] error) { /* Find self. */ StaticCodeInfo sci = cf.codeRef.staticInfo; Integer selfIdx = sci.oTryGetLexicalIdx("self"); SixModelObject self = null; if (selfIdx == null) { self = Ops.getlexouter("self", tc); if (self == null) { if (error != null) error[0] = String.format( "Unable to bind attributive parameter '%s' - could not find self", varName); return BIND_RESULT_FAIL; } } else { self = cf.oLex[selfIdx]; } /* If it's private, just need to fetch the attribute. */ SixModelObject assignee; if ((paramFlags & SIG_ELEM_BIND_PRIVATE_ATTR) != 0) { /* If we have a native Attribute we can't get a container for it, and since *trying* to get a container would throw already, we first check if the target Attribute is native. */ int hint = -1; for (HashMap<String, Integer> map : ((P6OpaqueREPRData) (attrPackage.st.REPRData)).nameToHintMap) { try { hint = map.get(varName); } catch (Exception e) { continue; } } REPR attrREPR = null; if (((P6OpaqueREPRData) (attrPackage.st.REPRData)).flattenedSTables[hint] != null) { /* We sometimes don't have flattenedSTables. I'm not sure that's okay, honestly... */ attrREPR = ((P6OpaqueREPRData) (attrPackage.st.REPRData)).flattenedSTables[hint].REPR; } if (attrREPR instanceof P6int) { Ops.bindattr_i(self, attrPackage, varName, Ops.unbox_i(value, tc), tc); } else if (attrREPR instanceof P6num) { Ops.bindattr_n(self, attrPackage, varName, Ops.unbox_n(value, tc), tc); } else if (attrREPR instanceof P6str) { Ops.bindattr_s(self, attrPackage, varName, Ops.unbox_s(value, tc), tc); } else { /* ...but we'll just assume it's probably some boxed Attribute. */ assignee = self.get_attribute_boxed(tc, attrPackage, varName, STable.NO_HINT); RakOps.p6store(assignee, value, tc); } } /* Otherwise if it's public, do a method call to get the assignee. */ else { throw new RuntimeException("$.x parameters NYI"); } return BIND_RESULT_OK; } /* Returns an appropriate failure mode (junction fail or normal fail). */ private static int juncOrFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject value) { if (value.st.WHAT == gcx.Junction && Ops.isconcrete(value, tc) != 0) return BIND_RESULT_JUNCTION; else return BIND_RESULT_FAIL; } /* Binds a single argument into the lexpad, after doing any checks that are * needed. Also handles any type captures. If there is a sub signature, then * re-enters the binder. Returns one of the BIND_RESULT_* codes. */ private static final CallSiteDescriptor genIns = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor ACCEPTS_o = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor ACCEPTS_i = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_INT }, null); private static final CallSiteDescriptor ACCEPTS_n = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_NUM }, null); private static final CallSiteDescriptor ACCEPTS_s = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null); private static final CallSiteDescriptor bindParamThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_INT }, null); private static final CallSiteDescriptor bindConcreteThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_STR, CallSiteDescriptor.ARG_STR, CallSiteDescriptor.ARG_STR, CallSiteDescriptor.ARG_STR, CallSiteDescriptor.ARG_INT, CallSiteDescriptor.ARG_INT }, null); private static final CallSiteDescriptor paramReadWriteThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null); private static int bindOneParam(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject param, Object origArg, byte origFlag, boolean noNomTypeCheck, boolean isSlurpyNamed, Object[] error) { /* Get parameter flags and variable name. */ param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags); int paramFlags = (int)tc.native_i; param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name); String varName = tc.native_s; boolean hasVarName = true; if (varName == null || varName.isEmpty()) { varName = "<anon>"; hasVarName = false; } if (RakOps.DEBUG_MODE) System.err.println(varName); /* We'll put the value to bind into one of the following locals, and * flag will indicate what type of thing it is. */ int flag; long arg_i = 0; double arg_n = 0.0; String arg_s = null; SixModelObject arg_o = null; /* Check if boxed/unboxed expectations are met. */ int desiredNative = paramFlags & SIG_ELEM_NATIVE_VALUE; boolean is_rw = (paramFlags & SIG_ELEM_IS_RW) != 0; int gotNative = origFlag & (CallSiteDescriptor.ARG_INT | CallSiteDescriptor.ARG_NUM | CallSiteDescriptor.ARG_STR); if (is_rw && desiredNative != 0) { switch (desiredNative) { case SIG_ELEM_NATIVE_INT_VALUE: if (gotNative != 0 || Ops.iscont_i((SixModelObject)origArg) == 0) { if (error != null) error[0] = String.format( "Expected a modifiable native int argument for '%s'", varName); return BIND_RESULT_FAIL; } break; case SIG_ELEM_NATIVE_NUM_VALUE: if (gotNative != 0 || Ops.iscont_n((SixModelObject)origArg) == 0) { if (error != null) error[0] = String.format( "Expected a modifiable native num argument for '%s'", varName); return BIND_RESULT_FAIL; } break; case SIG_ELEM_NATIVE_STR_VALUE: if (gotNative != 0 || Ops.iscont_s((SixModelObject)origArg) == 0) { if (error != null) error[0] = String.format( "Expected a modifiable native str argument for '%s'", varName); return BIND_RESULT_FAIL; } break; } flag = CallSiteDescriptor.ARG_OBJ; arg_o = (SixModelObject)origArg; } else if (desiredNative == 0 && gotNative == CallSiteDescriptor.ARG_OBJ) { flag = gotNative; arg_o = (SixModelObject)origArg; } else if (desiredNative == SIG_ELEM_NATIVE_INT_VALUE && gotNative == CallSiteDescriptor.ARG_INT) { flag = gotNative; arg_i = (long)origArg; } else if (desiredNative == SIG_ELEM_NATIVE_NUM_VALUE && gotNative == CallSiteDescriptor.ARG_NUM) { flag = gotNative; arg_n = (double)origArg; } else if (desiredNative == SIG_ELEM_NATIVE_STR_VALUE && gotNative == CallSiteDescriptor.ARG_STR) { flag = gotNative; arg_s = (String)origArg; } else if (desiredNative == 0) { /* We need to do a boxing operation. */ flag = CallSiteDescriptor.ARG_OBJ; arg_o = createBox(tc, gcx, origArg, gotNative); } else { /* We need to do an unboxing operation. */ SixModelObject decontValue = Ops.decont((SixModelObject)origArg, tc); StorageSpec spec = decontValue.st.REPR.get_storage_spec(tc, decontValue.st); switch (desiredNative) { case SIG_ELEM_NATIVE_INT_VALUE: if ((spec.can_box & StorageSpec.CAN_BOX_INT) != 0) { flag = CallSiteDescriptor.ARG_INT; arg_i = decontValue.get_int(tc); } else { if (error != null) error[0] = String.format( "Cannot unbox argument to '%s' as a native int", varName); return BIND_RESULT_FAIL; } break; case SIG_ELEM_NATIVE_NUM_VALUE: if ((spec.can_box & StorageSpec.CAN_BOX_NUM) != 0) { flag = CallSiteDescriptor.ARG_NUM; arg_n = decontValue.get_num(tc); } else { if (error != null) error[0] = String.format( "Cannot unbox argument to '%s' as a native num", varName); return BIND_RESULT_FAIL; } break; case SIG_ELEM_NATIVE_STR_VALUE: if ((spec.can_box & StorageSpec.CAN_BOX_STR) != 0) { flag = CallSiteDescriptor.ARG_STR; arg_s = decontValue.get_str(tc); } else { if (error != null) error[0] = String.format( "Cannot unbox argument to '%s' as a native str", varName); return BIND_RESULT_FAIL; } break; default: if (error != null) error[0] = String.format( "Cannot unbox argument to '%s' as a native type", varName); return BIND_RESULT_FAIL; } } /* By this point, we'll either have an object that we might be able to * bind if it passes the type check, or a native value that needs no * further checking. */ SixModelObject decontValue = null; boolean didHLLTransform = false; SixModelObject paramType = param.get_attribute_boxed(tc, gcx.Parameter, "$!type", HINT_type); SixModelObject ContextRef = null; SixModelObject HOW = null; if (flag == CallSiteDescriptor.ARG_OBJ && !(is_rw && desiredNative != 0)) { /* We need to work on the decontainerized value. */ decontValue = Ops.decont(arg_o, tc); /* HLL map it as needed. */ SixModelObject beforeHLLize = decontValue; decontValue = Ops.hllize(decontValue, tc); if (decontValue != beforeHLLize) didHLLTransform = true; /* Skip nominal type check if not needed. */ if (!noNomTypeCheck) { /* Is the nominal type generic and in need of instantiation? (This * can happen in (::T, T) where we didn't learn about the type until * during the signature bind.) */ if ((paramFlags & SIG_ELEM_TYPE_GENERIC) != 0) { HOW = paramType.st.HOW; SixModelObject ig = Ops.findmethod(HOW, "instantiate_generic", tc); ContextRef = tc.gc.ContextRef; SixModelObject cc = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)cc).context = cf; Ops.invokeDirect(tc, ig, genIns, new Object[] { HOW, paramType, cc }); paramType = Ops.result_o(tc.curFrame); } /* If the expected type is Positional, see if we need to do the * positional bind failover. */ if (paramType == gcx.Positional) { if (Ops.istype_nd(arg_o, gcx.PositionalBindFailover, tc) != 0) { SixModelObject ig = Ops.findmethod(arg_o, "cache", tc); Ops.invokeDirect(tc, ig, Ops.invocantCallSite, new Object[] { arg_o }); arg_o = Ops.result_o(tc.curFrame); decontValue = Ops.decont(arg_o, tc); } else if (Ops.istype_nd(decontValue, gcx.PositionalBindFailover, tc) != 0) { SixModelObject ig = Ops.findmethod(decontValue, "cache", tc); Ops.invokeDirect(tc, ig, Ops.invocantCallSite, new Object[] { decontValue }); decontValue = Ops.result_o(tc.curFrame); } } /* If not, do the check. If the wanted nominal type is Mu, then * anything goes. * When binding a slurpy named hash while compiling the setting don't check for Associative. */ if (paramType != gcx.Mu && !(isSlurpyNamed && paramType == gcx.Associative) && Ops.istype_nd(decontValue, paramType, tc) == 0) { /* Type check failed; produce error if needed. */ if (error != null) { SixModelObject thrower = RakOps.getThrower(tc, "X::TypeCheck::Binding::Parameter"); if (thrower != null) { error[0] = thrower; error[1] = bindParamThrower; error[2] = new Object[] { decontValue, paramType.st.WHAT, varName, param, (long)0 }; } else { error[0] = String.format( "Nominal type check failed for parameter '%s'", varName); } } /* Report junction failure mode if it's a junction. */ return juncOrFail(tc, gcx, decontValue); } /* Also enforce definedness check */ if ( (paramFlags & SIG_ELEM_DEFINEDNES_CHECK) != 0) { /* Don't check decontValue for concreteness though, but arg_o, seeing as we don't have a isconcrete_nodecont */ Boolean shouldBeConcrete = (paramFlags & SIG_ELEM_DEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) != 1; if (shouldBeConcrete || ((paramFlags & SIG_ELEM_UNDEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) == 1)) { if (error != null) { String typeName = Ops.typeName(param.get_attribute_boxed(tc, gcx.Parameter, "$!type", HINT_type), tc); String argName = Ops.typeName(arg_o, tc); String methodName = cf.codeRef.name; SixModelObject thrower = RakOps.getThrower(tc, "X::Parameter::InvalidConcreteness"); if (thrower != null) { error[0] = thrower; error[1] = bindConcreteThrower; error[2] = new Object[] { typeName, argName, methodName, varName, (long)(shouldBeConcrete ? 1 : 0), (long)(paramFlags & SIG_ELEM_INVOCANT) }; } else { if (methodName == null || methodName.isEmpty()) methodName = "<anon>"; error[0] = ((paramFlags & SIG_ELEM_INVOCANT) != 0) ? shouldBeConcrete ? String.format( "Invocant of method '%s' must be an object instance of type '%s', not a type object of type '%s'. Did you forget a '.new'?", methodName, typeName, argName) : String.format( "Invocant of method '%s' must be a type object of type '%s', not an object instance of type '%s'. Did you forget a 'multi'?", methodName, typeName, argName) : shouldBeConcrete ? String.format( "Parameter '%s' of routine '%s' must be an object instance of type '%s', not a type object of type '%s'. Did you forget a '.new'?", varName, methodName, typeName, argName) : String.format( "Parameter '%s' of routine '%s' must be a type object of type '%s', not an object instance of type '%s'. Did you forget a 'multi'?", varName, methodName, typeName, argName); } } return juncOrFail(tc, gcx, decontValue); } } } } /* Type captures. */ SixModelObject typeCaps = param.get_attribute_boxed(tc, gcx.Parameter, "@!type_captures", HINT_type_captures); if (typeCaps != null) bindTypeCaptures(tc, typeCaps, cf, decontValue.st.WHAT); /* Do a coercion, if one is needed. */ SixModelObject coerciveMeth = Ops.findmethod(param.st.WHAT, "coercive", tc); Ops.invokeDirect(tc, coerciveMeth, Ops.invocantCallSite, new Object[] { param }); if (Ops.istrue(Ops.result_o(tc.curFrame), tc) == 1) { /* Coercing natives not possible - nothing to call a method on. */ if (flag != CallSiteDescriptor.ARG_OBJ) { if (error != null) error[0] = String.format( "Unable to coerce natively typed parameter '%s'", varName); return BIND_RESULT_FAIL; } HOW = paramType.st.HOW; SixModelObject coerceMeth = Ops.findmethod(HOW, "coerce", tc); Ops.invokeDirect(tc, coerceMeth, genIns, new Object[] { HOW, paramType, arg_o }); arg_o = Ops.result_o(tc.curFrame); decontValue = Ops.decont(arg_o, tc); } /* If it's not got attributive binding, we'll go about binding it into the * lex pad. */ StaticCodeInfo sci = cf.codeRef.staticInfo; if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) == 0) { /* Is it native? If so, just go ahead and bind it. */ if (flag != CallSiteDescriptor.ARG_OBJ) { if (hasVarName) { switch (flag) { case CallSiteDescriptor.ARG_INT: cf.iLex[sci.iTryGetLexicalIdx(varName)] = arg_i; break; case CallSiteDescriptor.ARG_NUM: cf.nLex[sci.nTryGetLexicalIdx(varName)] = arg_n; break; case CallSiteDescriptor.ARG_STR: cf.sLex[sci.sTryGetLexicalIdx(varName)] = arg_s; break; } } } /* Otherwise it's some objecty case. */ else if (is_rw) { if (Ops.isrwcont(arg_o, tc) == 1) { if (hasVarName) cf.oLex[sci.oTryGetLexicalIdx(varName)] = arg_o; } else { SixModelObject thrower = RakOps.getThrower(tc, "X::Parameter::RW"); if (thrower == null) { error[0] = "Parameter expected a writable container"; } else { error[0] = thrower; error[1] = paramReadWriteThrower; error[2] = new Object[] { decontValue, varName}; } return BIND_RESULT_FAIL; } } else if (hasVarName) { if ((paramFlags & SIG_ELEM_IS_RAW) != 0) { /* Just bind the thing as is into the lexpad. */ cf.oLex[sci.oTryGetLexicalIdx(varName)] = didHLLTransform ? decontValue : arg_o; } else { /* If it's an array, copy means make a new one and store, * and a normal bind is a straightforward binding plus * adding a constraint. */ if ((paramFlags & SIG_ELEM_ARRAY_SIGIL) != 0) { SixModelObject bindee = decontValue; if ((paramFlags & SIG_ELEM_IS_COPY) != 0) { SixModelObject BOOTArray = tc.gc.BOOTArray; bindee = gcx.Array.st.REPR.allocate(tc, gcx.Array.st); bindee.bind_attribute_boxed(tc, gcx.List, "$!reified", HINT_LIST_reified, BOOTArray.st.REPR.allocate(tc, BOOTArray.st)); RakOps.p6store(bindee, decontValue, tc); } cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee; } /* If it's a hash, similar approach to array. */ else if ((paramFlags & SIG_ELEM_HASH_SIGIL) != 0) { SixModelObject bindee = decontValue; if ((paramFlags & SIG_ELEM_IS_COPY) != 0) { SixModelObject BOOTHash = tc.gc.BOOTHash; bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st); bindee.bind_attribute_boxed(tc, gcx.Map, "$!storage", HINT_ENUMMAP_storage, BOOTHash.st.REPR.allocate(tc, BOOTHash.st)); RakOps.p6store(bindee, decontValue, tc); } cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee; } /* If it's a scalar, we always need to wrap it into a new * container and store it, for copy or ro case (the rw bit * in the container descriptor takes care of the rest). */ else { boolean wrap = (paramFlags & SIG_ELEM_IS_COPY) != 0; if (!wrap && paramType != null && gcx.Iterable != null) { wrap = Ops.istype(gcx.Iterable, paramType, tc) != 0 || Ops.istype(paramType, gcx.Iterable, tc) != 0; } if (wrap || varName.equals("$_")) { STable stScalar = gcx.Scalar.st; SixModelObject new_cont = stScalar.REPR.allocate(tc, stScalar); SixModelObject desc = param.get_attribute_boxed(tc, gcx.Parameter, "$!container_descriptor", HINT_container_descriptor); new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!descriptor", RakudoContainerSpec.HINT_descriptor, desc); new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!value", RakudoContainerSpec.HINT_value, decontValue); cf.oLex[sci.oTryGetLexicalIdx(varName)] = new_cont; } else { cf.oLex[sci.oTryGetLexicalIdx(varName)] = decontValue; } } } } } /* Is it the invocant? If so, also have to bind to self lexical. */ if ((paramFlags & SIG_ELEM_INVOCANT) != 0) cf.oLex[sci.oTryGetLexicalIdx("self")] = decontValue; /* Handle any constraint types (note that they may refer to the parameter by * name, so we need to have bound it already). */ SixModelObject postConstraints = param.get_attribute_boxed(tc, gcx.Parameter, "$!post_contraints", HINT_post_constraints); if (postConstraints != null) { long numConstraints = postConstraints.elems(tc); for (long i = 0; i < numConstraints; i++) { /* Check we meet the constraint. */ SixModelObject consType = postConstraints.at_pos_boxed(tc, i); SixModelObject acceptsMeth = Ops.findmethod(consType, "ACCEPTS", tc); if (Ops.isconcrete(consType, tc) == 1 && Ops.istype(consType, gcx.Code, tc) != 0) RakOps.p6capturelex(consType, tc); switch (flag) { case CallSiteDescriptor.ARG_INT: Ops.invokeDirect(tc, acceptsMeth, ACCEPTS_i, new Object[] { consType, arg_i }); break; case CallSiteDescriptor.ARG_NUM: Ops.invokeDirect(tc, acceptsMeth, ACCEPTS_n, new Object[] { consType, arg_n }); break; case CallSiteDescriptor.ARG_STR: Ops.invokeDirect(tc, acceptsMeth, ACCEPTS_s, new Object[] { consType, arg_s }); break; default: Ops.invokeDirect(tc, acceptsMeth, ACCEPTS_o, new Object[] { consType, arg_o }); break; } if (Ops.istrue(Ops.result_o(tc.curFrame), tc) == 0) { /* Constraint type check failed; produce error if needed. */ if (error != null) { SixModelObject thrower = RakOps.getThrower(tc, "X::TypeCheck::Binding::Parameter"); if (thrower != null) { error[0] = thrower; error[1] = bindParamThrower; error[2] = new Object[] { (SixModelObject)origArg, consType.st.WHAT, varName, param, (long)1 }; } else { error[0] = String.format( "Constraint type check failed for parameter '%s'", varName); } } return BIND_RESULT_FAIL; } } } /* TODO: attributives. */ if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) != 0) { if (flag != CallSiteDescriptor.ARG_OBJ) { if (error != null) error[0] = "Native attributive binding not yet implemented"; return BIND_RESULT_FAIL; } int result = assignAttributive(tc, cf, varName, paramFlags, param.get_attribute_boxed(tc, gcx.Parameter, "$!attr_package", HINT_attr_package), decontValue, error); if (result != BIND_RESULT_OK) return result; } /* If it has a sub-signature, bind that. */ SixModelObject subSignature = param.get_attribute_boxed(tc, gcx.Parameter, "$!sub_signature", HINT_sub_signature); if (subSignature != null && flag == CallSiteDescriptor.ARG_OBJ) { /* Turn value into a capture, unless we already have one. */ SixModelObject capture = null; int result; if ((paramFlags & SIG_ELEM_IS_CAPTURE) != 0) { capture = decontValue; } else { SixModelObject meth = Ops.findmethodNonFatal(decontValue, "Capture", tc); if (meth == null) { if (error != null) error[0] = "Could not turn argument into capture"; return BIND_RESULT_FAIL; } Ops.invokeDirect(tc, meth, Ops.invocantCallSite, new Object[] { decontValue }); capture = Ops.result_o(tc.curFrame); } SixModelObject subParams = subSignature .get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_params); /* Recurse into signature binder. */ CallSiteDescriptor subCsd = explodeCapture(tc, gcx, capture); result = bind(tc, gcx, cf, subParams, subCsd, tc.flatArgs, noNomTypeCheck, error); if (result != BIND_RESULT_OK) { if (error != null) { /* Note in the error message that we're in a sub-signature. */ error[0] += " in sub-signature"; /* Have we a variable name? */ if (varName != null) { error[0] += " of parameter " + varName; } } return result; } } if (RakOps.DEBUG_MODE) System.err.println("bindOneParam NYFI"); return BIND_RESULT_OK; } private static final CallSiteDescriptor exploder = new CallSiteDescriptor(new byte[] { CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT, CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT | CallSiteDescriptor.ARG_NAMED }, null); public static CallSiteDescriptor explodeCapture(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject capture) { capture = Ops.decont(capture, tc); SixModelObject capType = gcx.Capture; SixModelObject list = capture.get_attribute_boxed(tc, capType, "@!list", HINT_CAPTURE_list); SixModelObject hash = capture.get_attribute_boxed(tc, capType, "%!hash", HINT_CAPTURE_hash); if (list == null) list = gcx.EMPTYARR; if (hash == null) hash = gcx.EMPTYHASH; return exploder.explodeFlattening(tc.curFrame, new Object[] { list, hash }); } /* This takes a signature element and either runs the closure to get a default * value if there is one, or creates an appropriate undefined-ish thingy. */ private static SixModelObject handleOptional(ThreadContext tc, RakOps.GlobalExt gcx, int flags, SixModelObject param, CallFrame cf) { /* Is the "get default from outer" flag set? */ if ((flags & SIG_ELEM_DEFAULT_FROM_OUTER) != 0) { param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name); String varName = tc.native_s; CallFrame curOuter = cf.outer; while (curOuter != null) { Integer idx = curOuter.codeRef.staticInfo.oTryGetLexicalIdx(varName); if (idx != null) return curOuter.oLex[idx]; curOuter = curOuter.outer; } return null; } /* Do we have a default value or value closure? */ SixModelObject defaultValue = param.get_attribute_boxed(tc, gcx.Parameter, "$!default_value", HINT_default_value); if (defaultValue != null) { if ((flags & SIG_ELEM_DEFAULT_IS_LITERAL) != 0) { return defaultValue; } else { /* Thunk; run it to get a value. */ Ops.invokeArgless(tc, defaultValue); return Ops.result_o(tc.curFrame); } } /* Otherwise, go by sigil to pick the correct default type of value. */ else { if ((flags & SIG_ELEM_ARRAY_SIGIL) != 0) { SixModelObject res = gcx.Array.st.REPR.allocate(tc, gcx.Array.st); return res; } else if ((flags & SIG_ELEM_HASH_SIGIL) != 0) { SixModelObject res = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st); return res; } else { param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags); int paramFlags = (int)tc.native_i; switch (paramFlags & SIG_ELEM_NATIVE_VALUE) { case SIG_ELEM_NATIVE_INT_VALUE: return createBox(tc, gcx, (long)0, CallSiteDescriptor.ARG_INT); case SIG_ELEM_NATIVE_NUM_VALUE: return createBox(tc, gcx, (double)0.0, CallSiteDescriptor.ARG_NUM); case SIG_ELEM_NATIVE_STR_VALUE: return createBox(tc, gcx, null, CallSiteDescriptor.ARG_STR); default: return param.get_attribute_boxed(tc, gcx.Parameter, "$!type", HINT_type); } } } } /* Takes a signature along with positional and named arguments and binds them * into the provided callframe. Returns BIND_RESULT_OK if binding works out, * BIND_RESULT_FAIL if there is a failure and BIND_RESULT_JUNCTION if the * failure was because of a Junction being passed (meaning we need to auto-thread). */ private static final CallSiteDescriptor slurpyFromArgs = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); public static int bind(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject params, CallSiteDescriptor csd, Object[] args, boolean noNomTypeCheck, Object[] error) { int bindFail = BIND_RESULT_OK; int curPosArg = 0; /* If we have a |$foo that's followed by slurpies, then we can suppress * any future arity checks. */ boolean suppressArityFail = false; /* If we do have some named args, we want to make a clone of the hash * to work on. We'll delete stuff from it as we bind, and what we have * left over can become the slurpy hash or - if we aren't meant to be * taking one - tell us we have a problem. */ HashMap<String, Integer> namedArgsCopy = csd.nameMap == null ? null : new HashMap<String, Integer>(csd.nameMap); /* Now we'll walk through the signature and go about binding things. */ int numPosArgs = csd.numPositionals; long numParams = params.elems(tc); for (long i = 0; i < numParams; i++) { /* Get parameter, its flags and any named names. */ SixModelObject param = params.at_pos_boxed(tc, i); param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags); int flags = (int)tc.native_i; SixModelObject namedNames = param.get_attribute_boxed(tc, gcx.Parameter, "@!named_names", HINT_named_names); /* Is it looking for us to bind a capture here? */ if ((flags & SIG_ELEM_IS_CAPTURE) != 0) { /* Capture the arguments from this point forwards into a Capture. * Of course, if there's no variable name we can (cheaply) do pretty * much nothing. */ param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name); if (tc.native_s == null) { bindFail = BIND_RESULT_OK; } else { SixModelObject posArgs = gcx.EMPTYARR.clone(tc); for (int k = curPosArg; k < numPosArgs; k++) { switch (csd.argFlags[k]) { case CallSiteDescriptor.ARG_OBJ: posArgs.push_boxed(tc, (SixModelObject)args[k]); break; case CallSiteDescriptor.ARG_INT: posArgs.push_boxed(tc, RakOps.p6box_i((long)args[k], tc)); break; case CallSiteDescriptor.ARG_NUM: posArgs.push_boxed(tc, RakOps.p6box_n((double)args[k], tc)); break; case CallSiteDescriptor.ARG_STR: posArgs.push_boxed(tc, RakOps.p6box_s((String)args[k], tc)); break; } } SixModelObject namedArgs = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args); SixModelObject capType = gcx.Capture; SixModelObject capSnap = capType.st.REPR.allocate(tc, capType.st); capSnap.bind_attribute_boxed(tc, capType, "@!list", HINT_CAPTURE_list, posArgs); capSnap.bind_attribute_boxed(tc, capType, "%!hash", HINT_CAPTURE_hash, namedArgs); bindFail = bindOneParam(tc, gcx, cf, param, capSnap, CallSiteDescriptor.ARG_OBJ, noNomTypeCheck, false, error); } if (bindFail != 0) { return bindFail; } else if (i + 1 == numParams) { /* Since a capture acts as "the ultimate slurpy" in a sense, if * this is the last parameter in the signature we can return * success right off the bat. */ return BIND_RESULT_OK; } else { SixModelObject nextParam = params.at_pos_boxed(tc, i + 1); nextParam.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags); if (((int)tc.native_i & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED)) != 0) suppressArityFail = true; } } /* Could it be a named slurpy? */ else if ((flags & SIG_ELEM_SLURPY_NAMED) != 0) { SixModelObject slurpy = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args); SixModelObject bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st); bindee.bind_attribute_boxed(tc, gcx.Map, "$!storage", HINT_ENUMMAP_storage, slurpy); bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ, noNomTypeCheck, true, error); if (bindFail != 0) return bindFail; /* Nullify named arguments hash now we've consumed it, to mark all * is well. */ namedArgsCopy = null; } /* Otherwise, maybe it's a positional of some kind. */ else if (namedNames == null) { /* Slurpy or LoL-slurpy? */ if ((flags & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_LOL | SIG_ELEM_SLURPY_ONEARG)) != 0) { /* Create Raku array, create VM array of all remaining things, * then store it. */ SixModelObject slurpy = gcx.EMPTYARR.clone(tc); while (curPosArg < numPosArgs) { switch (csd.argFlags[curPosArg]) { case CallSiteDescriptor.ARG_OBJ: slurpy.push_boxed(tc, (SixModelObject)args[curPosArg]); break; case CallSiteDescriptor.ARG_INT: slurpy.push_boxed(tc, RakOps.p6box_i((long)args[curPosArg], tc)); break; case CallSiteDescriptor.ARG_NUM: slurpy.push_boxed(tc, RakOps.p6box_n((double)args[curPosArg], tc)); break; case CallSiteDescriptor.ARG_STR: slurpy.push_boxed(tc, RakOps.p6box_s((String)args[curPosArg], tc)); break; } curPosArg++; } SixModelObject slurpyType = (flags & SIG_ELEM_IS_RAW) != 0 ? gcx.List : gcx.Array; SixModelObject sm = Ops.findmethod(slurpyType, (flags & SIG_ELEM_SLURPY_ONEARG) != 0 ? "from-slurpy-onearg" : (flags & SIG_ELEM_SLURPY_POS) != 0 ? "from-slurpy-flat" : "from-slurpy", tc); Ops.invokeDirect(tc, sm, slurpyFromArgs, new Object[] { slurpyType, slurpy }); SixModelObject bindee = Ops.result_o(tc.curFrame); bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ, noNomTypeCheck, false, error); if (bindFail != 0) return bindFail; } /* Otherwise, a positional. */ else { /* Do we have a value? */ if (curPosArg < numPosArgs) { /* Easy - just bind that. */ bindFail = bindOneParam(tc, gcx, cf, param, args[curPosArg], csd.argFlags[curPosArg], noNomTypeCheck, false, error); if (bindFail != 0) return bindFail; curPosArg++; } else { /* No value. If it's optional, fetch a default and bind that; * if not, we're screwed. Note that we never nominal type check * an optional with no value passed. */ if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) { bindFail = bindOneParam(tc, gcx, cf, param, handleOptional(tc, gcx, flags, param, cf), CallSiteDescriptor.ARG_OBJ, false, false, error); if (bindFail != 0) return bindFail; } else { if (error != null) error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, false); return BIND_RESULT_FAIL; } } } } /* Else, it's a non-slurpy named. */ else { /* Try and get hold of value. */ Integer lookup = null; if (namedArgsCopy != null) { long numNames = namedNames.elems(tc); for (long j = 0; j < numNames; j++) { namedNames.at_pos_native(tc, j); String name = tc.native_s; lookup = namedArgsCopy.remove(name); if (lookup != null) break; } } /* Did we get one? */ if (lookup == null) { /* Nope. We'd better hope this param was optional... */ if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) { bindFail = bindOneParam(tc, gcx, cf, param, handleOptional(tc, gcx, flags, param, cf), CallSiteDescriptor.ARG_OBJ, false, false, error); } else if (!suppressArityFail) { if (error != null) { namedNames.at_pos_native(tc, 0); error[0] = "Required named argument '" + tc.native_s + "' not passed"; } return BIND_RESULT_FAIL; } } else { bindFail = bindOneParam(tc, gcx, cf, param, args[lookup >> 3], (byte)(lookup & 7), noNomTypeCheck, false, error); } /* If we got a binding failure, return it. */ if (bindFail != 0) return bindFail; } } /* Do we have any left-over args? */ if (curPosArg < numPosArgs && !suppressArityFail) { /* Oh noes, too many positionals passed. */ if (error != null) error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, true); return BIND_RESULT_FAIL; } if (namedArgsCopy != null && namedArgsCopy.size() > 0) { /* Oh noes, unexpected named args. */ if (error != null) { int numExtra = namedArgsCopy.size(); if (numExtra == 1) { for (String name : namedArgsCopy.keySet()) error[0] = "Unexpected named argument '" + name + "' passed"; } else { boolean first = true; error[0] = numExtra + " unexpected named arguments passed ("; for (String name : namedArgsCopy.keySet()) { if (!first) error[0] += ", "; else first = false; error[0] += name; } error[0] += ")"; } } return BIND_RESULT_FAIL; } /* If we get here, we're done. */ return BIND_RESULT_OK; } /* Takes any nameds we didn't capture yet and makes a VM Hash of them. */ private static SixModelObject vmHashOfRemainingNameds(ThreadContext tc, RakOps.GlobalExt gcx, HashMap<String, Integer> namedArgsCopy, Object[] args) { SixModelObject slurpy = gcx.Mu; if (namedArgsCopy != null) { SixModelObject BOOTHash = tc.gc.BOOTHash; slurpy = BOOTHash.st.REPR.allocate(tc, BOOTHash.st); for (String name : namedArgsCopy.keySet()) { int lookup = namedArgsCopy.get(name); switch (lookup & 7) { case CallSiteDescriptor.ARG_OBJ: slurpy.bind_key_boxed(tc, name, (SixModelObject)args[lookup >> 3]); break; case CallSiteDescriptor.ARG_INT: slurpy.bind_key_boxed(tc, name, RakOps.p6box_i((long)args[lookup >> 3], tc)); break; case CallSiteDescriptor.ARG_NUM: slurpy.bind_key_boxed(tc, name, RakOps.p6box_n((double)args[lookup >> 3], tc)); break; case CallSiteDescriptor.ARG_STR: slurpy.bind_key_boxed(tc, name, RakOps.p6box_s((String)args[lookup >> 3], tc)); break; } } } return slurpy; } }
package gnu.expr; import gnu.bytecode.*; import gnu.mapping.Procedure; /** A Type or a Type expression. * Can be used for higher-level types that do not map directly to a Type. */ public interface TypeValue extends java.lang.reflect.Type { /** The lower-level Type used to represent instances of this type. */ public Type getImplementationType(); /** Emit code for * <tt>if (incoming instanceof this_type) decl = incoming ...</tt>. * This method is designed for <tt>typeswitch</tt> applications, where this * call is the first part of a conditional, so it must be followed * by calls to <tt>emitElse</tt> and <tt>emitFi</tt>. * @param incoming Contains the value we are testing to see if it has the * type of <tt>this</tt>. If null, use top-of-stack. * May not be null if decl is non-null. * @param decl If non-null, assign value after coercion to <tt>Declaration</tt>. * @param comp The compilation state. */ public void emitTestIf(Variable incoming, Declaration decl, Compilation comp); /** Emit code for <tt>incoming instanceof this_type</tt>. * The implementation can use * {@link gnu.kawa.reflect.InstanceOf#emitIsInstance InstanceOf * .emitIsInstance} which is a conveniece method that calls * {@link #emitTestIf emitTestIf}. * @param incoming Contains the value we are testing to see if it has the * the type of 'this'. If null, use top-of-stack. * @param comp The compilation state. * @param target Where to leave the result. */ public void emitIsInstance(Variable incoming, Compilation comp, Target target); /** Get the constructor function for this type. * Returns null if there is no contructor function. * Also returns null if this extends ClassType or ArrayType and * standard Java constructors ({@code <init>} methods) should be used. */ public Procedure getConstructor (); /** Return converted expression or null. */ public Expression convertValue (Expression value); /* #ifdef JAVA8 */ // default public String encodeType(Language language) { return null; } /* #else */ public String encodeType(Language language); /* #endif */ }
package krasa.frameswitcher; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.vfs.VirtualFile; import org.jdesktop.swingx.combobox.EnumComboBoxModel; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class FrameSwitcherGui { private DefaultListModel listModel; private JPanel root; private JTextField maxRecentProjects; private JComboBox popupAidComboBox; private JList recentProjectFiltersList; private JButton addButton; private JButton remove; private FrameSwitcherSettings settings; private EnumComboBoxModel<JBPopupFactory.ActionSelectionAid> comboBoxModel; public FrameSwitcherGui(FrameSwitcherSettings settings) { this.settings = settings; addButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { browseForFile(); } }); remove.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { int leadSelectionIndex = recentProjectFiltersList.getSelectionModel().getLeadSelectionIndex(); if (!recentProjectFiltersList.getSelectionModel().isSelectionEmpty()) { listModel.remove(leadSelectionIndex); } } }); initModel(settings); } private void initModel(FrameSwitcherSettings settings) { comboBoxModel = new EnumComboBoxModel<JBPopupFactory.ActionSelectionAid>(JBPopupFactory.ActionSelectionAid.class); comboBoxModel.setSelectedItem(settings.getPopupSelectionAid()); popupAidComboBox.setModel(comboBoxModel); listModel = new DefaultListModel(); for (String s : settings.getRecentProjectPaths()) { listModel.addElement(s); } recentProjectFiltersList.setModel(listModel); recentProjectFiltersList.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION); } private void browseForFile() { final FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createMultipleFoldersDescriptor(); descriptor.setTitle("Select parent folder"); // 10.5 does not have #chooseFile VirtualFile[] virtualFile = FileChooser.chooseFiles(descriptor, null, null); if (virtualFile != null) { for (int i = 0; i < virtualFile.length; i++) { VirtualFile file = virtualFile[i]; listModel.addElement(file.getPath()); } } } public JPanel getRoot() { return root; } public void importFrom(FrameSwitcherSettings data) { initModel(data); setData(data); comboBoxModel.setSelectedItem(data.getPopupSelectionAid()); } public void setData(FrameSwitcherSettings data) { maxRecentProjects.setText(data.getMaxRecentProjects()); } public FrameSwitcherSettings exportDisplayedSettings() { getData(settings); settings.setPopupSelectionAid(comboBoxModel.getSelectedItem()); settings.setRecentProjectPaths(toListStrings(listModel.toArray())); return settings; } private List<String> toListStrings(final Object[] objects) { final ArrayList<String> recentProjectPaths = new ArrayList<String>(); for (Object object : objects) { recentProjectPaths.add((String) object); } return recentProjectPaths; } public void getData(FrameSwitcherSettings data) { data.setMaxRecentProjects(maxRecentProjects.getText()); } public boolean isModified(FrameSwitcherSettings data) { if (isModifiedCustom(data)) { return true; } if (maxRecentProjects.getText() != null ? !maxRecentProjects.getText().equals(data.getMaxRecentProjects()) : data.getMaxRecentProjects() != null) { return true; } return false; } private boolean isModifiedCustom(FrameSwitcherSettings data) { if (!Arrays.equals(listModel.toArray(), data.getRecentProjectPaths().toArray())) { return true; } if (comboBoxModel.getSelectedItem() != data.getPopupSelectionAid()) { return true; } return false; } }
package com.powerdata.openpa.tools.psmfmt; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import com.powerdata.openpa.ACBranch; import com.powerdata.openpa.ACBranchListIfc; import com.powerdata.openpa.PAModel; import com.powerdata.openpa.PAModelException; import com.powerdata.openpa.PhaseShifterList; import com.powerdata.openpa.TransformerList; public class CaseTransformerWindingOPA extends ExportOpenPA<TransformerList> { @Override protected String getPsmFmtName() { return "PsmCase" + PsmCaseFmtObject.TransformerWinding.toString(); } int _ntx, _nps; FmtInfo[] _txi; public CaseTransformerWindingOPA(PAModel m) throws PAModelException { super(null, TransformerWinding.values().length); TransformerList tlist = m.getTransformers(); PhaseShifterList plist = m.getPhaseShifters(); _ntx = tlist.size(); _nps = plist.size(); assign(tlist); _txi = _finfo.clone(); assign(plist); } void assign(ACBranchListIfc<? extends ACBranch> list) throws PAModelException { assign(TransformerWinding.ID, new StringWrap(i -> list.getID(i)+"_wnd")); assign(CaseTransformerWinding.FromMW, i -> String.valueOf(list.getFromP(i))); assign(CaseTransformerWinding.FromMVAr, i -> String.valueOf(list.getFromQ(i))); assign(CaseTransformerWinding.ToMW, i -> String.valueOf(list.getToP(i))); assign(CaseTransformerWinding.ToMVAr, i -> String.valueOf(list.getToQ(i))); } @Override void export(File outputdir) throws PAModelException, IOException { PrintWriter pw = new PrintWriter(new BufferedWriter( new FileWriter(new File(outputdir, getPsmFmtName()+".csv")))); printHeader(pw); printData(pw, _txi, _ntx); printData(pw, _finfo, _nps); pw.close(); } }
package edu.uci.python.runtime.datatype; import edu.uci.python.runtime.standardtype.*; public final class PComplex extends PythonBuiltinObject { private final double real; private final double imag; public PComplex() { this.real = 0; this.imag = 0; } public PComplex(double real, double imaginary) { this.real = real; this.imag = imaginary; } public PComplex(PComplex c) { this.real = c.getReal(); this.imag = c.getImag(); } public PComplex add(PComplex c) { PComplex result = new PComplex(real + c.getReal(), imag + c.getImag()); return result; } public PComplex sub(PComplex c) { PComplex result = new PComplex(real - c.getReal(), imag - c.getImag()); return result; } public PComplex mul(PComplex c) { double newReal = real * c.getReal() - imag * c.getImag(); double newImage = real * c.getImag() + imag * c.getReal(); return new PComplex(newReal, newImage); } public PComplex div(PComplex c) { double opNormSq = c.getReal() * c.getReal() + c.getImag() * c.getImag(); PComplex conjugate = c.getConjugate(); double realPart = real * conjugate.getReal() - imag * conjugate.getImag(); double imagPart = real * conjugate.getImag() + imag * conjugate.getReal(); return new PComplex(realPart / opNormSq, imagPart / opNormSq); } public PComplex getConjugate() { return new PComplex(real, imag * (-1)); } public boolean equals(PComplex c) { return (real == c.real && imag == c.imag); } public boolean notEqual(PComplex c) { return (real != c.real || imag != c.imag); } @SuppressWarnings({"unused", "static-method"}) public boolean greaterEqual(PComplex c) { throw new RuntimeException("cannot compare complex numbers using <, <=, >, >="); } @SuppressWarnings({"unused", "static-method"}) public boolean greaterThan(PComplex c) { throw new RuntimeException("cannot compare complex numbers using <, <=, >, >="); } @SuppressWarnings({"unused", "static-method"}) public boolean lessEqual(PComplex c) { throw new RuntimeException("cannot compare complex numbers using <, <=, >, >="); } @SuppressWarnings({"unused", "static-method"}) public boolean lessThan(PComplex c) { throw new RuntimeException("cannot compare complex numbers using <, <=, >, >="); } public double getReal() { return real; } public double getImag() { return imag; } @Override public String toString() { if (real == 0.) { return toString(imag) + "j"; } else { if (imag >= 0) { return String.format("(%s+%sj)", toString(real), toString(imag)); } else { return String.format("(%s-%sj)", toString(real), toString(-imag)); } } } private static String toString(double value) { if (value == Math.floor(value) && value <= Long.MAX_VALUE && value >= Long.MIN_VALUE) { return Long.toString((long) value); } else { return Double.toString(value); } } }
import java.io.*; import java.net.*; import java.sql.SQLException; import java.util.ArrayList; import DatabaseModel.DatabaseConnection; import DatabaseModel.Tables.Product; public class Server { public static int PortNumber = 2035; private static ServerSocket MyService; Server() { // TODO Auto-generated method stub try { Run(); } catch (IOException exception) { System.out.println(exception); if (MyService != null && MyService.isClosed() == false) { try { MyService.close(); } catch (IOException ioCloseException) { System.out.println(ioCloseException); } } } } public static void Run() throws IOException { MyService = new ServerSocket(PortNumber); DatabaseConnection db; ArrayList<Product> products; try { db = new DatabaseConnection(true); } catch (SQLException exception) { System.out.println("Failed to connect to database - " + exception); return; } try { products = db.select(Product.class); } catch (SQLException exception) { System.out.println("Failed to get products for client - " + exception); return; } // Next up send the individual OutputFileStream bytes to the client. And parse it there. while(true) { Socket s=MyService.accept(); ObjectOutputStream oos = new ObjectOutputStream(s.getOutputStream()); oos.writeObject(products); oos.flush(); oos.close(); s.close(); } } }
package com.yahoo.vespa.hosted.provision.restapi; import com.yahoo.component.Version; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.DockerImage; import com.yahoo.config.provision.Flavor; import com.yahoo.config.provision.HostFilter; import com.yahoo.config.provision.NodeFlavors; import com.yahoo.config.provision.NodeResources; import com.yahoo.config.provision.NodeType; import com.yahoo.config.provision.TenantName; import com.yahoo.container.jdisc.HttpRequest; import com.yahoo.container.jdisc.HttpResponse; import com.yahoo.container.jdisc.LoggingRequestHandler; import com.yahoo.io.IOUtils; import com.yahoo.restapi.ErrorResponse; import com.yahoo.restapi.MessageResponse; import com.yahoo.restapi.Path; import com.yahoo.restapi.ResourceResponse; import com.yahoo.restapi.SlimeJsonResponse; import com.yahoo.slime.ArrayTraverser; import com.yahoo.slime.Cursor; import com.yahoo.slime.Inspector; import com.yahoo.slime.Slime; import com.yahoo.slime.SlimeUtils; import com.yahoo.vespa.hosted.provision.NoSuchNodeException; import com.yahoo.vespa.hosted.provision.Node; import com.yahoo.vespa.hosted.provision.NodeMutex; import com.yahoo.vespa.hosted.provision.NodeRepository; import com.yahoo.vespa.hosted.provision.applications.Application; import com.yahoo.vespa.hosted.provision.autoscale.Load; import com.yahoo.vespa.hosted.provision.autoscale.MetricsDb; import com.yahoo.vespa.hosted.provision.node.Address; import com.yahoo.vespa.hosted.provision.node.Agent; import com.yahoo.vespa.hosted.provision.node.IP; import com.yahoo.vespa.hosted.provision.node.filter.ApplicationFilter; import com.yahoo.vespa.hosted.provision.node.filter.NodeFilter; import com.yahoo.vespa.hosted.provision.node.filter.NodeHostFilter; import com.yahoo.vespa.hosted.provision.node.filter.NodeOsVersionFilter; import com.yahoo.vespa.hosted.provision.node.filter.NodeTypeFilter; import com.yahoo.vespa.hosted.provision.node.filter.ParentHostFilter; import com.yahoo.vespa.hosted.provision.node.filter.StateFilter; import com.yahoo.vespa.hosted.provision.restapi.NodesResponse.ResponseType; import com.yahoo.vespa.orchestrator.Orchestrator; import com.yahoo.yolean.Exceptions; import javax.inject.Inject; import java.io.IOException; import java.io.UncheckedIOException; import java.net.URI; import java.net.URISyntaxException; import java.time.Duration; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.logging.Level; import java.util.stream.Collectors; import static com.yahoo.slime.SlimeUtils.optionalString; /** * The implementation of the /nodes/v2 API. * See NodesV2ApiTest for documentation. * * @author bratseth */ public class NodesV2ApiHandler extends LoggingRequestHandler { private final Orchestrator orchestrator; private final NodeRepository nodeRepository; private final MetricsDb metricsDb; private final NodeFlavors nodeFlavors; @Inject public NodesV2ApiHandler(LoggingRequestHandler.Context parentCtx, Orchestrator orchestrator, NodeRepository nodeRepository, MetricsDb metricsDb, NodeFlavors flavors) { super(parentCtx); this.orchestrator = orchestrator; this.nodeRepository = nodeRepository; this.metricsDb = metricsDb; this.nodeFlavors = flavors; } @Override public HttpResponse handle(HttpRequest request) { try { switch (request.getMethod()) { case GET: return handleGET(request); case PUT: return handlePUT(request); case POST: return isPatchOverride(request) ? handlePATCH(request) : handlePOST(request); case DELETE: return handleDELETE(request); case PATCH: return handlePATCH(request); default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported"); } } catch (NotFoundException | NoSuchNodeException e) { return ErrorResponse.notFoundError(Exceptions.toMessageString(e)); } catch (IllegalArgumentException e) { return ErrorResponse.badRequest(Exceptions.toMessageString(e)); } catch (RuntimeException e) { log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e); return ErrorResponse.internalServerError(Exceptions.toMessageString(e)); } } private HttpResponse handleGET(HttpRequest request) { Path path = new Path(request.getUri()); String pathS = request.getUri().getPath(); if (path.matches( "/nodes/v2")) return new ResourceResponse(request.getUri(), "node", "state", "acl", "command", "archive", "locks", "maintenance", "upgrade", "capacity", "application", "stats"); if (path.matches( "/nodes/v2/node")) return new NodesResponse(ResponseType.nodeList, request, orchestrator, nodeRepository); if (pathS.startsWith("/nodes/v2/node/")) return new NodesResponse(ResponseType.singleNode, request, orchestrator, nodeRepository); if (path.matches( "/nodes/v2/state")) return new NodesResponse(ResponseType.stateList, request, orchestrator, nodeRepository); if (pathS.startsWith("/nodes/v2/state/")) return new NodesResponse(ResponseType.nodesInStateList, request, orchestrator, nodeRepository); if (path.matches( "/nodes/v2/acl/{hostname}")) return new NodeAclResponse(request, nodeRepository, path.get("hostname")); if (path.matches( "/nodes/v2/command")) return new ResourceResponse(request.getUri(), "restart", "reboot"); if (path.matches( "/nodes/v2/archive")) return new ArchiveResponse(nodeRepository); if (path.matches( "/nodes/v2/locks")) return new LocksResponse(); if (path.matches( "/nodes/v2/maintenance")) return new JobsResponse(nodeRepository.jobControl()); if (path.matches( "/nodes/v2/upgrade")) return new UpgradeResponse(nodeRepository.infrastructureVersions(), nodeRepository.osVersions(), nodeRepository.containerImages()); if (path.matches( "/nodes/v2/capacity")) return new HostCapacityResponse(nodeRepository, request); if (path.matches( "/nodes/v2/application")) return applicationList(request.getUri()); if (path.matches( "/nodes/v2/application/{applicationId}")) return application(path.get("applicationId"), request.getUri()); if (path.matches( "/nodes/v2/stats")) return stats(); throw new NotFoundException("Nothing at " + path); } private HttpResponse handlePUT(HttpRequest request) { Path path = new Path(request.getUri()); if (path.matches("/nodes/v2/state/ready/{hostname}")) { nodeRepository.nodes().markNodeAvailableForNewAllocation(path.get("hostname"), Agent.operator, "Readied through the nodes/v2 API"); return new MessageResponse("Moved " + path.get("hostname") + " to " + Node.State.ready); } else if (path.matches("/nodes/v2/state/failed/{hostname}")) { List<Node> failedNodes = nodeRepository.nodes().failRecursively(path.get("hostname"), Agent.operator, "Failed through the nodes/v2 API"); return new MessageResponse("Moved " + hostnamesAsString(failedNodes) + " to " + Node.State.failed); } else if (path.matches("/nodes/v2/state/parked/{hostname}")) { List<Node> parkedNodes = nodeRepository.nodes().parkRecursively(path.get("hostname"), Agent.operator, "Parked through the nodes/v2 API"); return new MessageResponse("Moved " + hostnamesAsString(parkedNodes) + " to " + Node.State.parked); } else if (path.matches("/nodes/v2/state/dirty/{hostname}")) { List<Node> dirtiedNodes = nodeRepository.nodes().deallocateRecursively(path.get("hostname"), Agent.operator, "Dirtied through the nodes/v2 API"); return new MessageResponse("Moved " + hostnamesAsString(dirtiedNodes) + " to " + Node.State.dirty); } else if (path.matches("/nodes/v2/state/active/{hostname}")) { nodeRepository.nodes().reactivate(path.get("hostname"), Agent.operator, "Reactivated through nodes/v2 API"); return new MessageResponse("Moved " + path.get("hostname") + " to " + Node.State.active); } else if (path.matches("/nodes/v2/state/breakfixed/{hostname}")) { List<Node> breakfixedNodes = nodeRepository.nodes().breakfixRecursively(path.get("hostname"), Agent.operator, "Breakfixed through the nodes/v2 API"); return new MessageResponse("Moved " + hostnamesAsString(breakfixedNodes) + " to " + Node.State.breakfixed); } throw new NotFoundException("Cannot put to path '" + path + "'"); } private HttpResponse handlePATCH(HttpRequest request) { Path path = new Path(request.getUri()); if (path.matches("/nodes/v2/node/{hostname}")) { try (NodePatcher patcher = new NodePatcher(nodeFlavors, request.getData(), nodeFromHostname(path.get("hostname")), nodeRepository)) { var patchedNodes = patcher.apply(); nodeRepository.nodes().write(patchedNodes, patcher.nodeMutexOfHost()); return new MessageResponse("Updated " + patcher.nodeMutexOfHost().node().hostname()); } } else if (path.matches("/nodes/v2/application/{applicationId}")) { try (ApplicationPatcher patcher = new ApplicationPatcher(request.getData(), ApplicationId.fromFullString(path.get("applicationId")), nodeRepository)) { nodeRepository.applications().put(patcher.apply(), patcher.lock()); return new MessageResponse("Updated " + patcher.application()); } } else if (path.matches("/nodes/v2/archive/{tenant}")) { String uri = requiredField(toSlime(request), "uri", Inspector::asString); return setTenantArchiveUri(path.get("tenant"), Optional.of(uri)); } else if (path.matches("/nodes/v2/upgrade/{nodeType}")) { return setTargetVersions(path.get("nodeType"), toSlime(request)); } throw new NotFoundException("Nothing at '" + path + "'"); } private HttpResponse handlePOST(HttpRequest request) { Path path = new Path(request.getUri()); if (path.matches("/nodes/v2/command/restart")) { int restartCount = nodeRepository.nodes().restart(toNodeFilter(request)).size(); return new MessageResponse("Scheduled restart of " + restartCount + " matching nodes"); } if (path.matches("/nodes/v2/command/reboot")) { int rebootCount = nodeRepository.nodes().reboot(toNodeFilter(request)).size(); return new MessageResponse("Scheduled reboot of " + rebootCount + " matching nodes"); } if (path.matches("/nodes/v2/node")) { int addedNodes = addNodes(toSlime(request)); return new MessageResponse("Added " + addedNodes + " nodes to the provisioned state"); } if (path.matches("/nodes/v2/maintenance/run/{job}")) return runJob(path.get("job")); if (path.matches("/nodes/v2/upgrade/firmware")) return requestFirmwareCheckResponse(); throw new NotFoundException("Nothing at path '" + request.getUri().getPath() + "'"); } private HttpResponse handleDELETE(HttpRequest request) { Path path = new Path(request.getUri()); if (path.matches("/nodes/v2/node/{hostname}")) return deleteNode(path.get("hostname")); if (path.matches("/nodes/v2/archive/{tenant}")) return setTenantArchiveUri(path.get("tenant"), Optional.empty()); if (path.matches("/nodes/v2/upgrade/firmware")) return cancelFirmwareCheckResponse(); throw new NotFoundException("Nothing at path '" + request.getUri().getPath() + "'"); } private HttpResponse runJob(String job) { nodeRepository.jobControl().run(job); return new MessageResponse("Executed job '" + job + "'"); } private HttpResponse deleteNode(String hostname) { Optional<NodeMutex> nodeMutex = nodeRepository.nodes().lockAndGet(hostname); if (nodeMutex.isEmpty()) throw new NotFoundException("No node with hostname '" + hostname + "'"); try (var lock = nodeMutex.get()) { if (lock.node().state() == Node.State.deprovisioned) { nodeRepository.nodes().forget(lock.node()); return new MessageResponse("Permanently removed " + hostname); } else { List<Node> removedNodes = nodeRepository.nodes().removeRecursively(hostname); return new MessageResponse("Removed " + removedNodes.stream().map(Node::hostname).collect(Collectors.joining(", "))); } } } private Node nodeFromHostname(String hostname) { return nodeRepository.nodes().node(hostname).orElseThrow(() -> new NotFoundException("No node found with hostname " + hostname)); } public int addNodes(Inspector inspector) { List<Node> nodes = createNodesFromSlime(inspector); return nodeRepository.nodes().addNodes(nodes, Agent.operator).size(); } private Inspector toSlime(HttpRequest request) { try { byte[] jsonBytes = IOUtils.readBytes(request.getData(), 1000 * 1000); return SlimeUtils.jsonToSlime(jsonBytes).get(); } catch (IOException e) { throw new UncheckedIOException(e); } } private List<Node> createNodesFromSlime(Inspector object) { List<Node> nodes = new ArrayList<>(); object.traverse((ArrayTraverser) (int i, Inspector item) -> nodes.add(createNode(item))); return nodes; } private Node createNode(Inspector inspector) { Set<String> ipAddresses = new HashSet<>(); inspector.field("ipAddresses").traverse((ArrayTraverser) (i, item) -> ipAddresses.add(item.asString())); Set<String> ipAddressPool = new HashSet<>(); inspector.field("additionalIpAddresses").traverse((ArrayTraverser) (i, item) -> ipAddressPool.add(item.asString())); List<Address> addressPool = new ArrayList<>(); inspector.field("additionalHostnames").traverse((ArrayTraverser) (i, item) -> addressPool.add(new Address(item.asString()))); Node.Builder builder = Node.create(inspector.field("openStackId").asString(), IP.Config.of(ipAddresses, ipAddressPool, addressPool), inspector.field("hostname").asString(), flavorFromSlime(inspector), nodeTypeFromSlime(inspector.field("type"))); optionalString(inspector.field("parentHostname")).ifPresent(builder::parentHostname); optionalString(inspector.field("modelName")).ifPresent(builder::modelName); optionalString(inspector.field("reservedTo")).map(TenantName::from).ifPresent(builder::reservedTo); optionalString(inspector.field("exclusiveTo")).map(ApplicationId::fromSerializedForm).ifPresent(builder::exclusiveTo); optionalString(inspector.field("switchHostname")).ifPresent(builder::switchHostname); return builder.build(); } private Flavor flavorFromSlime(Inspector inspector) { Inspector flavorInspector = inspector.field("flavor"); Inspector resourcesInspector = inspector.field("resources"); if ( ! flavorInspector.valid()) { return new Flavor(new NodeResources( requiredField(resourcesInspector, "vcpu", Inspector::asDouble), requiredField(resourcesInspector, "memoryGb", Inspector::asDouble), requiredField(resourcesInspector, "diskGb", Inspector::asDouble), requiredField(resourcesInspector, "bandwidthGbps", Inspector::asDouble), optionalString(resourcesInspector.field("diskSpeed")).map(NodeResourcesSerializer::diskSpeedFrom).orElse(NodeResources.DiskSpeed.getDefault()), optionalString(resourcesInspector.field("storageType")).map(NodeResourcesSerializer::storageTypeFrom).orElse(NodeResources.StorageType.getDefault()))); } Flavor flavor = nodeFlavors.getFlavorOrThrow(flavorInspector.asString()); if (resourcesInspector.valid()) { if (resourcesInspector.field("vcpu").valid()) flavor = flavor.with(flavor.resources().withVcpu(resourcesInspector.field("vcpu").asDouble())); if (resourcesInspector.field("memoryGb").valid()) flavor = flavor.with(flavor.resources().withMemoryGb(resourcesInspector.field("memoryGb").asDouble())); if (resourcesInspector.field("diskGb").valid()) flavor = flavor.with(flavor.resources().withDiskGb(resourcesInspector.field("diskGb").asDouble())); if (resourcesInspector.field("bandwidthGbps").valid()) flavor = flavor.with(flavor.resources().withBandwidthGbps(resourcesInspector.field("bandwidthGbps").asDouble())); if (resourcesInspector.field("diskSpeed").valid()) flavor = flavor.with(flavor.resources().with(NodeResourcesSerializer.diskSpeedFrom(resourcesInspector.field("diskSpeed").asString()))); if (resourcesInspector.field("storageType").valid()) flavor = flavor.with(flavor.resources().with(NodeResourcesSerializer.storageTypeFrom(resourcesInspector.field("storageType").asString()))); } return flavor; } private static <T> T requiredField(Inspector inspector, String fieldName, Function<Inspector, T> valueExtractor) { Inspector field = inspector.field(fieldName); if (!field.valid()) throw new IllegalArgumentException("Required field '" + fieldName + "' is missing"); return valueExtractor.apply(field); } private NodeType nodeTypeFromSlime(Inspector object) { if (! object.valid()) return NodeType.tenant; // default return NodeSerializer.typeFrom(object.asString()); } public static NodeFilter toNodeFilter(HttpRequest request) { NodeFilter filter = NodeHostFilter.from(HostFilter.from(request.getProperty("hostname"), request.getProperty("flavor"), request.getProperty("clusterType"), request.getProperty("clusterId"))); filter = ApplicationFilter.from(request.getProperty("application"), filter); filter = StateFilter.from(request.getProperty("state"), request.getBooleanProperty("includeDeprovisioned"), filter); filter = NodeTypeFilter.from(request.getProperty("type"), filter); filter = ParentHostFilter.from(request.getProperty("parentHost"), filter); filter = NodeOsVersionFilter.from(request.getProperty("osVersion"), filter); return filter; } private static boolean isPatchOverride(HttpRequest request) { // Since Jersey's HttpUrlConnector does not support PATCH we support this by override this on POST requests. String override = request.getHeader("X-HTTP-Method-Override"); if (override != null) { if (override.equals("PATCH")) { return true; } else { String msg = String.format("Illegal X-HTTP-Method-Override header for POST request. Accepts 'PATCH' but got '%s'", override); throw new IllegalArgumentException(msg); } } return false; } private MessageResponse setTargetVersions(String nodeTypeS, Inspector inspector) { NodeType nodeType = NodeType.valueOf(nodeTypeS.toLowerCase()); List<String> messageParts = new ArrayList<>(4); boolean force = inspector.field("force").asBool(); Inspector versionField = inspector.field("version"); Inspector osVersionField = inspector.field("osVersion"); Inspector containerImageField = inspector.field("dockerImage"); Inspector upgradeBudgetField = inspector.field("upgradeBudget"); if (versionField.valid()) { Version version = Version.fromString(versionField.asString()); nodeRepository.infrastructureVersions().setTargetVersion(nodeType, version, force); messageParts.add("version to " + version.toFullString()); } if (osVersionField.valid()) { String v = osVersionField.asString(); if (v.isEmpty()) { nodeRepository.osVersions().removeTarget(nodeType); messageParts.add("osVersion to null"); } else { Version osVersion = Version.fromString(v); Optional<Duration> upgradeBudget = Optional.of(upgradeBudgetField) .filter(Inspector::valid) .map(Inspector::asString) .map(s -> { try { return Duration.parse(s); } catch (Exception e) { throw new IllegalArgumentException("Invalid duration '" + s + "'", e); } }); nodeRepository.osVersions().setTarget(nodeType, osVersion, upgradeBudget, force); messageParts.add("osVersion to " + osVersion.toFullString()); upgradeBudget.ifPresent(d -> messageParts.add("upgradeBudget to " + d)); } } if (containerImageField.valid()) { Optional<DockerImage> dockerImage = Optional.of(containerImageField.asString()) .filter(s -> !s.isEmpty()) .map(DockerImage::fromString); nodeRepository.containerImages().setImage(nodeType, dockerImage); messageParts.add("container image to " + dockerImage.map(DockerImage::asString).orElse(null)); } if (messageParts.isEmpty()) { throw new IllegalArgumentException("At least one of 'version', 'osVersion' or 'dockerImage' must be set"); } return new MessageResponse("Set " + String.join(", ", messageParts) + " for nodes of type " + nodeType); } private MessageResponse cancelFirmwareCheckResponse() { nodeRepository.firmwareChecks().cancel(); return new MessageResponse("Cancelled outstanding requests for firmware checks"); } private MessageResponse requestFirmwareCheckResponse() { nodeRepository.firmwareChecks().request(); return new MessageResponse("Will request firmware checks on all hosts."); } private HttpResponse setTenantArchiveUri(String tenant, Optional<String> archiveUri) { nodeRepository.archiveUris().setArchiveUri(TenantName.from(tenant), archiveUri); return new MessageResponse(archiveUri.map(a -> "Updated").orElse("Removed") + " archive URI for " + tenant); } private static String hostnamesAsString(List<Node> nodes) { return nodes.stream().map(Node::hostname).sorted().collect(Collectors.joining(", ")); } private HttpResponse applicationList(URI uri) { Slime slime = new Slime(); Cursor root = slime.setObject(); Cursor applications = root.setArray("applications"); for (ApplicationId id : nodeRepository.applications().ids()) { Cursor application = applications.addObject(); application.setString("url", withPath("/nodes/v2/application/" + id.toFullString(), uri).toString()); application.setString("id", id.toFullString()); } return new SlimeJsonResponse(slime); } private HttpResponse application(String idString, URI uri) { ApplicationId id = ApplicationId.fromFullString(idString); Optional<Application> application = nodeRepository.applications().get(id); if (application.isEmpty()) return ErrorResponse.notFoundError("No application '" + id + "'"); Slime slime = ApplicationSerializer.toSlime(application.get(), nodeRepository.nodes().list(Node.State.active).owner(id), metricsDb, nodeRepository, withPath("/nodes/v2/applications/" + id, uri)); return new SlimeJsonResponse(slime); } private HttpResponse stats() { var stats = nodeRepository.computeStats(); Slime slime = new Slime(); Cursor root = slime.setObject(); toSlime(stats.load(), root.setObject("load")); toSlime(stats.activeLoad(), root.setObject("activeLoad")); Cursor applicationsArray = root.setArray("applications"); for (int i = 0; i <= 5; i++) { if (i >= stats.applicationStats().size()) break; var applicationStats = stats.applicationStats().get(i); Cursor applicationObject = applicationsArray.addObject(); applicationObject.setString("id", applicationStats.id().toFullString()); toSlime(applicationStats.load(), applicationObject.setObject("load")); applicationObject.setDouble("cost", applicationStats.cost()); applicationObject.setDouble("unutilizedCost", applicationStats.unutilizedCost()); } return new SlimeJsonResponse(slime); } private void toSlime(Load load, Cursor object) { object.setDouble("cpu", load.cpu()); object.setDouble("memory", load.memory()); object.setDouble("disk", load.disk()); } /** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */ private URI withPath(String newPath, URI uri) { try { return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, null, null); } catch (URISyntaxException e) { throw new RuntimeException("Will not happen", e); } } }
package it.unibz.krdb.obda.owlrefplatform.core; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDADataSource; import it.unibz.krdb.obda.model.OBDAMappingAxiom; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.OBDAQueryReasoner; import it.unibz.krdb.obda.model.OBDAStatement; import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.model.impl.RDBMSourceParameterConstants; import it.unibz.krdb.obda.owlapi.OBDAOWLReasoner; import it.unibz.krdb.obda.owlapi.ReformulationPlatformPreferences; import it.unibz.krdb.obda.owlrefplatform.core.abox.RDBMSDataRepositoryManager; import it.unibz.krdb.obda.owlrefplatform.core.abox.RDBMSDirectDataRepositoryManager; import it.unibz.krdb.obda.owlrefplatform.core.abox.RDBMSSIRepositoryManager; import it.unibz.krdb.obda.owlrefplatform.core.abox.VirtualABoxMaterializer; import it.unibz.krdb.obda.owlrefplatform.core.abox.VirtualABoxMaterializer.VirtualTriplePredicateIterator; import it.unibz.krdb.obda.owlrefplatform.core.mappingprocessing.MappingVocabularyTranslator; import it.unibz.krdb.obda.owlrefplatform.core.ontology.Assertion; import it.unibz.krdb.obda.owlrefplatform.core.ontology.Axiom; import it.unibz.krdb.obda.owlrefplatform.core.ontology.Description; import it.unibz.krdb.obda.owlrefplatform.core.ontology.Ontology; import it.unibz.krdb.obda.owlrefplatform.core.ontology.OntologyFactory; import it.unibz.krdb.obda.owlrefplatform.core.ontology.imp.OntologyFactoryImpl; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.EvaluationEngine; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.JDBCEngine; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.JDBCUtility; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.DLRPerfectReformulator; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.QueryRewriter; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.QueryVocabularyValidator; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.TreeRedReformulator; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.ComplexMappingSQLGenerator; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.SourceQueryGenerator; import it.unibz.krdb.obda.owlrefplatform.core.tboxprocessing.EquivalenceTBoxOptimizer; import it.unibz.krdb.obda.owlrefplatform.core.tboxprocessing.SigmaTBoxOptimizer; import it.unibz.krdb.obda.owlrefplatform.core.translator.OWLAPI2ABoxIterator; import it.unibz.krdb.obda.owlrefplatform.core.translator.OWLAPI2Translator; import it.unibz.krdb.obda.owlrefplatform.core.translator.OWLAPI2VocabularyExtractor; import it.unibz.krdb.obda.owlrefplatform.core.unfolding.ComplexMappingUnfolder; import it.unibz.krdb.obda.owlrefplatform.core.unfolding.UnfoldingMechanism; import it.unibz.krdb.obda.owlrefplatform.core.viewmanager.MappingViewManager; import java.net.URI; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.semanticweb.owl.inference.MonitorableOWLReasoner; import org.semanticweb.owl.inference.OWLReasonerException; import org.semanticweb.owl.model.OWLClass; import org.semanticweb.owl.model.OWLConstant; import org.semanticweb.owl.model.OWLDataProperty; import org.semanticweb.owl.model.OWLDataPropertyExpression; import org.semanticweb.owl.model.OWLDataRange; import org.semanticweb.owl.model.OWLDescription; import org.semanticweb.owl.model.OWLEntity; import org.semanticweb.owl.model.OWLIndividual; import org.semanticweb.owl.model.OWLObjectProperty; import org.semanticweb.owl.model.OWLObjectPropertyExpression; import org.semanticweb.owl.model.OWLOntology; import org.semanticweb.owl.model.OWLOntologyManager; import org.semanticweb.owl.util.NullProgressMonitor; import org.semanticweb.owl.util.ProgressMonitor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The OBDAOWLReformulationPlatform implements the OWL reasoner interface and is * the implementation of the reasoning method in the reformulation project. */ public class QuestOWL implements OBDAOWLReasoner, OBDAQueryReasoner, MonitorableOWLReasoner { private static final String NOT_IMPLEMENTED_STR = "Service not available."; private OWLOntologyManager ontoManager = null; /* The merge and tranlsation of all loaded ontologies */ private Ontology translatedOntologyMerge = null; private TechniqueWrapper techwrapper = null; private HashSet<OWLOntology> loadedOntologies = null; private ProgressMonitor progressMonitor = new NullProgressMonitor(); private OBDAModel obdaModel = null; private Logger log = LoggerFactory.getLogger(QuestOWL.class); private boolean isClassified = false; private ReformulationPlatformPreferences preferences = null; private QueryVocabularyValidator validator = null; // private Ontology aboxDependencies = null; private Ontology reducedOntology = null; OWLAPI2VocabularyExtractor vext = new OWLAPI2VocabularyExtractor(); private OntologyFactory ofac = OntologyFactoryImpl.getInstance(); /*** * Optimization flags */ // private boolean optimizeEquivalences = true; private boolean optimizeSigma = true; public QuestOWL(OWLOntologyManager manager) { ontoManager = manager; } public Ontology getReducedOntology() { return reducedOntology; } public Ontology getABoxDependencies() { return null; } @Override public void loadOBDAModel(OBDAModel model) { isClassified = false; obdaModel = model; } public void loadDependencies(Ontology sigma) { techwrapper.loadDependencies(sigma); } /** * Set the technique wrapper which specifies which rewriting, unfolding and * evaluation techniques are used. * * @param newTechnique * the technique wrapper */ public void setTechniqueWrapper(TechniqueWrapper newTechnique) { techwrapper = newTechnique; } public TechniqueWrapper getTechniqueWrapper() { return techwrapper; } public void setPreferences(ReformulationPlatformPreferences preferences) { this.preferences = preferences; } @Override public OBDAStatement getStatement() throws Exception { if (techwrapper != null && isClassified == true) { return techwrapper.getStatement(); } else { throw new Exception( "Error, the technique wrapper has not been setup up yet. Make sure you have loaded the OWL Ontologies and the OBDA model, and classified before calling this method."); } } public boolean isConsistent(OWLOntology ontology) throws OWLReasonerException { return true; } public Ontology getOntology() { return this.translatedOntologyMerge; } public void classify() throws OWLReasonerException { getProgressMonitor().setIndeterminate(true); getProgressMonitor().setMessage("Classifying..."); getProgressMonitor().setStarted(); if (obdaModel == null) { throw new NullPointerException("APIController not set"); } if (preferences == null) { throw new NullPointerException("ReformulationPlatformPreferences not set"); } OBDADataFactory fac = OBDADataFactoryImpl.getInstance(); /*** * Duplicating the OBDA model to avoid strange behavior. */ String reformulationTechnique = (String) preferences.getCurrentValue(ReformulationPlatformPreferences.REFORMULATION_TECHNIQUE); boolean bOptimizeEquivalences = preferences.getCurrentBooleanValueFor(ReformulationPlatformPreferences.OPTIMIZE_EQUIVALENCES); boolean bUseInMemoryDB = preferences.getCurrentValue(ReformulationPlatformPreferences.DATA_LOCATION).equals(QuestConstants.INMEMORY); boolean bObtainFromOntology = preferences.getCurrentBooleanValueFor(ReformulationPlatformPreferences.OBTAIN_FROM_ONTOLOGY); boolean bObtainFromMappings = preferences.getCurrentBooleanValueFor(ReformulationPlatformPreferences.OBTAIN_FROM_MAPPINGS); String unfoldingMode = (String) preferences.getCurrentValue(ReformulationPlatformPreferences.ABOX_MODE); String dbType = (String) preferences.getCurrentValue(ReformulationPlatformPreferences.DBTYPE); // For testing purposes. boolean createMappings = preferences.getCurrentBooleanValueFor(ReformulationPlatformPreferences.CREATE_TEST_MAPPINGS); log.debug("Initializing Quest query answering engine..."); log.debug("Active preferences:"); log.debug("{} = {}", ReformulationPlatformPreferences.REFORMULATION_TECHNIQUE, reformulationTechnique); log.debug("{} = {}", ReformulationPlatformPreferences.OPTIMIZE_EQUIVALENCES, bOptimizeEquivalences); log.debug("{} = {}", ReformulationPlatformPreferences.DATA_LOCATION, bUseInMemoryDB); log.debug("{} = {}", ReformulationPlatformPreferences.OBTAIN_FROM_ONTOLOGY, bObtainFromOntology); log.debug("{} = {}", ReformulationPlatformPreferences.OBTAIN_FROM_MAPPINGS, bObtainFromMappings); log.debug("{} = {}", ReformulationPlatformPreferences.ABOX_MODE, unfoldingMode); log.debug("{} = {}", ReformulationPlatformPreferences.DBTYPE, dbType); log.debug("{} = {}", ReformulationPlatformPreferences.CREATE_TEST_MAPPINGS, createMappings); QueryRewriter rewriter = null; UnfoldingMechanism unfMech = null; SourceQueryGenerator gen = null; EvaluationEngine eval_engine; Ontology sigma = ofac.createOntology(URI.create("sigmaontology")); Ontology reformulationOntology = null; OBDAModel unfoldingOBDAModel = fac.getOBDAModel(); Map<Predicate, Description> equivalenceMaps = null; /* * PART 0: Simplifying the vocabulary of the ontology */ if (bOptimizeEquivalences) { log.debug("Equivalence optimization. Input ontology: {}", translatedOntologyMerge.toString()); EquivalenceTBoxOptimizer equiOptimizer = new EquivalenceTBoxOptimizer(translatedOntologyMerge); equiOptimizer.optimize(); /* This generates a new TBox with a simpler vocabulary */ reformulationOntology = equiOptimizer.getOptimalTBox(); /* * This is used to simplify the vocabulary of ABox assertions and * mappings */ equivalenceMaps = equiOptimizer.getEquivalenceMap(); log.debug("Equivalence optimization. Output ontology: {}", translatedOntologyMerge.toString()); } else { reformulationOntology = translatedOntologyMerge; equivalenceMaps = new HashMap<Predicate, Description>(); } try { /* * Preparing the data source */ if (unfoldingMode.equals(QuestConstants.CLASSIC)) { log.debug("Working in classic mode"); if (bUseInMemoryDB || createMappings) { log.debug("Using in an memory database"); String driver = "org.h2.Driver"; String url = "jdbc:h2:mem:aboxdump" + System.currentTimeMillis(); String username = "sa"; String password = ""; OBDADataSource newsource = fac.getDataSource(URI.create("http: newsource.setParameter(RDBMSourceParameterConstants.DATABASE_DRIVER, driver); newsource.setParameter(RDBMSourceParameterConstants.DATABASE_PASSWORD, password); newsource.setParameter(RDBMSourceParameterConstants.DATABASE_URL, url); newsource.setParameter(RDBMSourceParameterConstants.DATABASE_USERNAME, username); newsource.setParameter(RDBMSourceParameterConstants.IS_IN_MEMORY, "true"); newsource.setParameter(RDBMSourceParameterConstants.USE_DATASOURCE_FOR_ABOXDUMP, "true"); // this.translatedOntologyMerge.saturate(); RDBMSDataRepositoryManager dataRepository; // VocabularyExtractor extractor = new VocabularyExtractor(); // Set<Predicate> vocabulary = // extractor.getVocabulary(reformulationOntology); if (dbType.equals(QuestConstants.SEMANTIC)) { dataRepository = new RDBMSSIRepositoryManager(newsource, reformulationOntology.getVocabulary()); } else if (dbType.equals(QuestConstants.DIRECT)) { dataRepository = new RDBMSDirectDataRepositoryManager(newsource, reformulationOntology.getVocabulary()); } else { throw new Exception(dbType + " is unknown or not yet supported Data Base type. Currently only the direct db type is supported"); } dataRepository.setTBox(reformulationOntology); /* Creating the ABox repository */ getProgressMonitor().setMessage("Creating database schema..."); dataRepository.createDBSchema(true); dataRepository.insertMetadata(); if (bObtainFromOntology) { log.debug("Loading data from Ontology into the database"); OWLAPI2ABoxIterator aBoxIter = new OWLAPI2ABoxIterator(loadedOntologies, equivalenceMaps); dataRepository.insertData(aBoxIter); } if (bObtainFromMappings) { log.debug("Loading data from Mappings into the database"); VirtualABoxMaterializer materializer = new VirtualABoxMaterializer(obdaModel); Iterator<Assertion> assertionIter = materializer.getAssertionIterator(); dataRepository.insertData(assertionIter); } dataRepository.createIndexes(); /* Setting up the OBDA model */ unfoldingOBDAModel.addSource(newsource); unfoldingOBDAModel.addMappings(newsource.getSourceID(), dataRepository.getMappings()); for (Axiom axiom : dataRepository.getABoxDependencies().getAssertions()) { sigma.addEntities(axiom.getReferencedEntities()); sigma.addAssertion(axiom); } } } else if (unfoldingMode.equals(QuestConstants.VIRTUAL)) { log.debug("Working in virtual mode"); Collection<OBDADataSource> sources = this.obdaModel.getSources(); if (sources == null || sources.size() == 0) { throw new Exception("No datasource has been defined"); } else if (sources.size() > 1) { throw new Exception("Currently the reasoner can only handle one datasource"); } else { /* Setting up the OBDA model */ OBDADataSource ds = sources.iterator().next(); unfoldingOBDAModel.addSource(ds); /* * Processing mappings with respect to the vocabulary * simplification */ MappingVocabularyTranslator mtrans = new MappingVocabularyTranslator(); Collection<OBDAMappingAxiom> newMappings = mtrans.translateMappings(this.obdaModel.getMappings(ds.getSourceID()), equivalenceMaps); unfoldingOBDAModel.addMappings(ds.getSourceID(), newMappings); } } /* * Setting up the unfolder and SQL generation */ OBDADataSource datasource = unfoldingOBDAModel.getSources().get(0); // MappingValidator mappingValidator = new // MappingValidator(loadedOntologies); // boolean validmappings = // mappingValidator.validate(unfoldingOBDAModel.getMappings(datasource.getSourceID())); MappingViewManager viewMan = new MappingViewManager(unfoldingOBDAModel.getMappings(datasource.getSourceID())); unfMech = new ComplexMappingUnfolder(unfoldingOBDAModel.getMappings(datasource.getSourceID()), viewMan); JDBCUtility util = new JDBCUtility(datasource.getParameter(RDBMSourceParameterConstants.DATABASE_DRIVER)); gen = new ComplexMappingSQLGenerator(viewMan, util); log.debug("Setting up the connection;"); eval_engine = new JDBCEngine(unfoldingOBDAModel.getSources().get(0)); /* * Setting up the ontology we will use for the reformulation */ if (optimizeSigma) { SigmaTBoxOptimizer reducer = new SigmaTBoxOptimizer(reformulationOntology, sigma); reformulationOntology = reducer.getReducedOntology(); } /* * Setting up the reformulation engine */ if (QuestConstants.PERFECTREFORMULATION.equals(reformulationTechnique)) { rewriter = new DLRPerfectReformulator(); } else if (QuestConstants.UCQBASED.equals(reformulationTechnique)) { rewriter = new TreeRedReformulator(); } else { throw new IllegalArgumentException("Invalid value for argument: " + ReformulationPlatformPreferences.REFORMULATION_TECHNIQUE); } rewriter.setTBox(reformulationOntology); rewriter.setCBox(sigma); /* * Done, sending a new reasoner with the modules we just configured */ QueryVocabularyValidator validator = new QueryVocabularyValidator(reformulationOntology, equivalenceMaps); this.techwrapper = new QuestTechniqueWrapper(unfMech, rewriter, gen, validator, eval_engine, unfoldingOBDAModel); log.debug("... Quest has been setup and is ready for querying"); isClassified = true; } catch (Exception e) { log.error(e.getMessage(), e); OWLReasonerException ex = new OWLReasonerException(e.getMessage(), e) { }; e.fillInStackTrace(); throw ex; } finally { getProgressMonitor().setFinished(); } } public void clearOntologies() throws OWLReasonerException { if (loadedOntologies != null) { loadedOntologies.clear(); } translatedOntologyMerge = null; isClassified = false; } public void dispose() throws OWLReasonerException { techwrapper.dispose(); } public Set<OWLOntology> getLoadedOntologies() { return loadedOntologies; } public boolean isClassified() throws OWLReasonerException { return isClassified; } public boolean isDefined(OWLClass cls) throws OWLReasonerException { // TODO implement return true; } public boolean isDefined(OWLObjectProperty prop) throws OWLReasonerException { // TODO implement return true; } public boolean isDefined(OWLDataProperty prop) throws OWLReasonerException { // TODO implement return true; } public boolean isDefined(OWLIndividual ind) throws OWLReasonerException { return true; } public boolean isRealised() throws OWLReasonerException { return isClassified; } /*** * This method loads the given ontologies in the system. This will merge * these new ontologies with the existing ones in a set. Then it will * translate the assertions in all the ontologies into a single one, in our * internal representation. * * The translation is done using our OWLAPITranslator that gets the TBox * part of the ontologies and filters all the DL-Lite axioms (RDFS/OWL2QL * and DL-Lite). * * The original ontologies and the merged/translated ontology are kept and * are used later when classify() is called. * */ public void loadOntologies(Set<OWLOntology> ontologies) throws OWLReasonerException { /* * We will keep track of the loaded ontologies and tranlsate the TBox * part of them into our internal represntation */ URI uri = URI.create("http://it.unibz.krdb.obda/Quest/auxiliaryontology"); if (translatedOntologyMerge == null) { translatedOntologyMerge = ofac.createOntology(uri); } if (loadedOntologies == null) { loadedOntologies = new HashSet<OWLOntology>(); } log.debug("Load ontologies called. Translating ontologies."); OWLAPI2Translator translator = new OWLAPI2Translator(); Set<URI> uris = new HashSet<URI>(); Ontology translation = ofac.createOntology(uri); for (OWLOntology onto : ontologies) { uris.add(onto.getURI()); Ontology aux; try { aux = translator.translate(onto); } catch (Exception e) { throw new OWLReasonerException("Error translating ontology: " + onto.toString(), e) { }; } translation.addConcepts(aux.getConcepts()); translation.addRoles(aux.getRoles()); translation.addAssertions(aux.getAssertions()); } /* we translated successfully, now we append the new assertions */ this.loadedOntologies.addAll(ontologies); translatedOntologyMerge = translation; // translatedOntologyMerge.addAssertions(translation.getAssertions()); // translatedOntologyMerge.addConcepts(new // ArrayList<ClassDescription>(translation.getConcepts())); // translatedOntologyMerge.addRoles(new // ArrayList<Property>(translation.getRoles())); // translatedOntologyMerge.saturate(); log.debug("Ontology loaded: {}", translatedOntologyMerge); isClassified = false; } public void realise() throws OWLReasonerException { classify(); } public void unloadOntologies(Set<OWLOntology> ontologies) throws OWLReasonerException { boolean result = loadedOntologies.removeAll(ontologies); // if no ontologies where removed if (!result) return; // otherwise clear everything and update Set<OWLOntology> resultSet = new HashSet<OWLOntology>(); resultSet.addAll(loadedOntologies); clearOntologies(); loadOntologies(resultSet); } public Set<Set<OWLClass>> getAncestorClasses(OWLDescription clsC) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLClass>>(); } public Set<Set<OWLClass>> getDescendantClasses(OWLDescription clsC) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLClass>>(); } public Set<OWLClass> getEquivalentClasses(OWLDescription clsC) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLClass>(); } public Set<OWLClass> getInconsistentClasses() throws OWLReasonerException { // TODO implement owl return new HashSet<OWLClass>(); } public Set<Set<OWLClass>> getSubClasses(OWLDescription clsC) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLClass>>(); } public Set<Set<OWLClass>> getSuperClasses(OWLDescription clsC) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLClass>>(); } public boolean isEquivalentClass(OWLDescription clsC, OWLDescription clsD) throws OWLReasonerException { // TODO implement owl return true; } public boolean isSubClassOf(OWLDescription clsC, OWLDescription clsD) throws OWLReasonerException { // TODO implement owl return true; } public boolean isSatisfiable(OWLDescription description) throws OWLReasonerException { // TODO implement owl return true; } public Map<OWLDataProperty, Set<OWLConstant>> getDataPropertyRelationships(OWLIndividual individual) throws OWLReasonerException { // TODO implement owl return new HashMap<OWLDataProperty, Set<OWLConstant>>(); } public Set<OWLIndividual> getIndividuals(OWLDescription clsC, boolean direct) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLIndividual>(); } public Map<OWLObjectProperty, Set<OWLIndividual>> getObjectPropertyRelationships(OWLIndividual individual) throws OWLReasonerException { // TODO implement owl return new HashMap<OWLObjectProperty, Set<OWLIndividual>>(); } public Set<OWLIndividual> getRelatedIndividuals(OWLIndividual subject, OWLObjectPropertyExpression property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLIndividual>(); } public Set<OWLConstant> getRelatedValues(OWLIndividual subject, OWLDataPropertyExpression property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLConstant>(); } public Set<Set<OWLClass>> getTypes(OWLIndividual individual, boolean direct) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLClass>>(); } public boolean hasDataPropertyRelationship(OWLIndividual subject, OWLDataPropertyExpression property, OWLConstant object) throws OWLReasonerException { // TODO implement return false; } public boolean hasObjectPropertyRelationship(OWLIndividual subject, OWLObjectPropertyExpression property, OWLIndividual object) throws OWLReasonerException { // TODO implement return false; } public boolean hasType(OWLIndividual individual, OWLDescription type, boolean direct) throws OWLReasonerException { // TODO implement return false; } public Set<Set<OWLObjectProperty>> getAncestorProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLObjectProperty>>(); } public Set<Set<OWLDataProperty>> getAncestorProperties(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDataProperty>>(); } public Set<Set<OWLObjectProperty>> getDescendantProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLObjectProperty>>(); } public Set<Set<OWLDataProperty>> getDescendantProperties(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDataProperty>>(); } public Set<Set<OWLDescription>> getDomains(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDescription>>(); } public Set<Set<OWLDescription>> getDomains(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDescription>>(); } public Set<OWLObjectProperty> getEquivalentProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLObjectProperty>(); } public Set<OWLDataProperty> getEquivalentProperties(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLDataProperty>(); } public Set<Set<OWLObjectProperty>> getInverseProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLObjectProperty>>(); } public Set<OWLDescription> getRanges(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLDescription>(); } public Set<OWLDataRange> getRanges(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<OWLDataRange>(); } public Set<Set<OWLObjectProperty>> getSubProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLObjectProperty>>(); } public Set<Set<OWLDataProperty>> getSubProperties(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDataProperty>>(); } public Set<Set<OWLObjectProperty>> getSuperProperties(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLObjectProperty>>(); } public Set<Set<OWLDataProperty>> getSuperProperties(OWLDataProperty property) throws OWLReasonerException { // TODO implement owl return new HashSet<Set<OWLDataProperty>>(); } public boolean isAntiSymmetric(OWLObjectProperty property) throws OWLReasonerException { // TODO implement owl return false; } public boolean isFunctional(OWLObjectProperty property) throws OWLReasonerException { return false; } public boolean isFunctional(OWLDataProperty property) throws OWLReasonerException { return false; } public boolean isInverseFunctional(OWLObjectProperty property) throws OWLReasonerException { return false; } public boolean isIrreflexive(OWLObjectProperty property) throws OWLReasonerException { return false; } public boolean isReflexive(OWLObjectProperty property) throws OWLReasonerException { return false; } public boolean isSymmetric(OWLObjectProperty property) throws OWLReasonerException { return false; } public boolean isTransitive(OWLObjectProperty property) throws OWLReasonerException { return false; } public OWLEntity getCurrentEntity() { return null; // return ontoManager.getOWLDataFactory().getOWLThing(); } /* The following methods need revision */ @Override public void setProgressMonitor(ProgressMonitor progressMonitor) { this.progressMonitor = progressMonitor; } private ProgressMonitor getProgressMonitor() { if (progressMonitor == null) { progressMonitor = new NullProgressMonitor(); } return progressMonitor; } @Override public void finishProgressMonitor() { getProgressMonitor().setFinished(); } @Override public void startProgressMonitor(String msg) { getProgressMonitor().setMessage(msg); getProgressMonitor().setIndeterminate(true); getProgressMonitor().setStarted(); } }
package org.eclipse.mylar.bugzilla.search; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import javax.security.auth.login.LoginException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.dialogs.DialogPage; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.dialogs.InputDialog; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.mylar.bugzilla.BugzillaPlugin; import org.eclipse.mylar.bugzilla.BugzillaPreferences; import org.eclipse.mylar.bugzilla.IBugzillaConstants; import org.eclipse.mylar.bugzilla.saveQuery.GetQueryDialog; import org.eclipse.mylar.bugzilla.saveQuery.SaveQueryDialog; import org.eclipse.mylar.bugzilla.saveQuery.SavedQueryFile; import org.eclipse.search.ui.ISearchPage; import org.eclipse.search.ui.ISearchPageContainer; import org.eclipse.search.ui.NewSearchUI; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.List; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.internal.help.WorkbenchHelpSystem; /** * Bugzilla search page */ public class BugzillaSearchPage extends DialogPage implements ISearchPage { protected Combo summaryPattern = null; private static ArrayList<BugzillaSearchData> previousSummaryPatterns = new ArrayList<BugzillaSearchData>(20); private static ArrayList<BugzillaSearchData> previousEmailPatterns = new ArrayList<BugzillaSearchData>(20); private static ArrayList<BugzillaSearchData> previousCommentPatterns = new ArrayList<BugzillaSearchData>(20); protected ISearchPageContainer scontainer = null; private boolean firstTime = true; private IDialogSettings fDialogSettings; private static final String [] patternOperationText = {"all words", "any word", "regexp"}; private static final String [] patternOperationValues = {"allwordssubstr", "anywordssubstr", "regexp"}; private static final String [] emailOperationText = {"substring", "exact", "regexp"}; private static final String [] emailOperationValues = {"substring", "exact", "regexp"}; private static final String [] emailRoleValues = {"emailassigned_to1", "emailreporter1", "emailcc1", "emaillongdesc1"}; protected IPreferenceStore prefs = BugzillaPlugin.getDefault().getPreferenceStore(); private String [] statusValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.STATUS_VALUES)); private String [] preselectedStatusValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRESELECTED_STATUS_VALUES)); private String [] resolutionValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.RESOLUTION_VALUES)); private String [] severityValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.SEVERITY_VALUES)); private String [] priorityValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRIORITY_VALUES)); private String [] hardwareValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.HARDWARE_VALUES)); private String [] osValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.OS_VALUES)); private String [] productValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRODUCT_VALUES)); private String [] componentValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.COMPONENT_VALUES)); private String [] versionValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.VERSION_VALUES)); private String [] targetValues = BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.TARGET_VALUES)); private static class BugzillaSearchData { /** Pattern to match on */ String pattern; /** Pattern matching criterion */ int operation; BugzillaSearchData(String pattern, int operation) { this.pattern = pattern; this.operation = operation; } } /** * The constructor. */ public BugzillaSearchPage() { super(); } /** * Insert the method's description here. * @see DialogPage#createControl */ public void createControl(Composite parent) { readConfiguration(); Composite control = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(2, false); control.setLayout(layout); GridData gd = new GridData(GridData.FILL_BOTH); control.setLayoutData(gd); createTextSearchComposite(control); createComment(control); createProductAttributes(control); createLists(control); createLastDays(control); createEmail(control); createSaveQuery(control); input = new SavedQueryFile(BugzillaPlugin.getDefault().getStateLocation().toString(), "/queries"); createUpdate(control); setControl(control); WorkbenchHelpSystem.getInstance().setHelp(control, IBugzillaConstants.SEARCH_PAGE_CONTEXT); } protected Control createTextSearchComposite(Composite control) { GridData gd; Label label; Composite group = new Composite(control, SWT.NONE); GridLayout layout = new GridLayout(2, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING | GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); gd.horizontalSpan = 2; group.setLayoutData(gd); // Info text label = new Label(group, SWT.LEFT); label.setText("Bug id or summary search terms"); gd = new GridData(GridData.BEGINNING); gd.horizontalSpan = 2; label.setLayoutData(gd); // Pattern combo summaryPattern = new Combo(group, SWT.SINGLE | SWT.BORDER); summaryPattern.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { scontainer.setPerformActionEnabled(canQuery()); } }); summaryPattern.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { handleWidgetSelected(summaryPattern, summaryOperation, previousSummaryPatterns); } }); gd = new GridData(GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); summaryPattern.setLayoutData(gd); summaryOperation = new Combo(group, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); summaryOperation.setItems(patternOperationText); summaryOperation.setText(patternOperationText[0]); summaryOperation.select(0); return group; } private Control createComment(Composite control) { GridData gd; Label label; Composite group = new Composite(control, SWT.NONE); GridLayout layout = new GridLayout(3, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING | GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); gd.horizontalSpan = 2; group.setLayoutData(gd); // Info text label = new Label(group, SWT.LEFT); label.setText("Comment contains: "); gd = new GridData(GridData.BEGINNING); label.setLayoutData(gd); commentOperation = new Combo(group, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); commentOperation.setItems(patternOperationText); commentOperation.setText(patternOperationText[0]); commentOperation.select(0); // Comment pattern combo commentPattern = new Combo(group, SWT.SINGLE | SWT.BORDER); commentPattern.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { scontainer.setPerformActionEnabled(canQuery()); } }); commentPattern.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { handleWidgetSelected(commentPattern, commentOperation, previousCommentPatterns); } }); gd = new GridData(GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); commentPattern.setLayoutData(gd); return group; } /** * Creates the area for selection on product/component/version. */ protected Control createProductAttributes(Composite control) { GridData gd; GridLayout layout; // Search expression Group group = new Group(control, SWT.NONE); layout = new GridLayout(); layout.numColumns = 4; group.setLayout(layout); gd = new GridData(GridData.FILL_HORIZONTAL); gd.horizontalSpan = 5; group.setLayoutData(gd); // Labels Label label = new Label(group, SWT.LEFT); label.setText("Product"); label = new Label(group, SWT.LEFT); label.setText("Component"); label = new Label(group, SWT.LEFT); label.setText("Version"); label = new Label(group, SWT.LEFT); label.setText("Milestone"); // Lists product = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; product.setLayoutData(gd); component = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; component.setLayoutData(gd); version = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; version.setLayoutData(gd); target = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; target.setLayoutData(gd); return group; } /** * Creates the area for selection of bug attributes (status, etc.) */ protected Control createLists(Composite control) { GridData gd; GridLayout layout; // Search expression Group group = new Group(control, SWT.NONE); layout = new GridLayout(); layout.numColumns = 6; group.setLayout(layout); gd = new GridData(GridData.FILL_HORIZONTAL); gd.horizontalSpan = 5; group.setLayoutData(gd); // Labels Label label = new Label(group, SWT.LEFT); label.setText("Status"); label = new Label(group, SWT.LEFT); label.setText("Resolution"); label = new Label(group, SWT.LEFT); label.setText("Severity"); label = new Label(group, SWT.LEFT); label.setText("Priority"); label = new Label(group, SWT.LEFT); label.setText("Hardware"); label = new Label(group, SWT.LEFT); label.setText("OS"); // Lists status = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; status.setLayoutData(gd); resolution = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; resolution.setLayoutData(gd); severity = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; severity.setLayoutData(gd); priority = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; priority.setLayoutData(gd); hardware = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; hardware.setLayoutData(gd); os = new List(group, SWT.MULTI | SWT.V_SCROLL | SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.heightHint = 40; os.setLayoutData(gd); return group; } private Text daysText; protected Control createLastDays(Composite control) { GridLayout layout; GridData gd; Group group = new Group(control, SWT.NONE); layout = new GridLayout(3, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING | GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); gd.horizontalSpan = 2; group.setLayoutData(gd); Label label = new Label(group, SWT.LEFT); label.setText("Only bugs changed in the last "); // operation combo daysText = new Text(group, SWT.BORDER); daysText.setTextLimit(5); daysText.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { String days = daysText.getText(); if (days.length() == 0) return; for (int i = days.length() - 1; i >= 0; i try { if (days.equals("") || Integer.parseInt(days) > -1) { if (i == days.length() - 1) return; else break; } } catch (NumberFormatException ex) { days = days.substring(0, i); } } daysText.setText(days); } }); label = new Label(group, SWT.LEFT); label.setText(" Days."); return group; } private static final String [] emailText = {"bug owner", "reporter", "CC list", "commenter"}; protected Control createEmail(Composite control) { GridLayout layout; GridData gd; Group group = new Group(control, SWT.NONE); layout = new GridLayout(3, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING | GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); gd.horizontalSpan = 2; group.setLayoutData(gd); Composite buttons = new Composite(group, SWT.NONE); layout = new GridLayout(4, false); buttons.setLayout(layout); buttons.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING); gd.horizontalSpan = 3; buttons.setLayoutData(gd); emailButton = new Button[emailText.length]; for (int i = 0; i < emailButton.length; i++) { Button button = new Button(buttons, SWT.CHECK); button.setText(emailText[i]); emailButton[i] = button; } Label label = new Label(group, SWT.LEFT); label.setText("Email contains: "); // operation combo emailOperation = new Combo(group, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); emailOperation.setItems(emailOperationText); emailOperation.setText(emailOperationText[0]); emailOperation.select(0); // pattern combo emailPattern = new Combo(group, SWT.SINGLE | SWT.BORDER); emailPattern.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { scontainer.setPerformActionEnabled(canQuery()); } }); emailPattern.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { handleWidgetSelected(emailPattern, emailOperation, previousEmailPatterns); } }); gd = new GridData(GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); emailPattern.setLayoutData(gd); return group; } /** * Creates the buttons for remembering a query and accessing previously * saved queries. */ protected Control createSaveQuery(Composite control) { GridLayout layout; GridData gd; Group group = new Group(control, SWT.NONE); layout = new GridLayout(3, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING | GridData.FILL_HORIZONTAL | GridData.GRAB_HORIZONTAL); gd.horizontalSpan = 2; group.setLayoutData(gd); loadButton = new Button(group, SWT.PUSH | SWT.LEFT); loadButton.setText("Saved Queries..."); final BugzillaSearchPage bsp = this; loadButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { GetQueryDialog qd = new GetQueryDialog(getShell(), "Saved Queries", input); if (qd.open() == InputDialog.OK) { selIndex = qd.getSelected(); if (selIndex != -1) { rememberedQuery = true; performAction(); bsp.getShell().close(); } } } }); loadButton.setEnabled(true); loadButton.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); saveButton = new Button(group, SWT.PUSH | SWT.LEFT); saveButton.setText("Remember..."); saveButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { SaveQueryDialog qd = new SaveQueryDialog(getShell(), "Remember Query"); if (qd.open() == InputDialog.OK) { String qName = qd.getText(); if (qName != null && qName.compareTo("") != 0) { try { input.add(getQueryParameters().toString(), qName, summaryPattern.getText()); } catch (UnsupportedEncodingException e) { /* * Do nothing. Every implementation of the Java platform is required * to support the standard charset "UTF-8" */ } } } } }); saveButton.setEnabled(true); saveButton.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING)); return group; } public static SavedQueryFile getInput() { return input; } protected Control createUpdate(final Composite control) { GridData gd; Label label; Composite group = new Composite(control, SWT.NONE); GridLayout layout = new GridLayout(2, false); group.setLayout(layout); group.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); gd = new GridData(GridData.BEGINNING); gd.horizontalSpan = 2; group.setLayoutData(gd); // Info text label = new Label(group, SWT.LEFT); label.setText("Update search options from server (may take several seconds):"); gd = new GridData(GridData.BEGINNING); label.setLayoutData(gd); updateButton = new Button(group, SWT.LEFT | SWT.PUSH); updateButton.setText("Update"); updateButton.setLayoutData(new GridData()); updateButton.addMouseListener(new MouseAdapter() { @Override public void mouseUp(MouseEvent e) { monitorDialog.open(); IProgressMonitor monitor = monitorDialog.getProgressMonitor(); monitor.beginTask("Updating search options...", 55); try { BugzillaPreferences.updateQueryOptions(monitor); product.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRODUCT_VALUES))); monitor.worked(1); component.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.COMPONENT_VALUES))); monitor.worked(1); version.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.VERSION_VALUES))); monitor.worked(1); target.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.TARGET_VALUES))); monitor.worked(1); status.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.STATUS_VALUES))); monitor.worked(1); status.setSelection(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRESELECTED_STATUS_VALUES))); monitor.worked(1); resolution.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.RESOLUTION_VALUES))); monitor.worked(1); severity.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.SEVERITY_VALUES))); monitor.worked(1); priority.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.PRIORITY_VALUES))); monitor.worked(1); hardware.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.HARDWARE_VALUES))); monitor.worked(1); os.setItems(BugzillaPreferences.queryOptionsToArray(prefs.getString(IBugzillaConstants.OS_VALUES))); monitor.worked(1); } catch (LoginException exception) { // we had a problem that seems to have been caused from bad login info MessageDialog.openError(null, "Login Error", "Bugzilla could not log you in to get the information you requested since login name or password is incorrect.\nPlease check your settings in the bugzilla preferences. "); BugzillaPlugin.log(exception); } finally { monitor.done(); monitorDialog.close(); } } }); return group; } private void handleWidgetSelected(Combo widget, Combo operation, ArrayList<BugzillaSearchData> history) { if (widget.getSelectionIndex() < 0) return; int index = history.size() - 1 - widget.getSelectionIndex(); BugzillaSearchData patternData= history.get(index); if (patternData == null || !widget.getText().equals(patternData.pattern)) return; widget.setText(patternData.pattern); operation.setText(operation.getItem(patternData.operation)); } /** * @see ISearchPage#performAction() */ public boolean performAction() { getPatternData(summaryPattern, summaryOperation, previousSummaryPatterns); getPatternData(commentPattern, commentOperation, previousCommentPatterns); getPatternData(this.emailPattern, emailOperation, previousEmailPatterns); String summaryText; String url; if (rememberedQuery == true) { url = getQueryURL(new StringBuffer(input.getQueryParameters(selIndex))); summaryText = input.getSummaryText(selIndex); } else { try { StringBuffer params = getQueryParameters(); url = getQueryURL(params); summaryText = summaryPattern.getText(); } catch (UnsupportedEncodingException e) { /* * These statements should never be executed. Every implementation of * the Java platform is required to support the standard charset * "UTF-8" */ url = ""; summaryText = ""; } } try { // if the summary contains a single bug id, open the bug directly int id = Integer.parseInt(summaryText); return BugzillaSearchHit.show(id); } catch (NumberFormatException ignored) { // ignore this since this means that the text is not a bug id } // Don't activate the search result view until it is known that the // user is not opening a bug directly -- there is no need to open // the view if no searching is going to take place. NewSearchUI.activateSearchResultView(); BugzillaPlugin.getDefault().getPreferenceStore().setValue(IBugzillaConstants.MOST_RECENT_QUERY, summaryText); IBugzillaSearchResultCollector collector= new BugzillaSearchResultCollector(); IBugzillaSearchOperation op= new BugzillaSearchOperation( url, collector); BugzillaSearchQuery searchQuery = new BugzillaSearchQuery(op); NewSearchUI.runQueryInBackground(searchQuery); return true; } /** * @see ISearchPage#setContainer(ISearchPageContainer) */ public void setContainer(ISearchPageContainer container) { scontainer = container; } @Override public void setVisible(boolean visible) { if (visible && summaryPattern != null) { if (firstTime) { firstTime = false; // Set item and text here to prevent page from resizing summaryPattern.setItems(getPreviousPatterns(previousSummaryPatterns)); commentPattern.setItems(getPreviousPatterns(previousCommentPatterns)); emailPattern.setItems(getPreviousPatterns(previousEmailPatterns)); product.setItems(productValues); component.setItems(componentValues); version.setItems(versionValues); target.setItems(targetValues); status.setItems(statusValues); status.setSelection(preselectedStatusValues); resolution.setItems(resolutionValues); severity.setItems(severityValues); priority.setItems(priorityValues); hardware.setItems(hardwareValues); os.setItems(osValues); } summaryPattern.setFocus(); scontainer.setPerformActionEnabled(canQuery()); } super.setVisible(visible); } /** * Returns <code>true</code> if at least some parameter is given to query on. */ private boolean canQuery() { return product.getSelectionCount() > 0 || component.getSelectionCount() > 0 || version.getSelectionCount() > 0 || target.getSelectionCount() > 0 || status.getSelectionCount() > 0 || resolution.getSelectionCount() > 0 || severity.getSelectionCount() > 0 || priority.getSelectionCount() > 0 || hardware.getSelectionCount() > 0 || os.getSelectionCount() > 0 || summaryPattern.getText().length() > 0 || commentPattern.getText().length() > 0 || emailPattern.getText().length() > 0; } /** * Return search pattern data and update search history list. * An existing entry will be updated or a new one created. */ private BugzillaSearchData getPatternData(Combo widget, Combo operation, ArrayList<BugzillaSearchData> previousSearchQueryData) { String pattern = widget.getText(); if (pattern == null || pattern.trim().equals("")) { return null; } BugzillaSearchData match = null; int i = previousSearchQueryData.size() - 1; while (i >= 0) { match = previousSearchQueryData.get(i); if (pattern.equals(match.pattern)) { break; } i } if (i >= 0) { match.operation = operation.getSelectionIndex(); // remove - will be added last (see below) previousSearchQueryData.remove(match); } else { match= new BugzillaSearchData(widget.getText(), operation.getSelectionIndex()); } previousSearchQueryData.add(match); return match; } /** * Returns an array of previous summary patterns */ private String [] getPreviousPatterns(ArrayList<BugzillaSearchData> patternHistory) { int size = patternHistory.size(); String [] patterns = new String[size]; for (int i = 0; i < size; i++) patterns[i]= (patternHistory.get(size - 1 - i)).pattern; return patterns; } protected String getQueryURL(StringBuffer params) { StringBuffer url = new StringBuffer(getQueryURLStart().toString()); url.append(params); return url.toString(); } private StringBuffer getQueryURLStart() { StringBuffer sb = new StringBuffer(BugzillaPlugin.getDefault().getServerName()); if (sb.charAt(sb.length()-1) != '/') { sb.append('/'); } sb.append("buglist.cgi?"); // use the username and password if we have it if(BugzillaPreferences.getUserName() != null && !BugzillaPreferences.getUserName().equals("") && BugzillaPreferences.getPassword() != null && !BugzillaPreferences.getPassword().equals("")) { try { sb.append("GoAheadAndLogIn=1&Bugzilla_login=" + URLEncoder.encode(BugzillaPreferences.getUserName(), "UTF-8") + "&Bugzilla_password=" + URLEncoder.encode(BugzillaPreferences.getPassword(), "UTF-8") + "&"); } catch (UnsupportedEncodingException e) { /* * Do nothing. Every implementation of the Java platform is required * to support the standard charset "UTF-8" */ } } return sb; } /** * Goes through the query form and builds up the query parameters. * * Example: short_desc_type=substring&amp;short_desc=bla&amp; ... * @throws UnsupportedEncodingException */ protected StringBuffer getQueryParameters() throws UnsupportedEncodingException { StringBuffer sb = new StringBuffer(); sb.append("short_desc_type="); sb.append(patternOperationValues[summaryOperation.getSelectionIndex()]); sb.append("&short_desc="); sb.append(URLEncoder.encode(summaryPattern.getText(), "UTF-8")); int [] selected = product.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&product="); sb.append(URLEncoder.encode(product.getItem(selected[i]), "UTF-8")); } selected = component.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&component="); sb.append(URLEncoder.encode(component.getItem(selected[i]), "UTF-8")); } selected = version.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&version="); sb.append(URLEncoder.encode(version.getItem(selected[i]), "UTF-8")); } selected = target.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&target_milestone="); sb.append(URLEncoder.encode(target.getItem(selected[i]), "UTF-8")); } sb.append("&long_desc_type="); sb.append(patternOperationValues[commentOperation.getSelectionIndex()]); sb.append("&long_desc="); sb.append(URLEncoder.encode(commentPattern.getText(), "UTF-8")); selected = status.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&bug_status="); sb.append(status.getItem(selected[i])); } selected = resolution.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&resolution="); sb.append(resolution.getItem(selected[i])); } selected = severity.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&bug_severity="); sb.append(severity.getItem(selected[i])); } selected = priority.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&priority="); sb.append(priority.getItem(selected[i])); } selected = hardware.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&ref_platform="); sb.append(URLEncoder.encode(hardware.getItem(selected[i]), "UTF-8")); } selected = os.getSelectionIndices(); for (int i = 0; i < selected.length; i++) { sb.append("&op_sys="); sb.append(URLEncoder.encode(os.getItem(selected[i]), "UTF-8")); } if (emailPattern.getText() != null) { for (int i = 0; i < emailButton.length; i++) { if (emailButton[i].getSelection()) { sb.append("&"); sb.append(emailRoleValues[i]); sb.append("=1"); } } sb.append("&emailtype1="); sb.append(emailOperationValues[emailOperation.getSelectionIndex()]); sb.append("&email1="); sb.append(URLEncoder.encode(emailPattern.getText(), "UTF-8")); } if (daysText.getText() != null && !daysText.getText().equals("")) { try { Integer.parseInt(daysText.getText()); sb.append("&changedin="); sb.append(URLEncoder.encode(daysText.getText(), "UTF-8")); } catch(NumberFormatException ignored) { // this means that the days is not a number, so don't worry } } return sb; } // Dialog store id constants protected final static String PAGE_NAME = "BugzillaSearchPage"; //$NON-NLS-1$ protected Combo summaryOperation; protected List product; protected List os; protected List hardware; protected List priority; protected List severity; protected List resolution; protected List status; protected Combo commentOperation; protected Combo commentPattern; protected List component; protected List version; protected List target; protected Combo emailOperation; protected Combo emailPattern; protected Button [] emailButton; /** File containing saved queries */ protected static SavedQueryFile input; /** "Remember query" button */ protected Button saveButton; /** "Saved queries..." button */ protected Button loadButton; /** Run a remembered query */ protected boolean rememberedQuery = false; /** Index of the saved query to run */ protected int selIndex; protected Button updateButton; protected ProgressMonitorDialog monitorDialog = new ProgressMonitorDialog(BugzillaPlugin.getDefault().getWorkbench().getActiveWorkbenchWindow().getShell()); /** * Returns the page settings for this Java search page. * * @return the page settings to be used */ private IDialogSettings getDialogSettings() { IDialogSettings settings = BugzillaPlugin.getDefault().getDialogSettings(); fDialogSettings = settings.getSection(PAGE_NAME); if (fDialogSettings == null) fDialogSettings = settings.addNewSection(PAGE_NAME); return fDialogSettings; } /** * Initializes itself from the stored page settings. */ private void readConfiguration() { getDialogSettings(); } }
package org.jcryptool.analysis.fleissner.UI; import java.awt.Component; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.lang.reflect.Array; import java.net.URLDecoder; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Hashtable; import java.util.Locale; import java.util.concurrent.ThreadLocalRandom; import javax.swing.JDialog; import javax.swing.JFileChooser; import org.bouncycastle.util.encoders.UTF8; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.PaintEvent; import org.eclipse.swt.events.PaintListener; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Canvas; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Spinner; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.Tracker; import org.jcryptool.analysis.fleissner.Activator; import org.jcryptool.analysis.fleissner.key.Grille; import org.jcryptool.analysis.fleissner.key.KeySchablone; import org.jcryptool.analysis.fleissner.logic.InvalidParameterCombinationException; import org.jcryptool.analysis.fleissner.logic.MethodApplication; import org.jcryptool.analysis.fleissner.logic.ParameterSettings; import org.jcryptool.core.logging.utils.LogUtil; import org.jcryptool.core.util.colors.ColorService; import org.jcryptool.core.util.constants.IConstants; import org.jcryptool.core.util.directories.DirectoryService; import org.jcryptool.core.util.fonts.FontService; public class FleissnerWindow extends Composite{ private String fileName; private Grille model; private Composite headerComposite; private Composite mainComposite; private Composite analysisOut; private Composite method; private Composite text; private Composite process; private Group key; private Group inOutText; private Group plaintextComposite; private Group ciphertextComposite; private Group analysis; private Group textSelectionGroup; private Group analysisSettingsGroup; private Group methodComposite; private Canvas canvasKey; private KeyListener keyListener; private Text plaintext; private Text ciphertext; private Text analysisOutput; private Text chooseLanguage; private Label statisticNameIdentifier; private Text loadedStatisticName; private Label textNameIdentifier; private Text loadedTextName; private Spinner keySize; private Spinner restarts; private Spinner nGramSize; private Button analyze; private Button encrypt; private Button decrypt; private Button writeText; private Button statistics; private Button start; private Button statisticsLoad; private Button loadStatistics; private Button exampleText; private Button loadOwntext; private Button loadText; private Button deleteHoles; private Button randomKey; private Combo language; private Combo chooseExample; private Combo selectStatistic; private Label numberOfRestarts; private Label chooseNGramSize; private boolean plain = false; private boolean userText = false; private boolean userStatistics = false; private boolean startSettings = true; private Hashtable<Integer, Integer> htRestarts = new Hashtable<Integer, Integer>(); private int textState = 0; private int languageState = 0; private int statisticState = 0; private int statisticInputState = 0; private int textInputState = 0; private int[] keyToLogic = null; private String argMethod = null; private String argText = ""; private String argKey = null; private double[] argStatistics = null; private String argLanguage = null; private String[] args; private String textName; private String statisticName; private String oldNgramSize = null; private InputStream fis = null; private InputStream fisOld = null; private LoadFiles lf = new LoadFiles(); private static final int MIN_WIDTH_LEFT = 800; private static final int MIN_WIDTH_RIGHT = 200; public FleissnerWindow(Composite parent, int style) { super(parent, style); model = new Grille(); model.setKey(new KeySchablone(7)); SashForm sashForm = new SashForm(this, SWT.BORDER | SWT.BORDER_DASH | SWT.VERTICAL); sashForm.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true,1,1)); GridLayout gridLayoutParent = new GridLayout(3, false); GridData gd_txtAlgInformation = new GridData(SWT.FILL, SWT.FILL, true, true); ScrolledComposite scrolledMainComposite = new ScrolledComposite(sashForm,/* SWT.H_SCROLL | */SWT.V_SCROLL); scrolledMainComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); mainComposite = new Composite(scrolledMainComposite, SWT.NONE); mainComposite.setLayout(gridLayoutParent); mainComposite.setLayoutData(gd_txtAlgInformation); ScrolledComposite scrolledAnalysisOutComposite = new ScrolledComposite(sashForm,/* SWT.H_SCROLL | */SWT.V_SCROLL); scrolledAnalysisOutComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); analysisOut = new Composite(scrolledAnalysisOutComposite, SWT.NONE); analysisOut.setLayout(gridLayoutParent); analysisOut.setLayoutData(gd_txtAlgInformation); createHeader(mainComposite); createMethod(mainComposite); createKey(mainComposite); createInOutText(mainComposite); Composite platzHalter = new Composite(mainComposite, SWT.NONE); platzHalter.setLayout(new GridLayout()); platzHalter.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 2,1)); createText(mainComposite); createAnalysisOutput(analysisOut); scrolledMainComposite.setContent(mainComposite); scrolledMainComposite.setMinSize(mainComposite.computeSize(SWT.DEFAULT, SWT.DEFAULT)); scrolledMainComposite.setExpandHorizontal(true); scrolledMainComposite.setExpandVertical(true); scrolledMainComposite.layout(); scrolledAnalysisOutComposite.setContent(analysisOut); scrolledAnalysisOutComposite.setMinSize(analysisOut.computeSize(SWT.DEFAULT, SWT.DEFAULT)); scrolledAnalysisOutComposite.setExpandHorizontal(true); scrolledAnalysisOutComposite.setExpandVertical(true); scrolledAnalysisOutComposite.layout(); int[] weights = { 4,1}; sashForm.setWeights(weights); scrolledAnalysisOutComposite.addListener(SWT.Resize, new Listener(){ @Override public void handleEvent(Event arg0) { Rectangle r = scrolledAnalysisOutComposite.getClientArea(); if (analysisOut.getBounds().height>r.height) { gd_txtAlgInformation.minimumHeight = r.height; analysisOut.setLayoutData(gd_txtAlgInformation); sashForm.setWeights(weights); } } }); startSettings = false; reset("Konstruktor"); } private void createMethod(Composite parent) { methodComposite = new Group(parent, SWT.NONE); methodComposite.setLayout(new GridLayout()); methodComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 1, 2)); methodComposite.setText("Methode"); method = new Composite(methodComposite, SWT.NONE); method.setLayout(new GridLayout()); method.setLayoutData(new GridData(SWT.FILL, SWT.UP, false, true)); process = new Composite(methodComposite, SWT.NONE); process.setLayout(new GridLayout()); process.setLayoutData(new GridData(SWT.FILL, SWT.DOWN, false, false)); analyze = new Button(method, SWT.RADIO); analyze.setText("Analyse"); analyze.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (!startSettings) { if (analyze.getSelection()) { if (!argMethod.equals("analyze")) { System.out.println("Analyze selected. Method 'analyze' will be started."); analyze(); checkArgs("analyze Selection Listener"); reset("'analyze'-Listener"); } } else { // resetAnalysisSettings(); notwendig??? analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); } } } }); analyze.setSelection(true); argMethod = "analyze"; encrypt = new Button(method, SWT.RADIO); encrypt.setText("Verschlüsselung"); encrypt.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (encrypt.getSelection()) { if (!argMethod.equals("enrypt")) { encrypt(); checkArgs("Encrypt Selection Listener"); reset("'encrypt'-Listener"); } } } }); decrypt = new Button(method, SWT.RADIO); decrypt.setText("Entschlüsselung"); decrypt.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (decrypt.getSelection()) { if (!argMethod.equals("decrypt")) { decrypt(); checkArgs("decrypt Selection Listener"); reset("'decrypt'-Listener"); } } } }); GridData methods = new GridData(SWT.FILL, SWT.TOP, true, true); analyze.setLayoutData(methods); encrypt.setLayoutData(methods); decrypt.setLayoutData(methods); start = new Button(process, SWT.PUSH); start.setText("Start"); start.setEnabled(true); start.setToolTipText("Führt die ausgewählte Methode aus"); start.addSelectionListener(new SelectionListener() { @Override public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } @Override public void widgetSelected(SelectionEvent e) { checkArgs("start Selection Listener"); startMethod(); } }); Button example = new Button(process, SWT.PUSH); example.setText("Beispielanalyse"); example.setToolTipText("Führt eine Analyse mit vorgegebenen Daten - Text, Sprache, Sprachstatistik - durch"); example.addSelectionListener(new SelectionListener() { @Override public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } @Override public void widgetSelected(SelectionEvent e) { exampleAnalysis(); } }); GridData startOptions = new GridData(SWT.FILL, SWT.TOP, true, true); start.setLayoutData(startOptions); example.setLayoutData(startOptions); } private void createKey(Composite parent) { key = new Group(parent, SWT.NONE); key.setLayout(new GridLayout(3, false)); key.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 1, 2)); key.setText("Schlüssel"); canvasKey = new Canvas(key, SWT.DOUBLE_BUFFERED); canvasKey.setBackground(Display.getCurrent().getSystemColor(SWT.COLOR_WHITE)); canvasKey.addPaintListener(new KeyPainter(canvasKey, model)); keyListener = new org.jcryptool.analysis.fleissner.UI.KeyListener(model, this); canvasKey.addMouseListener(keyListener); GridData gridData = new GridData(SWT.FILL, SWT.FILL, false, false, 1, 4); gridData.widthHint = 201; gridData.heightHint = 201; canvasKey.setLayoutData(gridData); canvasKey.setEnabled(false); Label spinner = new Label(key, SWT.NONE); spinner.setText("Schlüssellänge"); //$NON-NLS-1$ GridData gd_spinner = new GridData(SWT.FILL, SWT.TOP, false, true); gd_spinner.horizontalIndent = 20; spinner.setLayoutData(gd_spinner); keySize = new Spinner(key, SWT.NONE); keySize.setMinimum(2); keySize.setMaximum(20); keySize.setIncrement(1); keySize.setSelection(7); keySize.setEnabled(true); GridData gd_keySize = new GridData(SWT.LEFT, SWT.TOP, false, true); gd_keySize.horizontalIndent = 20; keySize.setLayoutData(gd_keySize); keySize.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (Integer.parseInt(keySize.getText()) > 20 || Integer.parseInt(keySize.getText()) < 2) keySize.setSelection(7); model.setKey(new KeySchablone(Integer.parseInt(keySize.getText()))); reset("'keySize'-Listener"); canvasKey.removeMouseListener(keyListener); canvasKey.addMouseListener(keyListener); if (exampleText.getSelection() && !plain) refreshExampleText(); reset("keySize"); } }); randomKey = new Button(key, SWT.PUSH); GridData gd_setHoles = new GridData(SWT.FILL, SWT.BOTTOM, false, false,2,1); gd_setHoles.horizontalIndent = 20; randomKey.setLayoutData(gd_setHoles); randomKey.setEnabled(false); randomKey.setText("Zufälliger Schlüssel"); randomKey.setToolTipText("Erzeugt zufällig einen Schlüssel in der ausgewählten Schlüssellänge zur Ver- oder Entschlüsselung"); randomKey.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { generateRandomKey(); reset("'randomKey'-Listener"); } @Override public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } }); deleteHoles = new Button(key, SWT.PUSH); GridData gd_deleteHoles = new GridData(SWT.FILL, SWT.BOTTOM, false, false,2,1); gd_deleteHoles.horizontalIndent = 20; deleteHoles.setLayoutData(gd_deleteHoles); deleteHoles.setEnabled(false); deleteHoles.setText("Schlüssel zurücksetzen"); //$NON-NLS-1$ deleteHoles.setToolTipText("Setzt die ausgewählten Schlüsselfelder zurück"); deleteHoles.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { deleteHoles(); reset("'deleteHoles'-Listener"); } @Override public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } }); } private void createInOutText(Composite parent) { inOutText = new Group(parent, /*SWT.V_SCROLL*/SWT.NONE); inOutText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 6)); // Composite should have no border, so it looks like the groups on the left. GridLayout gl_inOutText = new GridLayout(1,false); inOutText.setLayout(gl_inOutText); createPlaintext(inOutText); createCiphertext(inOutText); } private void createPlaintext(Composite parent) { plaintextComposite = new Group(parent, SWT.NONE); plaintextComposite.setLayout(new GridLayout()); plaintextComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); plaintextComposite.setText("Klartext" + "(0)"); plaintext = new Text(plaintextComposite, SWT.MULTI | SWT.WRAP | SWT.V_SCROLL); GridData gridData = new GridData(SWT.FILL, SWT.FILL, true, true); gridData.widthHint = plaintextComposite.getSize().y; gridData.heightHint = plaintextComposite.getSize().x; plaintext.setLayoutData(gridData); plaintext.setBackground(ColorService.WHITE); plaintext.setEditable(false); plaintext.setEnabled(false); plaintext.addKeyListener(new org.eclipse.swt.events.KeyListener() { @Override public void keyPressed(KeyEvent e) { e.doit = true; } @Override public void keyReleased(KeyEvent e) { } }); plaintext.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { plaintextComposite.setText("Klartext (" + plaintext.getText().length() + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ if (!startSettings) { if (writeText.getSelection() && encrypt.getSelection()) { argText = plaintext.getText(); reset("'plaintext'-ModifyListener"); } } } }); } private void createCiphertext(Composite parent) { ciphertextComposite = new Group(parent, SWT.NONE); ciphertextComposite.setLayout(new GridLayout()); ciphertextComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); ciphertextComposite.setText("Geheimtext" + " (0)"); ciphertext = new Text(ciphertextComposite, SWT.MULTI | SWT.WRAP | SWT.V_SCROLL); GridData gridData = new GridData(SWT.FILL, SWT.FILL, true, true); gridData.widthHint = ciphertextComposite.getSize().y; gridData.heightHint = ciphertextComposite.getSize().x; ciphertext.setLayoutData(gridData); ciphertext.setEnabled(true); ciphertext.setEditable(false); ciphertext.setBackground(ColorService.WHITE); textName = "files/dawkinsGerCiphertext7.txt"; argText = lf.InputStreamToString(lf.openMyTestStream(textName)); resetTexts(); setArgText(); ciphertext.addKeyListener(new org.eclipse.swt.events.KeyListener() { @Override public void keyPressed(KeyEvent e) { e.doit = true; } @Override public void keyReleased(KeyEvent e) { } }); ciphertext.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { ciphertextComposite.setText("Geheimtext (" + ciphertext.getText().length() + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ if (!startSettings) { if (writeText.getSelection() && !encrypt.getSelection()) { argText =ciphertext.getText(); reset("'ciphertext'-ModifyListener"); } } // reset("'ciphertext'-ModifyListener"); } }); ciphertextComposite.setText("Geheimtext (" + ciphertext.getText().length() + ")"); } private void createText(Composite parent) { textSelectionGroup = new Group(parent, SWT.NONE); textSelectionGroup.setLayout(new GridLayout(4, false)); textSelectionGroup.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 2, 1)); textSelectionGroup.setText("Textauswahl"); Composite platzHalter = new Composite(parent, SWT.NONE); platzHalter.setLayout(new GridLayout()); platzHalter.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 2,1)); analysisSettingsGroup = new Group(parent, SWT.NONE); analysisSettingsGroup.setLayout(new GridLayout(4, false)); analysisSettingsGroup.setLayoutData(new GridData(SWT.FILL, SWT.UP, false, false, 2, 1)); analysisSettingsGroup.setText("Analyseeinstellungen"); analysisSettingsGroup.setEnabled(true); createLoadtextComposite(textSelectionGroup); String[] items = {"Deutsch", "Englisch"}; Composite languageAndRestarts = new Composite(analysisSettingsGroup, SWT.NONE); languageAndRestarts.setLayout(new GridLayout(4, false)); languageAndRestarts.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 4, 1)); Group statisticGroup = new Group(analysisSettingsGroup, SWT.NONE); statisticGroup.setLayout(new GridLayout(4,false)); statisticGroup.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 4, 1)); statisticGroup.setText("Statistikauswahl"); chooseLanguage = new Text(languageAndRestarts, SWT.WRAP | SWT.MULTI); chooseLanguage.setText("Sprache"); chooseLanguage.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, false, false)); chooseLanguage.setBackground(ColorService.LIGHTGRAY); // Create a dropdown Combo language = new Combo(languageAndRestarts, SWT.DROP_DOWN | SWT.READ_ONLY); language.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); language.setItems(items); language.select(0); argLanguage = "german"; language.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (languageState != language.getSelectionIndex()) { updateLanguageSettings("language"); if (statistics.getSelection()) { resetStatistics(); statisticState = selectStatistic.getSelectionIndex(); } if (exampleText.getSelection()) { refreshExampleText(); textState = chooseExample.getSelectionIndex(); } setArgLanguage(); checkArgs("language Selection Listener"); languageState = language.getSelectionIndex(); reset("'language'-Listener"); } } }); numberOfRestarts = new Label(languageAndRestarts, SWT.NONE); numberOfRestarts.setText("Restarts"); //$NON-NLS-1$ numberOfRestarts.setLayoutData(new GridData(SWT.RIGHT, SWT.FILL, true, false)); restarts = new Spinner(languageAndRestarts, SWT.NONE); restarts.setMinimum(1); restarts.setMaximum(1400); restarts.setIncrement(1); restarts.setSelection(5); restarts.setEnabled(true); restarts.setLayoutData(new GridData(SWT.LEFT, SWT.TOP, false, true)); restarts.setToolTipText("Bestimmt die Anzahl die Wiederholungen mit neuen Zufallsschablonen bei der Hill-Climbing Methode"); createLoadstatisticsComposite(statisticGroup); } /** * Hier findet das laden einer Datei statt. * @param firstGroup */ private void createLoadtextComposite(Composite thirdGroup) { // TODO Auto-generated method stub String[] items = {"Richard Dawkins - Der Gotteswahn", "Wikipedia - Frühchristliche Kunst", "Richard Dawkins - The God Delusion", "Wikipedia - Visual Arts"}; exampleText = new Button(thirdGroup, SWT.RADIO); exampleText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false)); exampleText.setText("Beispieltext"); exampleText.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (exampleText.getSelection()) { if (textInputState != 0) { loadedTextName.setText(""); userText = false; System.out.println("UserText (in exampleText): "+String.valueOf(userText)); textSelection(true, false, false, false); refreshExampleText(); checkArgs("Example Text Selection Listener"); textInputState = 0; } reset("'exampleText'-Listener"); } } }); exampleText.setSelection(true); // Create a dropdown Combo chooseExample = new Combo(thirdGroup, SWT.DROP_DOWN | SWT.READ_ONLY); chooseExample.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 3, 1)); chooseExample.setItems(items); chooseExample.select(0); chooseExample.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (textState!=chooseExample.getSelectionIndex()) { userText = false; refreshExampleText(); if (checkTextLangChange()) { updateLanguageSettings("text"); if (statistics.getSelection()) { resetStatistics(); statisticState = selectStatistic.getSelectionIndex(); } setArgLanguage(); languageState = language.getSelectionIndex(); } if (argMethod.equals("analyze")) analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); textState = chooseExample.getSelectionIndex(); checkArgs("chooseExample SelectionListener"); reset("'chooseExample'-Listener"); } } }); writeText = new Button(thirdGroup, SWT.RADIO); writeText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 4, 1)); writeText.setText("Geheimtext manuell eingeben"); loadOwntext = new Button(thirdGroup, SWT.RADIO); loadOwntext.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false)); loadOwntext.setText("Eigener Text"); loadOwntext.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (loadOwntext.getSelection()) { if (textInputState != 1) { deleteHoles(); userText = true; argText = ""; textName = ""; resetTexts(); textSelection(false, true, false, false); checkArgs("loadOwnText SelectionListener"); textInputState = 1; reset("'loadOwnText'-Listener"); } } } }); loadText = new Button(thirdGroup, SWT.PUSH); loadText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 3, 1)); loadText.setText("Text laden"); loadText.setEnabled(false); loadText.setToolTipText("Hier kann eine eigene Textdatei geladen werden. Kodierung: UTF-8"); loadText.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { // TODO Auto-generated method stub String filename = openFileDialog(SWT.OPEN); String[] a = filename.split("\\\\"); textName = filename; System.out.println("Dialog opened. File: "+filename+" choosen"); argText = loadNormal(filename); userText = true; loadedTextName.setText(a[a.length-1]); if (argMethod.equals("analyze")) analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); resetTexts(); reset("loadText-Listener"); checkArgs("loadtext SelectionListener"); } @Override public void widgetDefaultSelected(SelectionEvent e) { // TODO Auto-generated method stub widgetSelected(e); } }); textNameIdentifier = new Label(thirdGroup, SWT.NONE); textNameIdentifier.setText("Geladener Text: "); //$NON-NLS-1$ textNameIdentifier.setLayoutData(new GridData(SWT.LEFT, SWT.CENTER, false, true)); // textNameIdentifier.setVisible(false); loadedTextName = new Text(thirdGroup, /*SWT.WRAP | SWT.MULTI*/ SWT.NONE); loadedTextName.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, true)); loadedTextName.setBackground(ColorService.LIGHTGRAY); // loadedTextName.setVisible(false); // writeText = new Button(thirdGroup, SWT.RADIO); // writeText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1)); // writeText.setText("Geheimtext manuell eingeben"); writeText.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (writeText.getSelection()) { loadedTextName.setText(""); // analyze.setEnabled(false); boolean editPlaintext = false; boolean editCiphertext = false; userText = true; if (plain) { editPlaintext = true; editCiphertext = false; }else { editPlaintext = false; editCiphertext = true; } if (textInputState != 2) { deleteHoles(); argText = ""; resetTexts(); } textSelection(false, false, editPlaintext, editCiphertext); checkArgs("writeText SelectionListener"); reset("'writeText'-Listener"); textInputState = 2; } } }); } private void createLoadstatisticsComposite(Group thirdGroup) { // TODO Auto-generated method stub String[] items = { "de-4-gram-nocs.bin"/*, "de-3-gram-nocs.bin"*/, "en-4-gram-nocs.bin", "en-3-gram-nocs.bin"}; statistics = new Button(thirdGroup, SWT.RADIO); statistics.setSelection(true); statistics.setText("Sprachstatistik"); statistics.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (statistics.getSelection()) { if (statisticInputState != 0) { // statisticNameIdentifier.setVisible(false); // loadedStatisticName.setVisible(false); loadedStatisticName.setText(""); userStatistics = false; statisticSelection(); updateLanguageSettings("text"); resetStatistics(); checkArgs("statistics SelectionListener"); reset("'statistics'-Listener"); statisticInputState = 0; } } } }); // Create a dropdown Combo selectStatistic = new Combo(thirdGroup, SWT.DROP_DOWN | SWT.READ_ONLY); selectStatistic.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 3, 1)); selectStatistic.setItems(items); selectStatistic.select(0); statisticName = lf.statisticFiles(selectStatistic.getSelectionIndex()); fis = lf.openMyFileStream(statisticName); selectStatistic.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (statisticState != selectStatistic.getSelectionIndex()) { if (selectStatistic.getSelectionIndex()==2) { nGramSize.setSelection(3); } else { nGramSize.setSelection(4); } if (statistics.getSelection()) { resetStatistics(); statisticState = selectStatistic.getSelectionIndex(); } if (checkStatisticLangChange()) { updateLanguageSettings("statistic"); if (exampleText.getSelection()) { refreshExampleText(); textState = chooseExample.getSelectionIndex(); } setArgLanguage(); languageState = language.getSelectionIndex(); } checkArgs("selectStatistic SelectionListener"); reset("'selectStatistic'-Listener Ende"); } } }); statisticsLoad = new Button(thirdGroup, SWT.RADIO); statisticsLoad.setText("Eigene Statistik"); statisticsLoad.addSelectionListener(new SelectionListener() { public void widgetDefaultSelected(SelectionEvent e) { widgetSelected(e); } public void widgetSelected(SelectionEvent e) { if (statisticsLoad.getSelection()) { if (statisticInputState != 1) { fis = null; statisticName = ""; userStatistics = true; statisticSelection(); checkArgs("statisticsLoad SelectionListener"); reset("statisticsLoad'-Listener"); statisticInputState = 1; } } } }); loadStatistics = new Button(thirdGroup, SWT.PUSH); loadStatistics.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); loadStatistics.setText("Statistik laden"); loadStatistics.setEnabled(false); loadStatistics.setToolTipText("Hier kann eine eigene Sprachstatistik geladen werden. Format: Für Quadgramme bspw. sortiert von AAAA bis ZZZZ, wobei jedes Datum ein double (8byte) ist. Die Statistik sollte bereits logarithmisiert sein"); loadStatistics.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { // TODO Auto-generated method stub String filename = openStatFileDialog(SWT.OPEN); try { fis = new FileInputStream(filename); String[] a = filename.split("\\\\"); statisticName = filename; // loadedStatisticName.setText(filename); loadedStatisticName.setText(a[a.length-1]); // loadedStatisticName.setVisible(true); // statisticNameIdentifier.setVisible(true); userStatistics = true; } catch (FileNotFoundException e1) { // TODO Auto-generated catch block e1.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID, "Statistik auswählen", e1, true); } checkArgs("loadStatistics SelectionListener"); reset("loadStatistics'-Listener Ende"); } @Override public void widgetDefaultSelected(SelectionEvent e) { // TODO Auto-generated method stub } }); chooseNGramSize = new Label(thirdGroup, SWT.NONE); chooseNGramSize.setText("Größe nGram"); //$NON-NLS-1$ chooseNGramSize.setLayoutData(new GridData(SWT.RIGHT, SWT.FILL, true, false)); nGramSize = new Spinner(thirdGroup, SWT.NONE); nGramSize.setMinimum(3); nGramSize.setMaximum(4); nGramSize.setIncrement(1); nGramSize.setSelection(4); nGramSize.setEnabled(false); nGramSize.setLayoutData(new GridData(SWT.LEFT, SWT.TOP, false, true)); statisticNameIdentifier = new Label(thirdGroup, SWT.NONE); statisticNameIdentifier.setText("Geladene Statistik: "); //$NON-NLS-1$ statisticNameIdentifier.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1)); // statisticNameIdentifier.setVisible(false); loadedStatisticName = new Text(thirdGroup, SWT.WRAP | SWT.MULTI); loadedStatisticName.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 3, 1)); loadedStatisticName.setBackground(ColorService.LIGHTGRAY); // loadedStatisticName.setVisible(false); } private void createAnalysisOutput(Composite parent) { analysis = new Group(parent, /* SWT.V_SCROLL*/SWT.NONE/*SWT.DRAG*/); analysis.setLayout(new GridLayout()); analysis.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true/*, 3, 1*/)); analysis.setText("Ausgabe Analyse"); analysisOutput = new Text(analysis, SWT.MULTI | SWT.WRAP | SWT.V_SCROLL); GridData gridData = new GridData(SWT.FILL, SWT.FILL, true, true); analysisOutput.setLayoutData(gridData); analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); analysisOutput.setEditable(false); analysisOutput.setBackground(ColorService.WHITE); } /** * Suchen einer Datei * @param type * @return */ private String openFileDialog(int type) { FileDialog dialog = new FileDialog(getDisplay().getActiveShell(), type); dialog.setFilterPath(DirectoryService.getUserHomeDir()); dialog.setFilterExtensions(new String[] { "*.txt" }); //$NON-NLS-1$ dialog.setFilterNames(new String[] { "Text Files (*.txt)" }); //$NON-NLS-1$ dialog.setOverwrite(true); return dialog.open(); } private String/*File*/ openStatFileDialog(int type/*Component type*/) { FileDialog dialog = new FileDialog(getDisplay().getActiveShell(), type); dialog.setFilterPath(DirectoryService.getUserHomeDir()); dialog.setFilterExtensions(new String[] { "*.bin" }); //$NON-NLS-1$ dialog.setFilterNames(new String[] { "Binary Files (*.bin)" }); //$NON-NLS-1$ dialog.setOverwrite(true); return dialog.open(); } private String loadNormal(String fileName) { System.out.println("loadNormalFile geöffnet"); BufferedReader reader = null; String text = ""; System.out.println("Buffered reader geöffnet und 'null' gesetzt"); try { System.out.println("try-Block eröffnet"); // InputStream is = new InputStream(fileName); // reader = new BufferedReader(new InputStreamReader( // new FileInputStream(fileName), Charset.forName("Cp1252"))); reader = new BufferedReader(new InputStreamReader(new FileInputStream(fileName), IConstants.UTF8_ENCODING)); System.out.println("new Buffered Reader von "+fileName+" gesetzt"); String line = reader.readLine(); System.out.println("Erste Zeile ausgelesen als 'line': "+line); int count = 1; while (line != null) { text += line; System.out.println(count+". Zeile in 'text' geschrieben (in reader while schleife): "+text); LogUtil.logInfo(count+". Zeile in 'text' geschrieben (in reader while schleife): "+text); count++; line = reader.readLine(); } } catch (NumberFormatException nfe) { LogUtil.logError(Activator.PLUGIN_ID, nfe); MessageBox brokenFile = new MessageBox(getDisplay().getActiveShell(), SWT.OK); brokenFile.setText("Loading puzzle encountered a problem"); //$NON-NLS-1$ brokenFile.setMessage("Puzzle could not be loaded. There is a wrong character in the loaded file.\n"); //$NON-NLS-1$ brokenFile.open(); } catch (FileNotFoundException e) { LogUtil.logError(Activator.PLUGIN_ID, "text could not be loaded", e, true); return "text could not be loaded"; //Error handling } catch (IOException e) { LogUtil.logError(Activator.PLUGIN_ID, "text could not be loaded", e, true); return "text could not be loaded"; } finally { try { reader.close(); } catch (IOException e) { LogUtil.logError(Activator.PLUGIN_ID, e); } } return text; } /** * Creates the header and short description. * @param parent */ private void createHeader(Composite parent) { // TODO Auto-generated method stub headerComposite = new Composite(parent, SWT.NONE); headerComposite.setBackground(ColorService.WHITE); headerComposite.setLayout(new GridLayout()); headerComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 3, 1)); Text headerText = new Text(headerComposite, SWT.NONE); headerText.setText("Fleißner-Analyse"); headerText.setLayoutData(new GridData(SWT.LEFT, SWT.UP, false, true)); headerText.setFont(FontService.getHugeBoldFont()); headerText.setEditable(false); headerText.setBackground(ColorService.WHITE); Text descriptionText = new Text(headerComposite, SWT.WRAP | SWT.MULTI); descriptionText.setText("Hier kommt eine kurze Beschreibung hin."); descriptionText.setLayoutData(new GridData(SWT.FILL, SWT.UP, false, true)); descriptionText.setEditable(false); descriptionText.setBackground(ColorService.WHITE); } public void textSelection(boolean enableChooseExample, boolean enableLoadText, boolean editPlaintext, boolean editCiphertext) { System.out.println("textSelection-Method started\n"); chooseExample.setEnabled(enableChooseExample); loadText.setEnabled(enableLoadText); plaintext.setEditable(editPlaintext); ciphertext.setEditable(editCiphertext); if (argMethod.equals("analyze")) analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); // if (!enableLoadText) // loadedTextName.setText(""); } public void statisticSelection() { System.out.println("statisticSelection-Method started\n"); // reset("statisticSelection() Anfang"); if (!userStatistics) { selectStatistic.setEnabled(true); loadStatistics.setEnabled(false); nGramSize.setEnabled(false); nGramSize.setSelection(4); } else { selectStatistic.setEnabled(false); loadStatistics.setEnabled(true); nGramSize.setEnabled(true); } } public void setArgLanguage() { System.out.println("setArgLanguage-Method started\n"); // reset("setArgLanguage() Anfang"); if (language.getSelectionIndex()==0) { argLanguage = "german"; } else { argLanguage = "english"; } } public void analyze() { plain = false; argMethod = "analyze"; System.out.println("Methode: 'Analyse' (analyze()) gestartet\n"); setArgText(); deleteHoles(); // key settings canvasKey.setEnabled(false); randomKey.setEnabled(false); deleteHoles.setEnabled(false); plaintext.setEnabled(false); plaintext.setForeground(null); ciphertext.setEnabled(true); ciphertext.setForeground(null); // analysis settings analysis.setEnabled(true); analysisSettingsGroup.setEnabled(true); analysisOutput.setEnabled(true); chooseLanguage.setEnabled(true); language.setEnabled(true); setArgLanguage(); restarts.setEnabled(true); statistics.setEnabled(true); statisticsLoad.setEnabled(true); if (statistics.getSelection()) { nGramSize.setEnabled(false); selectStatistic.setEnabled(true); loadStatistics.setEnabled(false); resetStatistics(); }else { nGramSize.setEnabled(true); selectStatistic.setEnabled(false); loadStatistics.setEnabled(true); statisticName = ""; fis = null; } } public void encrypt() { System.out.println("encrypt()-Method entered"); plain = true; argMethod = "encrypt"; deleteHoles(); setArgText(); // key settings canvasKey.setEnabled(true); randomKey.setEnabled(true); deleteHoles.setEnabled(true); // text settings // writeText.setText("Klartext manuell eingeben"); // if (textInputState!=1) // loadedTextName.setText(""); // if (textInputState == 2) { // plaintext.setEditable(true); // ciphertext.setEditable(false); plaintext.setEnabled(true); plaintext.setForeground(null); ciphertext.setEnabled(false); ciphertext.setForeground(null); // analysis settings analysisSettingsGroup.setEnabled(false); analysis.setEnabled(false); analysisOutput.setEnabled(false); statistics.setEnabled(false); selectStatistic.setEnabled(false); statisticsLoad.setEnabled(false); loadStatistics.setEnabled(false); // loadedStatisticName.setVisible(false); // statisticNameIdentifier.setVisible(false); loadedStatisticName.setText(""); userStatistics = false; statisticName = ""; fis = null; language.setEnabled(false); restarts.setEnabled(false); nGramSize.setEnabled(false); } public void decrypt() { System.out.println("Decrypt()-Method entered"); checkArgs("decrypt() Anfang"); plain = false; argMethod = "decrypt"; setArgText(); // key settings canvasKey.setEnabled(true); randomKey.setEnabled(true); deleteHoles.setEnabled(true); // text settings // writeText.setEnabled(true); // writeText.setText("Geheimtext manuell eingeben"); // if (textInputState!=1) // loadedTextName.setText(""); // if (textInputState == 2) { // plaintext.setEditable(false); // ciphertext.setEditable(true); plaintext.setEnabled(false); plaintext.setForeground(null); ciphertext.setEnabled(true); ciphertext.setForeground(null); // analysis settings analysisSettingsGroup.setEnabled(false); analysis.setEnabled(false); analysisOutput.setEnabled(false); statistics.setEnabled(false); selectStatistic.setEnabled(false); statisticsLoad.setEnabled(false); loadStatistics.setEnabled(false); // loadedStatisticName.setVisible(false); // statisticNameIdentifier.setVisible(false); loadedStatisticName.setText(""); userStatistics = false; statisticName = ""; fis = null; language.setEnabled(false); restarts.setEnabled(false); nGramSize.setEnabled(false); checkArgs("decrypt() Ende"); } public void startMethod() { // String keyToString; System.out.println("startMethod-Method started\n"); System.out.println("Method (in startMethod): "+argMethod); if (argMethod != null) { switch (argMethod) { case "analyze": userText = true; analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); System.out.println("Analyze selected?: "+String.valueOf(analyze.getSelection())); args = new String[12]; args[0] = "-method"; args[1] = argMethod; args[2] = "-keyLength"; args[3] = keySize.getText(); args[4] = "-cryptedText"; args[5] = argText; args[6] = "-nGramSize"; args[7] = nGramSize.getText(); args[8] = "-language"; args[9] = argLanguage; args[10] = "-restarts"; args[11] = restarts.getText(); if (fisOld != fis || !oldNgramSize.equals(nGramSize.getText())) { try { argStatistics = lf.loadBinNgramFrequencies(fis, argLanguage, nGramSize.getSelection()); // System.out.println("\nStatistik geladen in startMethod(analyze): "+String.valueOf(fis)+"\n"); // analysisOutput.append("\nStatistik geladen in startMethod(analyze): "+String.valueOf(fis)+"\n"); fisOld = fis; oldNgramSize = nGramSize.getText(); } catch (NumberFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID,"Gültige Statistik eingeben", e, true); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID,"Datei nicht gefunden", e, true); } catch (InvalidParameterCombinationException e) { e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID, "Invalid Parameter Combination", e, true); return; } } // System.out.println("start widget for analysis"); // analysisOutput.append("\nstart widget for analysis\n"); analysisOutput.append("\nLanguage: "+argLanguage+"\nText: "+textName+"\nStatistic: "+statisticName); analysisOutput.append("\nnGramSize: "+nGramSize.getText()+"\nRestarts: "+restarts.getText()+"\nKey length: "+keySize.getText()); break; case "encrypt": System.out.println("Encrypt selected?: "+String.valueOf(encrypt.getSelection())); if (encrypt.getSelection()) { userText = true; System.out.println("userText changed in case encrypt in startMethod"); } args = new String[6]; args[0] = "-method"; args[1] = argMethod; args[2] = "-key"; argKey = model.translateKeyToLogic(); System.out.println("argKey (in start button:encrypt): "+argKey); args[3] = argKey; args[4] = "-plaintext"; args[5] = argText; System.out.println("start widget for encryption"); break; case "decrypt": System.out.println("Decrypt selected?: "+String.valueOf(decrypt.getSelection())); // if (decrypt.getSelection() && exampleText.getSelection()) { userText = true; System.out.println("userText: "+String.valueOf(userText)+" in case decrypt in startMethod"); args = new String[6]; args[0] = "-method"; args[1] = argMethod; args[2] = "-key"; argKey = model.translateKeyToLogic(); System.out.println("argKey (in start button: decrypt): "+argKey); args[3] = argKey; args[4] = "-cryptedText"; args[5] = argText; System.out.println("start widget for decryption"); break; default: System.out.println("No method choosen (in 'startMethod')"); break; } }else { System.out.println("'argMethod' not initialized"); analysisOutput.append("'argMethod' not initialized"); exampleAnalysis(); return; } checkArgs("startMethod(), before startApplication()"); startApplication(); System.out.println("startMethod() ended"); } public void startApplication() { System.out.println("Method 'startApplication' started."); ParameterSettings ps = null; MethodApplication ma = null; try { // Configuration of given parameters and selecting and applying one of the three methods ps = new ParameterSettings(args/*, this.analysisOutput*/); ma = new MethodApplication(ps, this.analysisOutput, argStatistics); } catch (InvalidParameterCombinationException ex) { // TODO Auto-generated catch block ex.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID, "Bitte gültige Parameter eingeben", ex, true); return; }catch(FileNotFoundException ex) { ex.printStackTrace(); // System.out.println("File not found !"); LogUtil.logError(Activator.PLUGIN_ID, "Datei nicht gefunden", ex, true); return; } switch(argMethod) { case "analyze": ma.analyze(); analysisOutput.append(ma.toString()); plaintext.setEnabled(true); plaintext.setForeground(ColorService.GRAY); plaintext.setText("Gefundener Klartext:\n\n"+ma.getBestDecryptedText()); keyToLogic = ma.getBestTemplate(); printFoundKey(); break; case "encrypt": ma.encrypt(); if (encrypt.getSelection()) { ciphertext.setEnabled(true); ciphertext.setForeground(ColorService.GRAY); } ciphertext.setText(ma.getEncryptedText()); break; case "decrypt": ma.decrypt(); plaintext.setEnabled(true); plaintext.setForeground(ColorService.GRAY); plaintext.setText(ma.getDecryptedText()); break; } // analysis.setEnabled(true); // analysisOutput.setEnabled(true); // analysisOutput.append(ma.toString()); System.out.println("startApplication() ended"); checkArgs("startApplication() Ende"); } public void exampleAnalysis() { System.out.println("exampleAnalysis() gestartet"); // userText = true; resetAnalysisSettings(); System.out.println("userText (in exmapleAnalysis()): "+String.valueOf(userText)+", restart() starting...\n"); analysisOutput.append("userText (in exmapleAnalysis()): "+String.valueOf(userText)+", restart() starting...\n"); // pretend restart startSettings = true; // Settings for key resetKey(); // Settings for method resetMethod(); // Settings for text selection resetTextSelection(); startSettings = false; System.out.println("...restart done\n"); analysisOutput.append("...restart done\n"); args = new String[12]; args[0] = "-method"; args[1] = argMethod; args[2] = "-keyLength"; args[3] = keySize.getText(); args[4] = "-cryptedText"; args[5] = argText; args[6] = "-nGramSize"; args[7] = nGramSize.getText(); args[8] = "-language"; args[9] = argLanguage; args[10] = "-restarts"; args[11] = restarts.getText(); fis = lf.openMyFileStream("files/de-4gram-nocs.bin"); statisticName = "de-4gram-nocs.bin"; try { argStatistics = lf.loadBinNgramFrequencies(fis, argLanguage, nGramSize.getSelection()); } catch (NumberFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID,"Gültige Statistik eingeben", e, true); return; } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID,"Datei nicht gefunden", e, true); return; } catch (InvalidParameterCombinationException e) { e.printStackTrace(); LogUtil.logError(Activator.PLUGIN_ID,"Invalid Parameter Combination", e, true); return; } System.out.println("start widget for analysis\n"); analysisOutput.append("start widget for analysis\n"); analysisOutput.append("\nBeispielanalyse gestartet...\n"); System.out.println("\nBeispielanalyse gestartet...\n"); checkArgs("exampleAnalysis(), before startApplication()"); startApplication(); analysisOutput.append("\n...Beispielanalyse durchgeführt"); System.out.println("\n...Beispielanalyse durchgeführt"); } public void generateRandomKey() { //reset the old grille System.out.println("begin generate RandomKey"); model.setKey(new KeySchablone(Integer.parseInt(keySize.getText()))); int size = model.getKey().getSize(); System.out.println("Size before random grille: "+size); int x,y; do { do { x = ThreadLocalRandom.current().nextInt(0, size); y = ThreadLocalRandom.current().nextInt(0, size); } while (model.getKey().get(x, y) != '0'); model.getKey().set(x, y); keyToLogic = new int[2]; keyToLogic[0] = y; keyToLogic[1] = x; System.out.println("keyToLogic(y): "+keyToLogic[0]+", keyToLogic(x): "+keyToLogic[1]); System.out.println("sets x: "+x+", sets y: "+y+", setted : "+model.getKey().get(x, y)); } while (!model.getKey().isValid()); System.out.println("Schlüssel nach Zufallserzeugung: "+model.translateKeyToLogic()); canvasKey.redraw(); } public void deleteHoles() { System.out.println("deleteHoles-Method started\n"); model.setKey(new KeySchablone(Integer.parseInt(keySize.getText()))); canvasKey.redraw(); canvasKey.removeMouseListener(keyListener); canvasKey.addMouseListener(keyListener); } public void reset(String methode) { System.out.println("reset-Method in "+methode+" started\n"); checkOkButton(); canvasKey.redraw(); } public void refreshExampleText() { System.out.println("refreshTexts-Method started\n"); userText = false; textName = lf.textFiles(chooseExample.getSelectionIndex()); argText = lf.InputStreamToString(lf.openMyTestStream(textName)); if (!plain) randomEncryption(); resetTexts(); } public void resetTexts() { System.out.println("resetTexts-Method started\n"); if (plain) { plaintext.setText(argText); ciphertext.setText(""); }else { ciphertext.setText(argText); plaintext.setText(""); } } public void resetStatistics(){ System.out.println("resetStatistics-Method started\n"); statisticName = lf.statisticFiles(selectStatistic.getSelectionIndex()); fis = lf.openMyFileStream(statisticName); File statFile = new File(statisticName); try { System.out.println("Statisticsize of "+statisticName+": "+fis.available()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void checkOkButton() { System.out.println("checkOkButton-Method started\n"); if (analyze.getSelection()) { if(!argText.equals("") && fis!=null) start.setEnabled(true); else start.setEnabled(false); }else { if (model.getKey().isValid() && !argText.equals("")) { //$NON-NLS-1$ System.out.println("model translatekeyToLogic (in checkOkButton): "+model.translateKeyToLogic()); start.setEnabled(true); } else { start.setEnabled(false); } } } public void resetAnalysisSettings() { System.out.println("resetAnalysisSettings-Method started\n"); language.select(0); argLanguage = "german"; languageState = 0; statistics.setSelection(true); statisticInputState = 0; statisticsLoad.setSelection(false); selectStatistic.select(0); statisticState = 0; restarts.setSelection(5); nGramSize.setSelection(4); analysisOutput.setText("Gefundene Schablone/ Fortschritt\n"); } public void resetKey() { System.out.println("resetKey-Method started\n"); keySize.setSelection(7); deleteHoles(); } public void resetTextSelection() { System.out.println("resetTextSelection-Method started\n"); userText = false; exampleText.setSelection(true); textInputState = 0; loadOwntext.setSelection(false); writeText.setSelection(false); textSelection(true, false, false, false); chooseExample.select(0); textState = 0; plaintext.setEnabled(false); ciphertext.setEnabled(true); ciphertext.setForeground(null); } public void setArgText() { String tempArgText, textInput; boolean edit; if (plain) { tempArgText = plaintext.getText(); textInput = "Klartext"; edit = true; }else { tempArgText = ciphertext.getText(); textInput = "Geheimtext"; edit = false; } writeText.setText(textInput+" manuell eingeben"); if (textInputState!=1 || tempArgText.equals("")) loadedTextName.setText(""); if (textInputState == 2) { plaintext.setEditable(edit); ciphertext.setEditable(!edit); } if (startSettings) { argText = lf.InputStreamToString(lf.openMyTestStream("files/dawkinsGerCiphertext7.txt")); System.out.println("startSettings true in setArgText"); resetTexts(); }else if (exampleText.getSelection()) { if (userText && !tempArgText.equals("")) { argText = tempArgText; resetTexts(); }else { refreshExampleText(); } }else { argText = tempArgText; resetTexts(); } } public void updateLanguageSettings(String setting) { // textState und languageState switch(setting) { case "text": if (chooseExample.getSelectionIndex() == 0 || chooseExample.getSelectionIndex() == 1) { language.select(0); selectStatistic.select(0); }else { language.select(1); if (nGramSize.getSelection()==4) selectStatistic.select(1); else selectStatistic.select(2); } break; case "language": if (language.getSelectionIndex() == 0) { chooseExample.select(0); selectStatistic.select(0); }else { chooseExample.select(2); selectStatistic.select(1); } break; case "statistic": if (selectStatistic.getSelectionIndex()==0) { if (exampleText.getSelection() && textState!=1) chooseExample.select(0); language.select(0); }else { if (exampleText.getSelection() && textState!=3) chooseExample.select(2); language.select(1); } break; } } public void randomEncryption() { System.out.println("randomEncryption-Method started\n"); String tempMethod = argMethod; generateRandomKey(); argMethod = "encrypt"; startMethod(); argMethod = tempMethod; argText = ciphertext.getText(); if (argMethod.equals("analyze")) deleteHoles(); } public void printFoundKey() { deleteHoles(); for (int i =0; i<keyToLogic.length/2; i++) { model.getKey().set(keyToLogic[2*i+1], keyToLogic[2*i]); } // userText = true; } public void resetMethod() { System.out.println("defaultMethod-Method started\n"); analyze.setSelection(true); encrypt.setSelection(false); decrypt.setSelection(false); analyze(); } public boolean checkTextLangChange() { boolean change; if (languageState == 0 && (chooseExample.getSelectionIndex() == 2 || chooseExample.getSelectionIndex() == 3)) { change = true; }else if (languageState == 1 && (chooseExample.getSelectionIndex() == 0 || chooseExample.getSelectionIndex() == 1)) { change = true; }else { change = false; } return change; } public boolean checkStatisticLangChange() { boolean change; if (languageState == 0 && (selectStatistic.getSelectionIndex() == 1 || selectStatistic.getSelectionIndex() == 2)) { change = true; }else if (languageState == 1 && selectStatistic.getSelectionIndex() == 0) { change = true; }else { change = false; } return change; } public void checkTextInput() { switch(textInputState) { case 0: break; case 1: break; case 2: break; } } public void checkArgs(String methode) { System.out.println("checkArgs started in: "+methode+"\nStartSettings?: "+startSettings); System.out.println("ArgMethod: "+argMethod+"\nTextname: "+textName+"\nArgText: "+argText+"\nArgLanguage: "+argLanguage+"\nArgStatistic: "+statisticName+" \ntextInputState: "+textInputState+"\nstatInoutState: "+statisticInputState+"\nfis: "+fis); System.out.println("Plain?: "+plain+"\nUserText?: "+userText); System.out.println("Plaintext editable: "+plaintext.getEditable()+"\nCiphertext editable: "+ciphertext.getEditable()); } public Canvas getCanvasKey() { return canvasKey; } public void setCanvasKey(Canvas canvasKey) { this.canvasKey = canvasKey; } public Grille getModel() { return model; } public void setModel(Grille model) { this.model = model; } public Spinner getKeySize() { return keySize; } public void setKeySize(Spinner keySize) { this.keySize = keySize; } }
package org.metaborg.runtime.task.primitives; import org.spoofax.interpreter.library.AbstractStrategoOperatorRegistry; public class TaskLibrary extends AbstractStrategoOperatorRegistry { public static final String REGISTRY_NAME = "TASK"; public TaskLibrary() { add(task_api_add_combinator_0_3.instance); add(task_api_add_dependency_0_2.instance); add(task_api_add_message_0_2.instance); add(task_api_add_read_0_2.instance); add(task_api_add_task_0_3.instance); add(task_api_becomes_cyclic_0_2.instance); add(task_api_clear_partition_0_1.instance); add(task_api_evaluate_3_0.instance); add(task_api_exists_0_1.instance); add(task_api_get_dependencies_0_1.instance); add(task_api_get_messages_0_1.instance); add(task_api_get_results_0_1.instance); add(task_api_has_failed_0_1.instance); add(task_api_invalidate_task_reads_0_1.instance); add(task_api_persist_0_0.instance); add(task_api_reset_0_0.instance); add(task_api_setup_0_1.instance); add(task_api_start_collection_0_1.instance); add(task_api_stop_collection_0_1.instance); add(task_api_task_id_0_1.instance); add(task_api_unload_0_1.instance); add(task_api_debug_info_0_1.instance); } public String getOperatorRegistryName() { return REGISTRY_NAME; } }
package org.mwc.cmap.TimeController.recorders; import java.io.File; import org.eclipse.core.runtime.IStatus; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.window.Window; import org.eclipse.swt.program.Program; import org.eclipse.swt.widgets.Display; import org.mwc.cmap.TimeController.wizards.ExportPPTDialog; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.DataTypes.Temporal.TimeControlPreferences; import org.mwc.debrief.core.preferences.PrefsPage; import Debrief.GUI.Frames.Application; import Debrief.GUI.Views.CoreCoordinateRecorder; import MWC.Algorithms.PlainProjection; import MWC.GUI.Layers; import MWC.GenericData.WorldArea; import MWC.GenericData.WorldLocation; public class CoordinateRecorder extends CoreCoordinateRecorder { public CoordinateRecorder(final Layers layers, final PlainProjection plainProjection, final TimeControlPreferences timePreferences) { super(layers, plainProjection, timePreferences.getAutoInterval() .getMillis(), timePreferences.getSmallStep().getMillis(), timePreferences.getDTGFormat()); } private static String getMasterTemplateFile() { String templateFile = CorePlugin.getDefault().getPreferenceStore() .getString(PrefsPage.PreferenceConstants.PPT_TEMPLATE); if (templateFile == null || templateFile.isEmpty()) { templateFile = CorePlugin.getDefault().getPreferenceStore() .getDefaultString(PrefsPage.PreferenceConstants.PPT_TEMPLATE); } return templateFile; } private static String getNewFileName(final String fileName, final String recordingStartTime) { String newName = fileName; final String[] fileNameParts = fileName.split("-"); if (fileNameParts.length > 0) { newName = fileNameParts[0] + "-" + recordingStartTime; } if (fileName.matches("^.*_\\d+$")) { int fileNameIncr = Integer.valueOf(fileName.substring(fileName .lastIndexOf("_") + 1)); newName += "_" + (++fileNameIncr); } else { newName += "_1"; } return newName; } @Override protected void openFile(final String filename) { CorePlugin.logError(IStatus.INFO, "Opening file:" + filename, null); final boolean worked = Program.launch(filename); CorePlugin.logError(IStatus.INFO, "Open file result:" + worked, null); } @Override public ExportDialogResult showExportDialog() { final ExportDialogResult retVal = new ExportDialogResult(); Display.getDefault().syncExec(new Runnable() { @Override public void run() { final ExportPPTDialog exportDialog = new ExportPPTDialog(Display .getDefault().getActiveShell()); // fix the filename final String exportLocation = exportDialog.getExportLocation(); // check we don't get invalid characters in the string // we're using for the filename final String tidyName = tidyString(startTime); String fileName = exportDialog.getFileName() + "-" + tidyName; if (exportLocation != null && !"".equals(exportLocation)) { final String filePath = exportDialog.getFileToExport(fileName); final File f = new File(filePath); if (f.exists()) { fileName = getNewFileName(fileName, startTime); } } exportDialog.setFileName(fileName); // clear startTime text, we don't need it any more startTime = null; // show the dialog if (exportDialog.open() == Window.OK) { final String exportFile = exportDialog.getFileToExport(null); final String masterTemplateFile = getMasterTemplateFile(); retVal.setMasterTemplate(masterTemplateFile); retVal.setFileName(fileName); retVal.setOpenOnComplete(exportDialog.getOpenOncomplete()); retVal.setScaleBarVisible(exportDialog.isScaleBarVisible()); retVal.setScaleBarUnit(exportDialog.getScaleBarUnit()); retVal.setSelectedFile(exportFile); retVal.setStatus(true); } // if cancelled, then stop recording. else { retVal.setStatus(false); retVal.setOpenOnComplete(false); retVal.setSelectedFile(null); } } }); return retVal; } private static String tidyString(String startTime) { if (startTime != null) { return startTime.replaceAll("[^a-zA-Z0-9-_\\.]", "_"); } return startTime; } @Override protected void showMessageDialog(final String message) { Display.getDefault().asyncExec(new Runnable() { @Override public void run() { MessageDialog.open(MessageDialog.INFORMATION, Display.getDefault() .getActiveShell(), "Export", message, MessageDialog.INFORMATION); } }); } public static class CoordinateRecorderTest extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; private static final String collingwood = "COLLINGWOOD"; private static final String nelson = "NELSON"; public CoordinateRecorder getRecorder() { final String testFilePath = "../org.mwc.cmap.combined.feature/root_installs/sample_data/offset_times.rep"; // ok, now try to read it in final Layers _theLayers = new Layers(); // add the REP importer MWC.Utilities.ReaderWriter.ImportManager.importThis(testFilePath, _theLayers); /* * final PlainProjection projection = new org.mwc.cmap.gt2plot.proj.GtProjection(); */ final PlainProjection projection = new MWC.Algorithms.Projections.FlatProjection(); projection.setScreenArea(new java.awt.Dimension(1443, 901)); projection.setDataArea(new WorldArea(new WorldLocation(22.238965795584505, -21.928244631862952, 0), new WorldLocation(22.238965795584505, -21.43985414608609, 0))); final org.mwc.cmap.core.DataTypes.Temporal.TimeControlProperties timePreferences = new org.mwc.cmap.core.DataTypes.Temporal.TimeControlProperties(); CoordinateRecorder recorder = new CoordinateRecorder(_theLayers, projection, timePreferences); return recorder; } /** * COLLINGWOOD starts after NELSON */ public void testPrimaryStartsFirst() { CoordinateRecorder recorder = getRecorder(); MWC.GenericData.HiResDate currentTime = new MWC.GenericData.HiResDate( 818748540000L); final int AMOUNT_OF_STEPS = 3; doIteration(recorder, currentTime, AMOUNT_OF_STEPS); final java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> track = recorder._tracks; checkTrackSize(track); assertEquals("correct amount of steps skipped in collingwood", 1, track.get(collingwood).getStepsToSkip()); assertEquals("correct amount of segments for collingwood", 2, track.get(collingwood).getPoints().size()); assertEquals("correct amount of steps skipped for nelson", 0, track.get(nelson).getStepsToSkip()); assertEquals("correct amount of tracks for nelson", 3, track.get(nelson).getPoints().size()); Application.logError2(Application.INFO, "Recording Test Passed (Primary Starting First)", null); } private static void checkTrackSize( final java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> track) { assertEquals("correct amount of tracks", 2, track.size()); assertTrue("tracks contain collingwood", track.containsKey(collingwood)); assertTrue("tracks contain nelson", track.containsKey(nelson)); } private static void doIteration(CoordinateRecorder recorder, final MWC.GenericData.HiResDate currentTime_in, final int AMOUNT_OF_STEPS) { MWC.GenericData.HiResDate currentTime = currentTime_in; recorder.startStepping(currentTime); long timeDelta = 60000; // 1 min. for (int i = 0; i < AMOUNT_OF_STEPS; i++) { recorder.newTime(currentTime); currentTime = new MWC.GenericData.HiResDate(currentTime.getMicros() / 1000L + timeDelta); } } /** * COLLINGWOOD starts with NELSON */ public void testPrimaryStartsWithSecondary() { CoordinateRecorder recorder = getRecorder(); MWC.GenericData.HiResDate currentTime = new MWC.GenericData.HiResDate( 818748600000L); final java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> track = twoStepsCheckCollingwood(recorder, currentTime); assertEquals("correct amount of steps skipped in nelson's track", 0, track.get(nelson).getStepsToSkip()); assertEquals("correct amount of points in collingwood's segment", 2, track.get(nelson).getPoints().size()); Application.logError2(Application.INFO, "Recording Test Passed (Starting at the same time)", null); } private static java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> twoStepsCheckCollingwood(CoordinateRecorder recorder, MWC.GenericData.HiResDate currentTime) { final int AMOUNT_OF_STEPS = 2; doIteration(recorder, currentTime, AMOUNT_OF_STEPS); final java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> track = recorder._tracks; checkTrackSize(track); assertEquals("correct number of skipped steps in collingwood's track", 0, track.get(collingwood).getStepsToSkip()); assertEquals("correct number of points in collingwood's segment", 2, track.get(collingwood).getPoints().size()); return track; } /** * COLLINGWOOD ends before NELSON */ public void testPrimaryEndsBeforeSecondary() { CoordinateRecorder recorder = getRecorder(); MWC.GenericData.HiResDate currentTime = new MWC.GenericData.HiResDate( 818764200000L); final java.util.Map<String, Debrief.ReaderWriter.powerPoint.model.Track> track = twoStepsCheckCollingwood(recorder, currentTime); assertEquals("correct number of skipped steps in nelson's track", 0, track.get(nelson).getStepsToSkip()); assertEquals("correct number of points in track's segment", 1, track.get(nelson).getPoints().size()); Application.logError2(Application.INFO, "Recording Test Passed (Starting at the same time)", null); } } }
package org.mwc.cmap.TimeController.recorders; import java.io.File; import org.eclipse.core.runtime.IStatus; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.window.Window; import org.eclipse.swt.program.Program; import org.eclipse.swt.widgets.Display; import org.mwc.cmap.TimeController.wizards.ExportPPTDialog; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.DataTypes.Temporal.TimeControlPreferences; import org.mwc.debrief.core.preferences.PrefsPage; import Debrief.GUI.Views.CoreCoordinateRecorder; import MWC.Algorithms.PlainProjection; import MWC.GUI.Layers; public class CoordinateRecorder extends CoreCoordinateRecorder { public CoordinateRecorder(final Layers layers, final PlainProjection plainProjection, final TimeControlPreferences timePreferences) { super(layers, plainProjection, timePreferences.getAutoInterval() .getMillis(), timePreferences.getSmallStep().getMillis(), timePreferences.getDTGFormat()); } private String getMasterTemplateFile() { String templateFile = CorePlugin.getDefault().getPreferenceStore() .getString(PrefsPage.PreferenceConstants.PPT_TEMPLATE); if (templateFile == null || templateFile.isEmpty()) { templateFile = CorePlugin.getDefault().getPreferenceStore() .getDefaultString(PrefsPage.PreferenceConstants.PPT_TEMPLATE); } return templateFile; } private String getNewFileName(final String fileName, final String recordingStartTime) { String newName = fileName; final String[] fileNameParts = fileName.split("-"); if (fileNameParts.length > 0) { newName = fileNameParts[0] + "-" + recordingStartTime; } if (fileName.matches("^.*_\\d+$")) { int fileNameIncr = Integer.valueOf(fileName.substring(fileName .lastIndexOf("_") + 1)); newName += "_" + (++fileNameIncr); } else { newName += "_1"; } return newName; } @Override protected void openFile(final String filename) { CorePlugin.logError(IStatus.INFO, "Opening file:" + filename, null); final boolean worked = Program.launch(filename); CorePlugin.logError(IStatus.INFO, "Open file result:" + worked, null); } @Override public ExportDialogResult showExportDialog() { final ExportDialogResult retVal = new ExportDialogResult(); Display.getDefault().syncExec(new Runnable() { @Override public void run() { final ExportPPTDialog exportDialog = new ExportPPTDialog(Display .getDefault().getActiveShell()); // fix the filename final String exportLocation = exportDialog.getExportLocation(); // check we don't get invalid characters in the string // we're using for the filename final String tidyName = tidyString(startTime); String fileName = exportDialog.getFileName() + "-" + tidyName; if (exportLocation != null && !"".equals(exportLocation)) { final String filePath = exportDialog.getFileToExport(fileName); final File f = new File(filePath); if (f.exists()) { fileName = getNewFileName(fileName, startTime); } } exportDialog.setFileName(fileName); // clear startTime text, we don't need it any more startTime = null; // show the dialog if (exportDialog.open() == Window.OK) { final String exportFile = exportDialog.getFileToExport(null); final String masterTemplateFile = getMasterTemplateFile(); retVal.setMasterTemplate(masterTemplateFile); retVal.setFileName(fileName); retVal.setOpenOnComplete(exportDialog.getOpenOncomplete()); retVal.setSelectedFile(exportFile); retVal.setStatus(true); } //if cancelled, then stop recording. else { retVal.setStatus(false); retVal.setOpenOnComplete(false); retVal.setSelectedFile(null); } } }); return retVal; } private static String tidyString(String startTime) { if(startTime!=null) { return startTime.replaceAll("[^a-zA-Z0-9-_\\.]", "_"); } return startTime; } @Override protected void showMessageDialog(final String message) { Display.getDefault().asyncExec(new Runnable() { @Override public void run() { MessageDialog.open(MessageDialog.INFORMATION, Display.getDefault() .getActiveShell(), "Export", message, MessageDialog.INFORMATION); } }); } }
package org.zstack.network.service.flat; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.compute.vm.StaticIpOperator; import org.zstack.compute.vm.VmSystemTags; import org.zstack.core.asyncbatch.While; import org.zstack.core.cloudbus.CloudBus; import org.zstack.core.cloudbus.CloudBusCallBack; import org.zstack.core.cloudbus.MessageSafe; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.*; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.thread.SyncTask; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.AbstractService; import org.zstack.header.apimediator.ApiMessageInterceptionException; import org.zstack.header.apimediator.GlobalApiMessageInterceptor; import org.zstack.header.core.*; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.host.HostConstant; import org.zstack.header.host.HostErrors; import org.zstack.header.message.APIMessage; import org.zstack.header.message.Message; import org.zstack.header.message.MessageReply; import org.zstack.header.network.l2.L2NetworkVO; import org.zstack.header.network.l3.*; import org.zstack.header.network.service.*; import org.zstack.header.vm.*; import org.zstack.header.vm.VmAbnormalLifeCycleStruct.VmAbnormalLifeCycleOperation; import org.zstack.kvm.*; import org.zstack.kvm.KvmCommandSender.SteppingSendCallback; import org.zstack.network.service.MtuGetter; import org.zstack.network.service.NetworkProviderFinder; import org.zstack.network.service.NetworkServiceProviderLookup; import org.zstack.tag.SystemTagCreator; import org.zstack.utils.CollectionUtils; import org.zstack.utils.DebugUtils; import org.zstack.utils.TagUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.Function; import org.zstack.utils.logging.CLogger; import org.zstack.utils.network.IPv6Constants; import org.zstack.utils.network.IPv6NetworkUtils; import org.zstack.utils.network.NetworkUtils; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.zstack.core.Platform.*; import static org.zstack.utils.CollectionDSL.*; public class FlatDhcpBackend extends AbstractService implements NetworkServiceDhcpBackend, KVMHostConnectExtensionPoint, L3NetworkDeleteExtensionPoint, VmInstanceMigrateExtensionPoint, VmAbnormalLifeCycleExtensionPoint, IpRangeDeletionExtensionPoint, BeforeStartNewCreatedVmExtensionPoint, GlobalApiMessageInterceptor, AfterAddIpRangeExtensionPoint { private static final CLogger logger = Utils.getLogger(FlatDhcpBackend.class); @Autowired private CloudBus bus; @Autowired private DatabaseFacade dbf; @Autowired private ThreadFacade thdf; @Autowired private PluginRegistry pluginRgty; public static final String APPLY_DHCP_PATH = "/flatnetworkprovider/dhcp/apply"; public static final String PREPARE_DHCP_PATH = "/flatnetworkprovider/dhcp/prepare"; public static final String RELEASE_DHCP_PATH = "/flatnetworkprovider/dhcp/release"; public static final String DHCP_CONNECT_PATH = "/flatnetworkprovider/dhcp/connect"; public static final String RESET_DEFAULT_GATEWAY_PATH = "/flatnetworkprovider/dhcp/resetDefaultGateway"; public static final String DHCP_DELETE_NAMESPACE_PATH = "/flatnetworkprovider/dhcp/deletenamespace"; public static String makeNamespaceName(String brName, String l3Uuid) { return String.format("%s_%s", brName, l3Uuid); } @Transactional(readOnly = true) private List<DhcpInfo> getDhcpInfoForConnectedKvmHost(KVMHostConnectedContext context) { String sql = "select vm.uuid, vm.defaultL3NetworkUuid from VmInstanceVO vm where vm.hostUuid = :huuid and vm.state in (:states) and vm.type = :vtype"; TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class); q.setParameter("huuid", context.getInventory().getUuid()); q.setParameter("states", list(VmInstanceState.Running, VmInstanceState.Unknown, VmInstanceState.Starting, VmInstanceState.Rebooting, VmInstanceState.Resuming, VmInstanceState.Migrating, VmInstanceState.VolumeMigrating)); q.setParameter("vtype", VmInstanceConstant.USER_VM_TYPE); List<Tuple> ts = q.getResultList(); if (ts.isEmpty()) { return null; } Map<String, String> vmDefaultL3 = new HashMap<String, String>(); for (Tuple t : ts) { vmDefaultL3.put(t.get(0, String.class), t.get(1, String.class)); } sql = "select nic from VmNicVO nic, L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref, NetworkServiceProviderVO provider, UsedIpVO ip" + " where nic.uuid = ip.vmNicUuid and ip.l3NetworkUuid = l3.uuid" + " and ref.l3NetworkUuid = l3.uuid and ref.networkServiceProviderUuid = provider.uuid " + " and ref.networkServiceType = :dhcpType " + " and provider.type = :ptype and nic.vmInstanceUuid in (:vmUuids) group by nic.uuid"; TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class); nq.setParameter("ptype", FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); nq.setParameter("dhcpType", NetworkServiceType.DHCP.toString()); nq.setParameter("vmUuids", vmDefaultL3.keySet()); List<VmNicVO> nics = nq.getResultList(); if (nics.isEmpty()) { return null; } List<String> l3Uuids = new ArrayList<>(); for (VmNicVO nic : nics) { for (UsedIpVO ip : nic.getUsedIps()) { l3Uuids.add(ip.getL3NetworkUuid()); } } l3Uuids = l3Uuids.stream().distinct().collect(Collectors.toList()); sql = "select t.tag, l3.uuid from SystemTagVO t, L3NetworkVO l3 where t.resourceType = :ttype and t.tag like :tag" + " and t.resourceUuid = l3.l2NetworkUuid and l3.uuid in (:l3Uuids)"; TypedQuery<Tuple> tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(KVMSystemTags.L2_BRIDGE_NAME.getTagFormat())); tq.setParameter("l3Uuids", l3Uuids); tq.setParameter("ttype", L2NetworkVO.class.getSimpleName()); ts = tq.getResultList(); Map<String, String> bridgeNames = new HashMap<String, String>(); for (Tuple t : ts) { bridgeNames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select t.tag, vm.uuid from SystemTagVO t, VmInstanceVO vm where t.resourceType = :ttype" + " and t.tag like :tag and t.resourceUuid = vm.uuid and vm.uuid in (:vmUuids)"; tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(VmSystemTags.HOSTNAME.getTagFormat())); tq.setParameter("ttype", VmInstanceVO.class.getSimpleName()); tq.setParameter("vmUuids", vmDefaultL3.keySet()); ts = tq.getResultList(); Map<String, String> hostnames = new HashMap<String, String>(); for (Tuple t : ts) { hostnames.put(t.get(1, String.class), VmSystemTags.HOSTNAME.getTokenByTag(t.get(0, String.class), VmSystemTags.HOSTNAME_TOKEN)); } sql = "select l3 from L3NetworkVO l3 where l3.uuid in (:l3Uuids)"; TypedQuery<L3NetworkVO> l3q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); l3q.setParameter("l3Uuids", l3Uuids); List<L3NetworkVO> l3s = l3q.getResultList(); Map<String, L3NetworkVO> l3Map = new HashMap<String, L3NetworkVO>(); for (L3NetworkVO l3 : l3s) { l3Map.put(l3.getUuid(), l3); } List<DhcpInfo> dhcpInfoList = new ArrayList<DhcpInfo>(); for (VmNicVO nic : nics) { for (UsedIpVO ip : nic.getUsedIps()) { DhcpInfo info = new DhcpInfo(); info.bridgeName = KVMSystemTags.L2_BRIDGE_NAME.getTokenByTag(bridgeNames.get(nic.getL3NetworkUuid()), KVMSystemTags.L2_BRIDGE_NAME_TOKEN); info.namespaceName = makeNamespaceName( info.bridgeName, ip.getL3NetworkUuid() ); DebugUtils.Assert(info.bridgeName != null, "bridge name cannot be null"); info.mtu = new MtuGetter().getMtu(ip.getL3NetworkUuid()); info.mac = nic.getMac(); info.netmask = ip.getNetmask(); info.isDefaultL3Network = ip.getL3NetworkUuid().equals(vmDefaultL3.get(nic.getVmInstanceUuid())); info.ip = ip.getIp(); info.gateway = ip.getGateway(); info.ipVersion = ip.getIpVersion(); L3NetworkVO l3 = l3Map.get(ip.getL3NetworkUuid()); info.dnsDomain = l3.getDnsDomain(); info.dns = getL3NetworkDns(ip.getL3NetworkUuid()); info.firstIp = NetworkUtils.getSmallestIp(l3.getIpRanges().stream().map(IpRangeAO::getStartIp).collect(Collectors.toList())); info.endIp = NetworkUtils.getBiggesttIp(l3.getIpRanges().stream().map(IpRangeAO::getEndIp).collect(Collectors.toList())); info.prefixLength = l3.getIpRanges().stream().findAny().map(IpRangeAO::getPrefixLen).orElse(null); if (info.isDefaultL3Network) { info.hostname = hostnames.get(nic.getVmInstanceUuid()); if (info.hostname == null && ip.getIp() != null) { if (ip.getIpVersion() == IPv6Constants.IPv4) { info.hostname = ip.getIp().replaceAll("\\.", "-"); } else { info.hostname = IPv6NetworkUtils.ipv6AddessToHostname(ip.getIp()); } } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.l3NetworkUuid = l3.getUuid(); info.hostRoutes = getL3NetworkHostRoute(ip.getL3NetworkUuid()); dhcpInfoList.add(info); } } return dhcpInfoList; } @Override @MessageSafe public void handleMessage(Message msg) { if (msg instanceof APIMessage) { handleApiMessage((APIMessage) msg); } else { handleLocalMessage(msg); } } private void handleApiMessage(APIMessage msg) { if (msg instanceof APIGetL3NetworkDhcpIpAddressMsg) { handle((APIGetL3NetworkDhcpIpAddressMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handleLocalMessage(Message msg) { if (msg instanceof FlatDhcpAcquireDhcpServerIpMsg) { handle((FlatDhcpAcquireDhcpServerIpMsg) msg); } else if (msg instanceof L3NetworkUpdateDhcpMsg) { handle((L3NetworkUpdateDhcpMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handle(APIGetL3NetworkDhcpIpAddressMsg msg) { APIGetL3NetworkDhcpIpAddressReply reply = new APIGetL3NetworkDhcpIpAddressReply(); if (msg.getL3NetworkUuid() == null) { reply.setError(argerr("l3 network uuid cannot be null")); bus.reply(msg, reply); return; } String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(msg.getL3NetworkUuid()); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); String dhcpServerIp = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); if (dhcpServerIp != null) { reply.setIp(IPv6NetworkUtils.ipv6TagValueToAddress(dhcpServerIp)); bus.reply(msg, reply); return; } } reply.setError(operr("Cannot find DhcpIp for l3 network[uuid:%s]", msg.getL3NetworkUuid())); bus.reply(msg, reply); } String allocateDhcpIp(String l3Uuid) { return allocateDhcpIp(l3Uuid, null); } String allocateDhcpIp(String l3Uuid, String excludedIp) { return allocateDhcpIp(l3Uuid, true, null, excludedIp); } @Deferred private String allocateDhcpIp(String l3Uuid, boolean allocate_ip, String requiredIp, String excludedIp) { L3NetworkVO l3 = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, l3Uuid).find(); if (!isProvidedByMe(L3NetworkInventory.valueOf(l3))) { return null; } // TODO: static allocate the IP to avoid the lock GLock lock = new GLock(String.format("l3-%s-allocate-dhcp-ip", l3Uuid), TimeUnit.MINUTES.toSeconds(30)); lock.lock(); Defer.defer(lock::unlock); String dhcpServerIp; String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(l3Uuid); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); dhcpServerIp = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); if (dhcpServerIp != null) { dhcpServerIp = IPv6NetworkUtils.ipv6TagValueToAddress(dhcpServerIp); } return dhcpServerIp; } dhcpServerIp = requiredIp; /* dhcp server IP uuid in L3_NETWORK_DHCP_IP is not used any more, to be compatible with old version, * keep the format of L3_NETWORK_DHCP_IP unchanged, so set it be null temporary, it will be optimized later */ String dhcpServerIpUuid = "null"; if (allocate_ip) { AllocateIpMsg amsg = new AllocateIpMsg(); amsg.setL3NetworkUuid(l3Uuid); if (requiredIp != null) { amsg.setRequiredIp(requiredIp); } amsg.setExcludedIp(excludedIp); bus.makeTargetServiceIdByResourceUuid(amsg, L3NetworkConstant.SERVICE_ID, l3Uuid); MessageReply reply = bus.call(amsg); if (!reply.isSuccess()) { throw new OperationFailureException(reply.getError()); } AllocateIpReply r = reply.castReply(); UsedIpInventory ip = r.getIpInventory(); dhcpServerIp = ip.getIp(); dhcpServerIpUuid = ip.getUuid(); } SystemTagCreator creator = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.newSystemTagCreator(l3Uuid); creator.inherent = true; creator.setTagByTokens( map( e(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN, IPv6NetworkUtils.ipv6AddessToTagValue(dhcpServerIp)), e(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_UUID_TOKEN, dhcpServerIpUuid) ) ); creator.create(); logger.debug(String.format("allocate DHCP server IP[ip:%s, uuid:%s] for l3 network[uuid:%s]", dhcpServerIp, dhcpServerIpUuid, l3Uuid)); for (DhcpServerExtensionPoint exp : pluginRgty.getExtensionList(DhcpServerExtensionPoint.class)) { exp.afterAllocateDhcpServerIP(l3Uuid, dhcpServerIp); } return dhcpServerIp; } private String allocateDhcpIp(String l3Uuid, boolean allocate_ip, String requiredIp) { return allocateDhcpIp(l3Uuid, allocate_ip, requiredIp, null); } private void handle(final FlatDhcpAcquireDhcpServerIpMsg msg) { thdf.syncSubmit(new SyncTask<Void>() { @Override public Void call() { dealMessage(msg); return null; } @MessageSafe private void dealMessage(FlatDhcpAcquireDhcpServerIpMsg msg) { FlatDhcpAcquireDhcpServerIpReply reply = new FlatDhcpAcquireDhcpServerIpReply(); String ip = allocateDhcpIp(msg.getL3NetworkUuid()); if (ip != null) { List<IpRangeVO> iprs = Q.New(IpRangeVO.class).eq(IpRangeVO_.l3NetworkUuid, msg.getL3NetworkUuid()).list(); if (iprs == null || iprs.isEmpty()) { reply.setError(operr("L3 network[uuid:%s] does not have any iprange", msg.getL3NetworkUuid())); bus.reply(msg, reply); return; } reply.setIp(ip); reply.setNetmask(iprs.get(0).getNetmask()); reply.setIpr(IpRangeInventory.valueOf(iprs.get(0))); } bus.reply(msg, reply); } @Override public String getName() { return getSyncSignature(); } @Override public String getSyncSignature() { return String.format("flat-dhcp-get-dhcp-ip-for-l3-network-%s", msg.getL3NetworkUuid()); } @Override public int getSyncLevel() { return 1; } }); } @Override public String getId() { return bus.makeLocalServiceId(FlatNetworkServiceConstant.SERVICE_ID); } @Override public boolean start() { return true; } @Override public boolean stop() { return true; } @Override public String preDeleteL3Network(L3NetworkInventory inventory) { return null; } @Override public void beforeDeleteL3Network(L3NetworkInventory inventory) { } private boolean isProvidedByMe(L3NetworkInventory l3) { String providerType = new NetworkProviderFinder().getNetworkProviderTypeByNetworkServiceType(l3.getUuid(), NetworkServiceType.DHCP.toString()); return FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING.equals(providerType); } @Override public void afterDeleteL3Network(L3NetworkInventory inventory) { if (!isProvidedByMe(inventory)) { return; } String dhchip = getDHCPServerIP(inventory.getUuid()); if (dhchip != null) { deleteDhcpServerIp(inventory.getUuid(), dhchip); logger.debug(String.format("delete DHCP IP[%s] of the flat network[uuid:%s] as the L3 network is deleted", dhchip, inventory.getUuid())); } deleteNameSpace(inventory); } private void deleteNameSpace(L3NetworkInventory inventory) { List<String> huuids = new Callable<List<String>>() { @Override @Transactional(readOnly = true) public List<String> call() { String sql = "select host.uuid from HostVO host, L2NetworkVO l2, L2NetworkClusterRefVO ref where l2.uuid = ref.l2NetworkUuid" + " and ref.clusterUuid = host.clusterUuid and l2.uuid = :uuid"; TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class); q.setParameter("uuid", inventory.getL2NetworkUuid()); return q.getResultList(); } }.call(); if (huuids.isEmpty()) { return; } String brName = new BridgeNameFinder().findByL3Uuid(inventory.getUuid()); DeleteNamespaceCmd cmd = new DeleteNamespaceCmd(); cmd.bridgeName = brName; cmd.namespaceName = makeNamespaceName(brName, inventory.getUuid()); new While<>(huuids).all((huuid, comp) -> { new KvmCommandSender(huuid).send(cmd, DHCP_DELETE_NAMESPACE_PATH, wrapper -> { DeleteNamespaceRsp rsp = wrapper.getResponse(DeleteNamespaceRsp.class); return rsp.isSuccess() ? null : operr("operation error, because:%s", rsp.getError()); }, new SteppingSendCallback<KvmResponseWrapper>() { @Override public void success(KvmResponseWrapper w) { logger.debug(String.format("successfully deleted namespace for L3 network[uuid:%s, name:%s] on the " + "KVM host[uuid:%s]", inventory.getUuid(), inventory.getName(), getHostUuid())); } @Override public void fail(ErrorCode errorCode) { if (!errorCode.isError(HostErrors.OPERATION_FAILURE_GC_ELIGIBLE)) { return; } FlatDHCPDeleteNamespaceGC gc = new FlatDHCPDeleteNamespaceGC(); gc.hostUuid = getHostUuid(); gc.command = cmd; gc.NAME = String.format("gc-namespace-on-host-%s", getHostUuid()); gc.submit(); } }); comp.done(); }).run(new NoErrorCompletion(new NopeCompletion()){ @Override public void done() { } }); } private List<DhcpInfo> getVmDhcpInfo(VmInstanceInventory vm) { return getVmDhcpInfo(vm, null); } @Transactional(readOnly = true) private List<DhcpInfo> getVmDhcpInfo(VmInstanceInventory vm, String l3Uuid) { String sql = "select nic from VmNicVO nic, L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref, NetworkServiceProviderVO provider, UsedIpVO ip" + " where nic.uuid = ip.vmNicUuid and ip.l3NetworkUuid = l3.uuid" + " and ref.l3NetworkUuid = l3.uuid and ref.networkServiceProviderUuid = provider.uuid " + " and ref.networkServiceType = :dhcpType " + " and provider.type = :ptype and nic.vmInstanceUuid = :vmUuid group by nic.uuid"; TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class); nq.setParameter("dhcpType", NetworkServiceType.DHCP.toString()); nq.setParameter("ptype", FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); nq.setParameter("vmUuid", vm.getUuid()); List<VmNicVO> nics = nq.getResultList(); if (l3Uuid != null) { nics = nics.stream().filter(nic -> nic.getUsedIps().stream().map(UsedIpVO::getL3NetworkUuid).collect(Collectors.toList()).contains(l3Uuid)).collect(Collectors.toList()); } if (nics.isEmpty()) { return new ArrayList<>(); } List<String> l3Uuids = new ArrayList<>(); for (VmNicVO nic : nics) { for (UsedIpVO ip : nic.getUsedIps()) { l3Uuids.add(ip.getL3NetworkUuid()); } } l3Uuids = l3Uuids.stream().distinct().collect(Collectors.toList()); sql = "select t.tag, l3.uuid from SystemTagVO t, L3NetworkVO l3 where t.resourceType = :ttype and t.tag like :tag" + " and t.resourceUuid = l3.l2NetworkUuid and l3.uuid in (:l3Uuids)"; TypedQuery<Tuple> tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(KVMSystemTags.L2_BRIDGE_NAME.getTagFormat())); tq.setParameter("l3Uuids", l3Uuids); tq.setParameter("ttype", L2NetworkVO.class.getSimpleName()); List<Tuple> ts = tq.getResultList(); Map<String, String> bridgeNames = new HashMap<String, String>(); for (Tuple t : ts) { bridgeNames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select t.tag, vm.uuid from SystemTagVO t, VmInstanceVO vm where t.resourceType = :ttype" + " and t.tag like :tag and t.resourceUuid = vm.uuid and vm.uuid = :vmUuid"; tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(VmSystemTags.HOSTNAME.getTagFormat())); tq.setParameter("ttype", VmInstanceVO.class.getSimpleName()); tq.setParameter("vmUuid", vm.getUuid()); Map<String, String> hostnames = new HashMap<String, String>(); for (Tuple t : ts) { hostnames.put(t.get(1, String.class), VmSystemTags.HOSTNAME.getTokenByTag(t.get(0, String.class), VmSystemTags.HOSTNAME_TOKEN)); } sql = "select l3 from L3NetworkVO l3 where l3.uuid in (:l3Uuids)"; TypedQuery<L3NetworkVO> l3q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); l3q.setParameter("l3Uuids", l3Uuids); List<L3NetworkVO> l3s = l3q.getResultList(); Map<String, L3NetworkVO> l3Map = new HashMap<String, L3NetworkVO>(); for (L3NetworkVO l3 : l3s) { l3Map.put(l3.getUuid(), l3); } List<DhcpInfo> dhcpInfoList = new ArrayList<DhcpInfo>(); for (VmNicVO nic : nics) { for (UsedIpVO ip : nic.getUsedIps()) { DhcpInfo info = new DhcpInfo(); info.bridgeName = KVMSystemTags.L2_BRIDGE_NAME.getTokenByTag(bridgeNames.get(ip.getL3NetworkUuid()), KVMSystemTags.L2_BRIDGE_NAME_TOKEN); info.namespaceName = makeNamespaceName( info.bridgeName, ip.getL3NetworkUuid() ); DebugUtils.Assert(info.bridgeName != null, "bridge name cannot be null"); info.mac = nic.getMac(); info.netmask = ip.getNetmask(); info.isDefaultL3Network = ip.getL3NetworkUuid().equals(vm.getDefaultL3NetworkUuid()); info.ip = ip.getIp(); info.ipVersion = ip.getIpVersion(); info.gateway = ip.getGateway(); L3NetworkVO l3 = l3Map.get(ip.getL3NetworkUuid()); info.dnsDomain = l3.getDnsDomain(); info.dns = getL3NetworkDns(ip.getL3NetworkUuid()); info.firstIp = NetworkUtils.getSmallestIp(l3.getIpRanges().stream().map(IpRangeAO::getStartIp).collect(Collectors.toList())); info.endIp = NetworkUtils.getBiggesttIp(l3.getIpRanges().stream().map(IpRangeAO::getEndIp).collect(Collectors.toList())); info.prefixLength = l3.getIpRanges().stream().findAny().map(IpRangeAO::getPrefixLen).orElse(null); if (info.isDefaultL3Network) { info.hostname = hostnames.get(nic.getVmInstanceUuid()); if (info.hostname == null && ip.getIp() != null) { if (ip.getIpVersion() == IPv6Constants.IPv4) { info.hostname = ip.getIp().replaceAll("\\.", "-"); } else { info.hostname = IPv6NetworkUtils.ipv6AddessToHostname(ip.getIp()); } } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.l3NetworkUuid = l3.getUuid(); info.hostRoutes = getL3NetworkHostRoute(ip.getL3NetworkUuid()); dhcpInfoList.add(info); } } return dhcpInfoList; } @Override public void preMigrateVm(VmInstanceInventory inv, String destHostUuid) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null || info.isEmpty()) { return; } FutureCompletion completion = new FutureCompletion(null); applyDhcpToHosts(info, destHostUuid, false, completion); completion.await(TimeUnit.MINUTES.toMillis(30)); if (!completion.isSuccess()) { throw new OperationFailureException(operr("cannot configure DHCP for vm[uuid:%s] on the destination host[uuid:%s]", inv.getUuid(), destHostUuid).causedBy(completion.getErrorCode())); } } @Override public void beforeMigrateVm(VmInstanceInventory inv, String destHostUuid) { } @Override public void afterMigrateVm(VmInstanceInventory inv, String srcHostUuid) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null || info.isEmpty()) { return; } releaseDhcpService(info, inv.getUuid(), srcHostUuid, new NoErrorCompletion() { @Override public void done() { // ignore } }); } @Override public void failedToMigrateVm(VmInstanceInventory inv, String destHostUuid, ErrorCode reason) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null || info.isEmpty()) { return; } releaseDhcpService(info, inv.getUuid(), destHostUuid, new NoErrorCompletion() { @Override public void done() { // ignore } }); } @Override public Flow createVmAbnormalLifeCycleHandlingFlow(final VmAbnormalLifeCycleStruct struct) { return new Flow() { String __name__ = "flat-network-configure-dhcp"; VmAbnormalLifeCycleOperation operation = struct.getOperation(); VmInstanceInventory vm = struct.getVmInstance(); List<DhcpInfo> info = getVmDhcpInfo(vm); String applyHostUuidForRollback; String releaseHostUuidForRollback; @Override public void run(FlowTrigger trigger, Map data) { if (info == null || info.isEmpty()) { trigger.next(); return; } if (operation == VmAbnormalLifeCycleOperation.VmRunningOnTheHost) { vmRunningOnTheHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost) { vmStoppedOnTheSameHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostChanged) { vmRunningFromUnknownStateHostChanged(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged) { vmRunningFromUnknownStateHostNotChanged(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmMigrateToAnotherHost) { vmMigrateToAnotherHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromIntermediateState) { vmRunningFromIntermediateState(trigger); } else { trigger.next(); } } private void vmRunningFromIntermediateState(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmMigrateToAnotherHost(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); applyHostUuidForRollback = struct.getOriginalHostUuid(); applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmRunningFromUnknownStateHostNotChanged(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmRunningFromUnknownStateHostChanged(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); applyHostUuidForRollback = struct.getCurrentHostUuid(); applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmStoppedOnTheSameHost(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getCurrentHostUuid(), new NoErrorCompletion(trigger) { @Override public void done() { applyHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } }); } private void vmRunningOnTheHost(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } @Override public void rollback(FlowRollback trigger, Map data) { if (info == null) { trigger.rollback(); return; } if (releaseHostUuidForRollback != null) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); } if (applyHostUuidForRollback != null) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(null) { @Override public void success() { //ignore } @Override public void fail(ErrorCode errorCode) { logger.warn(String.format("failed to re-apply DHCP configuration of" + " the vm[uuid:%s] to the host[uuid:%s], %s. You may need to reboot the VM to" + " make the DHCP works", vm.getUuid(), applyHostUuidForRollback, errorCode)); } }); } trigger.rollback(); } }; } @Override public void preDeleteIpRange(IpRangeInventory ipRange) { } @Override public void beforeDeleteIpRange(IpRangeInventory ipRange) { } private void deleteDhcpServerIp(String l3Uuid, String dhcpServerIp) { FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.deleteInherentTag(l3Uuid); for (DhcpServerExtensionPoint exp : pluginRgty.getExtensionList(DhcpServerExtensionPoint.class)) { exp.afterRemoveDhcpServerIP(l3Uuid, dhcpServerIp); } } private String getDHCPServerIP(String l3Uuid) { String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(l3Uuid); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); String dhcpServerIp = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); if (dhcpServerIp != null) { dhcpServerIp = IPv6NetworkUtils.ipv6TagValueToAddress(dhcpServerIp); } return dhcpServerIp; } return null; } @Override public void afterDeleteIpRange(IpRangeInventory ipRange) { String dhcpServerIp = getDHCPServerIP(ipRange.getL3NetworkUuid()); boolean ipRangeExisted = Q.New(IpRangeVO.class).eq(IpRangeVO_.l3NetworkUuid, ipRange.getL3NetworkUuid()).isExists(); if (!ipRangeExisted && dhcpServerIp != null) { deleteDhcpServerIp(ipRange.getL3NetworkUuid(), dhcpServerIp); logger.debug(String.format("delete DHCP IP[%s] of the flat network[uuid:%s] as the IP range[uuid:%s] is deleted", dhcpServerIp, ipRange.getL3NetworkUuid(), ipRange.getUuid())); } } @Override public void failedToDeleteIpRange(IpRangeInventory ipRange, ErrorCode errorCode) { } @Override public Flow createKvmHostConnectingFlow(final KVMHostConnectedContext context) { return new NoRollbackFlow() { String __name__ = "prepare-flat-dhcp"; @Override public void run(final FlowTrigger trigger, Map data) { final List<DhcpInfo> dhcpInfoList = getDhcpInfoForConnectedKvmHost(context); if (dhcpInfoList == null) { trigger.next(); return; } // to flush ebtables ConnectCmd cmd = new ConnectCmd(); KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setHostUuid(context.getInventory().getUuid()); msg.setCommand(cmd); msg.setNoStatusCheck(true); msg.setPath(DHCP_CONNECT_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, context.getInventory().getUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { applyDhcpToHosts(dhcpInfoList, context.getInventory().getUuid(), true, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } } }); } }; } @Override public void beforeStartNewCreatedVm(VmInstanceSpec spec) { String providerUuid = new NetworkServiceProviderLookup().lookupUuidByType(FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); Map<String, String> vmStaticIps = new StaticIpOperator().getStaticIpbyVmUuid(spec.getVmInventory().getUuid()); // make sure the Flat DHCP acquired DHCP server IP before starting VMs, // otherwise it may not be able to get IP when lots of VMs start concurrently // because the logic of VM acquiring IP is ahead flat DHCP acquiring IP for (L3NetworkInventory l3 :VmNicSpec.getL3NetworkInventoryOfSpec(spec.getL3Networks())) { List<String> serviceTypes = l3.getNetworkServiceTypesFromProvider(providerUuid); if (serviceTypes.contains(NetworkServiceType.DHCP.toString())) { String staticIp = vmStaticIps.get(l3.getUuid()); allocateDhcpIp(l3.getUuid(), staticIp); } } } public static class HostRouteInfo { public String prefix; public String nexthop; } public static class DhcpInfo { public int ipVersion; public String raMode; public String ip; public String mac; public String netmask; public String firstIp; public String endIp; public Integer prefixLength; public String gateway; public String hostname; public boolean isDefaultL3Network; public String dnsDomain; public List<String> dns; public String bridgeName; public String namespaceName; public String l3NetworkUuid; public Integer mtu; public List<HostRouteInfo> hostRoutes; } public static class ApplyDhcpCmd extends KVMAgentCommands.AgentCommand { public List<DhcpInfo> dhcp; public boolean rebuild; public String l3NetworkUuid; } public static class ApplyDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class ReleaseDhcpCmd extends KVMAgentCommands.AgentCommand { public List<DhcpInfo> dhcp; } public static class ReleaseDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class PrepareDhcpCmd extends KVMAgentCommands.AgentCommand { public String bridgeName; public String dhcpServerIp; public String dhcpNetmask; public String namespaceName; public Integer prefixLen; public Integer ipVersion; public String addressMode; } public static class PrepareDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class ConnectCmd extends KVMAgentCommands.AgentCommand { } public static class ConnectRsp extends KVMAgentCommands.AgentResponse { } public static class ResetDefaultGatewayCmd extends KVMAgentCommands.AgentCommand { public String bridgeNameOfGatewayToRemove; public String namespaceNameOfGatewayToRemove; public String gatewayToRemove; public String macOfGatewayToRemove; public String gatewayToAdd; public String macOfGatewayToAdd; public String bridgeNameOfGatewayToAdd; public String namespaceNameOfGatewayToAdd; } public static class ResetDefaultGatewayRsp extends KVMAgentCommands.AgentResponse { } public static class DeleteNamespaceCmd extends KVMAgentCommands.AgentCommand { public String bridgeName; public String namespaceName; } public static class DeleteNamespaceRsp extends KVMAgentCommands.AgentResponse { } public NetworkServiceProviderType getProviderType() { return FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE; } private List<String> getL3NetworkDns(String l3NetworkUuid){ List<String> dns = Q.New(L3NetworkDnsVO.class).eq(L3NetworkDnsVO_.l3NetworkUuid, l3NetworkUuid) .select(L3NetworkDnsVO_.dns).orderBy(L3NetworkDnsVO_.id, SimpleQuery.Od.ASC).listValues(); if (dns == null) { dns = new ArrayList<String>(); } L3NetworkVO l3VO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, l3NetworkUuid).find(); if (FlatNetwordProviderGlobalConfig.ALLOW_DEFAULT_DNS.value(Boolean.class) && l3VO.getIpVersion() == IPv6Constants.IPv4) { String dhcpIp = getDHCPServerIP(l3NetworkUuid); if (dhcpIp != null) { dns.add(dhcpIp); } } return dns; } private List<HostRouteInfo> getL3NetworkHostRoute(String l3NetworkUuid){ List<L3NetworkHostRouteVO> vos = Q.New(L3NetworkHostRouteVO.class).eq(L3NetworkHostRouteVO_.l3NetworkUuid, l3NetworkUuid).list(); if (vos == null || vos.isEmpty()) { return new ArrayList<>(); } List<HostRouteInfo> res = new ArrayList<>(); for (L3NetworkHostRouteVO vo : vos) { HostRouteInfo info = new HostRouteInfo(); info.prefix = vo.getPrefix(); info.nexthop = vo.getNexthop(); res.add(info); } return res; } private List<DhcpInfo> toDhcpInfo(List<DhcpStruct> structs) { final Map<String, String> l3Bridges = new HashMap<String, String>(); for (DhcpStruct s : structs) { if (!l3Bridges.containsKey(s.getL3Network().getUuid())) { l3Bridges.put(s.getL3Network().getUuid(), KVMSystemTags.L2_BRIDGE_NAME.getTokenByResourceUuid(s.getL3Network().getL2NetworkUuid(), KVMSystemTags.L2_BRIDGE_NAME_TOKEN)); } } return CollectionUtils.transformToList(structs, new Function<DhcpInfo, DhcpStruct>() { @Override public DhcpInfo call(DhcpStruct arg) { if (arg.getIp() == null) { return null; } if ((arg.getIpVersion() == IPv6Constants.IPv6) && (IPv6Constants.SLAAC.equals(arg.getRaMode()))) { return null; } DhcpInfo info = new DhcpInfo(); info.ipVersion = arg.getIpVersion(); info.raMode = arg.getRaMode(); info.dnsDomain = arg.getDnsDomain(); info.gateway = arg.getGateway(); info.hostname = arg.getHostname(); info.isDefaultL3Network = arg.isDefaultL3Network(); if (info.isDefaultL3Network) { if (info.hostname == null && arg.getIp() != null) { if (info.ipVersion == IPv6Constants.IPv4) { info.hostname = arg.getIp().replaceAll("\\.", "-"); } else { info.hostname = IPv6NetworkUtils.ipv6AddessToHostname(arg.getIp()); } } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.ip = arg.getIp(); info.netmask = arg.getNetmask(); info.firstIp = arg.getFirstIp(); info.endIp = arg.getEndIP(); info.prefixLength = arg.getPrefixLength(); info.mac = arg.getMac(); info.dns = getL3NetworkDns(arg.getL3Network().getUuid()); info.l3NetworkUuid = arg.getL3Network().getUuid(); info.bridgeName = l3Bridges.get(arg.getL3Network().getUuid()); info.namespaceName = makeNamespaceName(info.bridgeName, arg.getL3Network().getUuid()); info.mtu = arg.getMtu(); info.hostRoutes = getL3NetworkHostRoute(arg.getL3Network().getUuid()); return info; } }); } private void applyDhcpToHosts(List<DhcpInfo> dhcpInfo, final String hostUuid, final boolean rebuild, final Completion completion) { final Map<String, List<DhcpInfo>> l3DhcpMap = new HashMap<String, List<DhcpInfo>>(); for (DhcpInfo d : dhcpInfo) { List<DhcpInfo> lst = l3DhcpMap.get(d.l3NetworkUuid); if (lst == null) { lst = new ArrayList<DhcpInfo>(); l3DhcpMap.put(d.l3NetworkUuid, lst); } lst.add(d); } final Iterator<Map.Entry<String, List<DhcpInfo>>> it = l3DhcpMap.entrySet().iterator(); class DhcpApply { void apply() { if (!it.hasNext()) { completion.success(); return; } Map.Entry<String, List<DhcpInfo>> e = it.next(); final String l3Uuid = e.getKey(); final List<DhcpInfo> info = e.getValue(); DebugUtils.Assert(!info.isEmpty(), "how can info be empty???"); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("flat-dhcp-provider-apply-dhcp-to-l3-network-%s", l3Uuid)); chain.then(new ShareFlow() { String dhcpServerIp; String dhcpNetmask; Integer prefixLen; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-dhcp-server-ip"; @Override public void run(final FlowTrigger trigger, Map data) { FlatDhcpAcquireDhcpServerIpMsg msg = new FlatDhcpAcquireDhcpServerIpMsg(); msg.setL3NetworkUuid(l3Uuid); bus.makeTargetServiceIdByResourceUuid(msg, FlatNetworkServiceConstant.SERVICE_ID, l3Uuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { FlatDhcpAcquireDhcpServerIpReply r = reply.castReply(); dhcpServerIp = r.getIp(); dhcpNetmask = r.getNetmask(); prefixLen = r.getIpr().getPrefixLen(); trigger.next(); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "prepare-distributed-dhcp-server-on-host"; @Override public void run(final FlowTrigger trigger, Map data) { DhcpInfo i = info.get(0); PrepareDhcpCmd cmd = new PrepareDhcpCmd(); cmd.bridgeName = i.bridgeName; cmd.namespaceName = i.namespaceName; cmd.dhcpServerIp = dhcpServerIp; cmd.dhcpNetmask = dhcpNetmask; cmd.prefixLen = prefixLen; cmd.ipVersion = i.ipVersion; List<IpRangeVO> rangeVOS = Q.New(IpRangeVO.class).eq(IpRangeVO_.l3NetworkUuid, l3Uuid).list(); cmd.addressMode = rangeVOS.get(0).getAddressMode(); KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setHostUuid(hostUuid); msg.setNoStatusCheck(true); msg.setCommand(cmd); msg.setPath(PREPARE_DHCP_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply ar = reply.castReply(); PrepareDhcpRsp rsp = ar.toResponse(PrepareDhcpRsp.class); if (!rsp.isSuccess()) { trigger.fail(operr("operation error, because:%s", rsp.getError())); return; } trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "apply-dhcp"; @Override public void run(final FlowTrigger trigger, Map data) { ApplyDhcpCmd cmd = new ApplyDhcpCmd(); cmd.dhcp = info; cmd.rebuild = rebuild; cmd.l3NetworkUuid = l3Uuid; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setCommand(cmd); msg.setHostUuid(hostUuid); msg.setPath(APPLY_DHCP_PATH); msg.setNoStatusCheck(true); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply r = reply.castReply(); ApplyDhcpRsp rsp = r.toResponse(ApplyDhcpRsp.class); if (!rsp.isSuccess()) { trigger.fail(operr("operation error, because:%s", rsp.getError())); return; } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { apply(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } } new DhcpApply().apply(); } @Override public void applyDhcpService(List<DhcpStruct> dhcpStructList, VmInstanceSpec spec, final Completion completion) { if (dhcpStructList.isEmpty()) { completion.success(); return; } applyDhcpToHosts(toDhcpInfo(dhcpStructList), spec.getDestHost().getUuid(), false, completion); } private void releaseDhcpService(List<DhcpInfo> info, final String vmUuid, final String hostUuid, final NoErrorCompletion completion) { final ReleaseDhcpCmd cmd = new ReleaseDhcpCmd(); cmd.dhcp = info; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setCommand(cmd); msg.setHostUuid(hostUuid); msg.setPath(RELEASE_DHCP_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { //TODO: Add GC and notification logger.warn(String.format("failed to release dhcp%s for vm[uuid: %s] on the kvm host[uuid:%s]; %s", cmd.dhcp, vmUuid, hostUuid, reply.getError())); completion.done(); return; } KVMHostAsyncHttpCallReply r = reply.castReply(); ReleaseDhcpRsp rsp = r.toResponse(ReleaseDhcpRsp.class); if (!rsp.isSuccess()) { //TODO Add GC and notification logger.warn(String.format("failed to release dhcp%s for vm[uuid: %s] on the kvm host[uuid:%s]; %s", cmd.dhcp, vmUuid, hostUuid, rsp.getError())); completion.done(); return; } completion.done(); } }); } @Override public void releaseDhcpService(List<DhcpStruct> dhcpStructsList, final VmInstanceSpec spec, final NoErrorCompletion completion) { if (dhcpStructsList.isEmpty()) { completion.done(); return; } releaseDhcpService(toDhcpInfo(dhcpStructsList), spec.getVmInventory().getUuid(), spec.getDestHost().getUuid(), completion); } @Override public void vmDefaultL3NetworkChanged(VmInstanceInventory vm, String previousL3, String nowL3, final Completion completion) { DebugUtils.Assert(previousL3 != null || nowL3 != null, "why I get two NULL L3 networks!!!!"); if (!VmInstanceState.Running.toString().equals(vm.getState())) { return; } VmNicInventory pnic = null; VmNicInventory nnic = null; for (VmNicInventory nic : vm.getVmNics()) { if (VmNicHelper.getL3Uuids(nic).contains(previousL3)) { pnic = nic; } else if (VmNicHelper.getL3Uuids(nic).contains(nowL3)) { nnic = nic; } } ResetDefaultGatewayCmd cmd = new ResetDefaultGatewayCmd(); if (pnic != null) { cmd.gatewayToRemove = pnic.getGateway(); cmd.macOfGatewayToRemove = pnic.getMac(); cmd.bridgeNameOfGatewayToRemove = new BridgeNameFinder().findByL3Uuid(previousL3); cmd.namespaceNameOfGatewayToRemove = makeNamespaceName(cmd.bridgeNameOfGatewayToRemove, previousL3); } if (nnic != null) { cmd.gatewayToAdd = nnic.getGateway(); cmd.macOfGatewayToAdd = nnic.getMac(); cmd.bridgeNameOfGatewayToAdd = new BridgeNameFinder().findByL3Uuid(nowL3); cmd.namespaceNameOfGatewayToAdd = makeNamespaceName(cmd.bridgeNameOfGatewayToAdd, nowL3); } KvmCommandSender sender = new KvmCommandSender(vm.getHostUuid()); sender.send(cmd, RESET_DEFAULT_GATEWAY_PATH, wrapper -> { ResetDefaultGatewayRsp rsp = wrapper.getResponse(ResetDefaultGatewayRsp.class); return rsp.isSuccess() ? null : operr("operation error, because:%s", rsp.getError()); }, new ReturnValueCompletion<KvmResponseWrapper>(completion) { @Override public void success(KvmResponseWrapper returnValue) { completion.success(); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } private void applyDhcpToHosts(Iterator<Map.Entry<String, List<DhcpInfo>>> it, Completion completion) { if (!it.hasNext()) { completion.success(); return; } Map.Entry<String, List<DhcpInfo>> e = it.next(); final String hostUuid = e.getKey(); final List<DhcpInfo> infos = e.getValue(); if (infos == null || infos.isEmpty()) { applyDhcpToHosts(it, completion); return; } applyDhcpToHosts(infos, hostUuid, false, new Completion(completion) { @Override public void success() { applyDhcpToHosts(it, completion); } @Override public void fail(ErrorCode errorCode) { applyDhcpToHosts(it, completion); } }); } private void handle(L3NetworkUpdateDhcpMsg msg) { L3NetworkUpdateDhcpReply reply = new L3NetworkUpdateDhcpReply(); Map<String, List<DhcpInfo>> l3DhcpMap = new HashMap<String, List<DhcpInfo>>(); List<String> vmUuids = Q.New(VmNicVO.class).eq(VmNicVO_.l3NetworkUuid, msg.getL3NetworkUuid()).select(VmNicVO_.vmInstanceUuid) .groupBy(VmNicVO_.vmInstanceUuid).listValues(); for (String uuid: vmUuids) { VmInstanceInventory vm = VmInstanceInventory.valueOf(dbf.findByUuid(uuid, VmInstanceVO.class)); if (!vm.getState().equals(VmInstanceState.Running.toString()) || vm.getHostUuid() == null) { continue; } String hostUuid = vm.getHostUuid(); List<DhcpInfo> hostInfo = l3DhcpMap.computeIfAbsent(hostUuid, k -> new ArrayList<>()); hostInfo.addAll(getVmDhcpInfo(vm, msg.getL3NetworkUuid())); } applyDhcpToHosts(l3DhcpMap.entrySet().iterator(), new Completion(msg) { @Override public void success() { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override public List<Class> getMessageClassToIntercept() { List<Class> ret = new ArrayList<Class>(); ret.add(APIAddIpRangeMsg.class); ret.add(APIAddIpRangeByNetworkCidrMsg.class); ret.add(APIAddIpv6RangeMsg.class); ret.add(APIAddIpv6RangeByNetworkCidrMsg.class); ret.add(APIDeleteIpRangeMsg.class); return ret; } @Override public InterceptorPosition getPosition() { return InterceptorPosition.END; } private void validateDhcpServerIp(IpRangeInventory inv, List<String> systemTags) { if (systemTags == null || systemTags.isEmpty()) { return; } for (String systemTag : systemTags) { if (!FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.isMatch(systemTag)) { continue; } Map<String, String> token = TagUtils.parse(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTagFormat(), systemTag); String dhcpServerIp = token.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); dhcpServerIp = IPv6NetworkUtils.ipv6TagValueToAddress(dhcpServerIp); if (inv.getIpVersion() == IPv6Constants.IPv4) { if (!NetworkUtils.isIpv4Address(dhcpServerIp)) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] is not a IPv4 address", dhcpServerIp)); } if (!NetworkUtils.isIpv4InCidr(dhcpServerIp, inv.getNetworkCidr())) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] is not in the cidr [%s]", dhcpServerIp, inv.getNetworkCidr())); } } else { if (!IPv6NetworkUtils.isIpv6Address(dhcpServerIp)) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] is not a IPv6 address", dhcpServerIp)); } if (!IPv6NetworkUtils.isIpv6InCidrRange(dhcpServerIp, inv.getNetworkCidr())) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] is not in the cidr [%s]", dhcpServerIp, inv.getNetworkCidr())); } } String oldDhcpServer = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokenByResourceUuid(inv.getL3NetworkUuid(), FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); if (oldDhcpServer != null) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] is already existed in l3 network [%s]", IPv6NetworkUtils.ipv6TagValueToAddress(oldDhcpServer), inv.getL3NetworkUuid())); } if (dhcpServerIp.equals(inv.getGateway())) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] can not be equaled to gateway ip", dhcpServerIp)); } L3NetworkVO l3Vo = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, inv.getL3NetworkUuid()).find(); if (l3Vo.isSystem()) { throw new ApiMessageInterceptionException(argerr("DHCP server ip [%s] can not be configured to system l3", dhcpServerIp)); } } } @Override public APIMessage intercept(APIMessage msg) throws ApiMessageInterceptionException { if (msg instanceof APIAddIpRangeMsg) { IpRangeInventory inv = IpRangeInventory.fromMessage((APIAddIpRangeMsg)msg); validateDhcpServerIp(inv, msg.getSystemTags()); } else if (msg instanceof APIAddIpRangeByNetworkCidrMsg) { IpRangeInventory inv = IpRangeInventory.fromMessage((APIAddIpRangeByNetworkCidrMsg)msg); validateDhcpServerIp(inv, msg.getSystemTags()); } else if (msg instanceof APIAddIpv6RangeMsg) { IpRangeInventory inv = IpRangeInventory.fromMessage((APIAddIpv6RangeMsg)msg); validateDhcpServerIp(inv, msg.getSystemTags()); } else if (msg instanceof APIAddIpv6RangeByNetworkCidrMsg) { IpRangeInventory inv = IpRangeInventory.fromMessage((APIAddIpv6RangeByNetworkCidrMsg)msg); validateDhcpServerIp(inv, msg.getSystemTags()); } return msg; } /* when add an iprage, there are 2 cases: * #1 include dhcp server ip, it means there is no dhcp server yet. and it include 2 sub-cases: * $1.1 dhcp server ip is in this range, actions: * a) allocate dhcp server ip in db * b) create systemtag L3_NETWORK_DHCP_IP * $1.2 dhcp server ip is not in this range, actions: * b) create systemtag L3_NETWORK_DHCP_IP, but usedIp set to null * * #2 doesn't include dhcp server ip, it include 2 sub-cases: * $2.1 dhcp server ip is not config, actions: NONE * $2.2 dhcp server ip is configured, but no in this range, actions: None * $2.3 dhcp server ip is configured and in this range, actions: * a) allocate dhcp server ip in db * */ @Override public void afterAddIpRange(IpRangeInventory ipr, List<String> systemTags) { String dhcpTag = null; String dhcpServerIp = null; if (systemTags != null) { for (String sysTag : systemTags) { if (!FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.isMatch(sysTag)) { continue; } Map<String, String> token = TagUtils.parse(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTagFormat(), sysTag); dhcpServerIp = token.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN); if (dhcpServerIp == null) { continue; } dhcpServerIp = IPv6NetworkUtils.ipv6TagValueToAddress(dhcpServerIp); dhcpTag = sysTag; break; } } if (dhcpServerIp != null) { if (NetworkUtils.isInRange(dhcpServerIp, ipr.getStartIp(), ipr.getEndIp())) { /* case #1.1 */ allocateDhcpIp(ipr.getL3NetworkUuid(), true, dhcpServerIp); } else { /* case #1.2 */ allocateDhcpIp(ipr.getL3NetworkUuid(), false, dhcpServerIp); } systemTags.remove(dhcpTag); } else { String oldDhcpServerIp = getDHCPServerIP(ipr.getL3NetworkUuid()); if (oldDhcpServerIp != null && NetworkUtils.isInRange(oldDhcpServerIp, ipr.getStartIp(), ipr.getEndIp())) { /* case #2.3 */ deleteDhcpServerIp(ipr.getL3NetworkUuid(), oldDhcpServerIp); allocateDhcpIp(ipr.getL3NetworkUuid(), true, oldDhcpServerIp); } } } }
package org.zstack.network.service.flat; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.compute.vm.VmSystemTags; import org.zstack.core.cloudbus.CloudBus; import org.zstack.core.cloudbus.CloudBusCallBack; import org.zstack.core.cloudbus.MessageSafe; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.GLock; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.core.gc.EventBasedGCPersistentContext; import org.zstack.core.gc.GCEventTrigger; import org.zstack.core.gc.GCFacade; import org.zstack.core.logging.Event; import org.zstack.core.logging.Log; import org.zstack.core.thread.SyncTask; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.timeout.ApiTimeoutManager; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.AbstractService; import org.zstack.header.core.*; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.host.HostCanonicalEvents; import org.zstack.header.host.HostConstant; import org.zstack.header.host.HostErrors; import org.zstack.header.host.HostStatus; import org.zstack.header.message.APIMessage; import org.zstack.header.message.Message; import org.zstack.header.message.MessageReply; import org.zstack.header.network.l2.L2NetworkVO; import org.zstack.header.network.l3.*; import org.zstack.header.network.service.DhcpStruct; import org.zstack.header.network.service.NetworkServiceDhcpBackend; import org.zstack.header.network.service.NetworkServiceProviderType; import org.zstack.header.network.service.NetworkServiceType; import org.zstack.header.vm.*; import org.zstack.header.vm.VmAbnormalLifeCycleStruct.VmAbnormalLifeCycleOperation; import org.zstack.kvm.*; import org.zstack.kvm.KvmCommandSender.SteppingSendCallback; import org.zstack.network.service.NetworkProviderFinder; import org.zstack.network.service.NetworkServiceProviderLookup; import org.zstack.tag.SystemTagCreator; import org.zstack.utils.CollectionUtils; import org.zstack.utils.DebugUtils; import org.zstack.utils.TagUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.Function; import org.zstack.utils.logging.CLogger; import org.zstack.utils.network.NetworkUtils; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import static org.zstack.utils.CollectionDSL.*; import static org.zstack.utils.StringDSL.ln; public class FlatDhcpBackend extends AbstractService implements NetworkServiceDhcpBackend, KVMHostConnectExtensionPoint, L3NetworkDeleteExtensionPoint, VmInstanceMigrateExtensionPoint, VmAbnormalLifeCycleExtensionPoint, IpRangeDeletionExtensionPoint, BeforeStartNewCreatedVmExtensionPoint { private static final CLogger logger = Utils.getLogger(FlatDhcpBackend.class); @Autowired private CloudBus bus; @Autowired private ErrorFacade errf; @Autowired private DatabaseFacade dbf; @Autowired private ThreadFacade thdf; @Autowired private ApiTimeoutManager timeoutMgr; @Autowired private GCFacade gcf; public static final String APPLY_DHCP_PATH = "/flatnetworkprovider/dhcp/apply"; public static final String PREPARE_DHCP_PATH = "/flatnetworkprovider/dhcp/prepare"; public static final String RELEASE_DHCP_PATH = "/flatnetworkprovider/dhcp/release"; public static final String DHCP_CONNECT_PATH = "/flatnetworkprovider/dhcp/connect"; public static final String RESET_DEFAULT_GATEWAY_PATH = "/flatnetworkprovider/dhcp/resetDefaultGateway"; public static final String DHCP_DELETE_NAMESPACE_PATH = "/flatnetworkprovider/dhcp/deletenamespace"; private Map<String, UsedIpInventory> l3NetworkDhcpServerIp = new ConcurrentHashMap<String, UsedIpInventory>(); public static String makeNamespaceName(String brName, String l3Uuid) { return String.format("%s_%s", brName, l3Uuid); } @Transactional(readOnly = true) private List<DhcpInfo> getDhcpInfoForConnectedKvmHost(KVMHostConnectedContext context) { String sql = "select vm.uuid, vm.defaultL3NetworkUuid from VmInstanceVO vm where vm.hostUuid = :huuid and vm.state in (:states) and vm.type = :vtype"; TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class); q.setParameter("huuid", context.getInventory().getUuid()); q.setParameter("states", list(VmInstanceState.Running, VmInstanceState.Unknown)); q.setParameter("vtype", VmInstanceConstant.USER_VM_TYPE); List<Tuple> ts = q.getResultList(); if (ts.isEmpty()) { return null; } Map<String, String> vmDefaultL3 = new HashMap<String, String>(); for (Tuple t : ts) { vmDefaultL3.put(t.get(0, String.class), t.get(1, String.class)); } sql = "select nic from VmNicVO nic, L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref, NetworkServiceProviderVO provider where nic.l3NetworkUuid = l3.uuid" + " and ref.l3NetworkUuid = l3.uuid and ref.networkServiceProviderUuid = provider.uuid " + " and provider.type = :ptype and nic.vmInstanceUuid in (:vmUuids) group by nic.uuid"; TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class); nq.setParameter("ptype", FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); nq.setParameter("vmUuids", vmDefaultL3.keySet()); List<VmNicVO> nics = nq.getResultList(); if (nics.isEmpty()) { return null; } List<String> l3Uuids = CollectionUtils.transformToList(nics, new Function<String, VmNicVO>() { @Override public String call(VmNicVO arg) { return arg.getL3NetworkUuid(); } }); sql = "select t.tag, l3.uuid from SystemTagVO t, L3NetworkVO l3 where t.resourceType = :ttype and t.tag like :tag" + " and t.resourceUuid = l3.l2NetworkUuid and l3.uuid in (:l3Uuids)"; TypedQuery<Tuple> tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(KVMSystemTags.L2_BRIDGE_NAME.getTagFormat())); tq.setParameter("l3Uuids", l3Uuids); tq.setParameter("ttype", L2NetworkVO.class.getSimpleName()); ts = tq.getResultList(); Map<String, String> bridgeNames = new HashMap<String, String>(); for (Tuple t : ts) { bridgeNames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select t.tag, vm.uuid from SystemTagVO t, VmInstanceVO vm where t.resourceType = :ttype" + " and t.tag like :tag and t.resourceUuid = vm.uuid and vm.uuid in (:vmUuids)"; tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(VmSystemTags.HOSTNAME.getTagFormat())); tq.setParameter("ttype", VmInstanceVO.class.getSimpleName()); tq.setParameter("vmUuids", vmDefaultL3.keySet()); Map<String, String> hostnames = new HashMap<String, String>(); for (Tuple t : ts) { hostnames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select l3 from L3NetworkVO l3 where l3.uuid in (:l3Uuids)"; TypedQuery<L3NetworkVO> l3q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); l3q.setParameter("l3Uuids", l3Uuids); List<L3NetworkVO> l3s = l3q.getResultList(); Map<String, L3NetworkVO> l3Map = new HashMap<String, L3NetworkVO>(); for (L3NetworkVO l3 : l3s) { l3Map.put(l3.getUuid(), l3); } List<DhcpInfo> dhcpInfoList = new ArrayList<DhcpInfo>(); for (VmNicVO nic : nics) { DhcpInfo info = new DhcpInfo(); info.bridgeName = KVMSystemTags.L2_BRIDGE_NAME.getTokenByTag(bridgeNames.get(nic.getL3NetworkUuid()), KVMSystemTags.L2_BRIDGE_NAME_TOKEN); info.namespaceName = makeNamespaceName( info.bridgeName, nic.getL3NetworkUuid() ); DebugUtils.Assert(info.bridgeName != null, "bridge name cannot be null"); info.mac = nic.getMac(); info.netmask = nic.getNetmask(); info.isDefaultL3Network = nic.getL3NetworkUuid().equals(vmDefaultL3.get(nic.getVmInstanceUuid())); info.ip = nic.getIp(); info.gateway = nic.getGateway(); L3NetworkVO l3 = l3Map.get(nic.getL3NetworkUuid()); info.dnsDomain = l3.getDnsDomain(); info.dns = CollectionUtils.transformToList(l3.getDns(), new Function<String, L3NetworkDnsVO>() { @Override public String call(L3NetworkDnsVO arg) { return arg.getDns(); } }); if (info.isDefaultL3Network) { info.hostname = hostnames.get(nic.getVmInstanceUuid()); if (info.hostname == null) { info.hostname = nic.getIp().replaceAll("\\.", "-"); } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.l3NetworkUuid = l3.getUuid(); dhcpInfoList.add(info); } return dhcpInfoList; } @Override @MessageSafe public void handleMessage(Message msg) { if (msg instanceof APIMessage) { handleApiMessage((APIMessage) msg); } else { handleLocalMessage(msg); } } private void handleApiMessage(APIMessage msg) { if (msg instanceof APIGetL3NetworkDhcpIpAddressMsg) { handle((APIGetL3NetworkDhcpIpAddressMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handleLocalMessage(Message msg) { if (msg instanceof FlatDhcpAcquireDhcpServerIpMsg) { handle((FlatDhcpAcquireDhcpServerIpMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handle(APIGetL3NetworkDhcpIpAddressMsg msg) { APIGetL3NetworkDhcpIpAddressReply reply = new APIGetL3NetworkDhcpIpAddressReply(); if (msg.getL3NetworkUuid() == null) { reply.setError(errf.stringToOperationError("l3 network uuid cannot be null")); bus.reply(msg, reply); return; } UsedIpInventory ip = l3NetworkDhcpServerIp.get(msg.getL3NetworkUuid()); if (ip != null) { reply.setIp(ip.getIp()); bus.reply(msg, reply); return; } String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(msg.getL3NetworkUuid()); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); String ipUuid = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_UUID_TOKEN); UsedIpVO vo = dbf.findByUuid(ipUuid, UsedIpVO.class); if (vo == null) { throw new CloudRuntimeException(String.format("cannot find used ip [uuid:%s]", ipUuid)); } ip = UsedIpInventory.valueOf(vo); l3NetworkDhcpServerIp.put(msg.getL3NetworkUuid(), ip); reply.setIp(ip.getIp()); bus.reply(msg, reply); logger.debug(String.format("APIGetL3NetworkDhcpIpAddressMsg[ip:%s, uuid:%s] for l3 network[uuid:%s]", ip.getIp(), ip.getUuid(), ip.getL3NetworkUuid())); return; } reply.setError(errf.stringToOperationError( String.format("Cannot find DhcpIp for l3 network[uuid:%s]", msg.getL3NetworkUuid()))); bus.reply(msg, reply); } @Deferred public UsedIpInventory allocateDhcpIp(String l3Uuid) { UsedIpInventory ip = l3NetworkDhcpServerIp.get(l3Uuid); if (ip != null) { return ip; } // TODO: static allocate the IP to avoid the lock GLock lock = new GLock(String.format("l3-%s-allocate-dhcp-ip", l3Uuid), TimeUnit.MINUTES.toSeconds(30)); lock.lock(); Defer.defer(lock::unlock); String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(l3Uuid); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); String ipUuid = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_UUID_TOKEN); UsedIpVO vo = dbf.findByUuid(ipUuid, UsedIpVO.class); if (vo == null) { throw new CloudRuntimeException(String.format("cannot find used ip [uuid:%s]", ipUuid)); } ip = UsedIpInventory.valueOf(vo); l3NetworkDhcpServerIp.put(l3Uuid, ip); return ip; } AllocateIpMsg amsg = new AllocateIpMsg(); amsg.setL3NetworkUuid(l3Uuid); bus.makeTargetServiceIdByResourceUuid(amsg, L3NetworkConstant.SERVICE_ID, l3Uuid); MessageReply reply = bus.call(amsg); if (!reply.isSuccess()) { throw new OperationFailureException(reply.getError()); } AllocateIpReply r = reply.castReply(); ip = r.getIpInventory(); SystemTagCreator creator = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.newSystemTagCreator(l3Uuid); creator.inherent = true; creator.setTagByTokens( map( e(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_TOKEN, ip.getIp()), e(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_UUID_TOKEN, ip.getUuid()) ) ); creator.create(); l3NetworkDhcpServerIp.put(l3Uuid, ip); logger.debug(String.format("allocate DHCP server IP[ip:%s, uuid:%s] for l3 network[uuid:%s]", ip.getIp(), ip.getUuid(), ip.getL3NetworkUuid())); return ip; } private void handle(final FlatDhcpAcquireDhcpServerIpMsg msg) { thdf.syncSubmit(new SyncTask<Void>() { @Override public Void call() throws Exception { dealMessage(msg); return null; } @MessageSafe private void dealMessage(FlatDhcpAcquireDhcpServerIpMsg msg) { FlatDhcpAcquireDhcpServerIpReply reply = new FlatDhcpAcquireDhcpServerIpReply(); UsedIpInventory ip = allocateDhcpIp(msg.getL3NetworkUuid()); reply.setIp(ip.getIp()); reply.setNetmask(ip.getNetmask()); reply.setUsedIpUuid(ip.getUuid()); bus.reply(msg, reply); } @Override public String getName() { return getSyncSignature(); } @Override public String getSyncSignature() { return String.format("flat-dhcp-get-dhcp-ip-for-l3-network-%s", msg.getL3NetworkUuid()); } @Override public int getSyncLevel() { return 1; } }); } @Override public String getId() { return bus.makeLocalServiceId(FlatNetworkServiceConstant.SERVICE_ID); } @Override public boolean start() { return true; } @Override public boolean stop() { return true; } @Override public String preDeleteL3Network(L3NetworkInventory inventory) throws L3NetworkException { return null; } @Override public void beforeDeleteL3Network(L3NetworkInventory inventory) { } private boolean isProvidedbyMe(L3NetworkInventory l3) { String providerType = new NetworkProviderFinder().getNetworkProviderTypeByNetworkServiceType(l3.getUuid(), NetworkServiceType.DHCP.toString()); return FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING.equals(providerType); } @Override public void afterDeleteL3Network(L3NetworkInventory inventory) { if (!isProvidedbyMe(inventory)) { return; } UsedIpInventory dhchip = getDHCPServerIP(inventory.getUuid()); if (dhchip != null) { deleteDhcpServerIp(dhchip); logger.debug(String.format("delete DHCP IP[%s] of the flat network[uuid:%s] as the L3 network is deleted", dhchip.getIp(), dhchip.getL3NetworkUuid())); } deleteNameSpace(inventory); } private void deleteNameSpace(L3NetworkInventory inventory) { List<String> huuids = new Callable<List<String>>() { @Override @Transactional(readOnly = true) public List<String> call() { String sql = "select host.uuid from HostVO host, L2NetworkVO l2, L2NetworkClusterRefVO ref where l2.uuid = ref.l2NetworkUuid" + " and ref.clusterUuid = host.clusterUuid and l2.uuid = :uuid"; TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class); q.setParameter("uuid", inventory.getL2NetworkUuid()); return q.getResultList(); } }.call(); if (huuids.isEmpty()) { return; } String brName = new BridgeNameFinder().findByL3Uuid(inventory.getUuid()); DeleteNamespaceCmd cmd = new DeleteNamespaceCmd(); cmd.bridgeName = brName; cmd.namespaceName = makeNamespaceName(brName, inventory.getUuid()); new KvmCommandSender(huuids).send(cmd, DHCP_DELETE_NAMESPACE_PATH, wrapper -> { DeleteNamespaceRsp rsp = wrapper.getResponse(DeleteNamespaceRsp.class); return rsp.isSuccess() ? null : errf.stringToOperationError(rsp.getError()); }, new SteppingSendCallback<KvmResponseWrapper>() { @Override public void success(KvmResponseWrapper w) { logger.debug(String.format("successfully deleted namespace for L3 network[uuid:%s, name:%s] on the " + "KVM host[uuid:%s]", inventory.getUuid(), inventory.getName(), getHostUuid())); } @Override public void fail(ErrorCode errorCode) { if (!errorCode.isError(HostErrors.OPERATION_FAILURE_GC_ELIGIBLE)) { new Event().log(FlatNetworkLabels.DELETE_NAMESPACE_FAILURE, inventory.getName(), inventory.getUuid(), getHostUuid(), errorCode.toString()); return; } GCFlatDHCPDeleteNamespaceContext c = new GCFlatDHCPDeleteNamespaceContext(); c.setHostUuid(getHostUuid()); c.setCommand(cmd); c.setTriggerHostStatus(HostStatus.Connected.toString()); EventBasedGCPersistentContext<GCFlatDHCPDeleteNamespaceContext> ctx = new EventBasedGCPersistentContext<GCFlatDHCPDeleteNamespaceContext>(); ctx.setRunnerClass(GCFlatDHCPDeleteNamespaceRunner.class); ctx.setContextClass(GCFlatDHCPDeleteNamespaceContext.class); ctx.setName(String.format("delete-namespace-for-l3-%s", inventory.getUuid())); ctx.setContext(c); GCEventTrigger trigger = new GCEventTrigger(); trigger.setCodeName("gc-delete-vm-on-host-connected"); trigger.setEventPath(HostCanonicalEvents.HOST_STATUS_CHANGED_PATH); String code = ln( "import org.zstack.header.host.HostCanonicalEvents.HostStatusChangedData", "import org.zstack.network.service.flat.GCFlatDHCPDeleteNamespaceContext", "HostStatusChangedData d = (HostStatusChangedData) data", "GCFlatDHCPDeleteNamespaceContext c = (GCFlatDHCPDeleteNamespaceContext) context", "return c.hostUuid == d.hostUuid && d.newStatus == c.triggerHostStatus" ).toString(); trigger.setCode(code); ctx.addTrigger(trigger); trigger = new GCEventTrigger(); trigger.setCodeName("gc-delete-vm-on-host-deleted"); trigger.setEventPath(HostCanonicalEvents.HOST_DELETED_PATH); code = ln( "import org.zstack.header.host.HostCanonicalEvents.HostDeletedData", "import org.zstack.network.service.flat.GCFlatDHCPDeleteNamespaceContext", "HostDeletedData d = (HostDeletedData) data", "GCFlatDHCPDeleteNamespaceContext c = (GCFlatDHCPDeleteNamespaceContext) context", "return c.hostUuid == d.hostUuid" ).toString(); trigger.setCode(code); ctx.addTrigger(trigger); gcf.schedule(ctx); } }); } @Transactional(readOnly = true) private List<DhcpInfo> getVmDhcpInfo(VmInstanceInventory vm) { String sql = "select nic from VmNicVO nic, L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref, NetworkServiceProviderVO provider where nic.l3NetworkUuid = l3.uuid" + " and ref.l3NetworkUuid = l3.uuid and ref.networkServiceProviderUuid = provider.uuid " + " and provider.type = :ptype and nic.vmInstanceUuid = :vmUuid group by nic.uuid"; TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class); nq.setParameter("ptype", FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); nq.setParameter("vmUuid", vm.getUuid()); List<VmNicVO> nics = nq.getResultList(); if (nics.isEmpty()) { return null; } List<String> l3Uuids = CollectionUtils.transformToList(nics, new Function<String, VmNicVO>() { @Override public String call(VmNicVO arg) { return arg.getL3NetworkUuid(); } }); sql = "select t.tag, l3.uuid from SystemTagVO t, L3NetworkVO l3 where t.resourceType = :ttype and t.tag like :tag" + " and t.resourceUuid = l3.l2NetworkUuid and l3.uuid in (:l3Uuids)"; TypedQuery<Tuple> tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(KVMSystemTags.L2_BRIDGE_NAME.getTagFormat())); tq.setParameter("l3Uuids", l3Uuids); tq.setParameter("ttype", L2NetworkVO.class.getSimpleName()); List<Tuple> ts = tq.getResultList(); Map<String, String> bridgeNames = new HashMap<String, String>(); for (Tuple t : ts) { bridgeNames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select t.tag, vm.uuid from SystemTagVO t, VmInstanceVO vm where t.resourceType = :ttype" + " and t.tag like :tag and t.resourceUuid = vm.uuid and vm.uuid = :vmUuid"; tq = dbf.getEntityManager().createQuery(sql, Tuple.class); tq.setParameter("tag", TagUtils.tagPatternToSqlPattern(VmSystemTags.HOSTNAME.getTagFormat())); tq.setParameter("ttype", VmInstanceVO.class.getSimpleName()); tq.setParameter("vmUuid", vm.getUuid()); Map<String, String> hostnames = new HashMap<String, String>(); for (Tuple t : ts) { hostnames.put(t.get(1, String.class), t.get(0, String.class)); } sql = "select l3 from L3NetworkVO l3 where l3.uuid in (:l3Uuids)"; TypedQuery<L3NetworkVO> l3q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); l3q.setParameter("l3Uuids", l3Uuids); List<L3NetworkVO> l3s = l3q.getResultList(); Map<String, L3NetworkVO> l3Map = new HashMap<String, L3NetworkVO>(); for (L3NetworkVO l3 : l3s) { l3Map.put(l3.getUuid(), l3); } List<DhcpInfo> dhcpInfoList = new ArrayList<DhcpInfo>(); for (VmNicVO nic : nics) { DhcpInfo info = new DhcpInfo(); info.bridgeName = KVMSystemTags.L2_BRIDGE_NAME.getTokenByTag(bridgeNames.get(nic.getL3NetworkUuid()), KVMSystemTags.L2_BRIDGE_NAME_TOKEN); info.namespaceName = makeNamespaceName( info.bridgeName, nic.getL3NetworkUuid() ); DebugUtils.Assert(info.bridgeName != null, "bridge name cannot be null"); info.mac = nic.getMac(); info.netmask = nic.getNetmask(); info.isDefaultL3Network = nic.getL3NetworkUuid().equals(vm.getDefaultL3NetworkUuid()); info.ip = nic.getIp(); info.gateway = nic.getGateway(); L3NetworkVO l3 = l3Map.get(nic.getL3NetworkUuid()); info.dnsDomain = l3.getDnsDomain(); info.dns = CollectionUtils.transformToList(l3.getDns(), new Function<String, L3NetworkDnsVO>() { @Override public String call(L3NetworkDnsVO arg) { return arg.getDns(); } }); if (info.isDefaultL3Network) { info.hostname = hostnames.get(nic.getVmInstanceUuid()); if (info.hostname == null) { info.hostname = nic.getIp().replaceAll("\\.", "-"); } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.l3NetworkUuid = l3.getUuid(); dhcpInfoList.add(info); } return dhcpInfoList; } @Override public void preMigrateVm(VmInstanceInventory inv, String destHostUuid) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null) { return; } FutureCompletion completion = new FutureCompletion(); applyDhcpToHosts(info, destHostUuid, false, completion); completion.await(TimeUnit.MINUTES.toMillis(30)); if (!completion.isSuccess()) { throw new OperationFailureException(errf.instantiateErrorCode(SysErrors.OPERATION_ERROR, String.format("cannot configure DHCP for vm[uuid:%s] on the destination host[uuid:%s]", inv.getUuid(), destHostUuid), completion.getErrorCode() )); } } @Override public void beforeMigrateVm(VmInstanceInventory inv, String destHostUuid) { } @Override public void afterMigrateVm(VmInstanceInventory inv, String srcHostUuid) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null) { return; } releaseDhcpService(info, inv.getUuid(), srcHostUuid, new NoErrorCompletion() { @Override public void done() { // ignore } }); } @Override public void failedToMigrateVm(VmInstanceInventory inv, String destHostUuid, ErrorCode reason) { List<DhcpInfo> info = getVmDhcpInfo(inv); if (info == null) { return; } releaseDhcpService(info, inv.getUuid(), destHostUuid, new NoErrorCompletion() { @Override public void done() { // ignore } }); } @Override public Flow createVmAbnormalLifeCycleHandlingFlow(final VmAbnormalLifeCycleStruct struct) { return new Flow() { String __name__ = "flat-network-configure-dhcp"; VmAbnormalLifeCycleOperation operation = struct.getOperation(); VmInstanceInventory vm = struct.getVmInstance(); List<DhcpInfo> info = getVmDhcpInfo(vm); String applyHostUuidForRollback; String releaseHostUuidForRollback; @Override public void run(FlowTrigger trigger, Map data) { if (info == null) { trigger.next(); return; } if (operation == VmAbnormalLifeCycleOperation.VmRunningOnTheHost) { vmRunningOnTheHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost) { vmStoppedOnTheSameHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostChanged) { vmRunningFromUnknownStateHostChanged(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged) { vmRunningFromUnknownStateHostNotChanged(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmMigrateToAnotherHost) { vmMigrateToAnotherHost(trigger); } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromIntermediateState) { vmRunningFromIntermediateState(trigger); } else { trigger.next(); } } private void vmRunningFromIntermediateState(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmMigrateToAnotherHost(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); applyHostUuidForRollback = struct.getOriginalHostUuid(); applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmRunningFromUnknownStateHostNotChanged(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmRunningFromUnknownStateHostChanged(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); applyHostUuidForRollback = struct.getCurrentHostUuid(); applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void vmStoppedOnTheSameHost(final FlowTrigger trigger) { releaseDhcpService(info, vm.getUuid(), struct.getCurrentHostUuid(), new NoErrorCompletion(trigger) { @Override public void done() { applyHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } }); } private void vmRunningOnTheHost(final FlowTrigger trigger) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion(trigger) { @Override public void success() { releaseHostUuidForRollback = struct.getCurrentHostUuid(); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } @Override public void rollback(FlowRollback trigger, Map data) { if (info == null) { trigger.rollback(); return; } if (releaseHostUuidForRollback != null) { releaseDhcpService(info, vm.getUuid(), struct.getOriginalHostUuid(), new NopeNoErrorCompletion()); } if (applyHostUuidForRollback != null) { applyDhcpToHosts(info, struct.getCurrentHostUuid(), false, new Completion() { @Override public void success() { //ignore } @Override public void fail(ErrorCode errorCode) { //TODO logger.warn(String.format("failed to re-apply DHCP info of the vm[uuid:%s] to the host[uuid:%s], %s", vm.getUuid(), applyHostUuidForRollback, errorCode)); } }); } trigger.rollback(); } }; } @Override public void preDeleteIpRange(IpRangeInventory ipRange) { } @Override public void beforeDeleteIpRange(IpRangeInventory ipRange) { } private void deleteDhcpServerIp(UsedIpInventory ip) { l3NetworkDhcpServerIp.remove(ip.getL3NetworkUuid()); FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.deleteInherentTag(ip.getL3NetworkUuid()); dbf.removeByPrimaryKey(ip.getUuid(), UsedIpVO.class); } private UsedIpInventory getDHCPServerIP(String l3Uuid) { UsedIpInventory dhcpIp = l3NetworkDhcpServerIp.get(l3Uuid); if (dhcpIp != null) { return dhcpIp; } String tag = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTag(l3Uuid); if (tag != null) { Map<String, String> tokens = FlatNetworkSystemTags.L3_NETWORK_DHCP_IP.getTokensByTag(tag); String ipUuid = tokens.get(FlatNetworkSystemTags.L3_NETWORK_DHCP_IP_UUID_TOKEN); UsedIpVO vo = dbf.findByUuid(ipUuid, UsedIpVO.class); if (vo != null) { return UsedIpInventory.valueOf(vo); } } return null; } @Override public void afterDeleteIpRange(IpRangeInventory ipRange) { UsedIpInventory dhcpIp = getDHCPServerIP(ipRange.getL3NetworkUuid()); if (dhcpIp != null && NetworkUtils.isIpv4InRange(dhcpIp.getIp(), ipRange.getStartIp(), ipRange.getEndIp())) { deleteDhcpServerIp(dhcpIp); logger.debug(String.format("delete DHCP IP[%s] of the flat network[uuid:%s] as the IP range[uuid:%s] is deleted", dhcpIp.getIp(), ipRange.getL3NetworkUuid(), ipRange.getUuid())); } } @Override public void failedToDeleteIpRange(IpRangeInventory ipRange, ErrorCode errorCode) { } @Override public Flow createKvmHostConnectingFlow(final KVMHostConnectedContext context) { return new NoRollbackFlow() { String __name__ = "prepare-flat-dhcp"; @Override public void run(final FlowTrigger trigger, Map data) { final List<DhcpInfo> dhcpInfoList = getDhcpInfoForConnectedKvmHost(context); if (dhcpInfoList == null) { trigger.next(); return; } new Log(context.getInventory().getUuid()).log(FlatNetworkLabel.SYNC_DHCP); // to flush ebtables ConnectCmd cmd = new ConnectCmd(); KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setHostUuid(context.getInventory().getUuid()); msg.setCommand(cmd); msg.setCommandTimeout(timeoutMgr.getTimeout(cmd.getClass(), "5m")); msg.setNoStatusCheck(true); msg.setPath(DHCP_CONNECT_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, context.getInventory().getUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { applyDhcpToHosts(dhcpInfoList, context.getInventory().getUuid(), true, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } } }); } }; } @Override public void beforeStartNewCreatedVm(VmInstanceSpec spec) { String providerUuid = new NetworkServiceProviderLookup().lookupUuidByType(FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE_STRING); // make sure the Flat DHCP acquired DHCP server IP before starting VMs, // otherwise it may not be able to get IP when lots of VMs start concurrently // because the logic of VM acquiring IP is ahead flat DHCP acquiring IP for (L3NetworkInventory l3 : spec.getL3Networks()) { List<String> serviceTypes = l3.getNetworkServiceTypesFromProvider(providerUuid); if (serviceTypes.contains(NetworkServiceType.DHCP.toString())) { allocateDhcpIp(l3.getUuid()); } } } public static class DhcpInfo { public String ip; public String mac; public String netmask; public String gateway; public String hostname; public boolean isDefaultL3Network; public String dnsDomain; public List<String> dns; public String bridgeName; public String namespaceName; public String l3NetworkUuid; } public static class ApplyDhcpCmd extends KVMAgentCommands.AgentCommand { public List<DhcpInfo> dhcp; public boolean rebuild; public String l3NetworkUuid; } public static class ApplyDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class ReleaseDhcpCmd extends KVMAgentCommands.AgentCommand { public List<DhcpInfo> dhcp; } public static class ReleaseDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class PrepareDhcpCmd extends KVMAgentCommands.AgentCommand { public String bridgeName; public String dhcpServerIp; public String dhcpNetmask; public String namespaceName; } public static class PrepareDhcpRsp extends KVMAgentCommands.AgentResponse { } public static class ConnectCmd extends KVMAgentCommands.AgentCommand { } public static class ConnectRsp extends KVMAgentCommands.AgentResponse { } public static class ResetDefaultGatewayCmd extends KVMAgentCommands.AgentCommand { public String bridgeNameOfGatewayToRemove; public String namespaceNameOfGatewayToRemove; public String gatewayToRemove; public String macOfGatewayToRemove; public String gatewayToAdd; public String macOfGatewayToAdd; public String bridgeNameOfGatewayToAdd; public String namespaceNameOfGatewayToAdd; } public static class ResetDefaultGatewayRsp extends KVMAgentCommands.AgentResponse { } public static class DeleteNamespaceCmd extends KVMAgentCommands.AgentCommand { public String bridgeName; public String namespaceName; } public static class DeleteNamespaceRsp extends KVMAgentCommands.AgentResponse { } public NetworkServiceProviderType getProviderType() { return FlatNetworkServiceConstant.FLAT_NETWORK_SERVICE_TYPE; } private List<DhcpInfo> toDhcpInfo(List<DhcpStruct> structs) { final Map<String, String> l3Bridges = new HashMap<String, String>(); for (DhcpStruct s : structs) { if (!l3Bridges.containsKey(s.getL3Network().getUuid())) { l3Bridges.put(s.getL3Network().getUuid(), KVMSystemTags.L2_BRIDGE_NAME.getTokenByResourceUuid(s.getL3Network().getL2NetworkUuid(), KVMSystemTags.L2_BRIDGE_NAME_TOKEN)); } } return CollectionUtils.transformToList(structs, new Function<DhcpInfo, DhcpStruct>() { @Override public DhcpInfo call(DhcpStruct arg) { DhcpInfo info = new DhcpInfo(); info.dnsDomain = arg.getDnsDomain(); info.gateway = arg.getGateway(); info.hostname = arg.getHostname(); info.isDefaultL3Network = arg.isDefaultL3Network(); if (info.isDefaultL3Network) { if (info.hostname == null && arg.getIp() != null) { info.hostname = arg.getIp().replaceAll("\\.", "-"); } if (info.dnsDomain != null) { info.hostname = String.format("%s.%s", info.hostname, info.dnsDomain); } } info.ip = arg.getIp(); info.netmask = arg.getNetmask(); info.mac = arg.getMac(); info.dns = arg.getL3Network().getDns(); info.l3NetworkUuid = arg.getL3Network().getUuid(); info.bridgeName = l3Bridges.get(arg.getL3Network().getUuid()); info.namespaceName = makeNamespaceName(info.bridgeName, arg.getL3Network().getUuid()); return info; } }); } private void applyDhcpToHosts(List<DhcpInfo> dhcpInfo, final String hostUuid, final boolean rebuild, final Completion completion) { final Map<String, List<DhcpInfo>> l3DhcpMap = new HashMap<String, List<DhcpInfo>>(); for (DhcpInfo d : dhcpInfo) { List<DhcpInfo> lst = l3DhcpMap.get(d.l3NetworkUuid); if (lst == null) { lst = new ArrayList<DhcpInfo>(); l3DhcpMap.put(d.l3NetworkUuid, lst); } lst.add(d); } final Iterator<Map.Entry<String, List<DhcpInfo>>> it = l3DhcpMap.entrySet().iterator(); class DhcpApply { void apply() { if (!it.hasNext()) { completion.success(); return; } Map.Entry<String, List<DhcpInfo>> e = it.next(); final String l3Uuid = e.getKey(); final List<DhcpInfo> info = e.getValue(); DebugUtils.Assert(!info.isEmpty(), "how can info be empty???"); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("flat-dhcp-provider-apply-dhcp-to-l3-network-%s", l3Uuid)); chain.then(new ShareFlow() { String dhcpServerIp; String dhcpNetmask; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-dhcp-server-ip"; @Override public void run(final FlowTrigger trigger, Map data) { FlatDhcpAcquireDhcpServerIpMsg msg = new FlatDhcpAcquireDhcpServerIpMsg(); msg.setL3NetworkUuid(l3Uuid); bus.makeTargetServiceIdByResourceUuid(msg, FlatNetworkServiceConstant.SERVICE_ID, l3Uuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { FlatDhcpAcquireDhcpServerIpReply r = reply.castReply(); dhcpServerIp = r.getIp(); dhcpNetmask = r.getNetmask(); trigger.next(); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "prepare-distributed-dhcp-server-on-host"; @Override public void run(final FlowTrigger trigger, Map data) { DhcpInfo i = info.get(0); PrepareDhcpCmd cmd = new PrepareDhcpCmd(); cmd.bridgeName = i.bridgeName; cmd.namespaceName = i.namespaceName; cmd.dhcpServerIp = dhcpServerIp; cmd.dhcpNetmask = dhcpNetmask; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setHostUuid(hostUuid); msg.setNoStatusCheck(true); msg.setCommand(cmd); msg.setPath(PREPARE_DHCP_PATH); msg.setCommandTimeout(timeoutMgr.getTimeout(cmd.getClass(), "5m")); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply ar = reply.castReply(); PrepareDhcpRsp rsp = ar.toResponse(PrepareDhcpRsp.class); if (!rsp.isSuccess()) { trigger.fail(errf.stringToOperationError(rsp.getError())); return; } trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "apply-dhcp"; @Override public void run(final FlowTrigger trigger, Map data) { ApplyDhcpCmd cmd = new ApplyDhcpCmd(); cmd.dhcp = info; cmd.rebuild = rebuild; cmd.l3NetworkUuid = l3Uuid; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setCommand(cmd); msg.setCommandTimeout(timeoutMgr.getTimeout(cmd.getClass(), "5m")); msg.setHostUuid(hostUuid); msg.setPath(APPLY_DHCP_PATH); msg.setNoStatusCheck(true); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply r = reply.castReply(); ApplyDhcpRsp rsp = r.toResponse(ApplyDhcpRsp.class); if (!rsp.isSuccess()) { trigger.fail(errf.stringToOperationError(rsp.getError())); return; } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { apply(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } } new DhcpApply().apply(); } @Override public void applyDhcpService(List<DhcpStruct> dhcpStructList, VmInstanceSpec spec, final Completion completion) { if (dhcpStructList.isEmpty()) { completion.success(); return; } applyDhcpToHosts(toDhcpInfo(dhcpStructList), spec.getDestHost().getUuid(), false, completion); } private void releaseDhcpService(List<DhcpInfo> info, final String vmUuid, final String hostUuid, final NoErrorCompletion completion) { final ReleaseDhcpCmd cmd = new ReleaseDhcpCmd(); cmd.dhcp = info; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setCommand(cmd); msg.setCommandTimeout(timeoutMgr.getTimeout(cmd.getClass(), "5m")); msg.setHostUuid(hostUuid); msg.setPath(RELEASE_DHCP_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { //TODO: logger.warn(String.format("failed to release dhcp%s for vm[uuid: %s] on the kvm host[uuid:%s]; %s", cmd.dhcp, vmUuid, hostUuid, reply.getError())); completion.done(); return; } KVMHostAsyncHttpCallReply r = reply.castReply(); ReleaseDhcpRsp rsp = r.toResponse(ReleaseDhcpRsp.class); if (!rsp.isSuccess()) { //TODO logger.warn(String.format("failed to release dhcp%s for vm[uuid: %s] on the kvm host[uuid:%s]; %s", cmd.dhcp, vmUuid, hostUuid, rsp.getError())); completion.done(); return; } completion.done(); } }); } @Override public void releaseDhcpService(List<DhcpStruct> dhcpStructsList, final VmInstanceSpec spec, final NoErrorCompletion completion) { if (dhcpStructsList.isEmpty()) { completion.done(); return; } releaseDhcpService(toDhcpInfo(dhcpStructsList), spec.getVmInventory().getUuid(), spec.getDestHost().getUuid(), completion); } @Override public void vmDefaultL3NetworkChanged(VmInstanceInventory vm, String previousL3, String nowL3, final Completion completion) { DebugUtils.Assert(previousL3 != null || nowL3 != null, "why I get two NULL L3 networks!!!!"); if (!VmInstanceState.Running.toString().equals(vm.getState())) { return; } VmNicInventory pnic = null; VmNicInventory nnic = null; for (VmNicInventory nic : vm.getVmNics()) { if (nic.getL3NetworkUuid().equals(previousL3)) { pnic = nic; } else if (nic.getL3NetworkUuid().equals(nowL3)) { nnic = nic; } } ResetDefaultGatewayCmd cmd = new ResetDefaultGatewayCmd(); if (pnic != null) { cmd.gatewayToRemove = pnic.getGateway(); cmd.macOfGatewayToRemove = pnic.getMac(); cmd.bridgeNameOfGatewayToRemove = new BridgeNameFinder().findByL3Uuid(previousL3); cmd.namespaceNameOfGatewayToRemove = makeNamespaceName(cmd.bridgeNameOfGatewayToRemove, previousL3); } if (nnic != null) { cmd.gatewayToAdd = nnic.getGateway(); cmd.macOfGatewayToAdd = nnic.getMac(); cmd.bridgeNameOfGatewayToAdd = new BridgeNameFinder().findByL3Uuid(nowL3); cmd.namespaceNameOfGatewayToAdd = makeNamespaceName(cmd.bridgeNameOfGatewayToAdd, nowL3); } KvmCommandSender sender = new KvmCommandSender(vm.getHostUuid()); sender.send(cmd, RESET_DEFAULT_GATEWAY_PATH, wrapper -> { ResetDefaultGatewayRsp rsp = wrapper.getResponse(ResetDefaultGatewayRsp.class); return rsp.isSuccess() ? null : errf.stringToOperationError(rsp.getError()); }, new ReturnValueCompletion<KvmResponseWrapper>(completion) { @Override public void success(KvmResponseWrapper returnValue) { completion.success(); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } }
package battlecode.common; /** * Defines game constants used throughout the packages. * This class is meant to be used in other classes with * an <code>import static battlecode.common.GameConstants.*;</code>. * */ public final class GameConstants { /* * By convention, the names of the constants should begin with * what they apply to, e.g. GAME_DEFAULT_SEED rather than DEFAULT_GAME_SEED. */ /** Fixed cost to send a message */ public static final double BROADCAST_FIXED_COST = 0.0002; /** Additional cost per byte of message size */ public static final double BROADCAST_COST_PER_BYTE = 0.000002; /** The minimum possible map height. */ public static final int MAP_MIN_HEIGHT = 20; /** The maximum possible map height. */ public static final int MAP_MAX_HEIGHT = 70; /** The minumum possible map width. */ public static final int MAP_MIN_WIDTH = 20; /** The maxiumum possible map width. */ public static final int MAP_MAX_WIDTH = 70; /** The maximum fraction of upkeep that can be regained by yielding */ public static final double YIELD_BONUS = 0.998; /** The bytecode penalty that is imposed each time an exception is thrown */ public static final int EXCEPTION_BYTECODE_PENALTY = 500; /** The number of indicator strings that a player can associate with a robot */ public static final int NUMBER_OF_INDICATOR_STRINGS = 3; /** The base number of bytecodes a robot can execute each round */ public static final int BYTECODE_LIMIT_BASE = 10000; /** The number of additioal bytecodes granted by a processor */ public static final int BYTECODE_LIMIT_ADDON = 4500; /** The number of rounds after a component is equipped before it may be used */ public static final int EQUIP_WAKE_DELAY = 10; /** The number of rounds it takes for components to wake up after a robot is powered on */ public static final int POWER_WAKE_DELAY = 20; /** The maximum amount of damage done to a robot with Hardened from any one attack */ public static final double HARDENED_MAX_DAMAGE = 2.; /** A shield can reduce the damage done no lower than this value */ public static final double SHIELD_MIN_DAMAGE = .15; /** The amount by which each shield reduces the damage from any attack */ public static final double SHIELD_DAMAGE_REDUCTION = .6; /** Number of rounds Iron stays in effect after it is activated */ public static final int IRON_EFFECT_ROUNDS = 4; /** Hp granted by every plate on a robot */ public static final double PLATING_HP_BONUS = 7.; /** The number of longs that your team can remember between games. */ public static final int TEAM_MEMORY_LENGTH = 32; /** The maximum capacity that dropships can transport. Note that * a chassis of capacity, say, six will take up six space in the * dropship, regardless of its components. */ public static final int TRANSPORT_CAPACITY = 30; /** Maximum resources granted per flux mine before it starts to deplete */ public static final double MINE_RESOURCES = 1.0; /** Minimum resources granted per flux mine after it is fully depleted */ public static final double MINE_DEPLETED_RESOURCES = 0.15; /** Rounds that a mine is fully operational */ public static final int MINE_ROUNDS = 2000; /** Rounds for the mine to lose 0.01 */ public static final int MINE_DEPLETION_RATE = 5; /** Damage done by successive attacks of the beam as long as it is focused */ public static final double[] BEAM_RAMP = {0, 1, 2, 3, 4, 6}; /** Duration for which bug is in effect after it is planted */ public static final int BUG_DURATION = 250; /** Radius squared that the bug can sense */ public static final int BUG_SENSOR_RANGE = 8; /** Initial flux for each team */ public static final double INITIAL_FLUX = 30.; /** Minimum number of mines allowed on official maps. */ public static final int MINES_MIN = 8; /** Maximum number of mines allowed on official maps. */ public static final int MINES_MAX = 100; /** The maximum lifetime of a DUMMY */ public static final int DUMMY_LIFETIME = 200; /** The square of the maximum distance a message can travel. */ public static final int BROADCAST_RADIUS_SQUARED = 64; /** The amount of flux produced by an archon each turn. */ public static final double ARCHON_PRODUCTION = 1.; /** The cost in flux for a scout to regenerate nearby units. */ public static final double REGEN_COST = .4; /** The amount of energon gained when a unit regenerates. */ public static final double REGEN_AMOUNT = .2; /** The number of archons each team starts with. */ public static final int NUMBER_OF_ARCHONS = 6; public static final double MAX_NODE_HEALTH = 100.0; public static final double NODE_CAPTURE_LIMIT = 10.0; // This class cannot be instantiated. private GameConstants() { } }
package com.opengamma.financial.loader.portfolio; import javax.time.calendar.ZonedDateTime; import com.opengamma.financial.tool.ToolContext; import com.opengamma.id.ExternalIdSearch; import com.opengamma.id.ExternalIdSearchType; import com.opengamma.id.VersionCorrection; import com.opengamma.master.portfolio.ManageablePortfolio; import com.opengamma.master.portfolio.ManageablePortfolioNode; import com.opengamma.master.portfolio.PortfolioDocument; import com.opengamma.master.portfolio.PortfolioMaster; import com.opengamma.master.portfolio.PortfolioSearchRequest; import com.opengamma.master.portfolio.PortfolioSearchResult; import com.opengamma.master.position.ManageablePosition; import com.opengamma.master.position.PositionDocument; import com.opengamma.master.position.PositionMaster; import com.opengamma.master.position.PositionSearchRequest; import com.opengamma.master.position.PositionSearchResult; import com.opengamma.master.security.ManageableSecurity; import com.opengamma.master.security.SecurityDocument; import com.opengamma.master.security.SecurityMaster; import com.opengamma.master.security.SecuritySearchRequest; import com.opengamma.master.security.SecuritySearchResult; import com.opengamma.master.security.SecuritySearchSortOrder; /** * A class that facilitates writing securities and portfolio positions and trades */ public class MasterPortfolioWriter implements PortfolioWriter { private PortfolioMaster _portfolioMaster; private PositionMaster _positionMaster; private SecurityMaster _securityMaster; private PortfolioDocument _portfolioDocument; private ManageablePortfolioNode _currentNode; private ManageablePortfolioNode _originalNode; private ManageablePortfolioNode _originalRoot; public MasterPortfolioWriter(String portfolioName, ToolContext toolContext) { _portfolioMaster = toolContext.getPortfolioMaster(); _positionMaster = toolContext.getPositionMaster(); _securityMaster = toolContext.getSecurityMaster(); _portfolioDocument = createPortfolio(portfolioName); } public MasterPortfolioWriter(String portfolioName, PortfolioMaster portfolioMaster, PositionMaster positionMaster, SecurityMaster securityMaster) { _portfolioMaster = portfolioMaster; _positionMaster = positionMaster; _securityMaster = securityMaster; _portfolioDocument = createPortfolio(portfolioName); } /* * writeSecurity searches for an existing security that matches an external id search, and attempts to * reuse/update it wherever possible, instead of creating a new one. */ @Override public ManageableSecurity writeSecurity(ManageableSecurity security) { SecuritySearchRequest searchReq = new SecuritySearchRequest(); ExternalIdSearch idSearch = new ExternalIdSearch(security.getExternalIdBundle()); // match any one of the IDs searchReq.setVersionCorrection(VersionCorrection.ofVersionAsOf(ZonedDateTime.now())); // valid now searchReq.setExternalIdSearch(idSearch); searchReq.setFullDetail(true); searchReq.setSortOrder(SecuritySearchSortOrder.VERSION_FROM_INSTANT_DESC); SecuritySearchResult searchResult = _securityMaster.search(searchReq); for (ManageableSecurity foundSecurity : searchResult.getSecurities()) { if (weakEquals(foundSecurity, security)) { // It's already there, don't update or add it return foundSecurity; } } // Not found, so add it SecurityDocument addDoc = new SecurityDocument(security); SecurityDocument result = _securityMaster.add(addDoc); return result.getSecurity(); } // This weak equals does not actually compare the security's fields, just the type, external ids and attributes :( private boolean weakEquals(ManageableSecurity sec1, ManageableSecurity sec2) { return sec1.getName().equals(sec2.getName()) && sec1.getSecurityType().equals(sec2.getSecurityType()) && sec1.getExternalIdBundle().equals(sec2.getExternalIdBundle()) && sec1.getAttributes().equals(sec2.getAttributes()); } /* * WritePosition checks if the position exists in the previous version of the portfolio. * If so, the existing position is reused. */ @Override public ManageablePosition writePosition(ManageablePosition position) { ManageablePosition existingPosition = null; if (!(_originalNode == null) && !_originalNode.getPositionIds().isEmpty()) { PositionSearchRequest searchReq = new PositionSearchRequest(); // Filter positions in current node of original portfolio searchReq.setPositionObjectIds(_originalNode.getPositionIds()); // Filter positions with same external ids ExternalIdSearch externalIdSearch = new ExternalIdSearch(); externalIdSearch.addExternalIds(position.getSecurityLink().getExternalIds()); externalIdSearch.setSearchType(ExternalIdSearchType.ALL); searchReq.setSecurityIdSearch(externalIdSearch); // Filter positions with the same quantity searchReq.setMinQuantity(position.getQuantity()); searchReq.setMaxQuantity(position.getQuantity()); // Search PositionSearchResult searchResult = _positionMaster.search(searchReq); // Get the first match if found PositionDocument firstDocument = searchResult.getFirstDocument(); if (firstDocument != null) { existingPosition = firstDocument.getPosition(); } // TODO also confirm that all the associated trades are identical } if (existingPosition == null) { // Add the new position to the position master PositionDocument addedDoc = _positionMaster.add(new PositionDocument(position)); // Add the new position to the portfolio _currentNode.addPosition(addedDoc.getUniqueId()); // Return the new position return addedDoc.getPosition(); } else { // Add the existing position to the portfolio _currentNode.addPosition(existingPosition.getUniqueId()); // Return the existing position return existingPosition; } } @Override public ManageablePortfolio getPortfolio() { return _portfolioDocument.getPortfolio(); } @Override public ManageablePortfolioNode getCurrentNode() { return _currentNode; } @Override public ManageablePortfolioNode setCurrentNode(ManageablePortfolioNode node) { // Attempt to find equivalent node in earlier version of portfolio if (_originalRoot != null) { _originalNode = _originalRoot.findNodeByName(node.getName()); } _currentNode = node; return _currentNode; } @Override public void flush() { _portfolioMaster.update(_portfolioDocument); } @Override public void close() { flush(); } private PortfolioDocument createPortfolio(String portfolioName) { // Create a new root node ManageablePortfolioNode rootNode = new ManageablePortfolioNode(portfolioName); // Check to see whether the portfolio already exists PortfolioSearchRequest portSearchRequest = new PortfolioSearchRequest(); portSearchRequest.setName(portfolioName); PortfolioSearchResult portSearchResult = _portfolioMaster.search(portSearchRequest); PortfolioDocument portfolioDoc = portSearchResult.getFirstDocument(); // If it doesn't, create it (add) if (portfolioDoc == null) { ManageablePortfolio portfolio = new ManageablePortfolio(portfolioName, rootNode); portfolioDoc = new PortfolioDocument(); portfolioDoc.setPortfolio(portfolio); portfolioDoc = _portfolioMaster.add(portfolioDoc); _originalRoot = null; _originalNode = null; // If it does, create a new version of the existing portfolio (update) with a new root node } else { ManageablePortfolio portfolio = portfolioDoc.getPortfolio(); _originalRoot = portfolio.getRootNode(); _originalNode = _originalRoot; portfolio.setRootNode(rootNode); portfolioDoc.setPortfolio(portfolio); portfolioDoc = _portfolioMaster.update(portfolioDoc); } // Set current node to the root node _currentNode = portfolioDoc.getPortfolio().getRootNode(); return portfolioDoc; } }
// JSBuiltInFunctions.java package ed.js.engine; import java.io.*; import java.util.*; import java.lang.reflect.*; import com.twmacinta.util.*; import ed.log.*; import ed.js.*; import ed.js.func.*; import ed.io.*; import ed.net.*; import ed.util.*; import ed.security.*; /** * @anonymous name : {SYSOUT}, desc : {Dumps a string to standard output.}, param : {type : (string), name : (s), desc : (the string to print)} * @anonymous name : {assert}, desc : {Verifies a given condition and terminates the flow of execution if false.} param : {type : (boolean), name : (cond), desc : (condition to test)} * @anonymous name : {download}, desc : {Downloads the file at a given URL.}, return : {type : (file), desc : (downloaded file)}, param : {type : (string), name : (url), desc : (url of the file to download)} * @anonymous name : {fork}, desc : {Creates a new thread to run a given function. Once created, the thread can be run and it's return data fetched, as shown in the example.}, example : { x = fork(function() { return 3; }); x.run(); x.returnData(); // will print 3 } param : {type : (function), name : (f), desc : (function to execute in a separate thread)}, return : {type : (thread), desc : (the thread generated)} * @anonymous name : {isAlpha} desc : {Determines if the input is a single alphabetic character.} return : {type : (boolean), desc : (if the given string is a single alphabetic character)}, param : { type : (string), name : (ch), desc : (character to check)} * @anonymous name : {isArray} desc : {Checks that a given object is an array.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is an array)} * @anonymous name : {isBool} desc : {Checks that a given object is a boolean value.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a boolean value)} * @anonymous name : {isDate} desc : {Checks that a given object is a date object.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a date object)} * @anonymous name : {isDigit} desc : {Checks that a given object is a string representing a single digit.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a string representing a single digit)} * @anonymous name : {isFunction} desc : {Checks that a given object is a function.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a function)} * @anonymous name : {isNumber} desc : {Checks that a given object is a number.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a number)} * @anonymous name : {isObject} desc : {Checks that a given object is an object.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is an object)} * @anonymous name : {isRegExp} desc : {Checks that a given object is a regular expression.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a regular expression)} * @anonymous name : {isRegex} desc : {Checks that a given object is a regular expression.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a regular expression)} * @anonymous name : {isSpace} desc : {Checks that a given object is a single whitespace character.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a single whitespace character)} * @anonymous name : {isString} desc : {Checks that a given object is a string.} param : {type : (any), name : (arr), desc : (object to check)}, return : {type : (boolean), desc : (if the object is a string)} * @anonymous name : {javaStatic} desc : {Calls a static java function.}, param : {type : (string), name : (cls), desc : (Java class to call)}, param : {type : (string), name : (method), desc : (method to call within the class)}, param : {type : (any), name : (params), desc : (some number of arguments to be passed to the java function)} * @anonymous name : {javaStaticProp}, desc : {Returns the value of a given static Java property.}, param : {type : (string), name : (cls), desc : (Java class)}, param : {type : (string), name : (property), desc : (method to call within the class)}, return : {type : (any), desc : (the value of the requested property)} * @anonymous name : {md5} desc : {Returns an md5 encoding of the given object.} param : {type : (any), name : (thing), desc : (object to be encoded)}, return : {type : (string), desc : (md5 hash of the given object)} * @anonymous name : {parseBool} desc : {Converts an object into a boolean value. Objects that, when converted to a string, start with "t", "T", or "1" are true. Everything else is false. } param : {type : (any), name : (obj), desc : (object to be converted into a boolean)}, return : { type : (boolean) desc : (the boolean equivalent of the given object)} * @anonymous name : {parseDate} desc : {Converts a date or string into a date object.} param : {type : (string|Date), name : (d), desc : (object to be converted into a date)}, return : { type : (Date) desc : (the date equivalent of the given object)} * @anonymous name : {parseNumber} desc : {Converts an object into a numeric value. } param : {type : (any), name : (obj), desc : (object to be converted into a number)}, return : { type : (number) desc : (the numeric equivalent of the given object)} * @anonymous name : {printnoln} desc : {Prints a string with no terminating newline.} param : {type : (string) name : (str), desc : (string to print)} * @anonymous name : {processArgs} desc : {Assigns the values passed to a function in the variable <tt>arguments</tt> to a list of given variable names.} param : {type : (any) name : (param), desc : (a series of variable names to which to assign arguments)} * @anonymous name : {sleep} desc : {Pauses the thread's execution for a given number of milliseconds.} param : {type : (number) name : (ms), desc : (the number of milliseconds for which to pause)} * @anonymous name : {sysexec} desc : {Executes a system command.} param : {type : (string) name : (cmd), desc : (command to execute)}, param : {type : (string), name : (in) desc : (input to command) isOptional : (true)}, param : {isOptional : (true) type : (Object), name : (env), desc : (environmental variables to use)} param : {type : (string) name : (loc) desc : (path from which to execute the command) isOptional : (true)} return : { type : (Object) desc : (the output of the command)} * @expose */ public class JSBuiltInFunctions { static { JS._debugSIStart( "JSBuiltInFunctions" ); } /** Returns a new scope in which the builtin functions are defined. * @return the new scope */ public static Scope create(){ return create( "Built-In" ); } /** Returns a new scope with a given name in which the builtin functions are defined. * @return the new scope */ public static Scope create( String name ){ Scope s = new Scope( name , null ); try { s.putAll( _base ); _setup( s ); } catch ( RuntimeException re ){ re.printStackTrace(); System.exit(-1); } s.setGlobal( true ); s.lock(); return s; } public static class jsassert extends JSFunctionCalls1 { public jsassert(){ JSFunction myThrows = new JSFunctionCalls2(){ public Object call( Scope scope , Object exctype, Object f, Object extra[] ){ if( ! ( f instanceof JSFunction ) ){ if ( exctype instanceof JSFunction && f == null ){ f = exctype; exctype = null; } else throw new RuntimeException( "Second argument to assert.throws must be a function" ); } try { ((JSFunction)f).call( scope , null ); } catch(JSException e){ if ( exctype == null ) return true; if( exctype instanceof JSString || exctype instanceof String ){ if( e.getObject().equals( exctype.toString() ) ) return Boolean.TRUE; } Object desc = e.getObject(); if( desc instanceof Throwable && match( (Throwable) desc , exctype ) ) return Boolean.TRUE; Throwable cause = e.getCause(); if( match( cause , exctype ) ) return Boolean.TRUE; throw new JSException( "given function threw something else: " + cause.toString() ); } catch(Throwable e){ if ( exctype == null ) return true; if( match( e , exctype.toString() ) ) { return Boolean.TRUE; } // FIXME: what if exctype is a JSFunction (i.e. // an exception type? // Find out how to do instanceof in JS API throw new JSException( "given function threw something else: " + e.toString() ); } // Didn't throw anything throw new JSException( "given function did not throw " + exctype ); } }; set("throws", myThrows); set("raises", myThrows); set( "eq" , new JSFunctionCalls3(){ public Object call( Scope scope , Object a , Object b , Object extraMsg , Object extra[] ){ if ( JSInternalFunctions.JS_eq( a , b ) ) return true; String msg = "not the same [" + a + "] != [" + b + "]"; if ( extraMsg != null ) msg += " " + extraMsg; throw new JSException( msg ); } } ); set( "neq" , new JSFunctionCalls3(){ public Object call( Scope scope , Object a , Object b , Object extraMsg , Object extra[] ){ if ( ! JSInternalFunctions.JS_eq( a , b ) ) return true; String msg = "are the same [" + a + "] != [" + b + "]"; if ( extraMsg != null ) msg += " " + extraMsg; throw new JSException( msg ); } } ); } public Object call( Scope scope , Object foo , Object extra[] ){ if ( JSInternalFunctions.JS_evalToBool( foo ) ) return Boolean.TRUE; if ( extra != null && extra.length > 0 && extra[0] != null ) throw new JSException( "assert failed : " + extra[0] ); throw new JSException( "assert failed" ); } public boolean match( Throwable e , Object exctype ){ if( exctype instanceof JSString || exctype instanceof String ){ String s = exctype.toString(); String gotExc = e.getClass().toString(); if( gotExc.equals( "class " + s ) ) return true; if( gotExc.equals( "class java.lang." + s ) ) return true; if( e instanceof JSException && ((JSException)e).getObject().equals( s ) ) return true; // FIXME: check subclasses? } return false; } } /** @unexpose */ public static Class _getClass( String name ) throws Exception { final int colon = name.indexOf( ":" ); if ( colon < 0 ) return Class.forName( name ); String base = name.substring( 0 , colon ); Class c = Class.forName( base ); String inner = "$" + name.substring( colon + 1 ); for ( Class child : c.getClasses() ){ if ( child.getName().endsWith( inner ) ) return child; } throw new JSException( "can't find inner class [" + inner + "] on [" + c.getName() + "]" ); } public static class javaCreate extends JSFunctionCalls1 { public Object call( Scope scope , Object clazzNameJS , Object extra[] ){ String clazzName = clazzNameJS.toString(); if ( ! Security.isCoreJS() ) throw new JSException( "you can't do create a :" + clazzName + " from [" + Security.getTopJS() + "]" ); Class clazz = null; try { clazz = _getClass( clazzName ); } catch ( Exception e ){ throw new JSException( "can't find class for [" + clazzName + "]" ); } Constructor[] allCons = clazz.getConstructors(); Arrays.sort( allCons , NativeBridge._consLengthComparator ); for ( int i=0; i<allCons.length; i++ ){ Object params[] = NativeBridge.doParamsMatch( allCons[i].getParameterTypes() , extra , scope ); if ( params != null ){ try { return allCons[i].newInstance( params ); } catch ( RuntimeException re ){ ed.lang.StackTraceHolder.getInstance().fix( re ); throw re; } catch ( Exception e ){ throw new JSException( "can' instantiate" , e ); } } } throw new RuntimeException( "can't find valid constructor" ); } } public static class javaStatic extends JSFunctionCalls2 { public Object call( Scope scope , Object clazzNameJS , Object methodNameJS , Object extra[] ){ final boolean debug = false; String clazzName = clazzNameJS.toString(); if ( ! Security.isCoreJS() ) throw new JSException( "you can't use a :" + clazzName + " from [" + Security.getTopJS() + "]" ); Class clazz = null; try { clazz = _getClass( clazzName ); } catch ( Exception e ){ throw new JSException( "can't find class for [" + clazzName + "]" ); } Method[] all = clazz.getMethods(); Arrays.sort( all , NativeBridge._methodLengthComparator ); for ( int i=0; i<all.length; i++ ){ Method m = all[i]; if ( debug ) System.out.println( m.getName() ); if ( ( m.getModifiers() & Modifier.STATIC ) == 0 ){ if ( debug ) System.out.println( "\t not static" ); continue; } if ( ! m.getName().equals( methodNameJS.toString() ) ){ if ( debug ) System.out.println( "\t wrong name" ); continue; } Object params[] = NativeBridge.doParamsMatch( m.getParameterTypes() , extra , scope , debug ); if ( params == null ){ if ( debug ) System.out.println( "\t params don't match" ); continue; } try { return m.invoke( null , params ); } catch ( Exception e ){ e.printStackTrace(); throw new JSException( "can't call" , e ); } } throw new RuntimeException( "can't find valid method" ); } } public static class javaStaticProp extends JSFunctionCalls2 { public Object call( Scope scope , Object clazzNameJS , Object fieldNameJS , Object extra[] ){ String clazzName = clazzNameJS.toString(); if ( ! Security.isCoreJS() ) throw new JSException( "you can't use a :" + clazzName + " from [" + Security.getTopJS() + "]" ); Class clazz = null; try { clazz = _getClass( clazzName ); } catch ( JSException e ){ throw e; } catch ( Exception e ){ throw new JSException( "can't find class for [" + clazzName + "]" ); } try { return clazz.getField( fieldNameJS.toString() ).get( null ); } catch ( NoSuchFieldException n ){ throw new JSException( "can't find field [" + fieldNameJS + "] from [" + clazz.getName() + "]" ); } catch ( Exception e ){ throw new JSException( "can't get field [" + fieldNameJS + "] from [" + clazz.getName() + "] b/c " + e ); } } } public static class print extends JSFunctionCalls1 { print(){ this( true ); } print( boolean newLine ){ super(); _newLine = newLine; } public Object call( Scope scope , Object foo , Object extra[] ){ if ( _newLine ) System.out.println( foo ); else System.out.print( foo ); return null; } final boolean _newLine; } public static class NewObject extends JSFunctionCalls0{ public Object call( Scope scope , Object extra[] ){ return new JSObjectBase(); } public Object get( Object o ){ if ( o == null ) return null; if ( o.toString().equals( "prototype" ) ) return JSObjectBase._objectLowFunctions; return super.get( o ); } protected void init(){ /** * Copies all properties from the source to the destination object. * Not in JavaScript spec! Please refer to Prototype docs! */ set( "extend", new Prototype.Object_extend() ); set( "values", new Prototype.Object_values() ); set( "keys", new Prototype.Object_keys() ); } }; public static class NewDate extends JSFunctionCalls1 { public Object call( Scope scope , Object t , Object extra[] ){ if ( t == null ) return new JSDate(); if ( ! ( t instanceof Number ) ) return new JSDate(); return new JSDate( ((Number)t).longValue() ); } } public static class CrID extends JSFunctionCalls1 { public Object call( Scope scope , Object idString , Object extra[] ){ if ( idString == null ) return ed.db.ObjectId.get(); if ( idString instanceof ed.db.ObjectId ) return idString; return new ed.db.ObjectId( idString.toString() ); } public JSObject newOne(){ throw new JSException( "ObjectId is not a constructor" ); } } public static class sleep extends JSFunctionCalls1 { public Object call( Scope scope , Object timeObj , Object extra[] ){ if ( ! ( timeObj instanceof Number ) ) return false; try { Thread.sleep( ((Number)timeObj).longValue() ); } catch ( Exception e ){ return false; } return true; } } public static class isXXX extends JSFunctionCalls1 { isXXX( Class c ){ _c = c; } public Object call( Scope scope , Object o , Object extra[] ){ return _c.isInstance( o ); } final Class _c; } public static class isNaN extends JSFunctionCalls1 { public Object call( Scope scope , Object o , Object extra[] ){ return o.equals(Double.NaN); } } public static class isXXXs extends JSFunctionCalls1 { isXXXs( Class ... c ){ _c = c; } public Object call( Scope scope , Object o , Object extra[] ){ for ( int i=0; i<_c.length; i++ ) if ( _c[i].isInstance( o ) ) return true; return false; } final Class _c[]; } public static class fork extends JSFunctionCalls1 { public Object call( final Scope scope , final Object funcJS , final Object extra[] ){ if ( ! ( funcJS instanceof JSFunction ) ) throw new JSException( "fork has to take a function" ); final JSFunction func = (JSFunction)funcJS; final Thread t = new Thread( "fork" ){ public void run(){ try { _result = func.call( scope , extra ); } catch ( Throwable t ){ if ( scope.get( "log" ) != null ) ((Logger)scope.get( "log" ) ).error( "error in fork" , t ); else t.printStackTrace(); } } public Object returnData() throws InterruptedException { join(); return _result; } private Object _result; }; return t; } } public static class processArgs extends JSFunctionCalls0 { public Object call( Scope scope , Object [] args){ JSArray a = (JSArray)scope.get("arguments"); for(int i = 0; i < args.length; i++){ scope.put(args[i].toString(), a.getInt(i), true); } return null; } } /** Returns if a given scope is the main scope. * @param the scope to check * @return if the given scope is the main scope */ public static final boolean isBase( Scope s ){ return s == _base; } private static final Scope _base; // these are things that aren't modifiable, so its safe if there is only 1 copy //private static final Scope _myScope; // this is the security hole. need to get rid off TODO static { Scope s = new Scope( "base" , null ); try { _setupBase( s ); } catch ( RuntimeException re ){ re.printStackTrace(); System.exit( -1 ); } finally { _base = s; _base.lock(); _base.setGlobal( true ); } } /** * everything that gets put into the scope that is a JSObjetBase gets locked */ private static void _setupBase( Scope s ){ s.put( "sysexec" , new ed.io.SysExec() , true ); s.put( "print" , new print() , true ); s.put( "printnoln" , new print( false ) , true ); s.put( "SYSOUT" , new print() , true ); s.put( "sleep" , new sleep() , true ); s.put( "fork" , new fork() , true ); CrID crid = new CrID(); s.put( "CrID" , crid , true ); s.put( "ObjID" , crid , true ); s.put( "ObjId" , crid , true ); s.put( "ObjectID" , crid , true ); s.put( "ObjectId" , crid , true ); s.put( "parseBool" , new JSFunctionCalls1(){ public Object call( Scope scope , Object b , Object extra[] ){ if ( b == null ) return false; String s = b.toString(); if ( s.length() == 0 ) return false; char c = s.charAt( 0 ); return c == 't' || c == 'T' || c == '1'; } } , true ); s.put( "parseFloat" , new JSFunctionCalls1(){ public Object call( Scope scope , Object a , Object extra[] ){ if ( a == null ) return Double.NaN; try { return Double.parseDouble( a.toString() ); } catch ( Exception e ){} return Double.NaN; } } , true ); s.put( "parseInt" , new JSFunctionCalls2(){ public Object call( Scope scope , Object a , Object b , Object extra[] ){ if ( a == null ) return Double.NaN; if ( a instanceof Number ) return ((Number)a).intValue(); String s = a.toString(); try { if ( b != null && b instanceof Number ){ return StringParseUtil.parseIntRadix( s , ((Number)b).intValue() ); } return StringParseUtil.parseIntRadix( s , 10 ); } catch ( Exception e ){} return Double.NaN; } } , true ); s.put( "parseDate" , new JSFunctionCalls1(){ public Object call( Scope scope , Object a , Object extra[] ){ if ( a == null ) return null; if ( a instanceof JSDate ) return a; if ( ! ( a instanceof String || a instanceof JSString ) ) return null; long t = JSDate.parseDate( a.toString() , 0 ); if ( t == 0 ) return null; return new JSDate( t ); } } , true ); s.put( "NaN" , Double.NaN , true ); s.put( "md5" , new JSFunctionCalls1(){ public Object call( Scope scope , Object b , Object extra[] ){ synchronized ( _myMd5 ){ _myMd5.Init(); _myMd5.Update( b.toString() ); return new JSString( _myMd5.asHex() ); } } private final MD5 _myMd5 = new MD5(); } , true ); s.put( "isArray" , new isXXX( JSArray.class ) , true ); s.put( "isBool" , new isXXX( Boolean.class ) , true ); s.put( "isNumber" , new isXXX( Number.class ) , true ); s.put( "isDate" , new isXXX( JSDate.class ) , true ); s.put( "isFunction" , new isXXX( JSFunction.class ) , true ); s.put( "isRegExp" , new isXXX( JSRegex.class ) , true ); s.put( "isRegex" , new isXXX( JSRegex.class ) , true ); s.put( "isNaN", new isNaN(), true); s.put( "isString" , new isXXXs( String.class , JSString.class ) , true ); s.put( "isObject" , new JSFunctionCalls1(){ public Object call( Scope scope , Object o , Object extra[] ){ if ( o == null ) return false; if ( ! ( o instanceof JSObject ) ) return false; if ( o instanceof JSString ) return false; return true; } } , true ); s.put( "isAlpha" , new JSFunctionCalls1(){ public Object call( Scope scope , Object o , Object extra[] ){ char c = getChar( o ); return Character.isLetter( c ); } } , true ); s.put( "isSpace" , new JSFunctionCalls1(){ public Object call( Scope scope , Object o , Object extra[] ){ char c = getChar( o ); return Character.isWhitespace( c ); } } , true ); s.put( "isDigit" , new JSFunctionCalls1(){ public Object call( Scope scope , Object o , Object extra[] ){ char c = getChar( o ); return Character.isDigit( c ); } } , true ); s.put( "__self" , new JSFunctionCalls1(){ public Object call( Scope scope , Object o , Object extra[] ){ return o; } } , true ); s.put( "javaCreate" , new javaCreate() , true ); s.put( "javaStatic" , new javaStatic() , true ); s.put( "javaStaticProp" , new javaStaticProp() , true ); s.put( "JSCaptcha" , new JSCaptcha() , true ); s.put( "MimeTypes" , new ed.appserver.MimeTypes() , true ); s.put( "Base64" , new ed.util.Base64() , true ); s.put( "download" , new HttpDownload.downloadFunc() , true ); s.put( "XMLHttpRequest" , XMLHttpRequest._cons , true ); s.put( "processArgs", new processArgs(), true ); // mail stuff till i'm done s.put( "JAVAXMAILTO" , javax.mail.Message.RecipientType.TO , true ); JSON.init( s ); Encoding.install( s ); for ( String key : s.keySet() ){ Object val = s.get( key ); if ( val instanceof JSObjectBase ) ((JSObjectBase)val).lock(); } ed.db.migrate.Drivers.init( s ); } private static void _setup( Scope s ){ // core js s.put( "Object" , new NewObject() , true ); s.put( "Array" , new JSArray.JSArrayCons() , true ); s.put( "Date" , new JSDate.Cons() , true ); s.put( "JSDate" , s.get( "Date" ) , true ); // b/c Eliot always types this s.put( "String" , new JSString.JSStringCons() , true ); s.put( "XML" , new E4X.XML() , true ); s.put( "Namespace" , new E4X.NamespaceCons() , true ); s.put( "RegExp" , new JSRegex.Cons() , true ); s.put( "Regexp" , s.get( "RegExp" ) , true ); // for Ruby technically s.put( "Function" , new JSInternalFunctions.FunctionCons() , true ); s.put( "Math" , new JSMath() , true ); s.put( "Class", Prototype.newCopy() , true ); s.put( "Number" , new JSNumber.Cons() , true ); s.put( "parseNumber" , s.get( "Number" ) , true ); // extensions s.put( "Exception" , new JSException.cons() , true ); s.put( "Map" , new JSMap.Cons() , true ); s.put( "assert" , new jsassert() , true ); s.lock(); } private static char getChar( Object o ){ if ( o instanceof Character ) return (Character)o; if ( o instanceof JSString ) o = o.toString(); if ( o instanceof String ){ String s = (String)o; if ( s.length() == 1 ) return s.charAt( 0 ); } return 0; } static { JS._debugSIDone( "JSBuiltInFunctions" ); } }
package org.jboss.forge.addon.projects.ui; import java.io.PrintStream; import java.util.Map; import java.util.Objects; import org.jboss.forge.addon.projects.Project; import org.jboss.forge.addon.projects.ProjectFactory; import org.jboss.forge.addon.projects.Projects; import org.jboss.forge.addon.projects.building.BuildMessage; import org.jboss.forge.addon.projects.building.BuildResult; import org.jboss.forge.addon.projects.facets.PackagingFacet; import org.jboss.forge.addon.ui.command.AbstractCommandExecutionListener; import org.jboss.forge.addon.ui.command.UICommand; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.context.UIExecutionContext; import org.jboss.forge.addon.ui.output.UIOutput; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.furnace.container.simple.lifecycle.SimpleContainer; import org.jboss.forge.furnace.services.Imported; /** * Prematurely builds the {@link Project} (if exists) and warns if it is valid * * @author <a href="ggastald@redhat.com">George Gastaldi</a> */ public class ProjectBuildStatusListener extends AbstractCommandExecutionListener { private static final String PROJECT_BUILDSTATUS_SKIP_FLAG = "PROJECT_BUILDSTATUS_SKIP"; @Override public void postCommandExecuted(UICommand command, UIExecutionContext context, Result result) { Map<Object, Object> attributeMap = context.getUIContext().getAttributeMap(); String skipProjectBuild = Objects.toString(attributeMap.get(PROJECT_BUILDSTATUS_SKIP_FLAG), null); Imported<ProjectFactory> services = SimpleContainer .getServices(getClass().getClassLoader(), ProjectFactory.class); if (Boolean.parseBoolean(skipProjectBuild) || services.isUnsatisfied()) { // ProjectFactory is not available or PROJECT_BUILDSTATUS_SKIP_FLAG provided, ignore return; } ProjectFactory projectFactory = services.get(); UIContext uiContext = context.getUIContext(); Project project = Projects.getSelectedProject(projectFactory, uiContext.getSelection()); if (project != null && project.hasFacet(PackagingFacet.class)) { PackagingFacet facet = project.getFacet(PackagingFacet.class); BuildResult buildResult = facet.getBuildResult(); if (buildResult != null && !buildResult.isSuccess()) { UIOutput output = uiContext.getProvider().getOutput(); PrintStream err = output.err(); output.error(err, String.format("Project '%s' has errors", project.getRoot())); for (BuildMessage message : buildResult.getMessages()) { switch (message.getSeverity()) { case ERROR: output.error(err, message.getMessage()); break; case WARN: output.warn(err, message.getMessage()); break; case INFO: default: output.info(err, message.getMessage()); break; } } } } } }
package Application; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; import org.springframework.stereotype.Controller; //import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import com.heroku.sdk.jdbc.DatabaseUrl; import dataTransfer.LoginData; import dataTransfer.LoginValidation; import dataTransfer.RegisterData; //import Models.RegisterUserCredentials; import dataTransfer.ValidationCodes; @Controller public class HomeController { @RequestMapping("/") public String home() { return "index"; } @RequestMapping("/loginPage") public String loginPage() { return "users/login"; } @RequestMapping("/registerPage") public String registerPage() { return "users/register"; } @RequestMapping("/worldPage") public String worldPage() { return "worldPage"; } @RequestMapping(value = "/login", method = RequestMethod.POST) public @ResponseBody LoginValidation loginGet(@RequestBody LoginData data) { LoginValidation code = new LoginValidation(); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtCount = connection.createStatement(); Statement stmt = connection.createStatement(); ResultSet user = stmtCount.executeQuery("SELECT count(*) FROM Users WHERE username = '" + data.userName + "' AND password = '" + data.password + "'"); while (user.next()) { if(user.getInt(0) == 0) { code.IncorrectUsernameOrPassword = true; } } if(!code.IncorrectUsernameOrPassword) { ResultSet userInfo = stmt.executeQuery("SELECT * FROM Users WHERE username = '" + data.userName + "' AND password = '" + data.password + "'"); while (userInfo.next()) { if(userInfo.getInt(0) == 0) { code.userId = userInfo.getInt(0); code.userName = userInfo.getString(1); } } } } catch (Exception e) { code.databaseError = true; } return code; // return new ResponseEntity<String>(HttpStatus.ACCEPTED); } @RequestMapping(value = "/register", method = RequestMethod.POST) public @ResponseBody ValidationCodes registerTransfer(@RequestBody RegisterData data) { ValidationCodes code = new ValidationCodes(); try { // URI dbUri = new URI("postgres://fghhopulwiaynq:OfvO_N_KLpwGqwbOZY7wEwKfL_@ec2-54-221-201-165.compute-1.amazonaws.com:5432/df02650vnkne80"); // String dbusername = dbUri.getUserInfo().split(":")[0]; // String dbpassword = dbUri.getUserInfo().split(":")[1]; // String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath(); Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); Statement stmtEmail = connection.createStatement(); Statement stmtInsert = connection.createStatement(); ResultSet userName = stmtUser.executeQuery("SELECT COUNT(*) FROM Users where username = '" + data.userName + "'"); ResultSet email = stmtEmail.executeQuery("SELECT count(*) FROM Users where email = '" + data.email + "'"); // while (userNames.next()) { // System.out.println("Number of Users: " + userNames.getString(0)); code.counter = 20; while (userName.next()) { ++code.counter; code.counter = userName.getRow(); code.row = userName.getString(0); code.counter = userName.getInt(0); if(userName.getInt(0) != 0) { code.counter = 100; code.UsernameTaken = true; } } code.counter = 200; while (email.next()) { ++code.counter; if(email.getInt(0) != 0) { code.counter = 500; code.EmailTaken = true; } } ++code.counter; String newPassword = data.password; ++code.counter; String newConfirmPassword = data.confirmPassword; ++code.counter; if(!newPassword.equals(newConfirmPassword)) { code.PasswordMismatch = true; } ++code.counter; if(!code.UsernameTaken && !code.EmailTaken && !code.PasswordMismatch) { stmtInsert.execute("Insert into Users (username, email, password) values (" + data.userName + "," + data.email + "," + data.password + ")"); } code.counter = -1; //return new ResponseEntity<String>(HttpStatus.ACCEPTED); } catch (Exception e) { //return new ResponseEntity<String>(HttpStatus.NOT_ACCEPTABLE); code.databaseError = true; } return code; } // @RequestMapping(value = "/validate", method = RequestMethod.POST) // public String registerPost(@ModelAttribute("registerUserCredentials") RegisterUserCredentials userCredentials, BindingResult result) { // try { // URI dbUri = new URI("postgres://fghhopulwiaynq:OfvO_N_KLpwGqwbOZY7wEwKfL_@ec2-54-221-201-165.compute-1.amazonaws.com:5432/df02650vnkne80"); // String dbusername = dbUri.getUserInfo().split(":")[0]; // String dbpassword = dbUri.getUserInfo().split(":")[1]; // String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath(); // System.out.println(userCredentials.getUserName()); // System.out.println(userCredentials.getEmail()); // System.out.println(userCredentials.getPassword()); // System.out.println(userCredentials.getConfirmPassword()); // //"Select * from Users // return "redirect:login"; // } catch (Exception e) { // return registerTransfer(); // @ModelAttribute("registerUserCredentials") // public RegisterUserCredentials getRegisterUserCredentials() { // return new RegisterUserCredentials(); }
package Controllers; import Models.dbclasses.Article; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class ArticleServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String string_id = req.getParameter("id"); if (string_id != null) { int id = Integer.parseInt(string_id.trim()); Article article; article = Article.InitArticle(new Article(id)); req.getSession().setAttribute("article", article); getServletContext().getRequestDispatcher("/View/article_get.jsp").forward(req, resp); } else { req.getSession().setAttribute("messages", "There is no such article"); resp.sendRedirect("/index.html"); } } }
package application; import com.sun.org.apache.xpath.internal.operations.Bool; import javafx.application.Platform; import javafx.concurrent.Task; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.*; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.stage.DirectoryChooser; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPFile; import org.apache.commons.net.ftp.FTPReply; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.net.InetAddress; import java.net.URL; import java.text.SimpleDateFormat; import java.time.Duration; import java.util.Calendar; import java.util.ResourceBundle; public class MainController implements Initializable { // UI elements @FXML private TreeView<String> fileTreeView; @FXML private TextField addressTF; @FXML private TextField usernameTF; @FXML private PasswordField passwordPF; @FXML private Button loginBT; @FXML private TextArea logTA; @FXML private Button outputDirBT; @FXML private Label outputDirLB; @FXML private TextField fileAgeLimitTF; // variables private FTPClient client = new FTPClient(); private InetAddress address; private DirectoryChooser directoryChooser = new DirectoryChooser(); private File outputDir; private OutputStream outStream; private SimpleDateFormat ft = new SimpleDateFormat ("HH:mm:ss MMM d"); private File settingsDir; private long defaultDaysLimit = 6; private long daysLimit; private boolean outputDirSelected; private boolean isDownloadingFiles; private Image dirIcon = new Image(getClass().getResourceAsStream("/icons/directory_icon.png")); public void initialize(URL location, ResourceBundle resources) { // make the settings directory settingsDir = new File("settings"); settingsDir.mkdir(); // set up directory chooser directoryChooser.setTitle("Select Download Location"); // set default age limit fileAgeLimitTF.setText(String.valueOf(defaultDaysLimit)); daysLimit = defaultDaysLimit; // set up file tree TreeItem<String> rootItem = new TreeItem<> ("Root: /", new ImageView(dirIcon)); rootItem.setExpanded(true); // set the tree root fileTreeView.setRoot(rootItem); // set the login details to make testing faster addressTF.setText("ftp.vaultfortress.net"); usernameTF.setText("ross@vaultfortress.net"); } // initialize() // onClick method for login button @FXML void loginButtonClick_OnAction(){ System.out.println("Login Click"); // clear message label logTA.setText("Logging in..."); // clear the file tree TreeItem<String> rootItem = new TreeItem<> ("Root: /", new ImageView(dirIcon)); rootItem.setExpanded(true); // set the tree root fileTreeView.setRoot(rootItem); // check that server address is entered // check that username and password are entered if(addressTF.getCharacters().length() < 3){ logTA.appendText("\nError, enter ftp server address."); return; } if(usernameTF.getCharacters().length() < 1){ logTA.appendText("\nError, enter Username."); return; } if(passwordPF.getCharacters().length() < 1){ logTA.appendText("\nError, enter Password."); return; } // make sure output directory is selected if(outputDirSelected == false){ logTA.appendText("\nError, please select an output directory for downloaded files."); return; } // try login connectToServer(this.addressTF.getText(), this.usernameTF.getText(), this.passwordPF.getText()); } // loginButtonClick() @FXML void outputDirBT_OnAction(){ // open output directory chooser outputDir = directoryChooser.showDialog(null); // show selected folder if(outputDir != null) { // flag as directory selected outputDirSelected = true; // display output location outputDirLB.setText(outputDir.getAbsolutePath()); } } // outputDirBT_OnAction() // runs when the Sync Files button is pressed @FXML void syncFilesBT_OnAction(){ Task downloadTask; // check if logged in if(client.isConnected() == false) { logTA.appendText("Error, not logged in. Cannot sync files."); return; } // flag as downloading isDownloadingFiles = true; // update the days limit daysLimit = Long.parseLong(fileAgeLimitTF.getText()); // download the files, in a separate thread downloadTask = new Task<Boolean>(){ protected Boolean call() throws Exception { Platform.runLater(() -> logTA.appendText("\nStarting to download files . . .")); try { // sync the files syncFiles(client); // disconnect from the server disconnectServer(); } catch (Exception ex){ Platform.runLater(() -> logTA.appendText("\nError Downloading files!")); // disconnect from the server disconnectServer(); isDownloadingFiles = false; ex.printStackTrace(); return false; } // try Platform.runLater(() -> logTA.appendText("\nFinished downloading files.")); isDownloadingFiles = false; return true; } // call() }; // start the thread new Thread(downloadTask).start(); } // syncFilesBT_OnAction() // connects to the ftp server and discovers the files private void connectToServer(String serverAddress, String username, String password){ // try connect try { // create a server address this.address = InetAddress.getByName(serverAddress); // connect to the address client.connect(address, 21); // try and login client.login(username, password); if (client.isConnected()) { System.out.print(client.getReplyString()); if (!FTPReply.isPositiveCompletion(client.getReplyCode())){ logTA.appendText("\nError: " + client.getReplyString()); client.disconnect(); return; } // enter passive mode client.enterLocalPassiveMode(); // logged in ok logTA.appendText("\n" + client.getReplyString()); // display files buildFileTree(fileTreeView.getRoot(), client); } }catch (Exception e){ System.out.println("Error: " + e.getMessage()); logTA.appendText("\nError: " + e.getMessage()); // disconnect the user from server disconnectServer(); } // try } // connectToServer() // disconnect the user form server private void disconnectServer(){ try { // disconnect client client.disconnect(); logTA.appendText("\nDisconnecting."); System.out.println("Disconnecting"); } catch (Exception e) { //e.printStackTrace(); logTA.appendText("\nError Disconnecting."); } // try } // disconnectServer() // builds the tree view of the files private void buildFileTree(TreeItem treeNode, FTPClient client) throws Exception { // display the files FTPFile[] files = client.listFiles("", FTPFile::isFile); for (FTPFile file : files) { if(!file.getName().startsWith(".")) { System.out.println("File: " + file.getName()); // add file to file tree treeNode.getChildren().add(new TreeItem<>(file.getName() + " | " + ft.format(file.getTimestamp().getTime()))); } } // for // get the directories FTPFile[] directories = client.listDirectories(); for (FTPFile dir : directories) { if(!dir.getName().startsWith(".")) { // change working directory to detected directory client.changeWorkingDirectory(dir.getName()); // create treeItem to represent new Directory TreeItem newDir = new TreeItem<>(dir.getName(), new ImageView(dirIcon)); // add directory to file tree treeNode.getChildren().add(newDir); logTA.appendText("\nDiscovering Files in: " + client.printWorkingDirectory()); System.out.println("Discovering Files in: " + client.printWorkingDirectory()); // recursively call method to add files and directories to new directory buildFileTree(newDir, client); // go back to parent directory, once finished in this directory client.changeToParentDirectory(); } } // for } // buildFileTree() // sync files, by download files that need to be downloaded private void syncFiles(FTPClient client) throws Exception { long daysOld; String pwd; // display the files FTPFile[] files = client.listFiles("", FTPFile::isFile); if(files.length > 0){ pwd = client.printWorkingDirectory(); System.out.println("Downloading Files in: " + client.printWorkingDirectory()); Platform.runLater(() -> logTA.appendText("\nDownloading Files in: " + pwd)); } for (FTPFile file : files) { if(!file.getName().startsWith(".")) { // get the number of days old this file is daysOld = Duration.between(file.getTimestamp().toInstant(), Calendar.getInstance().toInstant()).toDays(); System.out.println("File is " + daysOld + " days old"); // if file is not older then limit if (daysOld < daysLimit) { System.out.println("Downloading: " + file.getName()); Platform.runLater(() -> logTA.appendText("\nDownloading: " + file.getName())); // create outputStream for file outStream = new FileOutputStream(outputDir.getName() + File.separator + file.getName()); // retrieve the files client.retrieveFile(file.getName(), outStream); // close the stream outStream.close(); } } } // for // get the directories FTPFile[] directories = client.listDirectories(); for (FTPFile dir : directories) { if (!dir.getName().startsWith(".")) { // change working directory to detected directory client.changeWorkingDirectory(dir.getName()); // recursively call method to add files and directories to new directory syncFiles(client); // go back to parent directory, once finished in this directory client.changeToParentDirectory(); } } // for } // syncFiles() } // class
package blog.engine; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import blog.DBLOGUtil; import blog.common.Util; import blog.io.TableWriter; import blog.model.Evidence; import blog.model.Model; import blog.model.Queries; import blog.model.Query; import blog.sample.AfterSamplingListener; import blog.sample.Sampler; import blog.type.Timestep; import blog.world.DefaultPartialWorld; /** * A Particle Filter. It works by keeping a set of {@link Particles}, each * representing a partial world, weighted by the * evidence. It uses the following properties: <code>numParticles</code> or * <code>numSamples</code>: number of particles (default is <code>1000</code>). * * The ParticleFilter is an unusual {@link InferenceEngine} in that it takes * evidence and queries additional to the ones taken by * {@link #setEvidence(Evidence)} and {@link #setQueries(List)}. The evidence * set by {@link #setEvidence(Evidence)} is used in the very beginning of * inference (thus keeping the general InferenceEngine semantics for it) and the * queries set by {@link #setQueries(List)} are used by {@link #answerQueries()} * only (again keeping the original InferenceEngine semantics). */ public class ParticleFilter extends InferenceEngine { /** * Creates a new particle filter for the given BLOG model, with configuration * parameters specified by the given properties table. */ public ParticleFilter(Model model, Properties properties) { super(model); String numParticlesStr = properties.getProperty("numParticles"); String numSamplesStr = properties.getProperty("numSamples"); if (numParticlesStr != null && numSamplesStr != null && !numParticlesStr.equals(numSamplesStr)) Util.fatalError("ParticleFilter received both numParticles and numSamples properties with distinct values."); if (numParticlesStr == null) numParticlesStr = numSamplesStr; if (numParticlesStr == null) numParticlesStr = "1000"; try { numParticles = Integer.parseInt(numParticlesStr); } catch (NumberFormatException e) { Util.fatalErrorWithoutStack("Invalid number of particles: " + numParticlesStr); // do not dump stack. } String idTypesString = properties.getProperty("idTypes", "none"); idTypes = model.getListedTypes(idTypesString); if (idTypes == null) { Util.fatalErrorWithoutStack("Fatal error: invalid idTypes list."); } String samplerClassName = properties.getProperty("samplerClass", "blog.sample.LWSampler"); System.out.println("Constructing sampler of class " + samplerClassName); particleSampler = Sampler.make(samplerClassName, model, properties); String queryReportIntervalStr = properties.getProperty( "queryReportInterval", "10"); try { queryReportInterval = Integer.parseInt(queryReportIntervalStr); } catch (NumberFormatException e) { Util.fatalError("Invalid reporting interval: " + queryReportIntervalStr, false); } dataLogLik = 0; } /** Answers the queries provided at construction time. */ public void answerQueries() { if (Util.verbose()) { System.out.println("Evidence: " + evidence); System.out.println("Query: " + queries); } System.out.println("Report every: " + queryReportInterval + " timesteps"); reset(); takeEvidenceAndAnswerQuery(); System.out.println("Log likelihood of data: " + dataLogLik); } private void reset() { System.out.println("Using " + numParticles + " particles..."); if (evidence == null) { evidence = new Evidence(model); } if (queries == null) { queries = new Queries(model); } particles = new ArrayList<Particle>(); for (int i = 0; i < numParticles; i++) { Particle newParticle = makeParticle(idTypes); particles.add(newParticle); } needsToBeResampledBeforeFurtherSampling = false; } private void takeEvidenceAndAnswerQuery() { // Split evidence and queries according to the timestep it occurs in. Map<Timestep, Evidence> slicedEvidence = DBLOGUtil .splitEvidenceInTime(evidence); Map<Timestep, Queries> slicedQueries = DBLOGUtil .splitQueriesInTime(queries); // Process atemporal evidence (if any) before everything else. if (slicedEvidence.containsKey(null)) { take(slicedEvidence.get(null)); } // Process temporal evidence and queries in lockstep. List<Timestep> nonNullTimesteps = new ArrayList<Timestep>(); nonNullTimesteps.addAll(slicedEvidence.keySet()); nonNullTimesteps.addAll(slicedQueries.keySet()); nonNullTimesteps.removeAll(Collections.singleton(null)); // We use a TreeSet to remove duplicates and to sort the timesteps. // (We can't construct a TreeSet directly because it doesn't accept nulls.) TreeSet<Timestep> sortedTimesteps = new TreeSet<Timestep>(nonNullTimesteps); int querySlicesProcessed = 0; for (Timestep timestep : sortedTimesteps) { if (slicedEvidence.containsKey(timestep)) { take(slicedEvidence.get(timestep)); } if (slicedQueries.containsKey(timestep)) { List<Query> currentQueries = slicedQueries.get(timestep); for (Particle particle : particles) { particle.answer(currentQueries); } querySlicesProcessed++; if (querySlicesProcessed % queryReportInterval == 0) { TableWriter tableWriter = new TableWriter(currentQueries); tableWriter.setHeader("After timestep " + timestep.intValue()); tableWriter.writeResults(System.out); } } removePriorTimeSlice(timestep); } // Process atemporal queries (if any) after all the evidence. if (slicedQueries.containsKey(null)) { List<Query> currentQueries = slicedQueries.get(null); for (Particle particle : particles) { particle.answer(currentQueries); } } } /** * A method making a particle (by default, {@link Particle}). Useful for * extensions using specialized particles (don't forget to specialize * {@link Particle#copy()} for it to return an object of its own class). */ protected Particle makeParticle(Set idTypes) { DefaultPartialWorld world = new DefaultPartialWorld(idTypes); return new Particle(particleSampler, world); } /** * remove all the temporal variables prior to the specified timestep * * @param timestep * Timestep before which the vars should be removed */ public void removePriorTimeSlice(Timestep timestep) { for (Particle p : particles) { p.removePriorTimeSlice(timestep); } } /** Takes more evidence. */ public void take(Evidence evidence) { if (evidence.isEmpty()) { return; } if (needsToBeResampledBeforeFurtherSampling) { resample(); } if (beforeTakesEvidence != null) beforeTakesEvidence.evaluate(evidence, this); for (Particle p : particles) { if (beforeParticleTakesEvidence != null) beforeParticleTakesEvidence.evaluate(p, evidence, this); p.take(evidence); if (afterParticleTakesEvidence != null) afterParticleTakesEvidence.evaluate(p, evidence, this); } double logSumWeights = Double.NEGATIVE_INFINITY; ListIterator particleIt = particles.listIterator(); while (particleIt.hasNext()) { Particle particle = (Particle) particleIt.next(); if (particle.getLatestLogWeight() < Sampler.NEGLIGIBLE_LOG_WEIGHT) { particleIt.remove(); } else { logSumWeights = Util.logSum(logSumWeights, particle.getLatestLogWeight()); } } if (particles.size() == 0) throw new IllegalArgumentException("All particles have zero weight"); dataLogLik += logSumWeights; needsToBeResampledBeforeFurtherSampling = true; if (afterTakesEvidence != null) afterTakesEvidence.evaluate(evidence, this); } protected void resample() { double[] logWeights = new double[particles.size()]; boolean[] alreadySampled = new boolean[particles.size()]; double maxLogWeight = Double.NEGATIVE_INFINITY; double sumWeights = 0; double[] normalizedWeights = new double[particles.size()]; List<Particle> newParticles = new ArrayList<Particle>(); for (int i = 0; i < particles.size(); i++) { logWeights[i] = particles.get(i).getLatestLogWeight(); maxLogWeight = Math.max(maxLogWeight, logWeights[i]); } if (maxLogWeight == Double.NEGATIVE_INFINITY) { throw new IllegalArgumentException("All particles have zero weight"); } for (int i = 0; i < particles.size(); i++) { normalizedWeights[i] = Math.exp(logWeights[i] - maxLogWeight); if (i > 0) normalizedWeights[i] += normalizedWeights[i - 1]; } sumWeights = normalizedWeights[particles.size() - 1]; double ratio = sumWeights / numParticles; double basis = 0, sampleKey; int selection = 0; for (int i = 0; i < numParticles; i++) { sampleKey = basis + Util.random() * ratio; basis += ratio; while (normalizedWeights[selection] < sampleKey) ++selection; if (!alreadySampled[selection]) { newParticles.add(particles.get(selection)); alreadySampled[selection] = true; } else { newParticles.add(particles.get(selection).copy()); } } particles = newParticles; } // PARTICLE TAKES EVIDENCE EVENT HANDLING /** * An interface specifying handlers for before and after a particle takes * evidence. */ public static interface ParticleTakesEvidenceHandler { public void evaluate(Particle particle, Evidence evidence, ParticleFilter particleFilter); } /** * The {@link ParticleTakesEvidenceHandler} invoked right before a particle * takes evidence. */ public ParticleTakesEvidenceHandler beforeParticleTakesEvidence; /** * The {@link ParticleTakesEvidenceHandler} invoked right after a particle * takes evidence. */ public ParticleTakesEvidenceHandler afterParticleTakesEvidence; // FILTER TAKES EVIDENCE EVENT HANDLING /** * An interface specifying handlers for before and after the particle filter * takes evidence. */ public static interface TakesEvidenceHandler { public void evaluate(Evidence evidence, ParticleFilter particleFilter); } /** * The {@link TakesEvidenceHandler} invoked right before a particle takes * evidence. */ public TakesEvidenceHandler beforeTakesEvidence; /** * The {@link TakesEvidenceHandler} invoked right after a particle takes * evidence. */ public TakesEvidenceHandler afterTakesEvidence; // END OF EVENT HANDLING public AfterSamplingListener getAfterSamplingListener() { return afterSamplingListener; } public void setAfterSamplingListener( AfterSamplingListener afterSamplingListener) { this.afterSamplingListener = afterSamplingListener; particleSampler.afterSamplingListener = afterSamplingListener; } private Set idTypes; // of Type private int numParticles; protected List<Particle> particles; private boolean needsToBeResampledBeforeFurtherSampling = false; private Sampler particleSampler; private AfterSamplingListener afterSamplingListener; private int queryReportInterval; private double dataLogLik; // log likelihood of the data }
package org.purl.wf4ever.provtaverna.export; import info.aduna.lang.service.ServiceRegistry; import java.io.BufferedOutputStream; import java.io.File; import java.io.IOException; import java.math.BigDecimal; import java.net.URI; import java.sql.Timestamp; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.ServiceLoader; import java.util.UUID; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.XMLGregorianCalendar; import net.sf.taverna.raven.appconfig.ApplicationConfig; import net.sf.taverna.t2.provenance.api.ProvenanceAccess; import net.sf.taverna.t2.provenance.lineageservice.URIGenerator; import net.sf.taverna.t2.provenance.lineageservice.utils.DataflowInvocation; import net.sf.taverna.t2.provenance.lineageservice.utils.Port; import net.sf.taverna.t2.provenance.lineageservice.utils.ProcessorEnactment; import net.sf.taverna.t2.provenance.lineageservice.utils.ProvenanceProcessor; import net.sf.taverna.t2.reference.ErrorDocument; import net.sf.taverna.t2.reference.IdentifiedList; import net.sf.taverna.t2.reference.StackTraceElementBean; import net.sf.taverna.t2.reference.T2Reference; import net.sf.taverna.t2.reference.T2ReferenceType; import org.apache.commons.io.output.FileWriterWithEncoding; import org.apache.log4j.Logger; import org.openrdf.OpenRDFException; import org.openrdf.query.parser.QueryParserRegistry; import org.openrdf.query.parser.sparql.SPARQLParserFactory; import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.contextaware.ContextAwareConnection; import org.openrdf.repository.object.ObjectConnection; import org.openrdf.repository.object.ObjectFactory; import org.openrdf.repository.object.ObjectRepository; import org.openrdf.repository.object.config.ObjectRepositoryFactory; import org.openrdf.repository.sail.SailRepository; import org.openrdf.rio.RDFHandlerException; import org.openrdf.sail.memory.MemoryStore; import prov.Activity; import prov.Agent; import prov.Association; import prov.AssociationOrEndOrGenerationOrInvalidationOrStartOrUsage; import prov.Bundle; import prov.Collection; import prov.Entity; import prov.Generation; import prov.Plan; import prov.Role; import prov.Usage; import tavernaprov.Content; import tavernaprov.Error; import uk.org.taverna.scufl2.api.common.URITools; import wfprov.Artifact; public class W3ProvenanceExport { private URITools uriTools = new URITools(); private static Logger logger = Logger.getLogger(W3ProvenanceExport.class); protected Map<T2Reference, File> seenReferences = new HashMap<T2Reference, File>(); private static final int NANOSCALE = 9; private ProvenanceAccess provenanceAccess; private DatatypeFactory datatypeFactory; private ProvenanceURIGenerator uriGenerator = new ProvenanceURIGenerator(); private String workflowRunId; private Map<File, T2Reference> fileToT2Reference = Collections.emptyMap(); private File baseFolder; private File intermediatesDirectory; private Saver saver; private ObjectRepository objRepo; private ObjectConnection objCon; private ObjectFactory objFact; private Map<String, Entity> describedEntities = new HashMap<String, Entity>(); public File getIntermediatesDirectory() { return intermediatesDirectory; } public File getBaseFolder() { return baseFolder; } public Map<File, T2Reference> getFileToT2Reference() { return fileToT2Reference; } protected void makeObjectRepository() throws OpenRDFException { Repository myRepository = new SailRepository(new MemoryStore()); myRepository.initialize(); ObjectRepositoryFactory factory = new ObjectRepositoryFactory(); objRepo = factory.createRepository(myRepository); objCon = objRepo.getConnection(); objFact = objCon.getObjectFactory(); } protected void initializeRegistries() { // Thanks to info.aduna.lang.service.ServiceRegistry for passing down // the classloader // of the interface (!!) rather than the current thread's context class // loader, we'll // have to do this ourself for these registries to work within Raven or // OSGi. // These are all the subclasses of // info.aduna.lang.service.ServiceRegistry<String, SailFactory> // as far as Eclipse could find.. /* * For some reason this fails with: ERROR 2012-07-04 16:06:22,830 * (net.sf.taverna.t2.workbench.ui.impl.Workbench:115) - Uncaught * exception in Thread[SaveAllResults: Saving results to * /home/stain/Desktop/popopopo.prov.ttl,6,main] java.lang.VerifyError: * (class: no/s11/w3/prov/taverna/ui/W3ProvenanceExport, method: * initializeRegistries signature: ()V) Incompatible argument to * function at * org.purl.wf4ever.provtaverna.export.SaveProvAction.saveData * (SaveProvAction.java:65) at * net.sf.taverna.t2.workbench.views.results. * saveactions.SaveAllResultsSPI$2.run(SaveAllResultsSPI.java:177) * * * or with java -noverify (..) * * ERROR 2012-07-04 16:28:47,814 * (net.sf.taverna.t2.workbench.ui.impl.Workbench:115) - Uncaught * exception in Thread[SaveAllResults: Saving results to * /home/stain/Desktop/ppp.prov.ttl,6,main] * java.lang.AbstractMethodError: * info.aduna.lang.service.ServiceRegistry * .add(Ljava/lang/Object;)Ljava/lang/Object; at * org.purl.wf4ever.provtaverna * .export.W3ProvenanceExport.repopulateRegistry * (W3ProvenanceExport.java:132) at * org.purl.wf4ever.provtaverna.export.W3ProvenanceExport * .initializeRegistries(W3ProvenanceExport.java:111) at * org.purl.wf4ever * .provtaverna.export.W3ProvenanceExport.<init>(W3ProvenanceExport * .java:162) at * org.purl.wf4ever.provtaverna.export.SaveProvAction.saveData * (SaveProvAction.java:65) at * net.sf.taverna.t2.workbench.views.results. * saveactions.SaveAllResultsSPI$2.run(SaveAllResultsSPI.java:177) */ // repopulateRegistry(BooleanQueryResultParserRegistry.getInstance(), // BooleanQueryResultParserFactory.class); // repopulateRegistry(BooleanQueryResultWriterRegistry.getInstance(), // BooleanQueryResultWriterFactory.class); // repopulateRegistry(RDFParserRegistry.getInstance(), // RDFParserFactory.class); // repopulateRegistry(RDFWriterRegistry.getInstance(), // RDFWriterFactory.class); // repopulateRegistry(TupleQueryResultParserRegistry.getInstance(), // TupleQueryResultParserFactory.class); // repopulateRegistry(TupleQueryResultWriterRegistry.getInstance(), // TupleQueryResultWriterFactory.class); // repopulateRegistry(FunctionRegistry.getInstance(), Function.class); // repopulateRegistry(QueryParserRegistry.getInstance(), // QueryParserFactory.class); // repopulateRegistry(RepositoryRegistry.getInstance(), // RepositoryFactory.class); // repopulateRegistry(SailRegistry.getInstance(), SailFactory.class); /* So instead we just do a silly, minimal workaround for what we need */ QueryParserRegistry.getInstance().add(new SPARQLParserFactory()); } protected <I> void repopulateRegistry(ServiceRegistry<?, I> registry, Class<I> spi) { ClassLoader cl = classLoaderForServiceLoader(spi); logger.info("Selected classloader " + cl + " for registry of " + spi); for (I service : ServiceLoader.load(spi, cl)) { registry.add(service); } } private ClassLoader classLoaderForServiceLoader(Class<?> mustHave) { List<ClassLoader> possibles = Arrays.asList(Thread.currentThread() .getContextClassLoader(), getClass().getClassLoader(), mustHave .getClassLoader()); for (ClassLoader cl : possibles) { if (cl == null) { continue; } try { if (cl.loadClass(mustHave.getCanonicalName()) == mustHave) { return cl; } } catch (ClassNotFoundException e) { } } // Final fall-back, the old.. return ClassLoader.getSystemClassLoader(); } public W3ProvenanceExport(ProvenanceAccess provenanceAccess, String workflowRunId, Saver saver) { this.saver = saver; this.setWorkflowRunId(workflowRunId); this.setProvenanceAccess(provenanceAccess); initializeRegistries(); try { makeObjectRepository(); } catch (OpenRDFException e) { throw new IllegalStateException("Could not make object repository", e); } try { datatypeFactory = DatatypeFactory.newInstance(); } catch (DatatypeConfigurationException e) { throw new IllegalStateException( "Can't find a DatatypeFactory implementation", e); } } private final class ProvenanceURIGenerator extends URIGenerator { // Make URIs match with Scufl2 @Override public String makeWorkflowURI(String workflowID) { return makeWorkflowBundleURI(workflowRunId) + "workflow/" + provenanceAccess.getWorkflowNameByWorkflowID(workflowID) + "/"; } public String makeWorkflowBundleURI(String workflowRunId) { return "http://ns.taverna.org.uk/2010/workflowBundle/" + provenanceAccess.getTopLevelWorkflowID(workflowRunId) + "/"; } public String makePortURI(String wfId, String pName, String vName, boolean inputPort) { String base; if (pName == null) { base = makeWorkflowURI(wfId); } else { base = makeProcessorURI(pName, wfId); } return base + (inputPort ? "in/" : "out/") + escape(vName); } public String makeDataflowInvocationURI(String workflowRunId, String dataflowInvocationId) { return makeWFInstanceURI(workflowRunId) + "workflow/" + dataflowInvocationId + "/"; } public String makeProcessExecution(String workflowRunId, String processEnactmentId) { return makeWFInstanceURI(workflowRunId) + "process/" + processEnactmentId + "/"; } } enum Direction { INPUTS("in"), OUTPUTS("out"); private final String path; Direction(String path) { this.path = path; } public String getPath() { return path; } } public void exportAsW3Prov(BufferedOutputStream outStream) throws RepositoryException, RDFHandlerException, IOException { // TODO: Make this thread safe using contexts? objCon.clear(); GregorianCalendar startedProvExportAt = new GregorianCalendar(); String runURI = uriGenerator.makeWFInstanceURI(getWorkflowRunId()); // FIXME: Should this be "" to indicate the current file? // FIXME: Should this not be an Account instead? Bundle bundle = objFact.createObject(runURI + "bundle", Bundle.class); objCon.addObject(bundle); // Mini-provenance about this provenance trace. Unkown URI for // agent/activity Agent tavernaAgent = createObject(Agent.class); Activity storeProvenance = createObject(Activity.class); storeProvenance.getProvStartedAtTime().add( datatypeFactory.newXMLGregorianCalendar(startedProvExportAt)); storeProvenance.getProvWasAssociatedWith().add(tavernaAgent); // The agent is an execution of the Taverna software (e.g. also an // Activity) String versionName = ApplicationConfig.getInstance().getName(); // Qualify it to add the plan Association association = createObject(Association.class); association.getProvAgents_1().add(tavernaAgent); storeProvenance.getProvQualifiedAssociations().add(association); association.getProvHadPlans().add( objFact.createObject("http://ns.taverna.org.uk/2011/software/" + versionName, Plan.class)); bundle.getProvWasGeneratedBy().add(storeProvenance); // The store-provenance-process used the workflow run as input storeProvenance.getProvWasInformedBy().add( objFact.createObject(runURI, Activity.class)); Activity wfProcess = objFact.createObject(runURI, Activity.class); storeProvenance.getProvWasInformedBy().add(wfProcess); DataflowInvocation dataflowInvocation = provenanceAccess .getDataflowInvocation(getWorkflowRunId()); wfProcess.getProvWasAssociatedWith().add(tavernaAgent); association = createObject(Association.class); association.getProvAgents_1().add(tavernaAgent); wfProcess.getProvQualifiedAssociations().add(association); String wfUri = uriGenerator.makeWorkflowURI(dataflowInvocation .getWorkflowId()); // TODO: Also make the recipe a Scufl2 Workflow? Plan plan = objFact.createObject(wfUri, Plan.class); association.getProvHadPlans().add(plan); wfProcess.getProvStartedAtTime().add( timestampToXmlGreg(dataflowInvocation.getInvocationStarted())); wfProcess.getProvEndedAtTime().add( timestampToXmlGreg(dataflowInvocation.getInvocationEnded())); // Workflow inputs and outputs storeEntitities(dataflowInvocation.getInputsDataBindingId(), wfProcess, Direction.INPUTS, getIntermediatesDirectory()); // FIXME: These entities come out as "generated" by multiple processes storeEntitities(dataflowInvocation.getOutputsDataBindingId(), wfProcess, Direction.OUTPUTS, getIntermediatesDirectory()); List<ProcessorEnactment> processorEnactments = provenanceAccess .getProcessorEnactments(getWorkflowRunId()); // This will also include processor enactments in nested workflows for (ProcessorEnactment pe : processorEnactments) { String parentURI = pe.getParentProcessorEnactmentId(); if (parentURI == null) { // Top-level workflow parentURI = runURI; } else { // inside nested wf - this will be parent processenactment parentURI = uriGenerator.makeProcessExecution( pe.getWorkflowRunId(), pe.getProcessEnactmentId()); } String processURI = uriGenerator.makeProcessExecution( pe.getWorkflowRunId(), pe.getProcessEnactmentId()); Activity process = objFact.createObject(processURI, Activity.class); Activity parentProcess = objFact.createObject(parentURI, Activity.class); process.getProvWasInformedBy().add(parentProcess); process.getProvStartedAtTime().add( timestampToXmlGreg(pe.getEnactmentStarted())); process.getProvEndedAtTime().add( timestampToXmlGreg(pe.getEnactmentEnded())); // TODO: Linking to the processor in the workflow definition? ProvenanceProcessor provenanceProcessor = provenanceAccess .getProvenanceProcessor(pe.getProcessorId()); String processorURI = uriGenerator.makeProcessorURI( provenanceProcessor.getProcessorName(), provenanceProcessor.getWorkflowId()); // TODO: Also make the plan a Scufl2 Processor association = createObject(Association.class); process.getProvQualifiedAssociations().add(association); association.getProvAgents_1().add(tavernaAgent); plan = objFact.createObject(processorURI, Plan.class); association.getProvHadPlans().add(plan); // TODO: How to link together iterations on a single processor and // the collections // they are iterating over and creating? // Need 'virtual' ProcessExecution for iteration? // TODO: Activity/service details from definition? File path = getIntermediatesDirectory(); // Inputs and outputs storeEntitities(pe.getInitialInputsDataBindingId(), process, Direction.INPUTS, path); storeEntitities(pe.getFinalOutputsDataBindingId(), process, Direction.OUTPUTS, path); } storeFileReferences(); GregorianCalendar endedProvExportAt = new GregorianCalendar(); storeProvenance.getProvEndedAtTime().add( datatypeFactory.newXMLGregorianCalendar(endedProvExportAt)); // Save the whole thing ContextAwareConnection connection = objCon; connection.setNamespace("scufl2", "http://ns.taverna.org.uk/2010/scufl2 connection.setNamespace("prov", "http://www.w3.org/ns/prov connection.setNamespace("wfprov", "http://purl.org/wf4ever/wfprov connection.setNamespace("wfdesc", "http://purl.org/wf4ever/wfdesc connection.setNamespace("tavernaprov", "http://ns.taverna.org.uk/2012/tavernaprov/"); connection.setNamespace("owl", "http://www.w3.org/2002/07/owl connection.export(new TurtleWriterWithBase(outStream, getBaseFolder() .toURI())); } private <T> T createObject(Class<T> type) throws RepositoryException { T obj = objCon.addDesignation(objFact.createObject(), type); // A refresh to force set initialization objCon.getObject(objCon.addObject(obj)); return obj; } protected XMLGregorianCalendar timestampToXmlGreg( Timestamp invocationStarted) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(invocationStarted); XMLGregorianCalendar xmlCal = datatypeFactory .newXMLGregorianCalendar(cal); // Chop of the trailing 0-s of non-precission xmlCal.setFractionalSecond(BigDecimal.valueOf( invocationStarted.getNanos() / 1000000, NANOSCALE - 6)); return xmlCal; } protected void storeFileReferences() throws RepositoryException { for (Entry<File, T2Reference> entry : getFileToT2Reference().entrySet()) { File file = entry.getKey(); T2Reference t2Ref = entry.getValue(); String dataURI = uriGenerator.makeT2ReferenceURI(t2Ref.toUri() .toASCIIString()); Entity entity = objFact.createObject(dataURI, Entity.class); Content content = objFact.createObject( file.toURI().toASCIIString(), Content.class); objCon.addDesignation(entity, Artifact.class) .getTavernaprovContents().add(content); } } protected void storeEntitities(String dataBindingId, Activity activity, Direction direction, File path) throws IOException, RepositoryException { Map<Port, T2Reference> bindings = provenanceAccess .getDataBindings(dataBindingId); for (Entry<Port, T2Reference> binding : bindings.entrySet()) { Port port = binding.getKey(); T2Reference t2Ref = binding.getValue(); Entity entity = describeEntity(t2Ref); if (!seenReference(t2Ref)) { saveReference(t2Ref); } String id = t2Ref.getLocalPart(); String prefix = id.substring(0, 2); if (direction == Direction.INPUTS) { activity.getProvUsed().add(entity); } else { entity.getProvWasGeneratedBy().add(activity); } AssociationOrEndOrGenerationOrInvalidationOrStartOrUsage involvement; if (direction == Direction.INPUTS) { Usage usage = createObject(Usage.class); involvement = usage; activity.getProvQualifiedUsages().add(usage); usage.getProvEntities_1().add(entity); } else { Generation generation = createObject(Generation.class); involvement = generation; entity.getProvQualifiedGenerations().add(generation); generation.getProvActivities_1().add(activity); } String processerName = null; if (port.getProcessorId() != null) { // Not a workflow port ProvenanceProcessor p = provenanceAccess .getProvenanceProcessor(port.getProcessorId()); processerName = p.getProcessorName(); } port.getProcessorId(); String portURI = uriGenerator.makePortURI(port.getWorkflowId(), processerName, port.getPortName(), port.isInputPort()); Role portRole = objFact.createObject(portURI, Role.class); involvement.getProvHadRoles().add(portRole); } } protected Entity describeEntity(T2Reference t2Ref) throws RepositoryException, IOException { String dataURI = uriGenerator.makeT2ReferenceURI(t2Ref.toUri() .toASCIIString()); Entity entity = describedEntities.get(dataURI); if (entity != null) { return entity; } entity = objFact.createObject(dataURI, Entity.class); describedEntities.put(dataURI, entity); if (t2Ref.getReferenceType() == T2ReferenceType.ErrorDocument) { tavernaprov.Error error = objFact.createObject(dataURI, tavernaprov.Error.class); ErrorDocument errorDoc = saver.getReferenceService() .getErrorDocumentService().getError(t2Ref); addMessageIfNonEmpty(error, errorDoc.getMessage()); // getExceptionMEssage added by addStackTrace addStackTrace(error, errorDoc); } else if (t2Ref.getReferenceType() == T2ReferenceType.IdentifiedList) { IdentifiedList<T2Reference> list = saver.getReferenceService() .getListService().getList(t2Ref); Collection coll = objFact.createObject(dataURI, Collection.class); for (T2Reference ref : list) { String itemURI = uriGenerator.makeT2ReferenceURI(ref.toUri() .toASCIIString()); coll.getProvHadMembers().add( objFact.createObject(itemURI, Entity.class)); describeEntity(ref); // TODO: Record list position as well! } } return entity; } private boolean seenReference(T2Reference t2Ref) { return seenReferences.containsKey(t2Ref); } private File saveReference(T2Reference t2Ref) throws IOException, RepositoryException { // Avoid double-saving File f = seenReferences.get(t2Ref); if (f != null) { return f; } File file = referencePath(t2Ref); File parent = file.getParentFile(); parent.mkdirs(); if (t2Ref.getReferenceType() == T2ReferenceType.IdentifiedList) { // Write a kind of text/uri-list (but with relative URIs) IdentifiedList<T2Reference> list = saver.getReferenceService() .getListService().getList(t2Ref); file = new File(file.getParentFile(), file.getName() + ".list"); FileWriterWithEncoding writer = new FileWriterWithEncoding(file, "utf-8"); for (T2Reference ref : list) { File refFile = saveReference(ref).getAbsoluteFile(); URI relRef = uriTools.relativePath(parent.getAbsoluteFile() .toURI(), refFile.getAbsoluteFile().toURI()); writer.append(relRef.toASCIIString() + "\n"); } writer.close(); } else { String extension = ""; if (t2Ref.getReferenceType() == T2ReferenceType.ErrorDocument) { extension = ".err"; } // Capture filename with extension file = saver.writeDataObject(parent, file.getName(), t2Ref, extension); // FIXME: The above will save the same reference every time! } seenReference(t2Ref, file); return file; } protected void addStackTrace(Error error, ErrorDocument errorDoc) throws RepositoryException, IOException { StringBuffer sb = new StringBuffer(); addStackTrace(sb, errorDoc); if (sb.length() > 0) { error.getTavernaprovStackTrace().add(sb.toString()); } for (T2Reference errRef : errorDoc.getErrorReferences()) { String errorURI = uriGenerator.makeT2ReferenceURI(errRef.toUri() .toASCIIString()); tavernaprov.Error nested = objFact.createObject(errorURI, tavernaprov.Error.class); Entity errEntity = objCon.addDesignation(error, Entity.class); errEntity.getProvWasDerivedFrom().add( objCon.addDesignation(nested, Entity.class)); describeEntity(errRef); } } protected void addStackTrace(StringBuffer sb, ErrorDocument errorDoc) { if (errorDoc.getExceptionMessage() != null && !errorDoc.getExceptionMessage().isEmpty()) { sb.append(errorDoc.getExceptionMessage()); sb.append("\n"); } if (errorDoc.getStackTraceStrings() == null) { return; } if (sb.length() == 0) { sb.append("Stack trace:\n"); } // Attempt to recreate Java stacktrace style for (StackTraceElementBean trace : errorDoc.getStackTraceStrings()) { sb.append(" at "); sb.append(trace.getClassName()); sb.append("."); sb.append(trace.getMethodName()); sb.append("("); sb.append(trace.getFileName()); sb.append(":"); sb.append(trace.getLineNumber()); sb.append(")"); sb.append("\n"); } } protected void addMessageIfNonEmpty(Error error, String message) { if (message == null || message.isEmpty()) { return; } error.getTavernaprovErrorMessage().add(message); } private File referencePath(T2Reference t2Ref) { String local = t2Ref.getLocalPart(); try { local = UUID.fromString(local).toString(); } catch (IllegalArgumentException ex) { // Fallback - use full namespace/localpart return new File(new File(getIntermediatesDirectory(), t2Ref.getNamespacePart()), t2Ref.getLocalPart()); } return new File(new File(getIntermediatesDirectory(), local.substring( 0, 2)), local); } private boolean seenReference(T2Reference t2Ref, File file) { getFileToT2Reference().put(file, t2Ref); if (seenReference(t2Ref)) { return true; } return seenReferences.put(t2Ref, file) != null; } public ProvenanceAccess getProvenanceAccess() { return provenanceAccess; } public void setProvenanceAccess(ProvenanceAccess provenanceAccess) { this.provenanceAccess = provenanceAccess; } public String getWorkflowRunId() { return workflowRunId; } public void setWorkflowRunId(String workflowRunId) { this.workflowRunId = workflowRunId; } public void setFileToT2Reference(Map<File, T2Reference> fileToT2Reference) { this.fileToT2Reference = new HashMap<File, T2Reference>(); for (Entry<File, T2Reference> entry : fileToT2Reference.entrySet()) { seenReference(entry.getValue(), entry.getKey()); } } public void setBaseFolder(File baseFolder) { this.baseFolder = baseFolder; } public void setIntermediatesDirectory(File intermediatesDirectory) { this.intermediatesDirectory = intermediatesDirectory; } }
package br.com.dbsoft.util; import java.sql.Connection; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; import java.text.DateFormat; import java.text.DateFormatSymbols; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.Locale; import java.util.concurrent.TimeUnit; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.XMLGregorianCalendar; import org.apache.log4j.Logger; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Days; import org.joda.time.LocalDate; import org.joda.time.LocalDateTime; import org.joda.time.Seconds; import br.com.dbsoft.core.DBSSDK; import br.com.dbsoft.error.DBSIOException; import br.com.dbsoft.io.DBSDAO; public class DBSDate{ public enum PERIODICIDADE{ DIARIA ("Diária", 1), MENSAL ("Mensal", 2), ANUAL ("Anual", 3), IRREGULAR ("Irregular",4), EMISSAO ("Emissão", 5), VENCIMENTO ("Vencimento", 6); public static PERIODICIDADE get(Object pCode) { Integer xI = DBSNumber.toInteger(pCode, null); return get(xI); } public static PERIODICIDADE get(Integer pCode) { if (pCode == null){ return null; } switch (pCode) { case 1: return PERIODICIDADE.DIARIA; case 2: return PERIODICIDADE.MENSAL; case 3: return PERIODICIDADE.ANUAL; case 4: return PERIODICIDADE.IRREGULAR; case 5: return PERIODICIDADE.EMISSAO; case 6: return PERIODICIDADE.VENCIMENTO; } return null; } private String wName; private int wCode; private PERIODICIDADE(String pName, int pCode) { this.wName = pName; this.wCode = pCode; } public String getName() { return wName; } public int getCode() { return wCode; } } public enum BASE{ PADRAO, D_MENOS_UM, PRIMEIRO_DIA_DO_MES, PRIMEIRO_DIA_DO_ANO, PRIMEIRO_DIA_UTIL_DO_MES, PRIMEIRO_DIA_UTIL_DO_ANO, DIA_UTIL, PRIMEIRO_DIA_DO_MES_ANTERIOR; } protected static Logger wLogger = Logger.getLogger(DBSDate.class); // //## Public Methods # // /** * Retorna a hora de hoje * @return Hora de Hoje */ public static DateTime getNowDateTime() { return new org.joda.time.DateTime(); } /** * Retorna a data de hoje * @return Data de Hoje */ public static Date getNowDate() { try{ Calendar xCurrentTime = Calendar.getInstance(); return new Date((xCurrentTime.getTime()).getTime()); }catch(Exception e){ //DBSError.showException(e); return null; } } /** * Retorna a hora de hoje * @return Hora de Hoje */ public static Time getNowTime() { Calendar xCurrentTime = Calendar.getInstance(); return new Time(xCurrentTime.getTimeInMillis()); } /** * Retorna a data e hora atual. * @return Data e hora atual */ public static Timestamp getNowTimestamp() { Calendar xCurrentTime = Calendar.getInstance(); return new Timestamp(xCurrentTime.getTimeInMillis()); } /** * Calcula a quantidade de segundos entre duas datas * @param pTimeInicio Hora Inicio * @param pTimeFim Hora Fim * @return Quantidade de segundos */ public static int getTimeDif(DateTime pTimeInicio, DateTime pTimeFim){ if (pTimeInicio.equals(pTimeFim)){ return 0; } int xSeconds = Seconds.secondsBetween(pTimeInicio.toLocalDateTime(), pTimeFim.toLocalDateTime()).getSeconds(); return xSeconds; } public static boolean isDate(String pData) { if (toDate(pData) != null){ return true; } return false; } public static boolean isTime(String pHora) { DateFormat xFormat = new SimpleDateFormat("HH:mm:ss"); xFormat.setLenient(false); try { xFormat.parse(pHora); return true; } catch (ParseException e) { //DBSError.showException(e); return false; } } public static Date toDate(String pDia, String pMes, String pAno) { return toDate(pDia + "/" + pMes + "/"+ pAno); } public static Date toDate(int pDia, int pMes, int pAno) { return toDate(pDia + "/" + pMes + "/"+ pAno); } public static Date toDate(String pData) { if (pData == null){ return null; } DateFormat xFormat; // Testa se existe '-' na data passada if (pData.contains("-")){ // Data no formato ISO xFormat = new SimpleDateFormat("yy-MM-dd"); }else{ // Data no formato ABNT xFormat = new SimpleDateFormat("dd/MM/yy"); } Date xDate = Date.valueOf("0001-01-01"); Long xDateTime = 0L; xFormat.setLenient(false); try { xDateTime = xFormat.parse(pData).getTime(); xDate.setTime(xDateTime); } catch (ParseException e) { //DBSError.showException(e); return null; } return xDate; } public static Date toDate(Long pMilliSeconds) { if (DBSObject.isNull(pMilliSeconds)) { return null; } Date xData = new Date(pMilliSeconds); return xData; } /** * Retorna uma Data do tipo Date, a partir de uma data do tipo Calendar. * @param pData * @return Data no tipo Date */ public static Date toDate(Calendar pData) { if (DBSObject.isNull(pData)) { return null; } return toDate(pData.getTimeInMillis()); } /** * Retorna uma Data do tipo Date, a partir de uma data do tipo Timestamp. * @param pData * @return Data no tipo Date */ public static Date toDate(Timestamp pData) { if (DBSObject.isNull(pData)) { return null; } return toDate(pData.getTime()); } /** * Retorna uma Data do tipo Date, a partir de uma data do tipo Timestamp. * @param pData * @return Data no tipo Date */ public static Date toDate(Object pData) { if (DBSObject.isEmpty(pData)) { return null; } if (pData instanceof Timestamp) { return toDate((Timestamp) pData); } else if (pData instanceof String) { return toDate((String) pData); } else if (pData instanceof Time) { return new Date(((Time) pData).getTime()); } else { return (Date) pData; } } /** * Retorna uma Data do tipo Date, a partir de uma data do tipo LocalDate. * @param pData * @return Data no tipo Date */ public static Date toDate(LocalDate pData) { java.util.Date xData0 = pData.toDate(); java.sql.Date xData = new java.sql.Date(xData0.getTime()); return xData; } /** * Retorna uma Data do tipo Date, a partir de uma data do tipo DateTime * @param pData * @return Data no tipo Date */ public static Date toDate(DateTime pData) { java.util.Date xData0 = pData.toDate(); java.sql.Date xData = new java.sql.Date(xData0.getTime()); return xData; } public static Date toDate(XMLGregorianCalendar pData) { if (DBSObject.isEmpty(pData)) { return null; } DateTime xDataTime = new DateTime(pData.getYear(), pData.getMonth(), pData.getDay(), pData.getHour(), pData.getMinute()); return DBSDate.toDate(xDataTime); } public static XMLGregorianCalendar toXMLGregorianCalendar(Object pDate){ return toXMLGregorianCalendar(toDate(pDate)); } public static XMLGregorianCalendar toXMLGregorianCalendar(Date pDate){ GregorianCalendar gCalendar = new GregorianCalendar(); gCalendar.setTime(pDate); XMLGregorianCalendar xmlCalendar = null; try { xmlCalendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(gCalendar); } catch (DatatypeConfigurationException ex) { wLogger.error(ex); } return xmlCalendar; } /** * Retorna a data e hora no tipo Date, a partir de uma string com data e hora. * @param pData no formado dd/mm/aaaa hh:mm:ss * @return Retorna a data formatada */ public static Date toDateDMYHMS(String pData) { if (pData.contains("-")) { // Data no formato ISO return pvToDateLong(pData, "dd-MM-yyyy HH:mm:ss"); }else{ // Data no formato ABNT return pvToDateLong(pData, "dd/MM/yyyy HH:mm:ss"); } } public static Date toDateYMDHMS(String pData) { if (pData.contains("-")) { // Data no formato ISO return pvToDateLong(pData, "yyyy-MM-dd HH:mm:ss"); }else if(pData.contains("/")){ // Data no formato ABNT return pvToDateLong(pData, "yyyy/MM/dd HH:mm:ss"); }else { // Data no formato ABNT return pvToDateLong(pData, "yyyyMMddHHmmss"); } } /** * Retorna uma Data do tipo Timestamp, a partir de uma data do tipo Date. * @param pData * @return */ public static Timestamp toTimestamp(Date pData){ if (DBSObject.isEmpty(pData)) { return null; } Timestamp xT = new Timestamp(pData.getTime()); return xT; } /** * Retorna uma Data do tipo <b>Timestamp</b> a partir da quantidade de milisegundos. * @param pMilliSeconds * @return */ public static Timestamp toTimestamp(Long pMilliSeconds){ Timestamp xT = new Timestamp(toDateTime(pMilliSeconds).getMillis()); return xT; } /** * Retorna uma Data do tipo <b>Timestamp</b> a partir da quantidade de milisegundos. * @param pData * @return */ public static Timestamp toTimestamp(Integer pMilliSeconds){ Timestamp xT = new Timestamp(toDateTime(pMilliSeconds.longValue()).getMillis()); return xT; } /** * Retorna uma Data do tipo <b>Timestamp</b> a partir da hora. * @param pData * @return */ public static Timestamp toTimestamp(Time pTime){ Timestamp xT = new Timestamp(pTime.getTime()); return xT; } /** * Retorna uma Data do tipo Timestamp, a partir de uma data do tipo Object. * Se object for ""(vazio), retorna nulo. * @param pData * @return */ public static Timestamp toTimestamp(Object pData){ if (DBSObject.isEmpty(pData)) { return null; } if (pData.equals("")){ return null; }else if (pData instanceof Date){ return new Timestamp(((Date) pData).getTime()); } else if (pData instanceof Time) { return new Timestamp(((Time) pData).getTime()); } else if (pData instanceof Timestamp) { return (Timestamp) pData; } else if (pData instanceof Integer) { return new Timestamp((Integer) pData); } else { return (Timestamp) pData; } } /** * Retorna uma Data do tipo Timestamp, a partir de uma data do tipo String * com o formato Ano(4)/Mes/Dia Hora:Minuto:Segundo * @param pData * @return */ public static Timestamp toTimestampYMDHMS(String pData){ Date xData = DBSDate.toDateYMDHMS(pData); return DBSDate.toTimestamp(xData); } /** * Retorna uma Data do tipo Timestamp, a partir de uma data do tipo String * com o formato Dia/Mes/Ano(4) Hora:Minuto:Segundo * @param pData * @return */ public static Timestamp toTimestampDMYHMS(String pData){ Date xData = DBSDate.toDateDMYHMS(pData); return DBSDate.toTimestamp(xData); } /** * Retorna a hora a partir da string no formato HH:MM:SS * @param pHora no formato HH:MM:SS (24hrs) * @return hora */ public static Time toTime(String pHora){ DateFormat xFormat = new SimpleDateFormat("HH:mm:ss"); xFormat.setLenient(true); Time xTime = Time.valueOf("0:0:0"); try { xTime.setTime(xFormat.parse(pHora).getTime()); } catch (ParseException e) { // throw e; //DBSError.showException(e); return null; } return xTime; } /** * Retorna a hora a partir das strings de hora, minuto e segundo. * @param pHora * @param pMinuto * @param pSegundo * @return Hora * @throws ParseException */ public static Time toTime(String pHora, String pMinuto, String pSegundo) { DateFormat xFormat = new SimpleDateFormat("HH:mm:ss"); xFormat.setLenient(true); Time xTime = Time.valueOf("0:0:0"); try { xTime.setTime(xFormat.parse(pHora + ":" + pMinuto + ":" + pSegundo).getTime()); } catch (ParseException e) { return null; } return xTime; } /** * Retorna a hora a partir das strings de hora, minuto e segundo * @param pHora * @param pMinuto * @param pSegundo * @return Hora * @throws ParseException */ public static Time toTime(Long pHora, Long pMinuto, Long pSegundo) { return toTime(pHora.toString(),pMinuto.toString(), pSegundo.toString()); } /** * Retorna a hora a partir da quantidade de milisegundos. * @param pMilliseconds * @return hora */ public static Time toTime(Long pMilliseconds){ return toTime(TimeUnit.MILLISECONDS.toHours(pMilliseconds), TimeUnit.MILLISECONDS.toMinutes(pMilliseconds), TimeUnit.MILLISECONDS.toSeconds(pMilliseconds)); } /** * Retorna a hora a partir da quantidade de milisegundos. * @param pMilliseconds * @return hora */ public static Time toTime(Object pMilliseconds){ if (pMilliseconds instanceof Number){ return toTime((Long) pMilliseconds); }else{ return null; } } /** * Retorna a hora a partir da quantidade de timestamp. * @param pMilliseconds * @return hora */ public static Time toTime(Timestamp pTimestamp){ return toTime(pTimestamp.getTime()); } /** * Retornar data no tipo Calender a partir de data no tipo Date * @param pData do tipo Date que se seja converte * @return Data convertida para o tipo Calendar */ public static Calendar toCalendar(Date pData){ if (pData == null){ return null; } Calendar xData = Calendar.getInstance(); xData.setTime(pData); return xData; } /** * Retornar data no tipo Calender a partir de data no tipo Timestamp * @param pTime do tipo Date que se seja converte * @return Data convertida para o tipo Calendar */ public static Calendar toCalendar(Timestamp pTime){ if (pTime == null){ return null; } Calendar xData = Calendar.getInstance(); xData.setTime(pTime); return xData; } /** * Retornar data no tipo Calendar, a partir do dia, mes e ano informado * @param pDia * @param pMes * @param pAno * @return Data no formato Calendar */ public static Calendar toCalendar(int pDia, int pMes, int pAno) { Calendar xC = Calendar.getInstance(); xC.set(pAno, pMes, pDia); return xC; } /** * Retornar data no tipo Calendar, a partir da quantidade de milisegundos. * @param pDia * @param pMes * @param pAno * @return Data no formato Calendar */ public static Calendar toCalendar(Long pMilliseconds) { Calendar xC = Calendar.getInstance(); xC.setTimeInMillis(pMilliseconds); return xC; } public static DateTime toDateTime(Long pMilliSeconds) { return new DateTime(pMilliSeconds, DateTimeZone.UTC); } /** * Retorna o dia a partir de uma data. * @param pData * @return dia */ public static Integer getDia(Date pData){ Calendar xData = toCalendar(pData); return xData.get(Calendar.DAY_OF_MONTH); } public static int getDias(Connection pConexao, Date pDataInicio, Date pDataFim, boolean pUtil, int pCidade) { return getDias(pConexao, pDataInicio, pDataFim, pUtil, pCidade, null); } public static int getDias(Connection pConexao, Date pDataInicio, Date pDataFim, boolean pUtil, int pCidade, String pApplicationColumnName) { int xDias; int xSinal=1; if (DBSObject.isEmpty(pDataInicio) || DBSObject.isEmpty(pDataFim) || pDataInicio.equals(pDataFim)) { return 0; } else { if (pUtil){ xDias = getDateDif(pDataInicio, pDataFim); if (xDias < 0){ xSinal = -1; } return xSinal * (Math.abs(xDias) - getFinaisDeSemana(pDataInicio, pDataFim) - getFeriados(pConexao, pDataInicio, pDataFim, pCidade, pApplicationColumnName)); } else { return getDateDif(pDataInicio, pDataFim); } } } /** * Calcula a quantidade de meses entre duas datas * @param pDataInicio Data inicio * @param pDataFim Data fim * @return Quantidade de meses */ public static int getMeses(Date pDataInicio, Date pDataFim){ Calendar xDataInicio = toCalendar(pDataInicio); Calendar xDataFim = toCalendar(pDataFim); return getMeses(xDataInicio, xDataFim); } /** * Retorna o mes a partir de uma date(date) * @param pData * @return mes */ public static Integer getMes(Date pData){ Calendar xData = toCalendar(pData); return xData.get(Calendar.MONTH) + 1; } /** * Calcula a quantidade de meses entre duas datas * @param pDataInicio Data inicio * @param pDataFim Data fim * @return Quantidade de meses */ public static int getMeses(Calendar pDataInicio, Calendar pDataFim){ int xAnos = pDataFim.get(Calendar.YEAR) - pDataInicio.get(Calendar.YEAR); //Anos entre duas Data; int xMeses = pDataFim.get(Calendar.MONTH) - pDataInicio.get(Calendar.MONTH); //Anos entre duas Data; return (xAnos * 12) + xMeses; } /** * Retorna o ano a partir de uma date(date) * @param pData * @return Ano */ public static Integer getAno(Date pData){ Calendar xData = toCalendar(pData); return xData.get(Calendar.YEAR); } /** * Retorna a hora(sem minutos ou segundos) a partir de um timestamp * @param pData * @return Ano */ public static Integer getHora(Timestamp pTime){ Calendar xData = toCalendar(pTime); return xData.get(Calendar.HOUR_OF_DAY); } /** * Retorna o minuto a partir de um timestamp * @param pData * @return Ano */ public static Integer getMinuto(Timestamp pTime){ Calendar xData = toCalendar(pTime); return xData.get(Calendar.MINUTE); } /** * Retorna o segundo a partir de um timestamp * @param pData * @return Ano */ public static Integer getSegundo(Timestamp pTime){ Calendar xData = toCalendar(pTime); return xData.get(Calendar.SECOND); } /** * Calcula a quantidade de anos entre duas datas * @param pDataInicio Data inicio * @param pDataFim Data fim * @return Quantidade de anos */ public static int getAnos(Date pDataInicio, Date pDataFim){ Calendar xDataInicio = toCalendar(pDataInicio); Calendar xDataFim = toCalendar(pDataFim); return getAnos(xDataInicio, xDataFim); } /** * Calcula a quantidade de anos entre duas data * @param pDataInicio Data inicio * @param pDataFim Data fim * @return Quantidade de anos */ public static int getAnos(Calendar pDataInicio, Calendar pDataFim){ //Anos entre duas datas return pDataFim.get(Calendar.YEAR) - pDataInicio.get(Calendar.YEAR); } /** * Calcula a data a partir da database adicionada de dias * @param pDataBase Data base * @param pPrazo em dias * @return Data */ public static Date getDateAdd(Date pDataBase, int pPrazo){ if (pDataBase!=null){ if (pPrazo==0){ return pDataBase; }else{ LocalDate xDT = new DateTime(pDataBase).toLocalDate(); xDT = xDT.plusDays(pPrazo); return DBSDate.toDate(xDT); //int xDias = Days.daysBetween(new DateTime(pDataBase).toLocalDate(), new DateTime(pDataFim).toLocalDate()).getDays(); } }else{ return null; } } /** * Calcula a data a partir da database adicionada de meses * @param pDataBase Data base * @param pPrazo em dias * @return Data */ public static Date getDateAddMes(Date pDataBase, int pPrazo){ if (pDataBase!=null){ if (pPrazo==0){ return pDataBase; }else{ LocalDate xDT = new DateTime(pDataBase).toLocalDate(); xDT = xDT.plusMonths(pPrazo); return DBSDate.toDate(xDT); //int xDias = Days.daysBetween(new DateTime(pDataBase).toLocalDate(), new DateTime(pDataFim).toLocalDate()).getDays(); } }else{ return null; } } /** * Calcula a data a partir da database adicionada de anos * @param pDataBase Data base * @param pPrazo em dias * @return Data */ public static Date getDateAddAno(Date pDataBase, int pPrazo){ if (pDataBase!=null){ if (pPrazo==0){ return pDataBase; }else{ LocalDate xDT = new DateTime(pDataBase).toLocalDate(); xDT = xDT.plusYears(pPrazo); return DBSDate.toDate(xDT); //int xDias = Days.daysBetween(new DateTime(pDataBase).toLocalDate(), new DateTime(pDataFim).toLocalDate()).getDays(); } }else{ return null; } } /** * Calcula a quantidade de dias entre duas datas * @param pDataInicio Data Inicio * @param pDataFim Data Fim * @return Quantidade de dias */ public static int getDateDif(Date pDataInicio, Date pDataFim){ if (pDataInicio.equals(pDataFim)){ return 0; } return Days.daysBetween(new DateTime(pDataInicio).toLocalDate(), new DateTime(pDataFim).toLocalDate()).getDays(); } public static boolean isDiaUtil(Connection pConexao, Date pData, int pCidade, String pApplicationColumnName){ Calendar xData = Calendar.getInstance(); xData.setTime(pData); xData.add(Calendar.DAY_OF_MONTH, -1); if (getFeriados(pConexao, toDate(xData), pData, pCidade, pApplicationColumnName) > 0 || toCalendar(pData).get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY || toCalendar(pData).get(Calendar.DAY_OF_WEEK) == Calendar.SATURDAY){ return false; } return true; } public static boolean isDiaUtil(Connection pConexao, Date pData, int pCidade){ return isDiaUtil(pConexao, pData, pCidade, null); } public static boolean isDiaUtil(Connection pConexao, Date pData){ return isDiaUtil(pConexao, pData, -1); } public static int getFeriados(Connection pConexao, Date pDataInicio, Date pDataFim) { return getFeriados(pConexao, pDataInicio, pDataFim, -1); } public static int getFeriados(Connection pConexao, Date pDataInicio, Date pDataFim, int pCidade) { return getFeriados(pConexao, pDataInicio, pDataFim, pCidade, null); } public static int getFeriados(Connection pConexao, Date pDataInicio, Date pDataFim, int pCidade, String pApplicationColumnName) { if (DBSSDK.TABLE.FERIADO.equals("")){ wLogger.error("DBSSDK.TABLE.FERIADO em branco, Favor informar o tarefa que contém o cadastro de feriados."); return 0; } String xSql; String xFiltroCidade= ""; Integer xDias = 0; DBSDAO<Object> xDao = new DBSDAO<Object>(pConexao); xSql = "SELECT * From " + DBSSDK.TABLE.FERIADO + " "; if (pDataInicio.after(pDataFim)) { xSql = xSql + "WHERE DATA >=" + DBSIO.toSQLDate(pConexao, pDataFim) + " AND DATA <" + DBSIO.toSQLDate(pConexao, pDataInicio) ; } else { xSql = xSql + "WHERE DATA >" + DBSIO.toSQLDate(pConexao, pDataInicio) + " AND DATA <=" + DBSIO.toSQLDate(pConexao, pDataFim) ; } if (DBSObject.isIdValid(pCidade)) { xFiltroCidade = " OR CIDADE_ID = " + pCidade; } xSql = xSql + " AND (CIDADE_ID = -1 or CIDADE_ID IS NULL" + xFiltroCidade + ")";// Objetivo: Retorna quantidade de feriados em dias //ALBERTO if (!DBSObject.isEmpty(pApplicationColumnName)) { xSql = xSql + " AND "+ pApplicationColumnName + " = -1"; } try { if (xDao.open(xSql)) { xDao.moveBeforeFirstRow(); while (xDao.moveNextRow()) { if (DBSDate.getNumeroDaSemana(DBSDate.toDate(xDao.getValue("DATA"))) != Calendar.SATURDAY && DBSDate.getNumeroDaSemana(DBSDate.toDate(xDao.getValue("DATA"))) != Calendar.SUNDAY) { xDias = xDias + 1; } } xDao.close(); } } catch (DBSIOException e) { wLogger.error(e); } return xDias; } public static int getFinaisDeSemana(Date pDataInicio, Date pDataFim){ Double xDiasI; Double xDiasF; int xDias; Date xDataBase; xDataBase = DBSDate.toDate(01,01,1900); xDiasI = (((DBSDate.getDateDif(xDataBase,pDataInicio)+1)/7)-0.001); //Quantidade de semanas existentes na data inicio xDiasF = (((DBSDate.getDateDif(xDataBase,pDataFim)+1)/7)-0.001); //Quantidade de semanas existenstes na data fim xDias = (xDiasF.intValue() - xDiasI.intValue()) * 2; xDias = Math.abs(xDias); // Ajusta os dias caso as data seja um final de semana if (pDataFim.compareTo(pDataInicio) > 0) { if (DBSDate.toCalendar(pDataInicio).get(Calendar.DAY_OF_WEEK) == 7){ xDias } if (DBSDate.toCalendar(pDataFim).get(Calendar.DAY_OF_WEEK) == 7){ xDias++; } } else { // Ajusta os dias se as datas forem iguais a domingo if (DBSDate.toCalendar(pDataInicio).get(Calendar.DAY_OF_WEEK) == 1){ xDias } if (DBSDate.toCalendar(pDataFim).get(Calendar.DAY_OF_WEEK) == 1){ xDias++; } } return xDias; } public static int getDiasDoAno(Connection pConexao, int pAno, boolean pUtil, int pCidade, String pApplicationColumnName){ Date xInicio = DBSDate.toDate("31","12", String.valueOf(pAno - 1)); Date xFim = DBSDate.toDate("31","12", String.valueOf(pAno)); return DBSDate.getDias(pConexao, xInicio, xFim, pUtil, pCidade, pApplicationColumnName); } public static int getDiasDoAno(Connection pConexao, int pAno, boolean pUtil, int pCidade){ return getDiasDoAno(pConexao, pAno, pUtil, pCidade, null); } public static int getDiasDoAno(Connection pConexao, int pAno, boolean pUtil){ return getDiasDoAno(pConexao, pAno, pUtil, -1); } public static Date getProximaData(Connection pConexao, Date pDataBase, int pPrazo, boolean pUtil){ return getProximaData(pConexao, pDataBase, pPrazo, pUtil, -1, null); } public static Date getProximaData(Connection pConexao, Date pDataBase, int pPrazo, boolean pUtil, int pCidade){ return getProximaData(pConexao, pDataBase, pPrazo, pUtil, pCidade, null); } public static Date getProximaData(Connection pConexao, Date pDataBase, int pPrazo, boolean pUtil, int pCidade, String pApplicationColumnName){ Date xDataFim = pDataBase; if (pDataBase == null){ return null; } if (pPrazo == 0){ if (pUtil){ while (!isDiaUtil(pConexao, xDataFim, pCidade, pApplicationColumnName)) { xDataFim = DBSDate.getDateAdd(xDataFim, 1); } } return xDataFim; } if (pUtil){ xDataFim = pvGetProximaDataUtil(pConexao,pDataBase,pPrazo,pCidade, pApplicationColumnName); return xDataFim; }else{ xDataFim = DBSDate.getDateAdd(xDataFim, pPrazo); return xDataFim; } } public static String getNomeDoMesAbreviado(Date pData){ String xRetorno = ""; if (pData == null) { return xRetorno; } DateFormatSymbols xDF= new DateFormatSymbols(new Locale("pt", "BR")); xRetorno = xDF.getShortMonths()[DBSDate.getMes(pData)]; return xRetorno; } public static String getNomeDoMes(Date pData){ String xRetorno = ""; if (pData == null) { return xRetorno; } return getNomeDosMeses()[DBSDate.getMes(pData)-1];// xRetorno; } /** * Retorna array com os nomes de todos os meses * @return */ public static String[] getNomeDosMeses(){ // DateFormatSymbols xDF= new DateFormatSymbols(new Locale("pt", "BR")); DateFormatSymbols xDF = new DateFormatSymbols(); return xDF.getMonths(); } /** * Objetivo: Retorna nome da Semana a partir de uma data * @param pData * @return Nome da semana a partir de uma data. */ public static String getNomeDaSemana(Date pData){ int xDiaSemana = getNumeroDaSemana(pData); String xRetorno = ""; if (xDiaSemana==-1) { return xRetorno; } DateFormatSymbols xDF= new DateFormatSymbols(new Locale("pt", "BR")); xRetorno = xDF.getWeekdays()[xDiaSemana]; return xRetorno; } /** * Objetivo: Retorna nome da Semana abreviado a partir de uma data * @param pData * @return Nome da semana a partir de uma data. */ public static String getNomeDaSemanaAbreviado(Date pData){ int xDiaSemana = getNumeroDaSemana(pData); String xRetorno = ""; if (xDiaSemana==-1) { return xRetorno; } DateFormatSymbols xDF= new DateFormatSymbols(new Locale("pt", "BR")); xRetorno = xDF.getShortWeekdays()[xDiaSemana]; return xRetorno; } public static Date getPrimeiroDiaDoMes(Connection pConexao, Date pData, boolean pUtil){ if (pData == null){ return null; } Calendar xData = toCalendar(pData); pData = toDate(01,xData.get(Calendar.MONTH)+1,xData.get(Calendar.YEAR)); return getProximaData(pConexao, pData, 0, pUtil); } public static Date getUltimoDiaDoMes(Connection pConexao, Date pData, boolean pUtil){ if (pData == null){ return null; } Calendar xData = toCalendar(pData); xData.add(Calendar.MONTH, 1); pData = getPrimeiroDiaDoMes(pConexao, toDate(xData), false); //Encontra o dia anterior return getProximaData(pConexao, pData, -1, pUtil); } public static int getDiasDoMes(Connection pConexao, Date pData, boolean pUtil, int pCidade, String pApplicationColumnName) { if (pData == null){ return 0; } Date xInicio = getPrimeiroDiaDoMes(pConexao, pData, false); Date xFim = getUltimoDiaDoMes(pConexao, pData, false); xInicio = getProximaData(pConexao, xInicio,-1,false, pCidade); return getDias(pConexao, xInicio, xFim, pUtil, pCidade, pApplicationColumnName); } public static int getDiasDoMes(Connection pConexao, Date pData, boolean pUtil, int pCidade) { return getDiasDoMes(pConexao, pData, pUtil, pCidade, null); } public static int getDiasDoMes(Connection pConexao, Date pData, boolean pUtil) { return getDiasDoMes(pConexao, pData, pUtil, -1); } public static Date getUltimoDiaDoAno(Connection pConexao, Date pData, boolean pUtil){ if (pData == null){ return null; } Calendar xData = toCalendar(pData); //Primeiro Dia do ano Seguinte pData = toDate(1,1,xData.get(Calendar.YEAR)+1); //Menos um dia return getProximaData(pConexao, pData,-1,pUtil); } public static Date getPrimeiroDiaDoAno(Connection pConexao, Date pData, boolean pUtil) { if (pData == null){ return null; } Calendar xData = toCalendar(pData); //Primeiro dia do ano pData = toDate(1,1,xData.get(Calendar.YEAR)); if (pUtil){ return getProximaData(pConexao, pData, 0, pUtil); } return pData; } public static int getNumeroDaSemana(Date pData){ Calendar xCalendar = Calendar.getInstance(); if (pData == null) { return -1; } xCalendar.setTime(pData); return xCalendar.get(Calendar.DAY_OF_WEEK); } public static int getNumeroDoMes(String pMes){ String[] xMeses = DBSDate.getNomeDosMeses(); int xI = 0; //Uniformiza texto pMes = pMes.trim().toUpperCase(); for (String xMes : xMeses){ //Uniformiza texto xMes = xMes.trim().toUpperCase(); xI++; if (!xMes.equals("") && xMes.equals(pMes)){ return xI; } } return 0; // if (pMes.toUpperCase().equals("JANEIRO")) { // xRetorno = 1; // if (pMes.toUpperCase().equals("FEVEREIRO")) { // xRetorno = 2; // xRetorno = 3; // if (pMes.toUpperCase().equals("ABRIL")) { // xRetorno = 4; // if (pMes.toUpperCase().equals("MAIO")) { // xRetorno = 5; // if (pMes.toUpperCase().equals("JUNHO")) { // xRetorno = 6; // if (pMes.toUpperCase().equals("JULHO")) { // xRetorno = 7; // if (pMes.toUpperCase().equals("AGOSTO")) { // xRetorno = 8; // if (pMes.toUpperCase().equals("SETEMBRO")) { // xRetorno = 9; // if (pMes.toUpperCase().equals("OUTUBRO")) { // xRetorno = 10; // if (pMes.toUpperCase().equals("NOVEMBRO")) { // xRetorno = 11; // if (pMes.toUpperCase().equals("DEZEMBRO")) { // xRetorno = 12; // return xRetorno; } public static Date getProximaSemana(Connection pConexao, Date pDataAtual, int pPrazo, int pDiaDaSemana, boolean pUtil, int pCidade, String pApplicationColumnName){ if (pDataAtual == null){ return null; } if (pDiaDaSemana <= 0 || pDiaDaSemana > 7){ return null; } //Calcula um data aproximada a data desejada pDataAtual = getProximaData(pConexao, pDataAtual, pPrazo, false, pCidade); Calendar xData = toCalendar(pDataAtual); while (xData.get(Calendar.DAY_OF_WEEK) != pDiaDaSemana) { pDataAtual = getProximaData(pConexao, toDate(xData), 1, false, pCidade); xData.setTime(pDataAtual); } if (pUtil && !isDiaUtil(pConexao, pDataAtual, pCidade, pApplicationColumnName)){ int xSinal=1; if (pPrazo<0){ xSinal=-1; } pDataAtual = getProximaData(pConexao, pDataAtual, xSinal, true, pCidade); } return pDataAtual; } public static Date getProximaSemana(Connection pConexao, Date pDataAtual, int pPrazo, int pDiaDaSemana, boolean pUtil, int pCidade){ return getProximaSemana(pConexao, pDataAtual, pPrazo, pDiaDaSemana, pUtil, pCidade, null); } public static Date getProximaSemana(Connection pConexao, Date pDataAtual, int pPrazo, int pDiaDaSemana, boolean pUtil){ return getProximaSemana(pConexao, pDataAtual, pPrazo, pDiaDaSemana, pUtil, -1); } public static Date getProximoAniversario(Connection pConexao, Date pData, int pPrazo, PERIODICIDADE pPeriodicidade, boolean pUtil, int pCidade, String pApplicationColumnName){ if (pData == null || pPeriodicidade == null){ return null; } Calendar xData = toCalendar(pData); if (pPeriodicidade == PERIODICIDADE.DIARIA){ xData.add(Calendar.DAY_OF_MONTH, pPrazo); } else if (pPeriodicidade == PERIODICIDADE.MENSAL){ xData.add(Calendar.MONTH, pPrazo); } else if (pPeriodicidade == PERIODICIDADE.ANUAL) { xData.add(Calendar.YEAR, pPrazo); } return getProximaData(pConexao, toDate(xData), 0, pUtil, pCidade, pApplicationColumnName); } public static Date getProximoAniversario(Connection pConexao, Date pData, int pPrazo, PERIODICIDADE pPeriodicidade, boolean pUtil, int pCidade){ return getProximoAniversario(pConexao, pData, pPrazo, pPeriodicidade, pUtil, pCidade, null); } public static Date getProximoAniversario(Connection pConexao, Date pData, int pPrazo, PERIODICIDADE pPeriodicidade, boolean pUtil){ return getProximoAniversario(pConexao, pData, pPrazo, pPeriodicidade, pUtil, -1); } // public static Date getVencimento(Connection pConexao, int pParcela, Date pPrimeiraParcela, PERIODICIDADE pPeriodicidade, int pPrazo, boolean pUtil, String pApplicationColumnName){ // if (pPrimeiraParcela == null || pPeriodicidade == null){ // return null; // return getProximoAniversario(pConexao, pPrimeiraParcela, pPrazo * DBSNumber.toInteger(pParcela-1), pPeriodicidade, pUtil, -1, pApplicationColumnName); // //MOVIDO DE DBSFND - era usado em CALCULAPUBEAN public static Date getVencimento(Connection pConnection, Integer pParcelas, Date pPrimeiraParcela, PERIODICIDADE pPeriodicidade, Integer pPrazo, boolean pUtil, String pApplicationColumnName) { Date xVencimento = pPrimeiraParcela; if (DBSObject.isEmpty(xVencimento)) return xVencimento; if (pPeriodicidade.equals(PERIODICIDADE.DIARIA)) { xVencimento = DBSDate.getProximaData(pConnection, xVencimento, pPrazo * (pParcelas - 1), false, -1, pApplicationColumnName); if (pUtil) { if (!DBSDate.isDiaUtil(pConnection, xVencimento, -1)) { xVencimento = DBSDate.getProximaData(pConnection, xVencimento, 1, true, -1, pApplicationColumnName); } } } else { xVencimento = DBSDate.getProximoAniversario(pConnection, xVencimento, pPrazo * DBSNumber.toInteger(pParcelas - 1), pPeriodicidade, pUtil, -1, pApplicationColumnName); } return xVencimento; } /** * Adiciona minutos a uma data informada. * @param Data e Hora * @param pMinutes * @return */ public static Date getDateAddMinutes(Date pDate, int pMinutes){ LocalDateTime xDT = new LocalDateTime(pDate); xDT = xDT.plusMinutes(pMinutes); return DBSDate.toDate(xDT.toDateTime()); } /** * Adiciona minutos a uma data informada. * @param Data e Hora * @param pMinutes * @return */ public static Date getTimestampAddMinutos(Timestamp pDate, int pMinutes){ Date xDate = DBSDate.toDate(pDate); return getDateAddMinutes(xDate, pMinutes); } /** * Adiciona minutos a uma data informada. * @param pDate Data e Hora * @param pSeconds * @return */ public static Date getDateAddSeconds(Date pDate, int pSeconds){ LocalDateTime xDT = new LocalDateTime(pDate); xDT = xDT.plusSeconds(pSeconds); return DBSDate.toDate(xDT.toDateTime()); } /** * Parse de string para data. * @param pValue String no formato YYYYMMDD. * @return Date */ public static Date toDateYYYYMMDD(String pValue) { Date xData = null; String xAno = DBSString.getSubString(pValue, 1, 4); String xMes = DBSString.getSubString(pValue, 5, 2); String xDia = DBSString.getSubString(pValue, 7, 2); xData = toDate(xDia, xMes, xAno); return xData; } // privates private static Date pvGetProximaDataUtil(Connection pConexao, Date pDataBase, int pPrazo, int pCidade, String pApplicationColumnName){ Date xDataFim = DBSDate.getDateAdd(pDataBase, pPrazo); int xDiasNaoUteis = getFinaisDeSemana(pDataBase, xDataFim) + getFeriados(pConexao, pDataBase, xDataFim, pCidade, pApplicationColumnName); if (pPrazo < 0){ xDiasNaoUteis = -xDiasNaoUteis; } if (xDiasNaoUteis!=0){ xDataFim = pvGetProximaDataUtil(pConexao,xDataFim,xDiasNaoUteis,pCidade, pApplicationColumnName); } return xDataFim; } private static Date pvToDateLong(String pData, String pDateFormat) { if (pData == null){ return null; } DateFormat xFormat = DateFormat.getDateInstance(DateFormat.LONG, new Locale("pt", "BR")); xFormat = new SimpleDateFormat(pDateFormat); Date xDate = new Date(0); xFormat.setLenient(false); try { xDate.setTime(xFormat.parse(pData).getTime()); } catch (ParseException e) { wLogger.error(e); return null; } return xDate; } }
package org.asynchttpclient.providers.netty.channel; import static org.asynchttpclient.providers.netty.util.HttpUtils.WEBSOCKET; import static org.asynchttpclient.providers.netty.util.HttpUtils.isSecure; import static org.asynchttpclient.providers.netty.util.HttpUtils.isWebSocket; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.group.ChannelGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.http.HttpClientCodec; import io.netty.handler.codec.http.HttpContentDecompressor; import io.netty.handler.codec.http.websocketx.WebSocket08FrameDecoder; import io.netty.handler.codec.http.websocketx.WebSocket08FrameEncoder; import io.netty.handler.codec.http.websocketx.WebSocketFrameAggregator; import io.netty.handler.ssl.SslHandler; import io.netty.handler.stream.ChunkedWriteHandler; import io.netty.util.Timer; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import org.asynchttpclient.AsyncHttpClientConfig; import org.asynchttpclient.ConnectionPoolPartitioning; import org.asynchttpclient.ProxyServer; import org.asynchttpclient.providers.netty.Callback; import org.asynchttpclient.providers.netty.NettyAsyncHttpProviderConfig; import org.asynchttpclient.providers.netty.channel.pool.ChannelPool; import org.asynchttpclient.providers.netty.channel.pool.ChannelPoolPartitionSelector; import org.asynchttpclient.providers.netty.channel.pool.DefaultChannelPool; import org.asynchttpclient.providers.netty.channel.pool.NoopChannelPool; import org.asynchttpclient.providers.netty.future.NettyResponseFuture; import org.asynchttpclient.providers.netty.handler.HttpProtocol; import org.asynchttpclient.providers.netty.handler.Processor; import org.asynchttpclient.providers.netty.handler.WebSocketProtocol; import org.asynchttpclient.providers.netty.request.NettyRequestSender; import org.asynchttpclient.uri.Uri; import org.asynchttpclient.util.SslUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ChannelManager { private static final Logger LOGGER = LoggerFactory.getLogger(ChannelManager.class); public static final String HTTP_HANDLER = "httpHandler"; public static final String SSL_HANDLER = "sslHandler"; public static final String HTTP_PROCESSOR = "httpProcessor"; public static final String WS_PROCESSOR = "wsProcessor"; public static final String DEFLATER_HANDLER = "deflater"; public static final String INFLATER_HANDLER = "inflater"; public static final String CHUNKED_WRITER_HANDLER = "chunkedWriter"; public static final String WS_DECODER_HANDLER = "ws-decoder"; public static final String WS_FRAME_AGGREGATOR = "ws-aggregator"; public static final String WS_ENCODER_HANDLER = "ws-encoder"; private final AsyncHttpClientConfig config; private final NettyAsyncHttpProviderConfig nettyConfig; private final EventLoopGroup eventLoopGroup; private final boolean allowReleaseEventLoopGroup; private final Bootstrap plainBootstrap; private final Bootstrap secureBootstrap; private final Bootstrap webSocketBootstrap; private final Bootstrap secureWebSocketBootstrap; private final long handshakeTimeout; private final ChannelPool channelPool; private final boolean maxConnectionsEnabled; private final Semaphore freeChannels; private final ChannelGroup openChannels; private final boolean maxConnectionsPerHostEnabled; private final ConcurrentHashMap<String, Semaphore> freeChannelsPerHost; private final ConcurrentHashMap<Channel, String> channel2KeyPool; private Processor wsProcessor; public ChannelManager(AsyncHttpClientConfig config, NettyAsyncHttpProviderConfig nettyConfig, Timer nettyTimer) { this.config = config; this.nettyConfig = nettyConfig; ChannelPool channelPool = nettyConfig.getChannelPool(); if (channelPool == null && config.isAllowPoolingConnections()) { channelPool = new DefaultChannelPool(config, nettyTimer); } else if (channelPool == null) { channelPool = new NoopChannelPool(); } this.channelPool = channelPool; maxConnectionsEnabled = config.getMaxConnections() > 0; maxConnectionsPerHostEnabled = config.getMaxConnectionsPerHost() > 0; if (maxConnectionsEnabled) { openChannels = new CleanupChannelGroup("asyncHttpClient") { @Override public boolean remove(Object o) { boolean removed = super.remove(o); if (removed) { freeChannels.release(); if (maxConnectionsPerHostEnabled) { String poolKey = channel2KeyPool.remove(Channel.class.cast(o)); if (poolKey != null) { Semaphore freeChannelsForHost = freeChannelsPerHost.get(poolKey); if (freeChannelsForHost != null) freeChannelsForHost.release(); } } } return removed; } }; freeChannels = new Semaphore(config.getMaxConnections()); } else { openChannels = new CleanupChannelGroup("asyncHttpClient"); freeChannels = null; } if (maxConnectionsPerHostEnabled) { freeChannelsPerHost = new ConcurrentHashMap<String, Semaphore>(); channel2KeyPool = new ConcurrentHashMap<Channel, String>(); } else { freeChannelsPerHost = null; channel2KeyPool = null; } handshakeTimeout = nettyConfig.getHandshakeTimeout(); // check if external EventLoopGroup is defined allowReleaseEventLoopGroup = nettyConfig.getEventLoopGroup() == null; eventLoopGroup = allowReleaseEventLoopGroup ? new NioEventLoopGroup() : nettyConfig.getEventLoopGroup(); if (!(eventLoopGroup instanceof NioEventLoopGroup)) throw new IllegalArgumentException("Only Nio is supported"); plainBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(eventLoopGroup); secureBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(eventLoopGroup); webSocketBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(eventLoopGroup); secureWebSocketBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(eventLoopGroup); if (config.getConnectionTimeout() > 0) nettyConfig.addChannelOption(ChannelOption.CONNECT_TIMEOUT_MILLIS, config.getConnectionTimeout()); for (Entry<ChannelOption<Object>, Object> entry : nettyConfig.propertiesSet()) { ChannelOption<Object> key = entry.getKey(); Object value = entry.getValue(); plainBootstrap.option(key, value); webSocketBootstrap.option(key, value); secureBootstrap.option(key, value); secureWebSocketBootstrap.option(key, value); } } public void configureBootstraps(NettyRequestSender requestSender, AtomicBoolean closed) { HttpProtocol httpProtocol = new HttpProtocol(this, config, nettyConfig, requestSender); final Processor httpProcessor = new Processor(config, this, requestSender, httpProtocol); WebSocketProtocol wsProtocol = new WebSocketProtocol(this, config, nettyConfig, requestSender); wsProcessor = new Processor(config, this, requestSender, wsProtocol); plainBootstrap.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline() .addLast(HTTP_HANDLER, newHttpClientCodec()) .addLast(INFLATER_HANDLER, new HttpContentDecompressor()) .addLast(CHUNKED_WRITER_HANDLER, new ChunkedWriteHandler()) .addLast(HTTP_PROCESSOR, httpProcessor); if (nettyConfig.getHttpAdditionalChannelInitializer() != null) nettyConfig.getHttpAdditionalChannelInitializer().initChannel(ch); } }); webSocketBootstrap.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline() .addLast(HTTP_HANDLER, newHttpClientCodec()) .addLast(WS_PROCESSOR, wsProcessor); if (nettyConfig.getWsAdditionalChannelInitializer() != null) { nettyConfig.getWsAdditionalChannelInitializer().initChannel(ch); } } }); secureBootstrap.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline() .addLast(SSL_HANDLER, new SslInitializer(ChannelManager.this)) .addLast(HTTP_HANDLER, newHttpClientCodec()) .addLast(INFLATER_HANDLER, new HttpContentDecompressor()) .addLast(CHUNKED_WRITER_HANDLER, new ChunkedWriteHandler()) .addLast(HTTP_PROCESSOR, httpProcessor); if (nettyConfig.getHttpsAdditionalChannelInitializer() != null) nettyConfig.getHttpsAdditionalChannelInitializer().initChannel(ch); } }); secureWebSocketBootstrap.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline() .addLast(SSL_HANDLER, new SslInitializer(ChannelManager.this)) .addLast(HTTP_HANDLER, newHttpClientCodec()) .addLast(WS_PROCESSOR, wsProcessor); if (nettyConfig.getWssAdditionalChannelInitializer() != null) { nettyConfig.getWssAdditionalChannelInitializer().initChannel(ch); } } }); } public final void tryToOfferChannelToPool(Channel channel, boolean keepAlive, String partitionId) { if (channel.isActive() && keepAlive && channel.isActive()) { LOGGER.debug("Adding key: {} for channel {}", partitionId, channel); channelPool.offer(channel, partitionId); if (maxConnectionsPerHostEnabled) channel2KeyPool.putIfAbsent(channel, partitionId); Channels.setDiscard(channel); } else { // not offered closeChannel(channel); } } public Channel poll(Uri uri, ProxyServer proxy, ConnectionPoolPartitioning connectionPoolPartitioning) { String partitionId = connectionPoolPartitioning.getPartitionId(uri, proxy); return channelPool.poll(partitionId); } public boolean removeAll(Channel connection) { return channelPool.removeAll(connection); } private boolean tryAcquireGlobal() { return !maxConnectionsEnabled || freeChannels.tryAcquire(); } private Semaphore getFreeConnectionsForHost(String poolKey) { Semaphore freeConnections = freeChannelsPerHost.get(poolKey); if (freeConnections == null) { // lazy create the semaphore Semaphore newFreeConnections = new Semaphore(config.getMaxConnectionsPerHost()); freeConnections = freeChannelsPerHost.putIfAbsent(poolKey, newFreeConnections); if (freeConnections == null) freeConnections = newFreeConnections; } return freeConnections; } private boolean tryAcquirePerHost(String poolKey) { return !maxConnectionsPerHostEnabled || getFreeConnectionsForHost(poolKey).tryAcquire(); } public boolean preemptChannel(String poolKey) { return channelPool.isOpen() && tryAcquireGlobal() && tryAcquirePerHost(poolKey); } public void close() { channelPool.destroy(); openChannels.close(); for (Channel channel : openChannels) { Object attribute = Channels.getAttribute(channel); if (attribute instanceof NettyResponseFuture<?>) { NettyResponseFuture<?> future = (NettyResponseFuture<?>) attribute; future.cancelTimeouts(); } } if (allowReleaseEventLoopGroup) eventLoopGroup.shutdownGracefully(); } public void closeChannel(Channel channel) { LOGGER.debug("Closing Channel {} ", channel); removeAll(channel); Channels.setDiscard(channel); Channels.silentlyCloseChannel(channel); openChannels.remove(channel); } public void abortChannelPreemption(String poolKey) { if (maxConnectionsEnabled) freeChannels.release(); if (maxConnectionsPerHostEnabled) getFreeConnectionsForHost(poolKey).release(); } public void registerOpenChannel(Channel channel) { openChannels.add(channel); } private HttpClientCodec newHttpClientCodec() { return new HttpClientCodec( nettyConfig.getHttpClientCodecMaxInitialLineLength(), nettyConfig.getHttpClientCodecMaxHeaderSize(), nettyConfig.getHttpClientCodecMaxChunkSize(), false); } public SslHandler createSslHandler(String peerHost, int peerPort) throws IOException, GeneralSecurityException { SSLEngine sslEngine = null; if (nettyConfig.getSslEngineFactory() != null) { sslEngine = nettyConfig.getSslEngineFactory().newSSLEngine(); } else { SSLContext sslContext = config.getSSLContext(); if (sslContext == null) sslContext = SslUtils.getInstance().getSSLContext(config.isAcceptAnyCertificate()); sslEngine = sslContext.createSSLEngine(peerHost, peerPort); sslEngine.setUseClientMode(true); } SslHandler sslHandler = new SslHandler(sslEngine); if (handshakeTimeout > 0) sslHandler.setHandshakeTimeoutMillis(handshakeTimeout); return sslHandler; } public static SslHandler getSslHandler(ChannelPipeline pipeline) { return (SslHandler) pipeline.get(SSL_HANDLER); } public static boolean isSslHandlerConfigured(ChannelPipeline pipeline) { return pipeline.get(SSL_HANDLER) != null; } public void upgradeProtocol(ChannelPipeline pipeline, String scheme, String host, int port) throws IOException, GeneralSecurityException { if (pipeline.get(HTTP_HANDLER) != null) pipeline.remove(HTTP_HANDLER); if (isSecure(scheme)) if (isSslHandlerConfigured(pipeline)) { pipeline.addAfter(SSL_HANDLER, HTTP_HANDLER, newHttpClientCodec()); } else { pipeline.addFirst(HTTP_HANDLER, newHttpClientCodec()); pipeline.addFirst(SSL_HANDLER, createSslHandler(host, port)); } else pipeline.addFirst(HTTP_HANDLER, newHttpClientCodec()); if (isWebSocket(scheme)) { pipeline.addAfter(HTTP_PROCESSOR, WS_PROCESSOR, wsProcessor); pipeline.remove(HTTP_PROCESSOR); } } public String getPartitionId(NettyResponseFuture<?> future) { return future.getConnectionPoolPartitioning().getPartitionId(future.getUri(), future.getProxyServer()); } /** * Always make sure the channel who got cached support the proper protocol. * It could only occurs when a HttpMethod. CONNECT is used against a proxy * that requires upgrading from http to https. */ public void verifyChannelPipeline(ChannelPipeline pipeline, String scheme) throws IOException, GeneralSecurityException { boolean sslHandlerConfigured = isSslHandlerConfigured(pipeline); if (isSecure(scheme)) { if (!sslHandlerConfigured) pipeline.addFirst(SSL_HANDLER, new SslInitializer(this)); } else if (sslHandlerConfigured) pipeline.remove(SSL_HANDLER); } public Bootstrap getBootstrap(Uri uri, boolean useProxy, boolean useSSl) { return uri.getScheme().startsWith(WEBSOCKET) && !useProxy ? (useSSl ? secureWebSocketBootstrap : webSocketBootstrap) : (useSSl ? secureBootstrap : plainBootstrap); } public void upgradePipelineForWebSockets(ChannelPipeline pipeline) { pipeline.addAfter(HTTP_HANDLER, WS_ENCODER_HANDLER, new WebSocket08FrameEncoder(true)); pipeline.remove(HTTP_HANDLER); pipeline.addBefore(WS_PROCESSOR, WS_DECODER_HANDLER, new WebSocket08FrameDecoder(false, false, nettyConfig.getWebSocketMaxFrameSize())); pipeline.addAfter(WS_DECODER_HANDLER, WS_FRAME_AGGREGATOR, new WebSocketFrameAggregator(nettyConfig.getWebSocketMaxBufferSize())); } public final Callback newDrainCallback(final NettyResponseFuture<?> future, final Channel channel, final boolean keepAlive, final String poolKey) { return new Callback(future) { public void call() throws Exception { tryToOfferChannelToPool(channel, keepAlive, poolKey); } }; } public void drainChannel(final Channel channel, final NettyResponseFuture<?> future) { Channels.setAttribute(channel, newDrainCallback(future, channel, future.isKeepAlive(), getPartitionId(future))); } public void flushPartition(String partitionId) { channelPool.flushPartition(partitionId); } public void flushPartitions(ChannelPoolPartitionSelector selector) { channelPool.flushPartitions(selector); } }
package com.aot.engine.api; import com.aot.engine.Match; import com.aot.engine.api.json.CardPlayedJsonResponseBuilder; import com.aot.engine.api.json.GameApiJson; import com.aot.engine.board.Board; import com.aot.engine.board.Square; import com.aot.engine.cards.movements.MovementsCard; import com.google.gson.Gson; import com.google.gson.JsonObject; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.websocket.CloseReason; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import javax.websocket.server.PathParam; import javax.websocket.server.ServerEndpoint; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; @ServerEndpoint(value = "/api/game/{id}") public class GameApi { /** * The Match the user is playing. */ protected Match match; /** * The Board he is playing on. */ protected Board board; /** * The last card played. */ protected MovementsCard playableCard; protected String gameId; private Gson gson = new Gson(); private GameApiJson.Move move; @OnOpen public void open(@PathParam("id") String id, Session session) throws IOException { gameId = id; retrieveMatch(); if (match == null) { CloseReason.CloseCode cc = () -> 404; CloseReason cr = new CloseReason(cc, "No match is running"); session.close(cr); } } private void retrieveMatch() { JedisPool pool = new JedisPool(new JedisPoolConfig(), Redis.SERVER_HOST); try (Jedis jedis = pool.getResource()) { String matchJson = jedis.hget(Redis.GAME_KEY_PART + gameId, Redis.MATCH_KEY); match = Match.fromJson(matchJson); } pool.destroy(); } @OnMessage public void gameResponse(@PathParam("id") String id, String message, Session session) throws IOException { move = gson.fromJson(message, GameApiJson.Move.class); String response; switch (move.getRequestType()) { case VIEW_POSSIBLE_SQUARES: response = getPossibleSquares(); break; case PLAY: response = play(); break; default: response = buildBadResponse("Unknow resquest type."); break; } session.getBasicRemote().sendText(response); } private String getPossibleSquares() { if (move.areInputParemetersIncorrect(match)) { String message = String .format("Wrong input parameters. CardName: %s. CardColor: %s. PlayerId: %s.", move.getCardName(), move.getCardColor(), move.getPlayerId()); return buildBadResponse(message); } else { String cardName = move.getCardName(); String cardColor = move.getCardColor(); Square currentSquare = match.getActivePlayerCurrentSquare(); if (currentSquare == null) { return buildBadResponse("Cannot get active player's current square."); } else { currentSquare.setAsOccupied(); // Get the card. playableCard = match.getActivePlayerDeck().getCard(cardName, cardColor); if (playableCard == null) { String message = String.format("Cannot get the selected card: %s, %s.", cardName, cardColor); return buildBadResponse(message); } else { List<String> possibleSquaresIds = new ArrayList<>(playableCard.getPossibleMovements(currentSquare)); Collections.sort(possibleSquaresIds); JsonObject jsonResponse = new JsonObject(); jsonResponse.add("possible_squares", gson.toJsonTree(possibleSquaresIds)); return gson.toJson(jsonResponse); } } } } private String buildBadResponse(String message) { return "{\"error\": \"" + message + "\"}"; } private String play() { if (move.pass()) { passThisTurn(); } else if (move.discard() && !move.areInputParemetersIncorrect(match)) { discardCard(move); } else if (incorrectInputParemeters(move)) { String message = String .format("Wrong input parameters. CardName: %s. CardColor: %s. PlayerId: %s. X: %s. Y: %s.", move.getCardName(), move.getCardColor(), move.getPlayerId(), move.getX(), move.getY()); return buildBadResponse(message); } else { String cardName = move.getCardName(); String cardColor = move.getCardColor(); Square currentSquare = match.getActivePlayerCurrentSquare(); if (currentSquare == null) { String message = "Cannot get active player's current square."; return buildBadResponse(message); } currentSquare.setAsOccupied(); // Get the card. playableCard = match.getActivePlayerDeck().getCard(cardName, cardColor); if (playableCard == null) { String message = String.format("Cannot get the selected card: %s, %s.", cardName, cardColor); return buildBadResponse(message); } List<String> possibleSquaresIds = new ArrayList<>(playableCard.getPossibleMovements(currentSquare)); int x = move.getX(); int y = move.getY(); String selectedSquareId = String.format("square-%s-%s", x, y); if (!possibleSquaresIds.contains(selectedSquareId)) { String message = "Invalid square."; return buildBadResponse(message); } match.playTurn(x, y, playableCard); return "{\"play\": " + CardPlayedJsonResponseBuilder.build(match, x, y) + "}"; } return null; } private String passThisTurn() { match.passThisTurn(); return CardPlayedJsonResponseBuilder.build(match); } private String discardCard(GameApiJson.Move move) { String cardName = move.getCardName(); String cardColor = move.getCardColor(); MovementsCard cardToDiscard = match.getActivePlayerDeck().getCard(cardName, cardColor); if (cardToDiscard == null) { String message = String.format("Unknown card: %s, %s", cardName, cardColor); return buildBadResponse(message); } match.discard(cardToDiscard); return CardPlayedJsonResponseBuilder.build(match); } private boolean incorrectInputParemeters(GameApiJson.Move move) { return move.areInputParemetersIncorrect(match) || incorrectCoordinates(move); } private boolean incorrectCoordinates(GameApiJson.Move move) { return move.getX() == null || move.getY() == null; } private void saveMatch() { JedisPool pool = new JedisPool(new JedisPoolConfig(), Redis.SERVER_HOST); try (Jedis jedis = pool.getResource()) { String matchJson = match.toJson(); jedis.hset(Redis.GAME_KEY_PART + gameId, Redis.MATCH_KEY, matchJson); jedis.expire(Redis.GAME_KEY_PART + gameId, Redis.GAME_EXPIRE); } pool.destroy(); } }
package nl.idgis.publisher.service.geoserver; import static org.junit.Assert.fail; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import javax.xml.namespace.NamespaceContext; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import org.apache.commons.dbcp2.BasicDataSource; import org.eclipse.jetty.plus.jndi.Resource; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.webapp.Configuration.ClassList; import org.eclipse.jetty.webapp.WebAppContext; import org.h2.api.AggregateFunction; import org.h2.server.pg.PgServer; import org.postgresql.jdbc2.AbstractJdbc2Connection; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.ning.http.util.Base64; import com.vividsolutions.jts.geom.Geometry; import akka.event.LoggingAdapter; import nl.idgis.publisher.service.geoserver.rest.DefaultGeoServerRest; import nl.idgis.publisher.service.geoserver.rest.GeoServerRest; import nl.idgis.publisher.service.geoserver.rest.ServiceType; import nl.idgis.publisher.service.geoserver.rest.Workspace; import nl.idgis.publisher.utils.FileUtils; import nl.idgis.publisher.utils.FutureUtils; public class GeoServerTestHelper { public static final int JETTY_PORT = 7000; public static final int PG_PORT = PgServer.DEFAULT_PORT; private Thread pgListenThread; private PgServer pgServer; private Server jettyServer; private DocumentBuilder documentBuilder; private XPath xpath; public GeoServerTestHelper() throws Exception { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); documentBuilder = dbf.newDocumentBuilder(); BiMap<String, String> namespaces = HashBiMap.create(); namespaces.put("wms", "http: namespaces.put("sld", "http: XPathFactory xf = XPathFactory.newInstance(); xpath = xf.newXPath(); xpath.setNamespaceContext(new NamespaceContext() { @Override public String getNamespaceURI(String prefix) { return namespaces.get(prefix); } @Override public String getPrefix(String namespaceURI) { return namespaces.inverse().get(namespaceURI); } @Override public Iterator<?> getPrefixes(String namespaceURI) { return Arrays.asList(getPrefix(namespaceURI)).iterator(); } }); } public static String PostGIS_Lib_Version() { return "2.1.5"; } public static Geometry ST_Force_2D(Geometry geometry) { return geometry; } public static String encode(byte[] b, String method) { return Base64.encode(b); } public static byte[] ST_Estimated_Extent(String schemaName, String tableName, String geocolumnName) { return null; } public static class ST_Extent implements AggregateFunction { @Override public void init(Connection conn) throws SQLException { } @Override public int getType(int[] inputTypes) throws SQLException { return Types.BLOB; } @Override public void add(Object value) throws SQLException { } @Override public Object getResult() throws SQLException { return null; } } public void start() throws Exception { pgServer = new PgServer(); File baseDir = new File("build/geoserver-database"); if(baseDir.exists()) { FileUtils.delete(baseDir); } pgServer.init(/*"-trace", */"-pgPort", "" + PG_PORT, "-baseDir", baseDir.getAbsolutePath()); pgServer.start(); pgListenThread = new Thread() { @Override public void run() { pgServer.listen(); } }; pgListenThread.start(); // enable GeoDB Connection connection = DriverManager.getConnection("jdbc:postgresql://localhost:" + GeoServerTestHelper.PG_PORT + "/test", "postgres", "postgres"); Statement stmt = connection.createStatement(); stmt.execute("create alias if not exists init_geo_db for \"geodb.GeoDB.InitGeoDB\""); stmt.execute("call init_geo_db()"); // add missing PostGIS functions for(String function : new String[]{"PostGIS_Lib_Version", "ST_Force_2D", "ST_Estimated_Extent", "encode"}) { stmt.execute("create alias " + function + " for \"" + getClass().getCanonicalName() + "." + function + "\""); } // disable ST_Extent stmt.execute("drop aggregate ST_Extent"); stmt.execute("create aggregate ST_Extent for \"" + getClass().getCanonicalName() + "$ST_Extent\""); // add 'geometry' type to pg_type stmt.execute("merge into pg_catalog.pg_type select 705 oid, 'geometry' typname, " + "(select oid from pg_catalog.pg_namespace where nspname = 'pg_catalog') typnamespace, " + "-1 typlen, 'c' typtype, 0 typbasetype, -1 typtypmod, false typnotnull, null typinput " + "from INFORMATION_SCHEMA.type_info where pos = 0"); // create missing geography_columns table stmt.execute("create table geography_columns (" + "f_table_catalog text, " + "f_table_schema text, " + "f_table_name text, " + "f_geography_column text, " + "coord_dimension integer, " + "srid integer, " + "type text " + ")"); stmt.close(); connection.close(); File dataDir = new File("build/geoserver-data"); if(dataDir.exists()) { FileUtils.delete(dataDir); } dataDir.mkdir(); String geoserverDataDir = dataDir.getAbsolutePath(); System.setProperty("GEOSERVER_DATA_DIR", geoserverDataDir); jettyServer = new Server(JETTY_PORT); ClassList classlist = ClassList.setServerDefault(jettyServer); classlist.addAfter( "org.eclipse.jetty.webapp.FragmentConfiguration", "org.eclipse.jetty.plus.webapp.EnvConfiguration", "org.eclipse.jetty.plus.webapp.PlusConfiguration"); WebAppContext context = new WebAppContext(); File webXml = new File("build/geoserver/WEB-INF/web.xml"); FileInputStream fis = new FileInputStream(webXml); DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document d = db.parse(fis); fis.close(); Element resourceRef = d.createElement("resource-ref"); Element resourceRefName = d.createElement("res-ref-name"); resourceRefName.appendChild(d.createTextNode("jdbc/db")); resourceRef.appendChild(resourceRefName); Element resourceRefType = d.createElement("res-type"); resourceRefType.appendChild(d.createTextNode("javax.sql.DataSource")); resourceRef.appendChild(resourceRefType); Element resourceRefAuth = d.createElement("res-auth"); resourceRefAuth.appendChild(d.createTextNode("Container")); resourceRef.appendChild(resourceRefAuth); d.getDocumentElement().appendChild(resourceRef); Transformer t = TransformerFactory.newInstance().newTransformer(); t.transform(new DOMSource(d), new StreamResult(webXml)); context.setDescriptor(webXml.getAbsolutePath()); context.setResourceBase("build/geoserver"); context.setContextPath("/"); context.setParentLoaderPriority(false); BasicDataSource ds = new BasicDataSource() { @Override public Connection getConnection() throws SQLException { Connection c = super.getConnection(); AbstractJdbc2Connection unwrapped = c.unwrap(AbstractJdbc2Connection.class); unwrapped.getTypeInfo().addCoreType("geometry", 705, 0, "java.lang.String", 0); return c; } }; ds.setDriverClassName("org.postgresql.Driver"); ds.setUrl("jdbc:postgresql://localhost:" + GeoServerTestHelper.PG_PORT + "/test"); ds.setUsername("postgres"); ds.setPassword("postgres"); new Resource(context, "jdbc/db", ds); jettyServer.setHandler(context); jettyServer.start(); } public void stop() throws Exception { jettyServer.stop(); pgServer.stop(); pgListenThread.interrupt(); pgListenThread.join(); } public void processNodeList(NodeList nodeList, Collection<String> retval) { StringBuilder sb = new StringBuilder(); for(int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if(node.getNodeType() == Node.TEXT_NODE) { sb.append(node.getTextContent()); } else { processNodeList(node.getChildNodes(), retval); } } String result = sb.toString().trim(); if(!result.isEmpty()) { retval.add(result); } } public List<String> getText(Node node) { return getText(node.getChildNodes()); } public List<String> getText(NodeList nodeList) { List<String> retval = new ArrayList<>(); processNodeList(nodeList, retval); return retval; } public NodeList getNodeList(String expression, Node node) throws Exception { return (NodeList)xpath.evaluate(expression, node, XPathConstants.NODESET); } public void notExists(String expression, Node node) throws Exception { NodeList nodeList = getNodeList(expression, node); if(nodeList.getLength() != 0) { fail("result"); } } public String getText(String expression, Node node) throws Exception { NodeList nodeList = getNodeList(expression, node); if(nodeList.getLength() == 0) { fail("no result"); } if(nodeList.getLength() > 1) { fail("multiple results"); } return nodeList.item(0).getTextContent(); } private String getServiceUrl(String serviceName, ServiceType serviceType) { return "http://localhost:" + JETTY_PORT + "/" + serviceName + "/" + serviceType.name().toLowerCase(); } public Document getCapabilities(String serviceName, ServiceType serviceType, String version) throws SAXException, IOException { return documentBuilder.parse(getServiceUrl(serviceName, serviceType) + "?request=GetCapabilities&service=" + serviceType.name().toUpperCase() + "&version=" + version); } public Document getFeature(String serviceName, String typeName) throws SAXException, IOException { return documentBuilder.parse(getServiceUrl(serviceName, ServiceType.WFS) + "?request=GetFeature&service=WFS&version=1.1.0&typeName=" + typeName); } public GeoServerRest rest(FutureUtils f, LoggingAdapter log) throws Exception { return new DefaultGeoServerRest(f, log, "http://localhost:" + GeoServerTestHelper.JETTY_PORT + "/", "admin", "geoserver"); } public void clean(FutureUtils f, LoggingAdapter log) throws Exception { GeoServerRest service = rest(f, log); for(Workspace workspace : service.getWorkspaces().get()) { service.deleteWorkspace(workspace).get(); } service.getStyleNames().get() .forEach(service::deleteStyle); service.close(); } }
package com.conveyal.gtfs; import com.conveyal.gtfs.error.GTFSError; import com.conveyal.gtfs.model.*; import com.conveyal.gtfs.model.Calendar; import com.conveyal.gtfs.validator.DuplicateStopsValidator; import com.conveyal.gtfs.validator.GTFSValidator; import com.conveyal.gtfs.validator.HopSpeedsReasonableValidator; import com.conveyal.gtfs.validator.MisplacedStopValidator; import com.conveyal.gtfs.validator.MissingStopCoordinatesValidator; import com.conveyal.gtfs.validator.NamesValidator; import com.conveyal.gtfs.validator.OverlappingTripsValidator; import com.conveyal.gtfs.validator.ReversedTripsValidator; import com.conveyal.gtfs.validator.TripTimesValidator; import com.conveyal.gtfs.validator.UnusedStopValidator; import com.conveyal.gtfs.stats.FeedStats; import com.conveyal.gtfs.validator.service.GeoUtils; import com.google.common.collect.*; import com.google.common.eventbus.EventBus; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.*; import com.vividsolutions.jts.index.strtree.STRtree; import com.vividsolutions.jts.simplify.DouglasPeuckerSimplifier; import org.geotools.referencing.GeodeticCalculator; import org.mapdb.BTreeMap; import org.mapdb.Bind; import org.mapdb.DB; import org.mapdb.DBMaker; import org.mapdb.Fun; import org.mapdb.Fun.Tuple2; import org.mapdb.Serializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.time.LocalDate; import java.time.Period; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalUnit; import java.util.*; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.StreamSupport; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import static com.conveyal.gtfs.util.Util.human; /** * All entities must be from a single feed namespace. * Composed of several GTFSTables. */ public class GTFSFeed implements Cloneable, Closeable { private static final Logger LOG = LoggerFactory.getLogger(GTFSFeed.class); private static final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyyMMdd"); private DB db; public String feedId = null; // TODO make all of these Maps MapDBs so the entire GTFSFeed is persistent and uses constant memory /* Some of these should be multimaps since they don't have an obvious unique key. */ public final Map<String, Agency> agency; public final Map<String, FeedInfo> feedInfo; public final NavigableSet<Tuple2<String, Frequency>> frequencies; public final Map<String, Route> routes; public final Map<String, Stop> stops; public final Map<String, Transfer> transfers; public final BTreeMap<String, Trip> trips; public final Set<String> transitIds = new HashSet<>(); /** CRC32 of the GTFS file this was loaded from */ public long checksum; /* Map from 2-tuples of (shape_id, shape_pt_sequence) to shape points */ public final ConcurrentNavigableMap<Tuple2<String, Integer>, ShapePoint> shape_points; /* Map from 2-tuples of (trip_id, stop_sequence) to stoptimes. */ public final BTreeMap<Tuple2, StopTime> stop_times; // public final ConcurrentMap<String, Long> stopCountByStopTime; /* Map from stop (stop_id) to stopTimes tuples (trip_id, stop_sequence) */ public final NavigableSet<Tuple2<String, Tuple2>> stopStopTimeSet; public final NavigableSet<Tuple2<String, String>> tripsPerService; public final NavigableSet<Tuple2<String, String>> servicesPerDate; /* A fare is a fare_attribute and all fare_rules that reference that fare_attribute. */ public final Map<String, Fare> fares; /* A service is a calendar entry and all calendar_dates that modify that calendar entry. */ public final BTreeMap<String, Service> services; /* A place to accumulate errors while the feed is loaded. Tolerate as many errors as possible and keep on loading. */ public final NavigableSet<GTFSError> errors; /* Stops spatial index which gets built lazily by getSpatialIndex() */ private transient STRtree spatialIndex; /* Convex hull of feed (based on stops) built lazily by getConvexHull() */ private transient Polygon convexHull; /* Merged stop buffers polygon built lazily by getMergedBuffers() */ private transient Geometry mergedBuffers; /* Create geometry factory to produce LineString geometries. */ GeometryFactory gf = new GeometryFactory(); /* Map routes to associated trip patterns. */ // TODO: Hash Multimapping in guava (might need dependency). public final Map<String, Pattern> patterns; // TODO bind this to map above so that it is kept up to date automatically public final Map<String, String> tripPatternMap; private boolean loaded = false; /* A place to store an event bus that is passed through constructor. */ public transient EventBus eventBus; /** * The order in which we load the tables is important for two reasons. * 1. We must load feed_info first so we know the feed ID before loading any other entities. This could be relaxed * by having entities point to the feed object rather than its ID String. * 2. Referenced entities must be loaded before any entities that reference them. This is because we check * referential integrity while the files are being loaded. This is done on the fly during loading because it allows * us to associate a line number with errors in objects that don't have any other clear identifier. * * Interestingly, all references are resolvable when tables are loaded in alphabetical order. */ public void loadFromFile(ZipFile zip, String fid) throws Exception { if (this.loaded) throw new UnsupportedOperationException("Attempt to load GTFS into existing database"); // NB we don't have a single CRC for the file, so we combine all the CRCs of the component files. NB we are not // simply summing the CRCs because CRCs are (I assume) uniformly randomly distributed throughout the width of a // long, so summing them is a convolution which moves towards a Gaussian with mean 0 (i.e. more concentrated // probability in the center), degrading the quality of the hash. Instead we XOR. Assuming each bit is independent, // this will yield a nice uniformly distributed result, because when combining two bits there is an equal // probability of any input, which means an equal probability of any output. At least I think that's all correct. // Repeated XOR is not commutative but zip.stream returns files in the order they are in the central directory // of the zip file, so that's not a problem. checksum = zip.stream().mapToLong(ZipEntry::getCrc).reduce((l1, l2) -> l1 ^ l2).getAsLong(); db.getAtomicLong("checksum").set(checksum); new FeedInfo.Loader(this).loadTable(zip); // maybe we should just point to the feed object itself instead of its ID, and null out its stoptimes map after loading if (fid != null) { feedId = fid; LOG.info("Feed ID is undefined, pester maintainers to include a feed ID. Using file name {}.", feedId); // TODO log an error, ideally feeds should include a feedID } else if (feedId == null || feedId.isEmpty()) { feedId = new File(zip.getName()).getName().replaceAll("\\.zip$", ""); LOG.info("Feed ID is undefined, pester maintainers to include a feed ID. Using file name {}.", feedId); // TODO log an error, ideally feeds should include a feedID } else { LOG.info("Feed ID is '{}'.", feedId); } db.getAtomicString("feed_id").set(feedId); new Agency.Loader(this).loadTable(zip); // calendars and calendar dates are joined into services. This means a lot of manipulating service objects as // they are loaded; since mapdb keys/values are immutable, load them in memory then copy them to MapDB once // we're done loading them Map<String, Service> serviceTable = new HashMap<>(); new Calendar.Loader(this, serviceTable).loadTable(zip); new CalendarDate.Loader(this, serviceTable).loadTable(zip); this.services.putAll(serviceTable); serviceTable = null; // free memory // Same deal Map<String, Fare> fares = new HashMap<>(); new FareAttribute.Loader(this, fares).loadTable(zip); new FareRule.Loader(this, fares).loadTable(zip); this.fares.putAll(fares); fares = null; // free memory new Route.Loader(this).loadTable(zip); new ShapePoint.Loader(this).loadTable(zip); new Stop.Loader(this).loadTable(zip); new Transfer.Loader(this).loadTable(zip); new Trip.Loader(this).loadTable(zip); new Frequency.Loader(this).loadTable(zip); new StopTime.Loader(this).loadTable(zip); // comment out this line for quick testing using NL feed LOG.info("{} errors", errors.size()); for (GTFSError error : errors) { LOG.info("{}", error); } LOG.info("Building stop to stop times index"); Bind.secondaryKeys(stop_times, stopStopTimeSet, (key, stopTime) -> new String[] {stopTime.stop_id}); LOG.info("Building trips per service index"); Bind.secondaryKeys(trips, tripsPerService, (key, trip) -> new String[] {trip.service_id}); LOG.info("Building services per date index"); Bind.secondaryKeys(services, servicesPerDate, (key, service) -> { LocalDate startDate = service.calendar != null ? LocalDate.parse(String.valueOf(service.calendar.start_date), dateFormatter) : service.calendar_dates.keySet().stream().sorted().findFirst().get(); LocalDate endDate = service.calendar != null ? LocalDate.parse(String.valueOf(service.calendar.end_date), dateFormatter) : service.calendar_dates.keySet().stream().sorted().reduce((first, second) -> second).get(); // end date for Period.between is not inclusive int daysOfService = (int) ChronoUnit.DAYS.between(startDate, endDate.plus(1, ChronoUnit.DAYS)); return IntStream.range(0, daysOfService) .mapToObj(offset -> startDate.plusDays(offset)) .filter(service::activeOn) .map(date -> date.format(dateFormatter)) .toArray(size -> new String[size]); }); loaded = true; } public void loadFromFile(ZipFile zip) throws Exception { loadFromFile(zip, null); } public void toFile (String file) { try { File out = new File(file); OutputStream os = new FileOutputStream(out); ZipOutputStream zip = new ZipOutputStream(os); // write everything // TODO: fare attributes, fare rules, shapes // don't write empty feed_info.txt if (!this.feedInfo.isEmpty()) new FeedInfo.Writer(this).writeTable(zip); new Agency.Writer(this).writeTable(zip); new Calendar.Writer(this).writeTable(zip); new CalendarDate.Writer(this).writeTable(zip); new Frequency.Writer(this).writeTable(zip); new Route.Writer(this).writeTable(zip); new Stop.Writer(this).writeTable(zip); new ShapePoint.Writer(this).writeTable(zip); new Transfer.Writer(this).writeTable(zip); new Trip.Writer(this).writeTable(zip); new StopTime.Writer(this).writeTable(zip); zip.close(); LOG.info("GTFS file written"); } catch (Exception e) { LOG.error("Error saving GTFS: {}", e.getMessage()); throw new RuntimeException(e); } } // public void validate (EventBus eventBus, GTFSValidator... validators) { // if (eventBus == null) { // for (GTFSValidator validator : validators) { // validator.getClass().getSimpleName(); // validator.validate(this, false); public void validate (boolean repair, GTFSValidator... validators) { long startValidation = System.currentTimeMillis(); for (GTFSValidator validator : validators) { try { long startValidator = System.currentTimeMillis(); validator.validate(this, repair); long endValidator = System.currentTimeMillis(); long diff = endValidator - startValidator; LOG.info("{} finished in {} milliseconds.", validator.getClass().getSimpleName(), diff); } catch (Exception e) { LOG.error("Could not run {} validator.", validator.getClass().getSimpleName()); // LOG.error(e.toString()); e.printStackTrace(); } } long endValidation = System.currentTimeMillis(); long total = endValidation - startValidation; LOG.info("{} validators completed in {} milliseconds.", validators.length, total); } // validate function call that should explicitly list each validator to run on GTFSFeed public void validate () { validate(false, new DuplicateStopsValidator(), new HopSpeedsReasonableValidator(), new MisplacedStopValidator(), new MissingStopCoordinatesValidator(), new NamesValidator(), new OverlappingTripsValidator(), new ReversedTripsValidator(), new TripTimesValidator(), new UnusedStopValidator() ); } public void validateAndRepair () { validate(true, new DuplicateStopsValidator(), new HopSpeedsReasonableValidator(), new MisplacedStopValidator(), new MissingStopCoordinatesValidator(), new NamesValidator(), new OverlappingTripsValidator(), new ReversedTripsValidator(), new TripTimesValidator(), new UnusedStopValidator() ); } public FeedStats calculateStats() { FeedStats feedStats = new FeedStats(this); return feedStats; } public static GTFSFeed fromFile(String file) { return fromFile(file, null); } public static GTFSFeed fromFile(String file, String feedId) { GTFSFeed feed = new GTFSFeed(); ZipFile zip; try { zip = new ZipFile(file); if (feedId == null) { feed.loadFromFile(zip); } else { feed.loadFromFile(zip, feedId); } zip.close(); return feed; } catch (Exception e) { LOG.error("Error loading GTFS: {}", e.getMessage()); throw new RuntimeException(e); } } /** * For the given trip ID, fetch all the stop times in order of increasing stop_sequence. * This is an efficient iteration over a tree map. */ public Iterable<StopTime> getOrderedStopTimesForTrip (String trip_id) { Map<Fun.Tuple2, StopTime> tripStopTimes = stop_times.subMap( Fun.t2(trip_id, null), Fun.t2(trip_id, Fun.HI) ); return tripStopTimes.values(); } public STRtree getSpatialIndex () { if (this.spatialIndex == null) { synchronized (this) { if (this.spatialIndex == null) { // build spatial index STRtree stopIndex = new STRtree(); for(Stop stop : this.stops.values()) { try { if (Double.isNaN(stop.stop_lat) || Double.isNaN(stop.stop_lon)) { continue; } Coordinate stopCoord = new Coordinate(stop.stop_lat, stop.stop_lon); stopIndex.insert(new Envelope(stopCoord), stop); } catch (Exception e) { e.printStackTrace(); } } try { stopIndex.build(); this.spatialIndex = stopIndex; } catch (Exception e) { e.printStackTrace(); } } } } return this.spatialIndex; } /** Get the shape for the given shape ID */ public Shape getShape (String shape_id) { Shape shape = new Shape(this, shape_id); return shape.shape_dist_traveled.length > 0 ? shape : null; } /** * For the given trip ID, fetch all the stop times in order, and interpolate stop-to-stop travel times. */ public Iterable<StopTime> getInterpolatedStopTimesForTrip (String trip_id) throws FirstAndLastStopsDoNotHaveTimes { // clone stop times so as not to modify base GTFS structures StopTime[] stopTimes = StreamSupport.stream(getOrderedStopTimesForTrip(trip_id).spliterator(), false) .map(st -> st.clone()) .toArray(i -> new StopTime[i]); // avoid having to make sure that the array has length below. if (stopTimes.length == 0) return Collections.emptyList(); // first pass: set all partially filled stop times for (StopTime st : stopTimes) { if (st.arrival_time != Entity.INT_MISSING && st.departure_time == Entity.INT_MISSING) { st.departure_time = st.arrival_time; } if (st.arrival_time == Entity.INT_MISSING && st.departure_time != Entity.INT_MISSING) { st.arrival_time = st.departure_time; } } // quick check: ensure that first and last stops have times. // technically GTFS requires that both arrival_time and departure_time be filled at both the first and last stop, // but we are slightly more lenient and only insist that one of them be filled at both the first and last stop. // The meaning of the first stop's arrival time is unclear, and same for the last stop's departure time (except // in the case of interlining). // it's fine to just check departure time, as the above pass ensures that all stop times have either both // arrival and departure times, or neither if (stopTimes[0].departure_time == Entity.INT_MISSING || stopTimes[stopTimes.length - 1].departure_time == Entity.INT_MISSING) { throw new FirstAndLastStopsDoNotHaveTimes(); } // second pass: fill complete stop times int startOfInterpolatedBlock = -1; for (int stopTime = 0; stopTime < stopTimes.length; stopTime++) { if (stopTimes[stopTime].departure_time == Entity.INT_MISSING && startOfInterpolatedBlock == -1) { startOfInterpolatedBlock = stopTime; } else if (stopTimes[stopTime].departure_time != Entity.INT_MISSING && startOfInterpolatedBlock != -1) { // we have found the end of the interpolated section int nInterpolatedStops = stopTime - startOfInterpolatedBlock; double totalLengthOfInterpolatedSection = 0; double[] lengthOfInterpolatedSections = new double[nInterpolatedStops]; GeodeticCalculator calc = new GeodeticCalculator(); for (int stopTimeToInterpolate = startOfInterpolatedBlock, i = 0; stopTimeToInterpolate < stopTime; stopTimeToInterpolate++, i++) { Stop start = stops.get(stopTimes[stopTimeToInterpolate - 1].stop_id); Stop end = stops.get(stopTimes[stopTimeToInterpolate].stop_id); calc.setStartingGeographicPoint(start.stop_lon, start.stop_lat); calc.setDestinationGeographicPoint(end.stop_lon, end.stop_lat); double segLen = calc.getOrthodromicDistance(); totalLengthOfInterpolatedSection += segLen; lengthOfInterpolatedSections[i] = segLen; } // add the segment post-last-interpolated-stop Stop start = stops.get(stopTimes[stopTime - 1].stop_id); Stop end = stops.get(stopTimes[stopTime].stop_id); calc.setStartingGeographicPoint(start.stop_lon, start.stop_lat); calc.setDestinationGeographicPoint(end.stop_lon, end.stop_lat); totalLengthOfInterpolatedSection += calc.getOrthodromicDistance(); int departureBeforeInterpolation = stopTimes[startOfInterpolatedBlock - 1].departure_time; int arrivalAfterInterpolation = stopTimes[stopTime].arrival_time; int totalTime = arrivalAfterInterpolation - departureBeforeInterpolation; double lengthSoFar = 0; for (int stopTimeToInterpolate = startOfInterpolatedBlock, i = 0; stopTimeToInterpolate < stopTime; stopTimeToInterpolate++, i++) { lengthSoFar += lengthOfInterpolatedSections[i]; int time = (int) (departureBeforeInterpolation + totalTime * (lengthSoFar / totalLengthOfInterpolatedSection)); stopTimes[stopTimeToInterpolate].arrival_time = stopTimes[stopTimeToInterpolate].departure_time = time; } // we're done with this block startOfInterpolatedBlock = -1; } } return Arrays.asList(stopTimes); } public Collection<Frequency> getFrequencies (String trip_id) { // IntelliJ tells me all these casts are unnecessary, and that's also my feeling, but the code won't compile // without them return (List<Frequency>) frequencies.subSet(new Fun.Tuple2(trip_id, null), new Fun.Tuple2(trip_id, Fun.HI)).stream() .map(t2 -> ((Tuple2<String, Frequency>) t2).b) .collect(Collectors.toList()); } public List<String> getOrderedStopListForTrip (String trip_id) { Iterable<StopTime> orderedStopTimes = getOrderedStopTimesForTrip(trip_id); List<String> stops = Lists.newArrayList(); // In-order traversal of StopTimes within this trip. The 2-tuple keys determine ordering. for (StopTime stopTime : orderedStopTimes) { stops.add(stopTime.stop_id); } return stops; } /** * Bin all trips by the sequence of stops they visit. * @return A map from a list of stop IDs to a list of Trip IDs that visit those stops in that sequence. */ public void findPatterns() { int n = 0; Multimap<TripPatternKey, String> tripsForPattern = HashMultimap.create(); for (String trip_id : trips.keySet()) { if (++n % 100000 == 0) { LOG.info("trip {}", human(n)); } Trip trip = trips.get(trip_id); // no need to scope ID here, this is in the context of a single object TripPatternKey key = new TripPatternKey(trip.route_id); StreamSupport.stream(getOrderedStopTimesForTrip(trip_id).spliterator(), false) .forEach(key::addStopTime); tripsForPattern.put(key, trip_id); } // create an in memory list because we will rename them and they need to be immutable once they hit mapdb List<Pattern> patterns = tripsForPattern.asMap().entrySet() .stream() .map((e) -> new Pattern(this, e.getKey().stops, new ArrayList<>(e.getValue()))) .collect(Collectors.toList()); namePatterns(patterns); patterns.stream().forEach(p -> { this.patterns.put(p.pattern_id, p); p.associatedTrips.stream().forEach(t -> this.tripPatternMap.put(t, p.pattern_id)); }); LOG.info("Total patterns: {}", tripsForPattern.keySet().size()); } /** destructively rename passed in patterns */ private void namePatterns(Collection<Pattern> patterns) { LOG.info("Generating unique names for patterns"); Map<String, PatternNamingInfo> namingInfoForRoute = new HashMap<>(); for (Pattern pattern : patterns) { if (pattern.associatedTrips.isEmpty() || pattern.orderedStops.isEmpty()) continue; Trip trip = trips.get(pattern.associatedTrips.get(0)); // TODO this assumes there is only one route associated with a pattern String route = trip.route_id; // names are unique at the route level if (!namingInfoForRoute.containsKey(route)) namingInfoForRoute.put(route, new PatternNamingInfo()); PatternNamingInfo namingInfo = namingInfoForRoute.get(route); if (trip.trip_headsign != null) namingInfo.headsigns.put(trip.trip_headsign, pattern); // use stop names not stop IDs as stops may have duplicate names and we want unique pattern names String fromName = stops.get(pattern.orderedStops.get(0)).stop_name; String toName = stops.get(pattern.orderedStops.get(pattern.orderedStops.size() - 1)).stop_name; namingInfo.fromStops.put(fromName, pattern); namingInfo.toStops.put(toName, pattern); pattern.orderedStops.stream().map(stops::get).forEach(stop -> { if (fromName.equals(stop.stop_name) || toName.equals(stop.stop_name)) return; namingInfo.vias.put(stop.stop_name, pattern); }); namingInfo.patternsOnRoute.add(pattern); } // name the patterns on each route for (PatternNamingInfo info : namingInfoForRoute.values()) { for (Pattern pattern : info.patternsOnRoute) { pattern.name = null; // clear this now so we don't get confused later on String headsign = trips.get(pattern.associatedTrips.get(0)).trip_headsign; String fromName = stops.get(pattern.orderedStops.get(0)).stop_name; String toName = stops.get(pattern.orderedStops.get(pattern.orderedStops.size() - 1)).stop_name; /* We used to use this code but decided it is better to just always have the from/to info, with via if necessary. if (headsign != null && info.headsigns.get(headsign).size() == 1) { // easy, unique headsign, we're done pattern.name = headsign; continue; } if (info.toStops.get(toName).size() == 1) { pattern.name = String.format(Locale.US, "to %s", toName); continue; } if (info.fromStops.get(fromName).size() == 1) { pattern.name = String.format(Locale.US, "from %s", fromName); continue; } */ // check if combination from, to is unique Set<Pattern> intersection = new HashSet<>(info.fromStops.get(fromName)); intersection.retainAll(info.toStops.get(toName)); if (intersection.size() == 1) { pattern.name = String.format(Locale.US, "from %s to %s", fromName, toName); continue; } // check for unique via stop pattern.orderedStops.stream().map(stops::get).forEach(stop -> { Set<Pattern> viaIntersection = new HashSet<>(intersection); viaIntersection.retainAll(info.vias.get(stop.stop_name)); if (viaIntersection.size() == 1) { pattern.name = String.format(Locale.US, "from %s to %s via %s", fromName, toName, stop.stop_name); } }); if (pattern.name == null) { // no unique via, one pattern is subset of other. if (intersection.size() == 2) { Iterator<Pattern> it = intersection.iterator(); Pattern p0 = it.next(); Pattern p1 = it.next(); if (p0.orderedStops.size() > p1.orderedStops.size()) { p1.name = String.format(Locale.US, "from %s to %s express", fromName, toName); p0.name = String.format(Locale.US, "from %s to %s local", fromName, toName); } else if (p1.orderedStops.size() > p0.orderedStops.size()){ p0.name = String.format(Locale.US, "from %s to %s express", fromName, toName); p1.name = String.format(Locale.US, "from %s to %s local", fromName, toName); } } } if (pattern.name == null) { // give up pattern.name = String.format(Locale.US, "from %s to %s like trip %s", fromName, toName, pattern.associatedTrips.get(0)); } } // attach a stop and trip count to each for (Pattern pattern : info.patternsOnRoute) { pattern.name = String.format(Locale.US, "%s stops %s (%s trips)", pattern.orderedStops.size(), pattern.name, pattern.associatedTrips.size()); } } } public LineString getStraightLineForStops(String trip_id) { CoordinateList coordinates = new CoordinateList(); LineString ls = null; Trip trip = trips.get(trip_id); Iterable<StopTime> stopTimes; stopTimes = getOrderedStopTimesForTrip(trip.trip_id); if (Iterables.size(stopTimes) > 1) { for (StopTime stopTime : stopTimes) { Stop stop = stops.get(stopTime.stop_id); Double lat = stop.stop_lat; Double lon = stop.stop_lon; coordinates.add(new Coordinate(lon, lat)); } ls = gf.createLineString(coordinates.toCoordinateArray()); } // set ls equal to null if there is only one stopTime to avoid an exception when creating linestring else{ ls = null; } return ls; } /** * Returns a trip geometry object (LineString) for a given trip id. * If the trip has a shape reference, this will be used for the geometry. * Otherwise, the ordered stoptimes will be used. * * @param trip_id trip id of desired trip geometry * @return the LineString representing the trip geometry. * @see LineString */ public LineString getTripGeometry(String trip_id){ CoordinateList coordinates = new CoordinateList(); LineString ls = null; Trip trip = trips.get(trip_id); // If trip has shape_id, use it to generate geometry. if (trip.shape_id != null) { Shape shape = getShape(trip.shape_id); if (shape != null) ls = shape.geometry; } // Use the ordered stoptimes. if (ls == null) { ls = getStraightLineForStops(trip_id); } return ls; } /** Get the length of a trip in meters. */ public double getTripDistance (String trip_id, boolean straightLine) { return straightLine ? GeoUtils.getDistance(this.getStraightLineForStops(trip_id)) : GeoUtils.getDistance(this.getTripGeometry(trip_id)); } /** Get trip speed (using trip shape if available) in meters per second. */ public double getTripSpeed (String trip_id) { return getTripSpeed(trip_id, false); } /** Get trip speed in meters per second. */ public double getTripSpeed (String trip_id, boolean straightLine) { StopTime firstStopTime = this.stop_times.ceilingEntry(Fun.t2(trip_id, null)).getValue(); StopTime lastStopTime = this.stop_times.floorEntry(Fun.t2(trip_id, Fun.HI)).getValue(); // ensure that stopTime returned matches trip id (i.e., that the trip has stoptimes) if (!firstStopTime.trip_id.equals(trip_id) || !lastStopTime.trip_id.equals(trip_id)) { return Double.NaN; } double distance = getTripDistance(trip_id, straightLine); // trip time (in seconds) int time = lastStopTime.arrival_time - firstStopTime.departure_time; return distance / time; // meters per second } /** Get list of stop_times ordered by arrival time for a given stop_id. */ public List<StopTime> getStopTimesForStop (String stop_id) { SortedSet<Tuple2<String, Tuple2>> index = this.stopStopTimeSet .subSet(new Tuple2<>(stop_id, null), new Tuple2(stop_id, Fun.HI)); return index.stream() .map(tuple -> this.stop_times.get(tuple.b)) .sorted((a, b) -> Integer.compare(a.arrival_time, b.arrival_time)) .collect(Collectors.toList()); } public List<Trip> getTripsForService (String service_id) { SortedSet<Tuple2<String, String>> index = this.tripsPerService .subSet(new Tuple2<>(service_id, null), new Tuple2(service_id, Fun.HI)); return index.stream() .map(tuple -> this.trips.get(tuple.b)) .collect(Collectors.toList()); } /** Get list of services for each date of service. */ public List<Service> getServicesForDate (LocalDate date) { String dateString = date.format(dateFormatter); SortedSet<Tuple2<String, String>> index = this.servicesPerDate .subSet(new Tuple2<>(dateString, null), new Tuple2(dateString, Fun.HI)); return index.stream() .map(tuple -> this.services.get(tuple.b)) .collect(Collectors.toList()); } /** Get list of distinct trips (filters out multiple visits by a trip) a given stop_id. */ public List<Trip> getDistinctTripsForStop (String stop_id) { return getStopTimesForStop(stop_id).stream() .map(stopTime -> this.trips.get(stopTime.trip_id)) .distinct() .collect(Collectors.toList()); } /** Get the likely time zone for a stop using the agency of the first stop time encountered for the stop. */ public ZoneId getAgencyTimeZoneForStop (String stop_id) { StopTime stopTime = getStopTimesForStop(stop_id).iterator().next(); Trip trip = this.trips.get(stopTime.trip_id); Route route = this.routes.get(trip.route_id); Agency agency = route.agency_id != null ? this.agency.get(route.agency_id) : this.agency.get(0); return ZoneId.of(agency.agency_timezone); } // TODO: code review public Geometry getMergedBuffers() { if (this.mergedBuffers == null) { // synchronized (this) { Collection<Geometry> polygons = new ArrayList<>(); for (Stop stop : this.stops.values()) { if (getStopTimesForStop(stop.stop_id).isEmpty()) { continue; } if (stop.stop_lat > -1 && stop.stop_lat < 1 || stop.stop_lon > -1 && stop.stop_lon < 1) { continue; } Point stopPoint = gf.createPoint(new Coordinate(stop.stop_lon, stop.stop_lat)); Polygon stopBuffer = (Polygon) stopPoint.buffer(.01); polygons.add(stopBuffer); } Geometry multiGeometry = gf.buildGeometry(polygons); this.mergedBuffers = multiGeometry.union(); if (polygons.size() > 100) { this.mergedBuffers = DouglasPeuckerSimplifier.simplify(this.mergedBuffers, .001); } } return this.mergedBuffers; } public Polygon getConvexHull() { if (this.convexHull == null) { synchronized (this) { List<Coordinate> coordinates = this.stops.values().stream().map( stop -> new Coordinate(stop.stop_lon, stop.stop_lat) ).collect(Collectors.toList()); Coordinate[] coords = coordinates.toArray(new Coordinate[coordinates.size()]); ConvexHull convexHull = new ConvexHull(coords, gf); this.convexHull = (Polygon) convexHull.getConvexHull(); } } return this.convexHull; } /** * Cloning can be useful when you want to make only a few modifications to an existing feed. * Keep in mind that this is a shallow copy, so you'll have to create new maps in the clone for tables you want * to modify. */ @Override public GTFSFeed clone() { try { return (GTFSFeed) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } public void close () { db.close(); } /** Thrown when we cannot interpolate stop times because the first or last stops do not have times */ public class FirstAndLastStopsDoNotHaveTimes extends Exception { /** do nothing */ } private static class PatternNamingInfo { Multimap<String, Pattern> headsigns = HashMultimap.create(); Multimap<String, Pattern> fromStops = HashMultimap.create(); Multimap<String, Pattern> toStops = HashMultimap.create(); Multimap<String, Pattern> vias = HashMultimap.create(); List<Pattern> patternsOnRoute = new ArrayList<>(); } /** Create a GTFS feed in a temp file */ public GTFSFeed () { // calls to this must be first operation in constructor - why, Java? this(DBMaker.newTempFileDB() .transactionDisable() .mmapFileEnable() .asyncWriteEnable() .deleteFilesAfterClose() .compressionEnable() // .cacheSize(1024 * 1024) this bloats memory consumption .make()); // TODO db.close(); } /** Create a GTFS feed connected to a particular DB, which will be created if it does not exist. */ public GTFSFeed (String dbFile) { this(DBMaker.newFileDB(new File(dbFile)) .transactionDisable() .mmapFileEnable() .asyncWriteEnable() .compressionEnable() // .cacheSize(1024 * 1024) this bloats memory consumption .make()); // TODO db.close(); } private GTFSFeed (DB db) { this.db = db; agency = db.getTreeMap("agency"); feedInfo = db.getTreeMap("feed_info"); routes = db.getTreeMap("routes"); trips = db.getTreeMap("trips"); stop_times = db.getTreeMap("stop_times"); frequencies = db.getTreeSet("frequencies"); transfers = db.getTreeMap("transfers"); stops = db.getTreeMap("stops"); fares = db.getTreeMap("fares"); services = db.getTreeMap("services"); shape_points = db.getTreeMap("shape_points"); feedId = db.getAtomicString("feed_id").get(); checksum = db.getAtomicLong("checksum").get(); // use Java serialization because MapDB serialization is very slow with JTS as they have a lot of references. // nothing else contains JTS objects patterns = db.createTreeMap("patterns") .valueSerializer(Serializer.JAVA) .makeOrGet(); tripPatternMap = db.getTreeMap("patternForTrip"); stopStopTimeSet = db.getTreeSet("stopStopTimeSet"); tripsPerService = db.getTreeSet("tripsPerService"); servicesPerDate = db.getTreeSet("servicesPerDate"); errors = db.getTreeSet("errors"); } }
package com.conveyal.gtfs; import com.conveyal.gtfs.error.GTFSError; import com.conveyal.gtfs.model.Agency; import com.conveyal.gtfs.model.Calendar; import com.conveyal.gtfs.model.CalendarDate; import com.conveyal.gtfs.model.Fare; import com.conveyal.gtfs.model.FareAttribute; import com.conveyal.gtfs.model.FareRule; import com.conveyal.gtfs.model.FeedInfo; import com.conveyal.gtfs.model.Frequency; import com.conveyal.gtfs.model.Route; import com.conveyal.gtfs.model.Service; import com.conveyal.gtfs.model.Shape; import com.conveyal.gtfs.model.Stop; import com.conveyal.gtfs.model.StopTime; import com.conveyal.gtfs.model.Transfer; import com.conveyal.gtfs.model.Trip; import com.conveyal.gtfs.validator.GTFSValidator; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.mapdb.DB; import org.mapdb.DBMaker; import org.mapdb.Fun; import org.mapdb.Fun.Tuple2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentNavigableMap; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import static com.conveyal.gtfs.util.Util.human; /** * All entities must be from a single feed namespace. * Composed of several GTFSTables. */ public class GTFSFeed { private static final Logger LOG = LoggerFactory.getLogger(GTFSFeed.class); DB db = DBMaker.newTempFileDB() .transactionDisable() .mmapFileEnable() .asyncWriteEnable() .compressionEnable() .cacheSize(200 * 1024 * 1024) .make(); // TODO db.close(); public String feedId = null; /* Some of these should be multimaps since they don't have an obvious unique key. */ public final Map<String, Agency> agency = Maps.newHashMap(); public final Map<String, FeedInfo> feedInfo = Maps.newHashMap(); public final Map<String, Frequency> frequencies = Maps.newHashMap(); public final Map<String, Route> routes = Maps.newHashMap(); public final Map<String, Stop> stops = Maps.newHashMap(); public final Map<String, Transfer> transfers = Maps.newHashMap(); public final Map<String, Trip> trips = Maps.newHashMap(); /* Map from 2-tuples of (shape_id, shape_pt_sequence) to shape points */ public final ConcurrentNavigableMap<Tuple2<String, Integer>, Shape> shapePoints = db.getTreeMap("shapes"); /* This represents a bunch of views of the previous, one for each shape */ public final Map<String, Map<Integer, Shape>> shapes = Maps.newHashMap(); /* Map from 2-tuples of (trip_id, stop_sequence) to stoptimes. */ public final ConcurrentNavigableMap<Tuple2, StopTime> stop_times = db.getTreeMap("stop_times"); /* A fare is a fare_attribute and all fare_rules that reference that fare_attribute. */ public final Map<String, Fare> fares = Maps.newHashMap(); /* A service is a calendar entry and all calendar_dates that modify that calendar entry. */ public final Map<String, Service> services = Maps.newHashMap(); /* A place to accumulate errors while the feed is loaded. Tolerate as many errors as possible and keep on loading. */ public List<GTFSError> errors = Lists.newArrayList(); /** * The order in which we load the tables is important for two reasons. * 1. We must load feed_info first so we know the feed ID before loading any other entities. This could be relaxed * by having entities point to the feed object rather than its ID String. * 2. Referenced entities must be loaded before any entities that reference them. This is because we check * referential integrity while the files are being loaded. This is done on the fly during loading because it allows * us to associate a line number with errors in objects that don't have any other clear identifier. * * Interestingly, all references are resolvable when tables are loaded in alphabetical order. */ private void loadFromFile(ZipFile zip) throws Exception { new FeedInfo.Loader(this).loadTable(zip); // maybe we should just point to the feed object itself instead of its ID, and null out its stoptimes map after loading if (feedId == null) { LOG.info("Feed ID is undefined."); // TODO log an error, ideally feeds should include a feedID } LOG.info("Feed ID is '{}'.", feedId); new Agency.Loader(this).loadTable(zip); new Calendar.Loader(this).loadTable(zip); new CalendarDate.Loader(this).loadTable(zip); new FareAttribute.Loader(this).loadTable(zip); new FareRule.Loader(this).loadTable(zip); new Route.Loader(this).loadTable(zip); new Shape.Loader(this).loadTable(zip); new Stop.Loader(this).loadTable(zip); new Transfer.Loader(this).loadTable(zip); new Trip.Loader(this).loadTable(zip); new Frequency.Loader(this).loadTable(zip); new StopTime.Loader(this).loadTable(zip); // comment out this line for quick testing using NL feed LOG.info("{} errors", errors.size()); for (GTFSError error : errors) { LOG.info("{}", error); } } public void toFile (String file) { try { File out = new File(file); OutputStream os = new FileOutputStream(out); ZipOutputStream zip = new ZipOutputStream(os); // write everything // TODO: fare attributes, fare rules, shapes new Agency.Writer(this).writeTable(zip); new Calendar.Writer(this).writeTable(zip); new CalendarDate.Writer(this).writeTable(zip); new Frequency.Writer(this).writeTable(zip); new Route.Writer(this).writeTable(zip); new Stop.Writer(this).writeTable(zip); new Shape.Writer(this).writeTable(zip); new Transfer.Writer(this).writeTable(zip); new Trip.Writer(this).writeTable(zip); new StopTime.Writer(this).writeTable(zip); zip.close(); LOG.info("GTFS file written"); } catch (Exception e) { LOG.error("Error saving GTFS: {}", e.getMessage()); throw new RuntimeException(e); } } public void validate (GTFSValidator... validators) { for (GTFSValidator validator : validators) { validator.validate(this, false); } } public static GTFSFeed fromFile(String file) { GTFSFeed feed = new GTFSFeed(); ZipFile zip; try { zip = new ZipFile(file); feed.loadFromFile(zip); zip.close(); return feed; } catch (Exception e) { LOG.error("Error loading GTFS: {}", e.getMessage()); throw new RuntimeException(e); } } /** * For the given trip ID, fetch all the stop times in order of increasing stop_sequence. * This is an efficient iteration over a tree map. */ public Iterable<StopTime> getOrderedStopTimesForTrip (String trip_id) { Map<Fun.Tuple2, StopTime> tripStopTimes = stop_times.subMap( Fun.t2(trip_id, null), Fun.t2(trip_id, Fun.HI) ); return tripStopTimes.values(); } /** * Bin all trips by the sequence of stops they visit. * @return A map from a list of stop IDs to a list of Trip IDs that visit those stops in that sequence. */ public Map<List<String>, List<String>> findPatterns() { // A map from a list of stop IDs (the pattern) to a list of trip IDs which fit that pattern. Map<List<String>, List<String>> tripsForPattern = Maps.newHashMap(); int n = 0; for (String trip_id : trips.keySet()) { if (++n % 100000 == 0) { LOG.info("trip {}", human(n)); } Iterable<StopTime> orderedStopTimes = getOrderedStopTimesForTrip(trip_id); List<String> stops = Lists.newArrayList(); // In-order traversal of StopTimes within this trip. The 2-tuple keys determine ordering. for (StopTime stopTime : orderedStopTimes) { stops.add(stopTime.stop_id); } // Fetch or create the tripId list for this stop pattern, then add the current trip to that list. List<String> trips = tripsForPattern.get(stops); if (trips == null) { trips = Lists.newArrayList(); tripsForPattern.put(stops, trips); } trips.add(trip_id); } LOG.info("Total patterns: {}", tripsForPattern.keySet().size()); return tripsForPattern; } public Service getOrCreateService(String serviceId) { Service service = services.get(serviceId); if (service == null) { service = new Service(serviceId); services.put(serviceId, service); } return service; } public Fare getOrCreateFare(String fareId) { Fare fare = fares.get(fareId); if (fare == null) { fare = new Fare(fareId); fares.put(fareId, fare); } return fare; } // TODO augment with unrolled calendar, patterns, etc. before validation }
package com.example.helloworld; import com.example.helloworld.resources.HelloWorldResource; import io.dropwizard.Application; import io.dropwizard.Configuration; import io.dropwizard.setup.Bootstrap; import io.dropwizard.setup.Environment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Hello world! * */ public class App extends Application<Configuration> { private static final Logger LOGGER = LoggerFactory.getLogger(App.class); public static void main( String[] args ) throws Exception { new App().run(args); } @Override public void initialize(Bootstrap<Configuration> configurationBootstrap) { } @Override public void run(Configuration configuration, Environment environment) throws Exception { LOGGER.info("Method App#run() called"); System.out.println( "Hello world, by Dropwizard!" ); final HelloWorldResource resource = new HelloWorldResource(); environment.jersey().register(resource); } }
package com.hh.features; import android.content.Context; import android.view.View; import android.view.inputmethod.InputMethodManager; public class PfKeyboard { public static void show(Context pContext){ ((InputMethodManager)pContext.getSystemService(Context.INPUT_METHOD_SERVICE)) .toggleSoftInput(InputMethodManager.SHOW_FORCED, InputMethodManager.HIDE_IMPLICIT_ONLY); } public static void hide(Context pContext,View pView){ InputMethodManager lInputManager = (InputMethodManager) pContext.getSystemService(Context.INPUT_METHOD_SERVICE); lInputManager.hideSoftInputFromWindow(pView.getWindowToken(), 0); } }
package com.ninty.runtime; import com.ninty.cmd.base.CmdFatory; import com.ninty.cmd.base.ICmdBase; import com.ninty.runtime.heap.*; import com.sun.jdi.NativeMethodException; public class NiThread { private int level; private NiStack stack; private NiObject currentThread; // java.lang.Thread private static NiObject mainThread; private static final String CLZ_THREAD = "java/lang/Thread"; public NiThread(int maxStackSize) { stack = new NiStack(maxStackSize); } public void generateThread(NiObject threadGroup, NiClassLoader loader, String name) { NiClass clz = loader.loadClass(CLZ_THREAD); NiObject thread = clz.newObject(); thread.setFieldInt("priority", Thread.NORM_PRIORITY); NiMethod constructor = clz.getInitMethod("(Ljava/lang/ThreadGroup;Ljava/lang/String;)V"); currentThread = thread; execMethod(constructor, new Slot(thread), new Slot(threadGroup), new Slot(NiString.newString(loader, name))); } public NiFrame popFrame() { level NiFrame frame = stack.pop(); NiFrame top = topFrame(); if (top != null) { top.restorePostion(); } return frame; } public void pushFrame(NiFrame frame) { level++; NiFrame top = topFrame(); if (top != null) { top.savePosition(); } frame.reset(); frame.setThread(this); stack.push(frame); } public static Slot execMethodDirectly(NiMethod method, Slot... params) { NiThread thread = new NiThread(64); if (mainThread == null) { throw new NullPointerException("mainThread is null"); } thread.setCurrentThread(mainThread); NiFrame returnFrame = NiFrame.RETURN_FRAME; thread.pushFrame(returnFrame); thread.execMethod(method, params); if (returnFrame.getOperandStack().getSize() > 0) { return returnFrame.getOperandStack().popSlot(); } return new Slot(); } public void execMethod(NiMethod method, Slot... params) { pushFrameWithParams(method, params); execThread(); } public void invokeMethod(NiMethod method) { int argsCount = method.getArgsCount(); Slot[] params = new Slot[argsCount]; OperandStack stack = topFrame().getOperandStack(); if (argsCount > 0) { for (int i = argsCount - 1; i >= 0; i params[i] = stack.popSlot(); } } pushFrameWithParams(method, params); } private void pushFrameWithParams(NiMethod method, Slot... params) { NiFrame newFrame = new NiFrame(method); pushFrame(newFrame); int argsCount = method.getArgsCount(); LocalVars slots = newFrame.getLocalVars(); if (argsCount > 0) { for (int i = argsCount - 1; i >= 0; i slots.setSlot(i, params[i]); } } } private void execThread() { try { long startTime = System.nanoTime(); System.out.println("start\n"); while (true) { NiFrame frame = topFrame(); if (frame == NiFrame.RETURN_FRAME) { break; } CodeBytes bb = frame.getCode(); byte opCode = frame.getOpCode(); ICmdBase cmd = CmdFatory.getCmd(opCode); try { // System.out.println(getT(getLevel()) + cmd.getClass().getSimpleName()); // System.out.println(getT(getLevel()) + frame); // System.out.println(); cmd.init(bb); cmd.exec(frame); } catch (Exception e) { throwException(frame, e); } if (isEmpty()) { break; } } System.out.println("\nspend " + (System.nanoTime() - startTime) / Math.pow(10, 6) + "ms"); System.out.println("\n**done**"); } catch (Exception e) { e.printStackTrace(); } } private static void throwException(NiFrame frame, Exception e) throws Exception { Class eClz = e.getClass(); if (eClz == NativeMethodException.class) { throw e; } frame.restorePostion(); // new NiClass exClz = frame.getMethod().getClz().getLoader().loadClass(eClz.getName()); // dup if (frame.getOperandStack().getSize() < 2) { frame.setOperandStack(new OperandStack(2)); } OperandStack stack = frame.getOperandStack(); NiObject exObj = exClz.newObject(); stack.clear(); stack.pushRef(exObj); stack.pushRef(exObj); // init NiMethod initMethod = exClz.getMethod("<init>", "()V"); frame.getThread().invokeMethod(initMethod); NiFrame topFrame = frame.getThread().topFrame(); while (topFrame != frame) { CodeBytes bb = topFrame.getCode(); byte opCode = topFrame.getOpCode(); ICmdBase cmd = CmdFatory.getCmd(opCode); cmd.init(bb); cmd.exec(topFrame); topFrame = frame.getThread().topFrame(); } // athrow ICmdBase athrow = CmdFatory.getCmd((byte) 0xbf); athrow.exec(frame); } private String getT(int level) { StringBuilder t = new StringBuilder(level); for (int i = 1; i < level; i++) { t.append('\t'); } return t.toString(); } public NiObject getCurrentThread() { return currentThread; } public static void setMainThread(NiObject mainThread) { NiThread.mainThread = mainThread; } public void setCurrentThread(NiObject currentThread) { this.currentThread = currentThread; } public NiFrame topFrame() { return stack.top(); } public boolean isEmpty() { return stack.isEmpty(); } public int getLevel() { return level; } }
package com.nucleo.easybackup; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import ml.options.OptionData; import ml.options.OptionSet; import ml.options.Options; import ml.options.Options.Multiplicity; import ml.options.Options.Separator; public class Main { private static final String DATE_TIME_FORMAT = "`date '+%Y_%m_%d__%H_%M_%S'`"; public static void main(String[] args) throws IOException { Options options = new Options(args, 2); OptionSet set = options.getSet(); set.addOption("n", Separator.EQUALS, Multiplicity.ONCE); set.addOption("t", Separator.EQUALS, Multiplicity.ONCE); set.addOption("r", Separator.EQUALS, Multiplicity.ONCE); set.addOption("e", Separator.EQUALS, Multiplicity.ZERO_OR_MORE); if (!options.check()) { System.out.println(options.getCheckErrors()); System.exit(1); } String name = set.getOption("n").getResultValue(0); Type type = Type.valueOf(set.getOption("t").getResultValue(0).toUpperCase()); int retain = Integer.parseInt(set.getOption("r").getResultValue(0)); List<String> excludes = new ArrayList<>(); OptionData excludeOption = set.getOption("e"); for (int i = 0; i < excludeOption.getResultCount(); i++) { excludes.add(excludeOption.getResultValue(i)); } Path src = Paths.get(set.getData().get(0)); Path dest = Paths.get(set.getData().get(1)); File destAsFile = dest.toFile(); destAsFile.mkdirs(); String command = buildBackupCommand(type, name, src, dest, excludes); executeBackup(command); cleanOldBackups(retain, dest); } private static boolean executeBackup(String command) { System.out.println("Making backup\n" + command); try { Process p = Runtime.getRuntime().exec(new String[] { "bash", "-c", command }); new StreamPrinter(p.getErrorStream()).start(); new StreamPrinter(p.getInputStream()).start(); int exitVal = p.waitFor(); boolean success = exitVal == 0; System.out.println("Exit value: " + exitVal + " (" + (success ? "success" : "failure") + ")"); return success; } catch (Exception e) { e.printStackTrace(); return false; } } private static String buildBackupCommand(Type type, String name, Path src, Path dest, List<String> excludes) { String command = null; String backupFilePath = null; switch (type) { case TAR: backupFilePath = dest.resolve(name + "_" + DATE_TIME_FORMAT + ".tar.gz").toString(); command = "tar cvzf \"" + backupFilePath + "\" \"" + src + "\""; for (String exclude : excludes) { command += " --exclude=\"" + exclude + "\""; } break; case SQSH: backupFilePath = dest.resolve(name + "_" + DATE_TIME_FORMAT + ".sqsh").toString(); command = "mksquashfs \"" + src + "\" \"" + backupFilePath + "\""; if (!excludes.isEmpty()) { command += " -e"; for (String exclude : excludes) { command += " \"" + exclude + "\""; } } break; } return command; } private static void cleanOldBackups(int retain, Path dest) { List<File> backups = new LinkedList<File>(Arrays.asList(dest.toFile().listFiles())); Collections.sort(backups); while (backups.size() > retain) { System.out.println("Removing old backup\n" + backups.get(0)); backups.get(0).delete(); backups.remove(0); } } enum Type { TAR, SQSH } }
package com.qiniu.api.rs; import org.json.JSONException; import org.json.JSONStringer; import com.qiniu.api.auth.AuthException; import com.qiniu.api.auth.digest.DigestAuth; import com.qiniu.api.auth.digest.Mac; /** * The PutPolicy class used to generate a upload token. To upload a file, you * should obtain upload authorization from Qiniu cloud strage platform. By a * pair of valid accesskey and secretkey, we generate a upload token. When * upload a file, the upload token is transmissed as a part of the file stream, * or as an accessory part of the HTTP Headers. */ public class PutPolicy { /** bucketName bucketName:key */ public String scope; public String callbackUrl; public String callbackBody; public String returnUrl; public String returnBody; public String asyncOps; public String endUser; public long expires; public PutPolicy(String scope) { this.scope = scope; } private String marshal() throws JSONException { JSONStringer stringer = new JSONStringer(); stringer.object(); stringer.key("scope").value(this.scope); if (this.callbackUrl != null && this.callbackUrl.length() > 0) { stringer.key("callbackUrl").value(this.callbackUrl); } if (this.callbackBody != null && this.callbackBody.length() > 0) { stringer.key("callbackBody").value(this.callbackBody); } if (this.returnUrl != null && this.returnUrl.length() > 0) { stringer.key("returnUrl").value(this.returnUrl); } if (this.returnBody != null && this.returnBody.length() > 0) { stringer.key("returnBody").value(this.returnBody); } if (this.asyncOps != null && this.asyncOps.length() > 0) { stringer.key("asyncOps").value(this.asyncOps); } stringer.key("deadline").value(this.expires); stringer.endObject(); return stringer.toString(); } /** * makes an upload token. * @param mac * @return * @throws AuthException * @throws JSONException */ public String token(Mac mac) throws AuthException, JSONException { if (this.expires == 0) { this.expires = 3600; // 3600s, default. } this.expires = System.currentTimeMillis() / 1000 + expires; byte[] data = this.marshal().getBytes(); return DigestAuth.signWithData(mac, data); } }
package com.rallydev.rest.util; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Provides utility methods for working with ref URLs. */ public class Ref { private static List<Pattern> patterns = new ArrayList<Pattern>(Arrays.asList( //dynatype collection ref (/portfolioitem/feature/1234 Pattern.compile(".*?/(\\w{2,}/\\w+)/(\\d+/\\w+)(?:\\.js\\??.*)*$"), //dynatype ref (/portfolioitem/feature/1234 Pattern.compile(".*?/(\\w{2,}/\\w+)/(\\d+)(?:\\.js\\??.*)*$"), //collection ref (/defect/1234/tasks) Pattern.compile(".*?/(\\w+/-?\\d+)/(\\w+)(?:\\.js\\??.*)*$"), //basic ref (/defect/1234) Pattern.compile(".*?/(\\w+)/(-?\\d+)(?:\\.js\\??.*)*$"), Pattern.compile(".*?/(\\w+)/(\\d+u\\d+[pw]\\d+)(?:\\.js\\??.*)*$"), //adding UUID regex support in the ref urls //dynatype collection ref (/portfolioitem/feature/81348db8-aacd-447e-8678-2fb910ae9dc3 Pattern.compile(".*?/(\\w{2,}/\\w+)/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/\\w+)(?:\\.js\\??.*)*$"), //dynatype ref (/portfolioitem/feature/81348db8-aacd-447e-8678-2fb910ae9dc3 Pattern.compile(".*?/(\\w{2,}/\\w+)/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:\\.js\\??.*)*$"), //collection ref (/defect/81348db8-aacd-447e-8678-2fb910ae9dc3/tasks) Pattern.compile(".*?/(\\w+/-?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/(\\w+)(?:\\.js\\??.*)*$"), //basic ref (/defect/81348db8-aacd-447e-8678-2fb910ae9dc3) Pattern.compile(".*?/(\\w+)/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:\\.js\\??.*)*$") )); private static Matcher match(String ref) { String test = ref != null ? ref : ""; for(Pattern pattern : patterns) { Matcher m = pattern.matcher(test); if(m.matches()) { return m; } } return null; } /** * Determine whether the specified string is a valid ref url. * * @param ref the string to be tested. May be either absolute or relative, e.g. /defect/1234 * * @return whether the specified string is a valid ref url */ public static boolean isRef(String ref) { return match(ref) != null; } /** * Create a relative ref url from the specified ref * * @param ref the ref url to be made relative * * @return the relative ref url or null if the specified ref was not valid */ public static String getRelativeRef(String ref) { Matcher matcher = match(ref); return matcher != null ? String.format("/%s/%s", matcher.group(1), matcher.group(2)) : null; } /** * Get the type from the specified ref url * * @param ref the ref url to extract the type from * * @return the extracted type or null if the specified ref was not valid */ public static String getTypeFromRef(String ref) { Matcher matcher = match(ref); return matcher != null ? matcher.group(1) : null; } /** * Get the ObjectID from the specified ref url * * @param ref the ref url to extract the ObjectID from * * @return the extracted ObjectID or null if the specified ref was not valid */ public static String getOidFromRef(String ref) { Matcher matcher = match(ref); return matcher != null ? matcher.group(2) : null; } }
package com.shippo.net; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.net.URLEncoder; import java.net.URLStreamHandler; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.cert.Certificate; import java.security.cert.CertificateEncodingException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.shippo.Shippo; import com.shippo.exception.APIConnectionException; import com.shippo.exception.APIException; import com.shippo.exception.AuthenticationException; import com.shippo.exception.InvalidRequestException; import com.shippo.model.ShippoObject; import com.shippo.model.ShippoRawJsonObject; import com.shippo.model.ShippoRawJsonObjectDeserializer; public abstract class APIResource extends ShippoObject { public static final Gson GSON = new GsonBuilder() .setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES) // find a way to not access Batch here //.registerTypeAdapter(Batch.Shipment.class, new Batch.ShipmentDeserializer()) .registerTypeAdapter(ShippoRawJsonObject.class, new ShippoRawJsonObjectDeserializer()).create(); private static String className(Class<?> clazz) { String className = clazz.getSimpleName().toLowerCase() .replace("$", " "); // Special case class names if (className.equals("address")) { return "addresse"; } else if (className.equals("customsitem")) { return "customs/item"; } else if (className.equals("customsdeclaration")) { return "customs/declaration"; } else if (className.equals("carrieraccount")) { return "carrier_account"; } else if (className.equals("batch")) { return "batche"; } else { return className; } } protected static String singleClassURL(Class<?> clazz) { return String.format("%s/v1/%s", Shippo.getApiBase(), className(clazz)); } protected static String classURL(Class<?> clazz) { return String.format("%ss", singleClassURL(clazz)); } protected static String classURLWithTrailingSlash(Class<?> clazz) { return String.format("%ss/", singleClassURL(clazz)); } protected static String instanceURL(Class<?> clazz, String id) throws InvalidRequestException { try { return String.format("%s/%s", classURL(clazz), urlEncode(id)); } catch (UnsupportedEncodingException e) { throw new InvalidRequestException("Unable to encode parameters to " + CHARSET + ". Please contact support@goshippo.com for assistance.", null, e); } } public static final String CHARSET = "UTF-8"; private static final String DNS_CACHE_TTL_PROPERTY_NAME = "networkaddress.cache.ttl"; /* * Set this property to override your environment's default * URLStreamHandler; Settings the property should not be needed in most * environments. */ private static final String CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME = "com.shippo.net.customURLStreamHandler"; protected enum RequestMethod { GET, POST, PUT } protected static String urlEncode(String str) throws UnsupportedEncodingException { // Preserve original behavior that passing null for an object id will // lead // to us actually making a request to /v1/foo/null if (str == null) { return null; } else { return URLEncoder.encode(str, CHARSET); } } private static String urlEncodePair(String k, String v) throws UnsupportedEncodingException { return String.format("%s=%s", urlEncode(k), urlEncode(v)); } static Map<String, String> getHeaders(String apiKey) { Map<String, String> headers = new HashMap<String, String>(); headers.put("Accept-Charset", CHARSET); headers.put("User-Agent", String.format("Shippo/v1 JavaBindings/%s", Shippo.VERSION)); if (apiKey == null) { apiKey = Shippo.apiKey; } headers.put("Authorization", String.format("ShippoToken %s", apiKey)); headers.put("Accept", "application/json"); // debug headers String[] propertyNames = { "os.name", "os.version", "os.arch", "java.version", "java.vendor", "java.vm.version", "java.vm.vendor" }; Map<String, String> propertyMap = new HashMap<String, String>(); for (String propertyName : propertyNames) { propertyMap.put(propertyName, System.getProperty(propertyName)); } // propertyMap.put("bindings.version", Shippo.VERSION); // propertyMap.put("lang", "Java"); // propertyMap.put("publisher", "Shippo"); headers.put("User-Agent", GSON.toJson(propertyMap)); if (Shippo.apiVersion != null) { headers.put("Shippo-API-Version", Shippo.apiVersion); } return headers; } private static java.net.HttpURLConnection createShippoConnection( String url, String apiKey) throws IOException { URL shippoURL; String customURLStreamHandlerClassName = System.getProperty( CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME, null); if (customURLStreamHandlerClassName != null) { // instantiate the custom handler provided try { @SuppressWarnings("unchecked") Class<URLStreamHandler> clazz = (Class<URLStreamHandler>) Class .forName(customURLStreamHandlerClassName); Constructor<URLStreamHandler> constructor = clazz .getConstructor(); URLStreamHandler customHandler = constructor.newInstance(); shippoURL = new URL(null, url, customHandler); } catch (ClassNotFoundException e) { throw new IOException(e); } catch (SecurityException e) { throw new IOException(e); } catch (NoSuchMethodException e) { throw new IOException(e); } catch (IllegalArgumentException e) { throw new IOException(e); } catch (InstantiationException e) { throw new IOException(e); } catch (IllegalAccessException e) { throw new IOException(e); } catch (InvocationTargetException e) { throw new IOException(e); } } else { shippoURL = new URL(url); } java.net.HttpURLConnection conn = (java.net.HttpURLConnection) shippoURL .openConnection(); conn.setConnectTimeout(30 * 1000); conn.setReadTimeout(80 * 1000); conn.setUseCaches(false); for (Map.Entry<String, String> header : getHeaders(apiKey).entrySet()) { conn.setRequestProperty(header.getKey(), header.getValue()); } return conn; } private static void throwInvalidCertificateException() throws APIConnectionException { throw new APIConnectionException( "Invalid server certificate. You tried to connect to a server that has a revoked SSL certificate, which means we cannot securely send data to that server. Please email support@goshippo.com if you need help connecting to the correct API server."); } private static void checkSSLCert(java.net.HttpURLConnection hconn) throws IOException, APIConnectionException { if (!Shippo.getVerifySSL() && !hconn.getURL().getHost().equals("api.shippo.com")) { return; } javax.net.ssl.HttpsURLConnection conn = (javax.net.ssl.HttpsURLConnection) hconn; conn.connect(); Certificate[] certs = conn.getServerCertificates(); try { MessageDigest md = MessageDigest.getInstance("SHA-1"); byte[] der = certs[0].getEncoded(); md.update(der); byte[] digest = md.digest(); byte[] revokedCertDigest = {}; if (Arrays.equals(digest, revokedCertDigest)) { throwInvalidCertificateException(); } } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } catch (CertificateEncodingException e) { throwInvalidCertificateException(); } } private static String formatURL(String url, String query) { if (query == null || query.isEmpty()) { return url; } else { // In some cases, URL can already contain a question mark (eg, // upcoming invoice lines) String separator = url.contains("?") ? "&" : "?"; return String.format("%s%s%s", url, separator, query); } } private static java.net.HttpURLConnection createGetConnection(String url, String query, String apiKey) throws IOException, APIConnectionException { if (Shippo.isDEBUG()) { System.out.println("GET URL: " + url); } String getURL = formatURL(url, query); java.net.HttpURLConnection conn = createShippoConnection(getURL, apiKey); conn.setRequestMethod("GET"); checkSSLCert(conn); return conn; } private static java.net.HttpURLConnection createPostPutConnection(String url, String query, RequestMethod method, String apiKey) throws IOException, APIConnectionException { if (Shippo.isDEBUG()) { System.out.println("POST URL: " + url); } java.net.HttpURLConnection conn = createShippoConnection(url, apiKey); conn.setDoOutput(true); conn.setRequestMethod(method.toString()); conn.setRequestProperty("Content-Type", "application/json"); checkSSLCert(conn); OutputStream output = null; try { output = conn.getOutputStream(); output.write(query.getBytes(CHARSET)); } finally { if (output != null) { output.close(); } } return conn; } private static java.net.HttpURLConnection createPutConnection(String url, String query, String apiKey) throws IOException, APIConnectionException { if (Shippo.isDEBUG()) { System.out.println("PUT URL: " + url); } java.net.HttpURLConnection conn = createShippoConnection(url, apiKey); conn.setDoOutput(true); conn.setRequestMethod("PUT"); conn.setRequestProperty("Content-Type", "application/json"); checkSSLCert(conn); OutputStream output = null; try { output = conn.getOutputStream(); output.write(query.getBytes(CHARSET)); } finally { if (output != null) { output.close(); } } return conn; } private static String mapToJson(Map<String, Object> params) { if (params == null) { return GSON.toJson(new HashMap<String, Object>()); } // hack to serialize list instead of object Object o = params.get("__list"); if (o != null) { return GSON.toJson(o); } return GSON.toJson(params); } private static Map<String, String> flattenParams(Map<String, Object> params) throws InvalidRequestException { if (params == null) { return new HashMap<String, String>(); } Map<String, String> flatParams = new HashMap<String, String>(); for (Map.Entry<String, Object> entry : params.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (value instanceof Map<?, ?>) { Map<String, Object> flatNestedMap = new HashMap<String, Object>(); Map<?, ?> nestedMap = (Map<?, ?>) value; for (Map.Entry<?, ?> nestedEntry : nestedMap.entrySet()) { flatNestedMap.put( String.format("%s[%s]", key, nestedEntry.getKey()), nestedEntry.getValue()); } flatParams.putAll(flattenParams(flatNestedMap)); } else if (value == null) { flatParams.put(key, ""); } else { flatParams.put(key, value.toString()); } } return flatParams; } // represents Errors returned as JSON @SuppressWarnings("unused") private static class ErrorContainer { private APIResource.Error error; } private static class Error { @SuppressWarnings("unused") String type; String message; @SuppressWarnings("unused") String code; String param; } @SuppressWarnings("resource") private static String getResponseBody(InputStream responseStream) throws IOException { // \A is the beginning of the stream boundary String rBody = new Scanner(responseStream, CHARSET).useDelimiter("\\A") .next(); responseStream.close(); return rBody; } private static ShippoResponse makeURLConnectionRequest( APIResource.RequestMethod method, String url, String query, String apiKey) throws APIConnectionException { java.net.HttpURLConnection conn = null; // Print Information about the Connection if (Shippo.isDEBUG()) { System.out.println("URL: " + url); System.out.println("Query: " + query); System.out.println("API Key: " + apiKey); } try { if(method.equals(RequestMethod.GET)){ conn = createGetConnection(url, query, apiKey); } else if (method.equals(RequestMethod.POST) || method.equals(RequestMethod.PUT)) { conn = createPostPutConnection(url, query, method, apiKey); }else{ throw new APIConnectionException( String.format( "Unrecognized HTTP method %s. " + "This indicates a bug in the Shippo bindings. Please contact " + "support@goshippo.com for assistance.", method)); } // Trigger the Request int rCode = conn.getResponseCode(); String rBody; Map<String, List<String>> headers; if (rCode >= 200 && rCode < 300) { rBody = getResponseBody(conn.getInputStream()); } else { rBody = getResponseBody(conn.getErrorStream()); } headers = conn.getHeaderFields(); // PRINT RESULTS if (Shippo.isDEBUG()) { System.out.println("Headers: "); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { System.out.println(entry.getKey() + " : " + entry.getValue()); } System.out.println("Response Code: " + rCode); System.out.println("Reponse Body: " + rBody); } return new ShippoResponse(rCode, rBody, headers); } catch (IOException e) { throw new APIConnectionException( String.format( "IOException during API request to Shippo (%s): %s " + "Please check your internet connection and try again. If this problem persists," + "you should check Shippo's service status at http://status.goshippo.com/," + " or let us know at support@goshippo.com.", Shippo.getApiBase(), e.getMessage()), e); } finally { if (conn != null) { conn.disconnect(); } } } protected static <T> T request(APIResource.RequestMethod method, String url, Map<String, Object> params, Class<T> clazz, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { String originalDNSCacheTTL = null; Boolean allowedToSetTTL = true; try { originalDNSCacheTTL = java.security.Security .getProperty(DNS_CACHE_TTL_PROPERTY_NAME); // disable DNS cache java.security.Security .setProperty(DNS_CACHE_TTL_PROPERTY_NAME, "0"); } catch (SecurityException se) { allowedToSetTTL = false; } try { return _request(method, url, params, clazz, apiKey); } finally { if (allowedToSetTTL) { if (originalDNSCacheTTL == null) { // value unspecified by implementation // DNS_CACHE_TTL_PROPERTY_NAME of -1 = cache forever java.security.Security.setProperty( DNS_CACHE_TTL_PROPERTY_NAME, "-1"); } else { java.security.Security.setProperty( DNS_CACHE_TTL_PROPERTY_NAME, originalDNSCacheTTL); } } } } protected static <T> T _request(APIResource.RequestMethod method, String url, Map<String, Object> params, Class<T> clazz, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { if ((Shippo.apiKey == null || Shippo.apiKey.length() == 0) && (apiKey == null || apiKey.length() == 0)) { throw new AuthenticationException( "No API key provided. (HINT: set your API key using 'Shippo.apiKey = <API-KEY>'. " + "You can generate API keys from the Shippo web interface. " + "See https://goshippo.com/docs for details or email support@goshippo.com if you have questions."); } if (apiKey == null) { apiKey = Shippo.apiKey; } String query; try { query = createQuery(params, method); } catch (UnsupportedEncodingException e) { throw new InvalidRequestException("Unable to encode parameters to " + CHARSET + ". Please contact support@shippo.com for assistance.", null, e); } ShippoResponse response = makeURLConnectionRequest(method, url, query, apiKey); int rCode = response.responseCode; String rBody = response.responseBody; if (rCode < 200 || rCode >= 300) { handleAPIError(rBody, rCode); } return GSON.fromJson(rBody, clazz); } private static void handleAPIError(String rBody, int rCode) throws InvalidRequestException, AuthenticationException, APIException { // Current API does not support JSON Based error response bodies // APIResource.Error error = GSON.fromJson(rBody, // APIResource.ErrorContainer.class).error; APIResource.Error error = new Error(); error.message = rBody; error.code = rCode + ""; switch (rCode) { case 400: throw new InvalidRequestException(error.message, error.param, null); case 404: throw new InvalidRequestException(error.message, error.param, null); case 401: throw new AuthenticationException(error.message); default: throw new APIException(error.message, null); } } private static String createGETQuery(Map<String, Object> params) throws UnsupportedEncodingException, InvalidRequestException { Map<String, String> flatParams = flattenParams(params); StringBuilder queryStringBuffer = new StringBuilder(); for (Map.Entry<String, String> entry : flatParams.entrySet()) { if (queryStringBuffer.length() > 0) { queryStringBuffer.append("&"); } queryStringBuffer.append(urlEncodePair(entry.getKey(), entry.getValue())); } return queryStringBuffer.toString(); } private static String createQuery(Map<String, Object> params, APIResource.RequestMethod method) throws UnsupportedEncodingException, InvalidRequestException { switch (method) { case GET: return createGETQuery(params); case POST: return mapToJson(params); default: return mapToJson(params); } } }
package de.independit.scheduler.server.parser.filter; import java.io.*; import java.util.*; import java.util.regex.*; import java.lang.*; import de.independit.scheduler.server.*; import de.independit.scheduler.server.repository.*; import de.independit.scheduler.server.exception.*; import de.independit.scheduler.server.util.*; import de.independit.scheduler.server.parser.*; public class ParameterFilter extends Filter { final String name; final Comparable value; final String expression; final BoolExpr be; final String cmpop; Comparer c; Caster cst; final static Caster strCaster = new StringCaster(); final static Caster intCaster = new IntegerCaster(); final static Caster dblCaster = new DoubleCaster(); public ParameterFilter(SystemEnvironment sysEnv, WithHash w) throws SDMSException { super(); name = (String) w.get(ParseStr.S_NAME); Object o = w.get(ParseStr.S_VALUE); if (o instanceof WithItem) { o = ((WithItem) o).value; if (o instanceof String) expression = (String) o; else expression = o.toString(); value = null; be = new BoolExpr(expression); be.checkConditionSyntax(sysEnv); } else { value = (Comparable) o; expression = null; be = null; } cmpop = (String) w.get(ParseStr.S_CMPOP); cst = null; if(cmpop.equals("==")) { c = new EQComparer(sysEnv, value); } else if(cmpop.equals("!=")) { c = new NQComparer(sysEnv, value); } else if(cmpop.equals("<>")) { c = new NQComparer(sysEnv, value); } else if(cmpop.equals(">")) { c = new GTComparer(sysEnv, value); } else if(cmpop.equals(">=")) { c = new GEComparer(sysEnv, value); } else if(cmpop.equals("<")) { c = new LTComparer(sysEnv, value); } else if(cmpop.equals("<=")) { c = new LEComparer(sysEnv, value); } else if(cmpop.equals("=~")) { c = new LikeComparer(sysEnv, value != null ? value.toString() : null); cst = new StringCaster(); } else if(cmpop.equals(ParseStr.S_LIKE)) { c = new LikeComparer(sysEnv, value != null ? value.toString() : null); cst = new StringCaster(); } else if(cmpop.equals("!~")) { c = new NotLikeComparer(sysEnv, value != null ? value.toString() : null); cst = new StringCaster(); } else if(cmpop.equals(ParseStr.S_NOTLIKE)) { c = new NotLikeComparer(sysEnv, value != null ? value.toString() : null); cst = new StringCaster(); } else throw new CommonErrorException(new SDMSMessage(sysEnv, "03511031050", "Unknown comparison operator: " + cmpop)); if(cst == null) { if(value instanceof String) { cst = strCaster; } if(value instanceof Integer) { cst = intCaster; } if(value instanceof Double) { cst = dblCaster; } } } public boolean valid(SystemEnvironment sysEnv, SDMSProxy p) throws SDMSException { String parmVal = null; Long pId = p.getId(sysEnv); ParameterFilterCache parameterFilterCache; if (sysEnv.tx.txData.containsKey(SystemEnvironment.S_PARAMETERFILTER_CACHE)) { parameterFilterCache = (ParameterFilterCache)(sysEnv.tx.txData.get(SystemEnvironment.S_PARAMETERFILTER_CACHE)); if (! parameterFilterCache.id.equals(pId)) { parameterFilterCache.id = pId; parameterFilterCache.parameters.clear(); } else parmVal = (String)(parameterFilterCache.parameters.get(name)); } else { parameterFilterCache = new ParameterFilterCache(); parameterFilterCache.id = pId; sysEnv.tx.txData.put(SystemEnvironment.S_PARAMETERFILTER_CACHE, parameterFilterCache); } try { if (parmVal == null) { if (p instanceof SDMSSubmittedEntity) { SDMSSubmittedEntity sme = (SDMSSubmittedEntity) p; try { parmVal = sme.getVariableValue(sysEnv, name, true, ParseStr.S_DEFAULT, (be != null)); } catch(NotFoundException cee) { parmVal = null; } } else if (p instanceof SDMSSchedulingEntity) { SDMSSchedulingEntity se = (SDMSSchedulingEntity) p; try { parmVal = se.getVariableValue(sysEnv, name); } catch (NotFoundException cee) { parmVal = null; } } else if (p instanceof SDMSCalendar) { try { SDMSScheduledEvent scev = SDMSScheduledEventTable.getObject(sysEnv, ((SDMSCalendar)p).getScevId(sysEnv)); SDMSEvent ev = SDMSEventTable.getObject(sysEnv, scev.getEvtId(sysEnv)); SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, ev.getSeId(sysEnv)); parmVal = se.getVariableValue(sysEnv, name); } catch (NotFoundException cee) { parmVal = null; } } else if (p instanceof SDMSScheduledEvent) { try { SDMSEvent ev = SDMSEventTable.getObject(sysEnv, ((SDMSScheduledEvent)p).getEvtId(sysEnv)); SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, ev.getSeId(sysEnv)); parmVal = se.getVariableValue(sysEnv, name); } catch (NotFoundException cee) { parmVal = null; } } if (parmVal != null) parameterFilterCache.parameters.put(name, parmVal); } if (parmVal != null) { if (be != null) { if (! (p instanceof SDMSSubmittedEntity)) { return false; } Object o = be.evalExpression(sysEnv, null, (SDMSSubmittedEntity) p, (SDMSSubmittedEntity) p, null, null, null); if (o instanceof String) { cst = strCaster; c.setValue((String) o); } else if (o instanceof Long) { cst = intCaster; c.setValue((Long) o); } else if (o instanceof Double) { cst = dblCaster; c.setValue((Double) o); } } return c.cmp(cst.cast(parmVal)); } } catch (Exception e) { } return false; } public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof ParameterFilter)) return false; ParameterFilter f; f = (ParameterFilter) o; if (!name.equals(f.name)) return false; if (value == null && f.value != null) return false; if (value != null && f.value == null) return false; if (value != null && value.compareTo(f.value) != 0) return false; if (expression != null && expression.compareTo(f.expression) != 0) return false; if (!cmpop.equals(f.cmpop)) return false; return true; } class ParameterFilterCache { protected Long id = null; protected HashMap parameters = new HashMap(); } } abstract class Caster { abstract Comparable cast(String v); } class StringCaster extends Caster { Comparable cast(String v) { return v; } } class IntegerCaster extends Caster { Comparable cast(String v) { try { long i = Long.parseLong(v); return new Long(i); } catch (NumberFormatException nfe) { return null; } } } class DoubleCaster extends Caster { Comparable cast(String v) { try { double x = Double.parseDouble(v); return new Double(x); } catch (NumberFormatException nfe) { return null; } } } abstract class Comparer { Comparable wert; Comparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { wert = w; } public void setValue(Comparable c) { wert = c; } abstract boolean cmp(Comparable val); } class LTComparer extends Comparer { LTComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) < 0) return true; return false; } } class LEComparer extends Comparer { LEComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) <= 0) return true; return false; } } class GTComparer extends Comparer { GTComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) > 0) return true; return false; } } class GEComparer extends Comparer { GEComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) >= 0) return true; return false; } } class EQComparer extends Comparer { EQComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) == 0) return true; return false; } } class NQComparer extends Comparer { NQComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); } boolean cmp(Comparable val) { if(val == null) return false; if(val.compareTo(wert) != 0) return true; return false; } } class LikeComparer extends Comparer { Pattern p; LikeComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); if(w == null) p = null; try { p = Pattern.compile(w.toString()); } catch (PatternSyntaxException pse) { throw new CommonErrorException(new SDMSMessage(sysEnv, "03511030959", "Error in regular expression")); } } boolean cmp(Comparable val) { if(val == null) return false; if(p == null) return false; try { Matcher m; m = p.matcher((String) val); return m.matches(); } catch(Exception e) { } return false; } } class NotLikeComparer extends Comparer { Pattern p; NotLikeComparer(SystemEnvironment sysEnv, Comparable w) throws SDMSException { super(sysEnv, w); if(w == null) p = null; try { p = Pattern.compile(w.toString()); } catch (PatternSyntaxException pse) { throw new CommonErrorException(new SDMSMessage(sysEnv, "03511031051", "Error in regular expression")); } } boolean cmp(Comparable val) { if(val == null) return false; if(p == null) return false; try { Matcher m; m = p.matcher((String) val); return !m.matches(); } catch(Exception e) { } return false; } }
package de.dakror.arise.net; import java.io.File; import java.net.BindException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketException; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.concurrent.CopyOnWriteArrayList; import de.dakror.arise.AriseServer; import de.dakror.arise.game.Game; import de.dakror.arise.net.packet.Packet; import de.dakror.arise.net.packet.Packet.PacketTypes; import de.dakror.arise.net.packet.Packet00Handshake; import de.dakror.arise.net.packet.Packet01Login; import de.dakror.arise.net.packet.Packet01Login.Response; import de.dakror.arise.net.packet.Packet02Disconnect; import de.dakror.arise.net.packet.Packet02Disconnect.Cause; import de.dakror.arise.net.packet.Packet03World; import de.dakror.arise.net.packet.Packet04City; import de.dakror.arise.net.packet.Packet05Resources; import de.dakror.arise.net.packet.Packet06Building; import de.dakror.arise.net.packet.Packet07RenameCity; import de.dakror.arise.net.packet.Packet08PlaceBuilding; import de.dakror.arise.net.packet.Packet09BuildingStage; import de.dakror.arise.net.packet.Packet10Attribute; import de.dakror.arise.net.packet.Packet11DeconstructBuilding; import de.dakror.arise.net.packet.Packet12UpgradeBuilding; import de.dakror.arise.server.DBManager; import de.dakror.arise.server.ServerUpdater; import de.dakror.arise.settings.CFG; import de.dakror.gamesetup.util.Helper; /** * @author Dakror */ public class Server extends Thread { public static Server currentServer; public static final int PORT = 14744; public static final int PACKETSIZE = 255; // bytes public static File dir; public boolean running; public CopyOnWriteArrayList<User> clients = new CopyOnWriteArrayList<>(); ServerUpdater updater; DatagramSocket socket; public Server(InetAddress ip) { currentServer = this; try { dir = new File(CFG.DIR, "Server"); dir.mkdir(); socket = new DatagramSocket(new InetSocketAddress(ip, Server.PORT)); setName("Server-Thread"); setPriority(MAX_PRIORITY); out("Connecting to database"); DBManager.init(); updater = new ServerUpdater(); out("Fetching configuration"); Game.loadConfig(); out("Starting server at " + socket.getLocalAddress().getHostAddress() + ":" + socket.getLocalPort()); start(); } catch (BindException e) { err("There is a server already running on this machine!"); } catch (SocketException e) { e.printStackTrace(); } } @Override public void run() { running = true; while (running) { byte[] data = new byte[PACKETSIZE]; DatagramPacket packet = new DatagramPacket(data, data.length); try { socket.receive(packet); parsePacket(data, packet.getAddress(), packet.getPort()); } catch (SocketException e) {} catch (Exception e) { e.printStackTrace(); } } } public void parsePacket(byte[] data, InetAddress address, int port) { PacketTypes type = Packet.lookupPacket(data[0]); User user = getUserForIP(address, port); if (user != null) user.interact(); if (AriseServer.trafficLog != null) { AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "< " + address.getHostAddress() + ":" + port + " " + type.name() + "\n"); AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength()); } switch (type) { case INVALID: { err("Received invalid packet: " + new String(data)); break; } case HANDSHAKE: { try { sendPacket(new Packet00Handshake(), user == null ? new User(0, 0, address, port) : user); if (user == null) out("Shook hands with: " + address.getHostAddress() + ":" + port); break; } catch (Exception e) { e.printStackTrace(); } } case LOGIN: { try { Packet01Login p = new Packet01Login(data); String s = Helper.getURLContent(new URL("http://dakror.de/mp-api/login_noip.php?username=" + p.getUsername() + "&password=" + p.getPwdMd5())); boolean loggedIn = s.contains("true"); boolean worldExists = DBManager.getWorldForId(p.getWorldId()).getId() != -1; if (loggedIn && worldExists) { String[] parts = s.split(":"); User u = new User(Integer.parseInt(parts[1].trim()), p.getWorldId(), address, port); boolean alreadyLoggedIn = getUserForId(u.getId()) != null; if (alreadyLoggedIn) { out("Refused login of " + address.getHostAddress() + ":" + port + " (" + Response.ALREADY_LOGGED_IN.name() + ")"); sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), Response.ALREADY_LOGGED_IN), u); } else { out("User " + parts[2].trim() + " (#" + u.getId() + ")" + " logged in on world #" + p.getWorldId() + "."); sendPacket(new Packet01Login(parts[2], u.getId(), p.getWorldId(), Response.LOGIN_OK), u); clients.add(u); } } else { out("Refused login of " + address.getHostAddress() + ":" + port + " (" + (!loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID).name() + ")"); sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), !loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID), new User(0, 0, address, port)); } } catch (Exception e) { e.printStackTrace(); } break; } case DISCONNECT: { Packet02Disconnect p = new Packet02Disconnect(data); for (User u : clients) { if (u.getId() == p.getUserId() && address.equals(u.getIP())) { try { sendPacket(new Packet02Disconnect(0, Cause.SERVER_CONFIRMED), u); out("User disconnected: #" + u.getId() + " (" + p.getCause().name() + ")"); clients.remove(u); } catch (Exception e) { e.printStackTrace(); } } } break; } case WORLD: { try { Packet03World p = new Packet03World(data); boolean spawn = DBManager.spawnPlayer(p.getId(), user); out("Player's first visit on world? " + spawn); sendPacket(DBManager.getWorldForId(p.getId()), user); sendPacketToAllClientsExceptOne(DBManager.getSpawnCity(p.getId(), user.getId()), user); break; } catch (Exception e) { e.printStackTrace(); } } case CITY: { Packet04City p = new Packet04City(data); try { for (Packet04City packet : DBManager.getCities(p.getWorldId())) sendPacket(packet, user); } catch (Exception e) { e.printStackTrace(); } break; } case RESOURCES: { try { Packet05Resources p = new Packet05Resources(data); if (DBManager.isCityFromUser(p.getCityId(), user)) sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user); break; } catch (Exception e) { e.printStackTrace(); } } case BUILDING: { Packet06Building p = new Packet06Building(data); if (p.getBuildingType() == 0 && DBManager.isCityFromUser(p.getCityId(), user)) { try { for (Packet06Building packet : DBManager.getCityBuildings(p.getCityId())) sendPacket(packet, user); } catch (Exception e) { e.printStackTrace(); } } break; } case RENAMECITY: { Packet07RenameCity p = new Packet07RenameCity(data); if (DBManager.isCityFromUser(p.getCityId(), user)) { boolean worked = DBManager.renameCity(p.getCityId(), p.getNewName(), user); try { sendPacket(new Packet07RenameCity(p.getCityId(), worked ? p.getNewName() : "#false#"), getUserForIP(address, port)); } catch (Exception e) { e.printStackTrace(); } } break; } case PLACEBUILDING: { Packet08PlaceBuilding p = new Packet08PlaceBuilding(data); if (DBManager.isCityFromUser(p.getCityId(), user)) { int id = DBManager.placeBuilding(p.getCityId(), p.getBuildingType(), p.getX(), p.getY()); if (id != 0) { try { sendPacket(DBManager.getCityBuilding(p.getCityId(), id), user); sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user); } catch (Exception e) { e.printStackTrace(); } } } break; } case ATTRIBUTE: { Packet10Attribute p = new Packet10Attribute(data); if (user != null) { if (p.getKey().equals("city")) user.setCity(Integer.parseInt(p.getValue())); } break; } case DECONSTRUCTBUILDING: { Packet11DeconstructBuilding p = new Packet11DeconstructBuilding(data); if (DBManager.isCityFromUser(p.getCityId(), user)) { int timeleft = 0; if ((timeleft = DBManager.deconstructBuilding(p.getCityId(), p.getBuildingId())) > -1) { try { sendPacket(new Packet09BuildingStage(p.getBuildingId(), 2, timeleft), user); } catch (Exception e) { e.printStackTrace(); } } } break; } case UPGRADEBUILDING: { Packet12UpgradeBuilding p = new Packet12UpgradeBuilding(data); if (DBManager.isCityFromUser(p.getCityId(), user)) { int timeleft = 0; if ((timeleft = DBManager.upgradeBuilding(p.getCityId(), p.getBuildingId())) > -1) { try { sendPacket(new Packet09BuildingStage(p.getBuildingId(), 3, timeleft), user); } catch (Exception e) { e.printStackTrace(); } } } break; } default: err("Received unhandled packet (" + address.getHostAddress() + ":" + port + ") " + type + " [" + Packet.readData(data) + "]"); } } public void sendPacketToAllClients(Packet p) throws Exception { for (User u : clients) sendPacket(p, u); } public void sendPacketToAllClientsExceptOne(Packet p, User exception) throws Exception { for (User u : clients) { if (exception.getId() == 0) { if (exception.getIP().equals(u.getIP()) && exception.getPort() == u.getPort()) continue; } else if (exception.getId() == u.getId()) continue; sendPacket(p, u); } } public void sendPacket(Packet p, User u) throws Exception { if (u == null) throw new NullPointerException("user = null"); byte[] data = p.getData(); DatagramPacket packet = new DatagramPacket(data, data.length, u.getIP(), u.getPort()); socket.send(packet); if (AriseServer.trafficLog != null) { AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "> " + u.getIP().getHostAddress() + ":" + u.getPort() + " " + p.getType().name() + "\n"); AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength()); } } public User getUserForIP(InetAddress address, int port) { for (User u : clients) if (u.getIP().equals(address) && u.getPort() == port) return u; return null; } public User getUserForId(int id) { for (User u : clients) if (u.getId() == id) return u; return null; } public void shutdown() { try { sendPacketToAllClients(new Packet02Disconnect(0, Packet02Disconnect.Cause.SERVER_CLOSED)); } catch (Exception e) { e.printStackTrace(); } running = false; socket.close(); } public static void out(Object... p) { String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()); if (p.length == 1) System.out.println(timestamp + p[0]); else System.out.println(timestamp + Arrays.toString(p)); } public static void err(Object... p) { String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()); if (p.length == 1) System.err.println(timestamp + p[0]); else System.err.println(timestamp + Arrays.toString(p)); } }
package com.carrotsearch.hppcrt; /** * Something implementing a map interface (int-int). * (or OBJ - int) */ public abstract class MapImplementation<IMPLEM> { public enum HASH_QUALITY { NORMAL(0), BAD(6); public final int shift; private HASH_QUALITY(final int bitshift) { this.shift = bitshift; } } /** * A Int holder with variable Hash Qualities. * @author Vincent * */ public static class ComparableInt implements Comparable<ComparableInt> { public int value; public final int bitshift; public ComparableInt(final int initValue, final HASH_QUALITY quality) { this.value = initValue; this.bitshift = quality.shift; } @Override public int compareTo(final ComparableInt other) { if (this.value < other.value) { return -1; } else if (this.value > other.value) { return 1; } return 0; } @Override public int hashCode() { return this.value << this.bitshift; } @Override public boolean equals(final Object obj) { if (obj instanceof ComparableInt) { return ((ComparableInt) obj).value == this.value; } return false; } } public final IMPLEM instance; protected MapImplementation(final IMPLEM instance) { this.instance = instance; } /** * Contains bench to run, setup() must prepare the K,V set before */ public abstract int benchContainKeys(); /** * removed bench to run, setup() must prepare the K,V set before */ public abstract int benchRemoveKeys(); /** * put bench to run, setup() must prepare the K,V set before */ public abstract int benchPutAll(); /** * Preparation of a set of keys before executing the benchXXX() methods * @param keysToInsert the array of int or ComparableInts of HASH_QUALITY hashQ * to insert in the map on test * @param keysForContainsQuery the array of of int or ComparableInts to which the filled map * will be queried for contains() * @param keysForRemovalQuery the array of of int or ComparableInts to which the filled map * will be queried for remove() */ public abstract void setup(int[] keysToInsert, HASH_QUALITY hashQ, int[] keysForContainsQuery, int[] keysForRemovalQuery); //// Convenience methods to implement //// to ease setup() implementation. //// used for setup() public abstract void clear(); public abstract int size(); }