answer
stringlengths
17
10.2M
package io.compgen.cgpipe.runner; import io.compgen.cgpipe.exceptions.RunnerException; import io.compgen.cgpipe.parser.context.ExecContext; import io.compgen.cgpipe.parser.variable.VarString; import io.compgen.common.StringUtils; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class SGETemplateRunner extends TemplateRunner { private boolean hvmemIsTotal = false; private String account=null; private String parallelEnv = "shm"; @Override public String[] getSubCommand() { return new String[] {"qsub"}; } @Override public String[] getReleaseCommand(String jobId) { return new String[] {"qrls", jobId}; } @Override public String[] getDelCommand(String jobId) { return new String[] {"qdel", jobId}; } @Override public boolean isJobIdValid(String jobId) throws RunnerException { try { Process proc = Runtime.getRuntime().exec(new String[] {"qstat", "-j", jobId}); int retcode = proc.waitFor(); if (retcode == 0) { return true; } } catch (IOException | InterruptedException e) { } return false; } protected void updateTemplateContext(ExecContext cxt, JobDef jobdef) { if (this.account != null && !cxt.contains("job.account")) { cxt.set("job.account", new VarString(this.account)); } cxt.set("job.sge.parallelenv", new VarString(this.parallelEnv)); // set the dep list if (jobdef.getDependencies().size() > 0) { List<String> depids = new ArrayList<String>(); for (JobDependency dep: jobdef.getDependencies()) { if (!dep.getJobId().equals("")) { depids.add(dep.getJobId()); } } cxt.set("job.sge.depids", new VarString(StringUtils.join(",", depids).replaceAll(",,", ","))); } // set the proper memory setting if (jobdef.hasSetting("job.mem")) { if (jobdef.getSettingInt("job.procs", 1) > 1 && hvmemIsTotal) { cxt.set("job.mem", new VarString(jobdef.getSetting("job.mem"))); } else { // convert hvmem to a per-slot amount String mem = jobdef.getSetting("job.mem"); String units = ""; float memVal = 1; while (mem.length() > 0) { try { memVal = Float.parseFloat(mem); break; } catch (NumberFormatException e) { units = mem.substring(mem.length()-1) + units; mem = mem.substring(0, mem.length()-1); } } cxt.set("job.mem", new VarString((memVal / jobdef.getSettingInt("job.procs", 1)) + units)); } } super.updateTemplateContext(cxt, jobdef); } @Override protected void setConfig(String k, String val) { switch(k) { case "cgpipe.runner.sge.account": this.account = val; break; case "cgpipe.runner.sge.parallelenv": this.parallelEnv = val; break; case "cgpipe.runner.sge.hvmem_total": this.hvmemIsTotal = val.toUpperCase().equals("TRUE"); break; default: super.setConfig(k, val); break; } } @Override public String getConfigPrefix() { return "cgpipe.runner.sge"; } protected String buildGlobalHoldScript() { return "#!" + shell + "\n" + " "#$ -terse\n" + "#$ -N holding\n" + "#$ -o /dev/null\n" + "#$ -e /dev/null\n" + "#$ -l h_rt=00:00:10\n" + "sleep 1\n"; } }
//This library is free software; you can redistribute it and/or //modify it under the terms of the GNU Lesser General Public //This library is distributed in the hope that it will be useful, //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //You should have received a copy of the GNU Lesser General Public //Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. package opennlp.tools.parser; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import opennlp.maxent.ContextGenerator; import opennlp.tools.ngram.Dictionary; /** * Class to generator predictive contexts for deciding how constituents should be combined together. * @author Tom Morton */ public class BuildContextGenerator implements ContextGenerator { private static final String EOS = "eos"; private boolean zeroBackOff; private Dictionary dict; private String[] unigram; private String[] bigram; private String[] trigram; /** * Creates a new context generator for making decisions about combining constitients togehter. * */ public BuildContextGenerator() { super(); zeroBackOff = false; } public BuildContextGenerator(Dictionary dict) { this(); this.dict = dict; unigram = new String[1]; bigram = new String[2]; trigram = new String[3]; } public String[] getContext(Object o) { Object[] params = (Object[]) o; return getContext((Parse[]) params[0], ((Integer) params[1]).intValue()); } /** * Creates punctuation feature for the specified punctuation at the specfied index. * @param punct The punctuation which is in context. * @param i The index of the punctuation with relative to the parse. * @return Punctuation feature for the specified parse and the specified punctuation at the specfied index. */ private String punct(Parse punct, int i) { StringBuffer feat = new StringBuffer(5); feat.append(i).append("="); feat.append(punct.getType()); return (feat.toString()); } private String cons(Parse p, int i) { StringBuffer feat = new StringBuffer(20); feat.append(i).append("="); if (p != null) { if (i < 0) { feat.append(p.getLabel()).append("|"); } feat.append(p.getType()).append("|").append(p.getHead().toString()); } else { feat.append(EOS); } return (feat.toString()); } private String consbo(Parse p, int i) { //cons back-off StringBuffer feat = new StringBuffer(20); feat.append(i).append("*="); if (p != null) { if (i < 0) { feat.append(p.getLabel()).append("|"); } feat.append(p.getType()); } else { feat.append(EOS); } return (feat.toString()); } /** * Returns the predictive context used to determine how constituent at the specified index * should be combined with other contisuents. * @param constituents The constituents which have yet to be combined into new constituents. * @param index The index of the constituent whcihi is being considered. * @return the context for building constituents at the specified index. */ public String[] getContext(Parse[] constituents, int index) { List features = new ArrayList(100); int ps = constituents.length; //default features.add("default"); // cons(-2), cons(-1), cons(0), cons(1), cons(2) // cons(-2) Parse p_2 = null; Parse p_1 = null; Parse p0 = null; Parse p1 = null; Parse p2 = null; Set punct1s = null; Set punct2s = null; Set punct_1s = null; Set punct_2s = null; if (index - 2 >= 0) { p_2 = constituents[index - 2]; } if (index - 1 >= 0) { p_1 = constituents[index - 1]; punct_2s = p_1.getPreviousPunctuationSet(); } p0 = constituents[index]; punct_1s=p0.getPreviousPunctuationSet(); punct1s=p0.getNextPunctuationSet(); if (index + 1 < ps) { p1 = constituents[index + 1]; punct2s = p1.getNextPunctuationSet(); } if (index + 2 < ps) { p2 = constituents[index + 2]; } boolean u_2 = true; boolean u_1 = true; boolean u0 = true; boolean u1 = true; boolean u2 = true; boolean b_2_1 = true; boolean b_10 = true; boolean b01 = true; boolean b12 = true; boolean t_2_10 = true; boolean t_101 = true; boolean t012 = true; if (dict != null) { if (p_2 != null) { unigram[0] = p_2.toString(); u_2 = dict.contains(unigram); } if (p2 != null) { unigram[0] = p2.toString(); u2 = dict.contains(unigram); } unigram[0] = p0.toString(); u0 = dict.contains(unigram); if (p_2 != null && p_1 != null) { bigram[0] = p_2.toString(); bigram[1] = p_1.toString(); b_2_1 = dict.contains(bigram); trigram[0] = p_2.toString(); trigram[1] = p_1.toString(); trigram[2] = p_1.toString(); t_2_10 = dict.contains(trigram); } if (p_1 != null) { unigram[0] = p_1.toString(); u_1 = dict.contains(unigram); bigram[0] = p_1.toString(); bigram[1] = p0.toString(); b_10 = dict.contains(bigram); } if (p1 != null) { unigram[0] = p1.toString(); u1 = dict.contains(unigram); bigram[0] = p0.toString(); bigram[1] = p1.toString(); b01 = dict.contains(bigram); } if (p1 != null && p2 != null) { bigram[0] = p1.toString(); bigram[1] = p2.toString(); b12 = dict.contains(bigram); trigram[0] = p0.toString(); trigram[1] = p1.toString(); trigram[2] = p2.toString(); t012 = dict.contains(trigram); } if (p_1 != null && p1 != null) { trigram[0] = p_1.toString(); trigram[1] = p0.toString(); trigram[2] = p1.toString(); t_101 = dict.contains(trigram); } } String consp_2 = cons(p_2, -2); String consp_1 = cons(p_1, -1); String consp0 = cons(p0, 0); String consp1 = cons(p1, 1); String consp2 = cons(p2, 2); String consbop_2 = consbo(p_2, -2); String consbop_1 = consbo(p_1, -1); String consbop0 = consbo(p0, 0); String consbop1 = consbo(p1, 1); String consbop2 = consbo(p2, 2); // cons(-2), cons(-1), cons(0), cons(1), cons(2) if (u0) features.add(consp0); features.add(consbop0); if (u_2) features.add(consp_2); features.add(consbop_2); if (u_1) features.add(consp_1); features.add(consbop_1); if (u1) features.add(consp1); features.add(consbop1); if (u2) features.add(consp2); features.add(consbop2); //cons(0),cons(1) if (punct1s != null) { for (Iterator pi=punct1s.iterator();pi.hasNext();) { String punct = punct((Parse) pi.next(),1); //punct(1); features.add(punct); //cons(0)punct(1) if (u0) features.add(consp0+","+punct); features.add(consbop0+","+punct); //cons(0)punct(1)cons(1) if (b01) features.add(consp0+","+punct+","+consp1); if (u1) features.add(consbop0+","+punct+","+consp1); if (u0) features.add(consp0+","+punct+","+consbop1); features.add(consbop0+","+punct+","+consbop1); } } else { //cons(0),cons(1) if (b01) features.add(consp0 + "," + consp1); if (u1) features.add(consbop0 + "," + consp1); if (u0) features.add(consp0 + "," + consbop1); features.add(consbop0 + "," + consbop1); } //cons(-1,0) if (punct_1s != null) { for (Iterator pi=punct_1s.iterator();pi.hasNext();) { String punct = punct((Parse) pi.next(),-1); //punct(-1) features.add(punct); //punct(-1)cons(0) if (u0) features.add(punct+","+consp0); features.add(punct+","+consbop0); //cons(-1)punct(-1)cons(0) if (b_10) features.add(consp_1+","+punct+","+consp0); if (u0) features.add(consbop_1+","+punct+","+consp0); if (u_1) features.add(consp_1+","+punct+","+consbop0); features.add(consbop_1+","+punct+","+consbop0); } } else { // cons(-1,0) if (b_10) features.add(consp_1 + "," + consp0); if (u0) features.add(consbop_1 + "," + consp0); if (u_1) features.add(consp_1 + "," + consbop0); features.add(consbop_1 + "," + consbop0); } if (punct2s != null) { for (Iterator pi=punct2s.iterator();pi.hasNext();) { String punct = punct((Parse) pi.next(),2); //punct(2) features.add(punct); } if (punct1s != null) { //cons(0),punct(1),cons(1),punct(2),cons(2) for (Iterator pi2=punct2s.iterator();pi2.hasNext();) { String punct2 = punct((Parse) pi2.next(),2); for (Iterator pi1=punct1s.iterator();pi1.hasNext();) { String punct1 = punct((Parse) pi1.next(),1); if (t012) features.add(consp0 + "," + punct1+","+consp1 + "," + punct2+","+consp2); if (b12) features.add(consbop0 + "," + punct1+","+consp1 + "," + punct2+","+consp2); if (u0 && u2) features.add(consp0 + "," + punct1+","+consbop1 + "," + punct2+","+consp2); if (b01) features.add(consp0 + "," + punct1+","+consp1 + "," + punct2+","+consbop2); if (u2) features.add(consbop0 + "," + punct1+","+consbop1 + "," + punct2+","+consp2); if (u1) features.add(consbop0 + "," + punct1+","+consp1 + "," + punct2+","+consbop2); if (u0) features.add(consp0 + "," + punct1+","+consbop1 + "," + punct2+","+consbop2); features.add(consbop0 + "," + punct1+","+consbop1 + "," + punct2+","+consbop2); if (zeroBackOff) { if (b01) features.add(consp0 + "," + punct1+","+consp1 + "," + punct2); if (u1) features.add(consbop0 + "," + punct1+","+consp1 + "," + punct2); if (u0) features.add(consp0 + "," + punct1+","+consbop1 + "," + punct2); features.add(consbop0 + "," + punct1+","+consbop1 + "," + punct2); } } } } else { //cons(0),cons(1),punct(2),cons(2) for (Iterator pi2=punct2s.iterator();pi2.hasNext();) { String punct2 = punct((Parse) pi2.next(),2); if (t012) features.add(consp0 + "," + consp1 + "," + punct2+","+consp2); if (b12) features.add(consbop0 + "," + consp1 +"," + punct2+ "," + consp2); if (u0 && u2) features.add(consp0 + "," + consbop1 + "," + punct2+","+consp2); if (b01) features.add(consp0 + "," + consp1 + "," + punct2+","+consbop2); if (u2) features.add(consbop0 + "," + consbop1 + "," + punct2+","+consp2); if (u1) features.add(consbop0 + "," + consp1 + "," + punct2+","+consbop2); if (u0) features.add(consp0 + "," + consbop1 + "," + punct2+","+consbop2); features.add(consbop0 + "," + consbop1 + "," + punct2+","+consbop2); if (zeroBackOff) { if (b01) features.add(consp0 + "," + consp1 + "," + punct2); if (u1) features.add(consbop0 + "," + consp1 + "," + punct2); if (u0) features.add(consp0 + "," + consbop1 + "," + punct2); features.add(consbop0 + "," + consbop1 + "," + punct2); } } } } else { if (punct1s != null) { //cons(0),punct(1),cons(1),cons(2) for (Iterator pi1=punct1s.iterator();pi1.hasNext();) { String punct1 = punct((Parse) pi1.next(),1); if (t012) features.add(consp0 + "," + punct1 +","+ consp1 +","+consp2); if (b12) features.add(consbop0 + "," + punct1 +","+ consp1 +","+consp2); if (u0 && u2) features.add(consp0 + "," + punct1 +","+ consbop1 +","+consp2); if (b01) features.add(consp0 + "," + punct1 +","+ consp1 +","+consbop2); if (u2) features.add(consbop0 + "," + punct1 +","+ consbop1 +","+consp2); if (u1) features.add(consbop0 + "," + punct1 +","+ consp1 +","+consbop2); if (u0) features.add(consp0 + "," + punct1 +","+ consbop1 +","+consbop2); features.add(consbop0 + "," + punct1 +","+ consbop1 +","+consbop2); //zero backoff case covered by cons(0)cons(1) } } else { //cons(0),cons(1),cons(2) if (t012) features.add(consp0 + "," + consp1 + "," + consp2); if (b12) features.add(consbop0 + "," + consp1 + "," + consp2); if (u0 && u2) features.add(consp0 + "," + consbop1 + "," + consp2); if (b01) features.add(consp0 + "," + consp1 + "," + consbop2); if (u2) features.add(consbop0 + "," + consbop1 + "," + consp2); if (u1) features.add(consbop0 + "," + consp1 + "," + consbop2); if (u0) features.add(consp0 + "," + consbop1 + "," + consbop2); features.add(consbop0 + "," + consbop1 + "," + consbop2); } } if (punct_2s != null) { for (Iterator pi=punct_2s.iterator();pi.hasNext();) { String punct = punct((Parse) pi.next(),-2); //punct(-2) features.add(punct); } if (punct_1s != null) { //cons(-2),punct(-2),cons(-1),punct(-1),cons(0) for (Iterator pi_2=punct_2s.iterator();pi_2.hasNext();) { String punct_2 = punct((Parse) pi_2.next(),-2); for (Iterator pi_1=punct_1s.iterator();pi_1.hasNext();) { String punct_1 = punct((Parse) pi_1.next(),-1); if (t_2_10) features.add(consp_2 + "," + punct_2+","+consp_1 + "," + punct_1+","+consp0); if (b_10) features.add(consbop_2 + "," + punct_2+","+consp_1 + "," + punct_1+","+consp0); if (u_2 && u0) features.add(consp_2 + "," + punct_2+","+consbop_1 + "," + punct_1+","+consp0); if (b_2_1) features.add(consp_2 + "," + punct_2+","+consp_1 + "," + punct_1+","+consbop0); if (u0) features.add(consbop_2 + "," + punct_2+","+consbop_1 + "," + punct_1+","+consp0); if (u_1) features.add(consbop_2 + "," + punct_2+","+consp_1 + "," + punct_1+","+consbop0); if (u_2) features.add(consp_2 + "," + punct_2+","+consbop_1 + "," + punct_1+","+consbop0); features.add(consbop_2 + "," + punct_2+","+consbop_1 + "," + punct_1+","+consbop0); if (zeroBackOff) { if (b_10) features.add(punct_2+","+consp_1 + "," + punct_1+","+consp0); if (u0) features.add(punct_2+","+consbop_1 + "," + punct_1+","+consp0); if (u_1) features.add(punct_2+","+consp_1 + "," + punct_1+","+consbop0); features.add(punct_2+","+consbop_1 + "," + punct_1+","+consbop0); } } } } else { //cons(-2),punct(-2),cons(-1),cons(0) for (Iterator pi_2=punct_2s.iterator();pi_2.hasNext();) { String punct_2 = punct((Parse) pi_2.next(),-2); if (t_2_10) features.add(consp_2 + "," + punct_2+","+consp_1 + ","+consp0); if (b_10) features.add(consbop_2 + "," + punct_2+","+consp_1 + ","+consp0); if (u_2 && u0) features.add(consp_2 + "," + punct_2+","+consbop_1 + ","+consp0); if (b_2_1) features.add(consp_2 + "," + punct_2+","+consp_1 + ","+consbop0); if (u0) features.add(consbop_2 + "," + punct_2+","+consbop_1 + ","+consp0); if (u_1) features.add(consbop_2 + "," + punct_2+","+consp_1 + ","+consbop0); if (u_2) features.add(consp_2 + "," + punct_2+","+consbop_1 + ","+consbop0); features.add(consbop_2 + "," + punct_2+","+consbop_1 + ","+consbop0); if (zeroBackOff) { if (b_10) features.add(punct_2+","+consp_1 + ","+consp0); if (u0) features.add(punct_2+","+consbop_1 + ","+consp0); if (u_1) features.add(punct_2+","+consp_1 + ","+consbop0); features.add(punct_2+","+consbop_1 + ","+consbop0); } } } } else { if (punct_1s != null) { //cons(-2),cons(-1),punct(-1),cons(0) for (Iterator pi_1=punct_1s.iterator();pi_1.hasNext();) { String punct_1 = punct((Parse) pi_1.next(),-1); if (t_2_10) features.add(consp_2 + "," + consp_1 + "," + punct_1+","+consp0); if (b_10) features.add(consbop_2 + "," + consp_1 + "," + punct_1+","+consp0); if (u_2 && u0) features.add(consp_2 + "," + consbop_1 + "," + punct_1+","+consp0); if (b_2_1) features.add(consp_2 + "," + consp_1 + "," + punct_1+","+consbop0); if (u0) features.add(consbop_2 + "," + consbop_1 + "," + punct_1+","+consp0); if (u_1) features.add(consbop_2 + "," + consp_1 + "," + punct_1+","+consbop0); if (u_2) features.add(consp_2 + "," + consbop_1 + "," + punct_1+","+consbop0); features.add(consbop_2 + "," + consbop_1 + "," + punct_1+","+consbop0); //zero backoff case covered by cons(-1)cons(0) } } else { //cons(-2),cons(-1),cons(0) if (t_2_10) features.add(consp_2 + "," + consp_1 + "," +consp0); if (b_10) features.add(consbop_2 + "," + consp_1 + "," +consp0); if (u_2 && u0) features.add(consp_2 + "," + consbop_1 + "," +consp0); if (b_2_1)features.add(consp_2 + "," + consp_1 + "," +consbop0); if (u0) features.add(consbop_2 + "," + consbop_1 + "," +consp0); if (u_1) features.add(consbop_2 + "," + consp_1 + "," +consbop0); if (u_2) features.add(consp_2 + "," + consbop_1 + "," +consbop0); features.add(consbop_2 + "," + consbop_1 + "," +consbop0); } } if (punct_1s !=null) { if (punct1s != null) { //cons(-1),punct(-1),cons(0),punct(1),cons(1) for (Iterator pi_1=punct_1s.iterator();pi_1.hasNext();) { String punct_1 = punct((Parse) pi_1.next(),-1); for (Iterator pi1=punct1s.iterator();pi1.hasNext();) { String punct1 = punct((Parse) pi1.next(),1); if (t_101) features.add(consp_1 + "," + punct_1+","+consp0 + "," + punct1+","+consp1); if (b01) features.add(consbop_1 + "," + punct_1+","+consp0 + "," + punct1+","+consp1); if (u_1 && u1) features.add(consp_1 + "," + punct_1+","+consbop0 + "," + punct1+","+consp1); if (b_10) features.add(consp_1 + "," + punct_1+","+consp0 + "," + punct1+","+consbop1); if (u1) features.add(consbop_1 + "," + punct_1+","+consbop0 + "," + punct1+","+consp1); if (u0) features.add(consbop_1 + "," + punct_1+","+consp0 + "," + punct1+","+consbop1); if (u_1) features.add(consp_1 + "," + punct_1+","+consbop0 + "," + punct1+","+consbop1); features.add(consbop_1 + "," + punct_1+","+consbop0 + "," + punct1+","+consbop1); if (zeroBackOff) { if (b_10) features.add(consp_1 + "," + punct_1+","+consp0 + "," + punct1); if (u0) features.add(consbop_1 + "," + punct_1+","+consp0 + "," + punct1); if (u_1) features.add(consp_1 + "," + punct_1+","+consbop0 + "," + punct1); features.add(consbop_1 + "," + punct_1+","+consbop0 + "," + punct1); if (b01) features.add(punct_1+","+consp0 + "," + punct1+","+consp1); if (u1) features.add(punct_1+","+consbop0 + "," + punct1+","+consp1); if (u0) features.add(punct_1+","+consp0 + "," + punct1+","+consbop1); features.add(punct_1+","+consbop0 + "," + punct1+","+consbop1); } } } } else { //cons(-1),punct(-1),cons(0),cons(1) for (Iterator pi_1=punct_1s.iterator();pi_1.hasNext();) { String punct_1 = punct((Parse) pi_1.next(),-1); if (t_101) features.add(consp_1 + "," + punct_1+","+consp0 + "," + consp1); if (b01)features.add(consbop_1 + "," + punct_1+","+consp0 + "," + consp1); if (u_1 && u1) features.add(consp_1 + "," + punct_1+","+consbop0 + "," + consp1); if (u0) features.add(consp_1 + "," + punct_1+","+consp0 + "," + consbop1); if (u1) features.add(consbop_1 + "," + punct_1+","+consbop0 + "," + consp1); if (u0) features.add(consbop_1 + "," + punct_1+","+consp0 + "," + consbop1); if (u_1) features.add(consp_1 + "," + punct_1+","+consbop0 + "," + consbop1); features.add(consbop_1 + "," + punct_1+","+consbop0 + "," + consbop1); if(zeroBackOff) { if (b01) features.add(punct_1+","+consp0 + "," + consp1); if (u1) features.add(punct_1+","+consbop0 + "," + consp1); if (u0) features.add(punct_1+","+consp0 + "," + consbop1); features.add(punct_1+","+consbop0 + "," + consbop1); } } } } else { if (punct1s != null) { //cons(-1),cons(0),punct(1),cons(1) for (Iterator pi1=punct1s.iterator();pi1.hasNext();) { String punct1 = punct((Parse) pi1.next(),1); if (t_101) features.add(consp_1 + "," + consp0 + "," + punct1+","+consp1); if (b01) features.add(consbop_1 + "," + consp0 + "," + punct1+","+consp1); if (u_1 && u1) features.add(consp_1 + "," + consbop0 + "," + punct1+","+consp1); if (b_10) features.add(consp_1 + "," + consp0 + "," + punct1+","+consbop1); if (u1) features.add(consbop_1 + "," + consbop0 + "," + punct1+","+consp1); if (u0) features.add(consbop_1 + "," + consp0 + "," + punct1+","+consbop1); if (u_1) features.add(consp_1 + "," + consbop0 + "," + punct1+","+consbop1); features.add(consbop_1 + "," + consbop0 + "," + punct1+","+consbop1); if (zeroBackOff) { if (b_10) features.add(consp_1 + "," + consp0 + "," + punct1); if (u0) features.add(consbop_1 + "," + consp0 + "," + punct1); if (u_1) features.add(consp_1 + "," + consbop0 + "," + punct1); features.add(consbop_1 + "," + consbop0 + "," + punct1); } } } else { //cons(-1),cons(0),cons(1) if (t_101) features.add(consp_1 + "," + consp0 + "," +consp1); if (b01) features.add(consbop_1 + "," + consp0 + "," +consp1); if (u_1 && u1) features.add(consp_1 + "," + consbop0 + "," +consp1); if (b_10) features.add(consp_1 + "," + consp0 + "," +consbop1); if (u1) features.add(consbop_1 + "," + consbop0 + "," +consp1); if (u0) features.add(consbop_1 + "," + consp0 + "," +consbop1); if (u_1) features.add(consp_1 + "," + consbop0 + "," +consbop1); features.add(consbop_1 + "," + consbop0 + "," +consbop1); } } String p0Word = p0.toString(); if (p0Word.equals("-RRB-")) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.toString().equals("-LRB-")) { features.add("bracketsmatch"); break; } if (p.getLabel().startsWith(ParserME.START)) { break; } } } if (p0Word.equals("-RCB-")) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.toString().equals("-LCB-")) { features.add("bracketsmatch"); break; } if (p.getLabel().startsWith(ParserME.START)) { break; } } } if (p0Word.equals("''")) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.toString().equals("``")) { features.add("quotesmatch"); break; } if (p.getLabel().startsWith(ParserME.START)) { break; } } } if (p0Word.equals("'")) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.toString().equals("`")) { features.add("quotesmatch"); break; } if (p.getLabel().startsWith(ParserME.START)) { break; } } } if (p0Word.equals(",")) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.toString().equals(",")) { features.add("iscomma"); break; } if (p.getLabel().startsWith(ParserME.START)) { break; } } } if (p0Word.equals(".") && index == ps - 1) { for (int pi = index - 1; pi >= 0; pi Parse p = constituents[pi]; if (p.getLabel().startsWith(ParserME.START)) { if (pi == 0) { features.add("endofsentence"); } break; } } } return ((String[]) features.toArray(new String[features.size()])); } }
//This library is free software; you can redistribute it and/or //modify it under the terms of the GNU Lesser General Public //This library is distributed in the hope that it will be useful, //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //You should have received a copy of the GNU Lesser General Public //Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. package opennlp.tools.parser; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import opennlp.tools.util.Sequence; import opennlp.tools.util.Span; import opennlp.maxent.io.SuffixSensitiveGISModelReader; import opennlp.tools.chunker.ChunkerME; import opennlp.tools.postag.DefaultPOSContextGenerator; import opennlp.tools.postag.POSDictionary; import opennlp.tools.postag.POSTaggerME; public class EnglishTreebankParser { public static ParserME getParser(String dataDir, boolean useTagDictionary, boolean useCaseSensitiveTagDictionary) throws IOException { if (useTagDictionary) { return new ParserME( new SuffixSensitiveGISModelReader(new File(dataDir + "/build.bin.gz")).getModel(), new SuffixSensitiveGISModelReader(new File(dataDir + "/check.bin.gz")).getModel(), new EnglishTreebankPOSTagger(dataDir + "/tag.bin.gz", dataDir + "/tagdict", useCaseSensitiveTagDictionary), new EnglishTreebankChunker(dataDir + "/chunk.bin.gz"), new HeadRules(dataDir + "/head_rules")); } else { return new ParserME( new SuffixSensitiveGISModelReader(new File(dataDir + "/build.bin.gz")).getModel(), new SuffixSensitiveGISModelReader(new File(dataDir + "/check.bin.gz")).getModel(), new EnglishTreebankPOSTagger(dataDir + "/tag.bin.gz"), new EnglishTreebankChunker(dataDir + "/chunk.bin.gz"), new HeadRules(dataDir + "/head_rules")); } } private static class EnglishTreebankPOSTagger extends POSTaggerME implements ParserTagger { private static final int K = 10; public EnglishTreebankPOSTagger(String modelFile) throws IOException { super(10, new SuffixSensitiveGISModelReader(new File(modelFile)).getModel(), new DefaultPOSContextGenerator(), null); } public EnglishTreebankPOSTagger(String modelFile, String tagDictionary, boolean useCase) throws IOException { super(10, new SuffixSensitiveGISModelReader(new File(modelFile)).getModel(), new DefaultPOSContextGenerator(), new POSDictionary(tagDictionary, useCase)); } public Sequence[] topKSequences(List sentence) { return beam.bestSequences(K, sentence, null); } public Sequence[] topKSequences(String[] sentence) { return beam.bestSequences(K, Arrays.asList(sentence), null); } } private static class EnglishTreebankChunker extends ChunkerME implements ParserChunker { private static final int K = 10; public EnglishTreebankChunker(String modelFile) throws IOException { super(new SuffixSensitiveGISModelReader(new File(modelFile)).getModel(), new ChunkContextGenerator(), 10); } public Sequence[] topKSequences(List sentence, List tags) { return beam.bestSequences(K, sentence, new Object[] { tags }); } public Sequence[] topKSequences(String[] sentence, String[] tags) { return beam.bestSequences(K, Arrays.asList(sentence), new Object[] { Arrays.asList(tags)}); } public Sequence[] topKSequences(List sentence) { return beam.bestSequences(K, sentence, null); } public Sequence[] topKSequences(String[] sentence) { return beam.bestSequences(K, Arrays.asList(sentence), null); } protected boolean validOutcome(String outcome, Sequence sequence) { if (outcome.startsWith(ParserME.CONT)) { List tagList = sequence.getOutcomes(); int lti = tagList.size() - 1; if (lti == -1) { return (false); } else { String lastTag = (String) tagList.get(lti); if (lastTag.equals(ParserME.OTHER)) { return (false); } String pred = outcome.substring(ParserME.CONT.length()); if (lastTag.startsWith(ParserME.START)) { return lastTag.substring(ParserME.START.length()).equals(pred); } else if (lastTag.startsWith(ParserME.CONT)) { return lastTag.substring(ParserME.CONT.length()).equals(pred); } } } return (true); } } private static String convertToken(String token) { if (token.equals("(")) { return "-LRB-"; } else if (token.equals(")")) { return "-RRB-"; } else if (token.equals("{")) { return "-LCB-"; } else if (token.equals("}")) { return "-RCB-"; } return token; } private static void usage() { System.err.println("Usage: EnglishTreebankParser [-i] dataDirectory < tokenized_sentences"); System.err.println("dataDirectory: Directory containing parser models."); System.err.println("-d: Use tag dictionary."); System.err.println("-i: Case insensitive tag dictionary."); System.exit(1); } public static void main(String[] args) throws IOException { if (args.length == 0) { usage(); } boolean useTagDictionary=false; boolean caseInsensitiveTagDictionary=false; int ai = 0; while (args[ai].startsWith("-")) { if (args[ai].equals("-d")) { useTagDictionary = true; ai++; } if (args[ai].equals("-i")) { caseInsensitiveTagDictionary = true; ai++; } if (args[ai].equals(" ai++; break; } } ParserME parser; if (caseInsensitiveTagDictionary) { parser = EnglishTreebankParser.getParser(args[ai++],true,false); } else if (useTagDictionary) { parser = EnglishTreebankParser.getParser(args[ai++],true,true); } else { parser = EnglishTreebankParser.getParser(args[ai++],false,false); } BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); String line; try { while (null != (line = in.readLine())) { StringTokenizer str = new StringTokenizer(line); int numToks = str.countTokens(); StringBuffer sb = new StringBuffer(); List tokens = new ArrayList(); while (str.hasMoreTokens()) { String tok = convertToken(str.nextToken()); tokens.add(tok); sb.append(tok).append(" "); } if (sb.length() != 0) { String text = sb.substring(0, sb.length() - 1).toString(); Parse p = new Parse(text, new Span(0, text.length()), "INC", 1, null); int start = 0; for (Iterator ti = tokens.iterator(); ti.hasNext();) { String tok = (String) ti.next(); p.insert(new Parse(text, new Span(start, start + tok.length()), ParserME.TOK_NODE, 0)); start += tok.length() + 1; } p = parser.parse(p); //System.out.print(p.getProb()+" "); p.show(); } System.out.println(); } } catch (IOException e) { System.err.println(e); } } }
package org.apache.commons.dbcp; import java.io.PrintStream; import java.io.PrintWriter; import java.sql.SQLException; /** * A SQLException subclass containing another Throwable * * @author Dirk Verbeeck * @version $Id: SQLNestedException.java,v 1.4 2003/09/14 00:10:54 dirkv Exp $ */ public class SQLNestedException extends SQLException { /** * Holds the reference to the exception or error that caused * this exception to be thrown. */ private Throwable cause = null; /** * Constructs a new <code>SQLNestedException</code> with specified * detail message and nested <code>Throwable</code>. * * @param msg the error message * @param cause the exception or error that caused this exception to be * thrown */ public SQLNestedException(String msg, Throwable cause) { super(msg); if (cause == null) { cause = new Exception("No cause"); } this.cause = cause; } public Throwable getCause() { return this.cause; } public String getLocalizedMessage() { return super.getLocalizedMessage() + ", cause: " + this.cause.getLocalizedMessage(); } public void printStackTrace() { System.err.println(getClass().getName() + ": " + getMessage() + ", cause: "); this.cause.printStackTrace(); } public void printStackTrace(PrintStream s) { s.println(getClass().getName() + ": " + getMessage() + ", cause: "); this.cause.printStackTrace(s); } public void printStackTrace(PrintWriter s) { s.println(getClass().getName() + ": " + getMessage() + ", cause: "); this.cause.printStackTrace(s); } }
package VASSAL.counters; import java.awt.Component; import java.awt.Graphics; import java.awt.Rectangle; import java.awt.Shape; import java.awt.Point; import javax.swing.BoxLayout; import javax.swing.JPanel; import javax.swing.KeyStroke; import java.util.ArrayList; import java.util.List; import VASSAL.build.module.BasicCommandEncoder; import VASSAL.build.module.Map; import VASSAL.build.module.documentation.HelpFile; import VASSAL.command.Command; import VASSAL.configure.BooleanConfigurer; import VASSAL.configure.NamedHotKeyConfigurer; import VASSAL.i18n.TranslatablePiece; import VASSAL.tools.NamedKeyStroke; import VASSAL.tools.SequenceEncoder; import VASSAL.i18n.PieceI18nData; import VASSAL.i18n.Resources; import VASSAL.configure.StringConfigurer; /** * Implements a trait to allow a piece to be deselected from the KeyBuffer in response to a Key Command. * @author Brian Reynolds */ public class Deselect extends Decorator implements TranslatablePiece { private static final char DELIMITER = '\t'; //$NON-NLS-1$ public static final String ID = "deselect" + DELIMITER; protected KeyCommand[] command; protected String commandName; protected NamedKeyStroke key; protected KeyCommand deselectCommand; protected String description; protected Boolean unstack; public Deselect() { commandName = "Deselect"; key = new NamedKeyStroke(KeyStroke.getKeyStroke("K")); description = ""; unstack = false; } public Deselect(String type, GamePiece inner) { mySetType(type); setInner(inner); } public void mySetType(String type) { type = type.substring(ID.length()); SequenceEncoder.Decoder st = new SequenceEncoder.Decoder(type, DELIMITER); commandName = st.nextToken(); key = st.nextNamedKeyStroke('K'); description = st.nextToken(""); unstack = st.nextBoolean(false); command = null; } public String myGetType() { SequenceEncoder se = new SequenceEncoder(DELIMITER); se.append(commandName).append(key).append(description).append(unstack); return ID + se.getValue(); } protected KeyCommand[] myGetKeyCommands() { if (command == null) { deselectCommand = new KeyCommand(commandName, key, Decorator.getOutermost(this), this); if (commandName.length() > 0 && key != null && ! key.isNull()) { command = new KeyCommand[]{deselectCommand}; } else { command = new KeyCommand[0]; } } if (command.length > 0) { command[0].setEnabled(getMap() != null); } return command; } public String myGetState() { return ""; } public Command myKeyEvent(KeyStroke stroke) { Command c = null; myGetKeyCommands(); if (deselectCommand.matches(stroke)) { GamePiece outer = Decorator.getOutermost(this); final Map m = getMap(); if (unstack) { Stack stack = outer.getParent(); //BR// If we're now being dragged around as part of a stack if (stack != null) { Point pos = outer.getPosition(); //BR// Figure out where stack was/is stack.setExpanded(true); //BR// Expand the stack stack.remove(outer); //BR// Remove our piece from the stack c = m.placeAt(outer, pos); //BR// Put it back on the map so it won't be missing } } outer.setProperty(Properties.SELECTED, false); //BR// Mark as not selected DragBuffer.getBuffer().remove(outer); //BR// Remove from the drag buffer KeyBuffer.getBuffer().remove(outer); //BR// Remove from the key buffer } return c; } public void mySetState(String newState) { } public Rectangle boundingBox() { return piece.boundingBox(); } public void draw(Graphics g, int x, int y, Component obs, double zoom) { piece.draw(g, x, y, obs, zoom); } public String getName() { return piece.getName(); } public Shape getShape() { return piece.getShape(); } public PieceEditor getEditor() { return new Ed(this); } public String getDescription() { return (description == null || description.length() == 0) ? Resources.getString("Deselect.Deselect") : Resources.getString("Deselect.Deselect") + " - " + description; } public HelpFile getHelpFile() { return HelpFile.getReferenceManualPage("Deselect.htm", ""); } public PieceI18nData getI18nData() { return getI18nData(commandName, "Deselect command"); } public static class Ed implements PieceEditor { private StringConfigurer nameInput; private StringConfigurer descInput; private NamedHotKeyConfigurer keyInput; private BooleanConfigurer unstackInput; private JPanel controls; public Ed(Deselect p) { controls = new JPanel(); controls.setLayout(new BoxLayout(controls, BoxLayout.Y_AXIS)); descInput = new StringConfigurer(null, Resources.getString("Editor.description_label"), p.description); controls.add(descInput.getControls()); nameInput = new StringConfigurer(null, Resources.getString("Editor.menu_command"), p.commandName); controls.add(nameInput.getControls()); keyInput = new NamedHotKeyConfigurer(null, Resources.getString("Editor.keyboard_command"), p.key); controls.add(keyInput.getControls()); unstackInput = new BooleanConfigurer(null, Resources.getString("Editor.Deselect.remove_piece_from_stack"), p.unstack); controls.add(unstackInput.getControls()); } public Component getControls() { return controls; } public String getType() { SequenceEncoder se = new SequenceEncoder(DELIMITER); se.append(nameInput.getValueString()).append(keyInput.getValueString()).append(descInput.getValueString()).append(unstackInput.getValueString()); return ID + se.getValue(); } public String getState() { return ""; } } /** * Return Property names exposed by this trait */ public List<String> getPropertyNames() { ArrayList<String> l = new ArrayList<String>(); l.add(Properties.SELECTED); return l; } }
package org.apache.velocity.test; import java.util.Properties; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.io.IOException; import java.io.FileInputStream; import java.io.BufferedInputStream; import junit.framework.*; import org.apache.velocity.runtime.Runtime; /** * Test suite for Apache Velocity. * * @author <a href="mailto:dlr@finemaltcoding.com">Daniel Rall</a> * @version $Id: VelocityTestSuite.java,v 1.3 2000/10/23 06:17:25 dlr Exp $ */ public class VelocityTestSuite extends TestSuite { /** * The name of the test suite properties file. */ private static final String PROPS_FILE_NAME = "TestSuite.properties"; /** * The test suite properties. */ private Properties props; /** * Creates an instace of the Apache Velocity test suite. */ public VelocityTestSuite () { super("Apache Velocity test suite"); // Read in properties file. props = new Properties(); try { props.load (new BufferedInputStream(new FileInputStream(PROPS_FILE_NAME))); } catch (IOException e) { // TODO: Need to initialize the Runtime first. Runtime.error(e); } // Add test cases here. List templateTestCases = getTemplateTestCases(); for (Iterator iter = templateTestCases.iterator(); iter.hasNext(); ) { addTest(new TemplateTestCase((String)iter.next())); } addTest(new VelocityTest("Apache Velocity")); } /** * Returns a list of the template test cases to run. * * @return A <code>List</code> of <code>String</code> objects naming the * test cases. */ private List getTemplateTestCases () { List testCases = new ArrayList(); // TODO: Parse the template test cases from the properties file. return testCases; } }
package org.jcoderz.phoenix.report; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.Task; import org.apache.tools.ant.taskdefs.Execute; import org.apache.tools.ant.taskdefs.LogStreamHandler; import org.apache.tools.ant.taskdefs.PumpStreamHandler; import org.apache.tools.ant.types.CommandlineJava; import org.apache.tools.ant.types.Path; import org.jcoderz.commons.types.Date; import org.jcoderz.commons.util.FileUtils; import org.jcoderz.commons.util.StringUtil; /** * This is the Ant task for the Jcoderz Report. * This task forks all processing steps as separate processes * so that memory for each process can be controlled separately. * * @author Michael Rumpf */ public class JcReportAntTask extends Task { private static final int DEFAULT_MAX_HEAP = 512; private static final Date CREATION_TIMESTAMP = Date.now(); private NestedReportsElement mReports = null; private NestedMappingsElement mMappings = null; private NestedToolsElement mTools = null; private NestedFiltersElement mFilterElements = null; private String mName = null; private File mDest = null; private String mWikiBase = null; private String mWebRcsBase = null; private String mWebRcsSuffix = null; private String mPackageBase = null; private String mProjectBase = null; private String mStylesheet = null; private File mTempfolder = null; private int mMaxHeap = DEFAULT_MAX_HEAP; private boolean mDebug = false; private File mWorkingDir = null; /** The global Java Commandline instance */ private final CommandlineJava mCommandline = new CommandlineJava(); /** * Returns the working directory. * * @return the working directory. */ public File getWorkingDir () { return mWorkingDir; } /** * Sets the maximum heap value. * If not defined in the Ant task the default value of 512MB will be used. * * @param maxheap the max heap value. */ public void setMaxheap (String maxheap) { mMaxHeap = Integer.valueOf(maxheap).intValue(); } /** * Sets the name of the report. * * @param name the report name. */ public void setName (String name) { mName = name; } /** * Sets the destination of the report. * * @param dest the report destination. */ public void setDest (String dest) { mDest = new File(dest); if (!mDest.exists()) { mDest.mkdirs(); } } public void setPackageBase (String packageBase) { mPackageBase = packageBase; } public void setProjectBase (String projectBase) { mProjectBase = projectBase; } public void setWebRcsBase (String webRcsBase) { mWebRcsBase = webRcsBase; } public void setWebRcsSuffix (String webRcsSuffix) { mWebRcsSuffix = webRcsSuffix; } public void setWikiBase (String wikiBase) { mWikiBase = wikiBase; } /** * Sets the stylesheet to be used for the report. * * @param stylesheet the report stylesheet. */ public void setStylesheet (String stylesheet) { mStylesheet = stylesheet; } /** * Sets the temporary folder. * * @param tempfolder the temporary folder. */ public void setTempfolder (String tempfolder) { mTempfolder = new File(tempfolder); } /** * Sets the debug parameter. * * @param debug the debug parameter. */ public void setDebug (Boolean debug) { mDebug = debug.booleanValue(); } public Path createClasspath () { return mCommandline.createClasspath(getProject()).createPath(); } /** * This method is called by Ant for executing this task. * * @throws BuildException whenver a problem occurs. */ public void execute () throws BuildException { try { // Always show this line super.log("Executing JcReportAntTask..."); if (mTempfolder == null) { throw new BuildException("You must specify a temporary folder!", getLocation()); } mTempfolder.mkdirs(); if (!mTempfolder.isDirectory()) { throw new BuildException("Temporary folder must be a directory!", getLocation()); } mWorkingDir = new File(mTempfolder, mName); mWorkingDir.mkdirs(); // Check that the names of the reports differ! final Set reportNames = new HashSet(); Iterator iterReport = mReports.getReports().iterator(); while (iterReport.hasNext()) { final NestedReportElement nre = (NestedReportElement) iterReport.next(); reportNames.add(nre.getName()); } if (reportNames.size() != mReports.getReports().size()) { throw new BuildException("Reports must not have the same names!", getLocation()); } // Delete the dest folder in case it exists so that we don't mix // already deleted files. And create a fresh folder afterwards again. if (mDest.exists()) { FileUtils.rmdir(mDest); mDest.mkdirs(); } // Now start processing the different reports log("Processing reports..."); final List jcReports = new ArrayList(); iterReport = mReports.getReports().iterator(); while (iterReport.hasNext()) { final NestedReportElement nre = (NestedReportElement) iterReport.next(); log("Processing report '" + nre.getName() + "' ..."); // Create a temp folder for this report final File reportTmpDir = new File(mWorkingDir, nre.getName()); reportTmpDir.mkdirs(); final File srcDir = new File(nre.getSourcePath()); final File clsDir = new File(nre.getClassPath()); File checkstyleXml = null; final NestedCheckstyleElement nce = mTools.getCheckstyle(); if (nce != null) { log("Running checkstyle on '" + nre.getName() + "'..."); checkstyleXml = nce.executeCheckstyle(reportTmpDir, srcDir, clsDir); } File findbugsXml = null; final NestedFindbugsElement nfe = mTools.getFindbugs(); if (nfe != null) { log("Running findbugs on '" + nre.getName() + "'..."); findbugsXml = nfe.executeFindbugs(reportTmpDir, srcDir, clsDir); } File pmdXml = null; final NestedPmdElement npe = mTools.getPmd(); if (npe != null) { log("Running pmd on '" + nre.getName() + "'..."); pmdXml = npe.executePmd(reportTmpDir, srcDir, clsDir); } File cpdXml = null; final NestedCpdElement nde = mTools.getCpd(); if (nde != null) { log("Running cpd on '" + nre.getName() + "'..."); cpdXml = nde.executeCpd(reportTmpDir, srcDir, clsDir); } File coberturaXml = null; final NestedCoberturaElement noe = mTools.getCobertura(); if (noe != null) { log("Running cobertura on '" + nre.getName() + "'..."); coberturaXml = noe.executeCobertura(reportTmpDir, srcDir, clsDir); } // Merge the different reports into one jcoderz-report.xml // This must be done on a level by level basis final File jcReport = executeReportNormalizer(srcDir, reportTmpDir, nre.getLevel(), checkstyleXml, findbugsXml, pmdXml, cpdXml, coberturaXml); jcReports.add(jcReport); } final File jcReport = executeReportMerger(jcReports); executeJava2Html(jcReport); } catch (Throwable ex) { ex.printStackTrace(); throw new BuildException("An unexpected exception occured!", ex); } } private File executeReportNormalizer (File srcDir, File reportDir, ReportLevel level, File checkstyleXml, File findbugsXml, File pmdXml, File cpdXml, File coberturaXml) { log("Creating report normalizer command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.setClassname("org.jcoderz.phoenix.report.ReportNormalizer"); cmd.createArgument().setValue("-srcDir"); cmd.createArgument().setFile(srcDir); cmd.createArgument().setValue("-level"); cmd.createArgument().setValue(level.toString()); if (mDebug) { cmd.createArgument().setValue("-loglevel"); cmd.createArgument().setValue("FINEST"); } cmd.createArgument().setValue("-projectName"); cmd.createArgument().setValue(mName); cmd.createArgument().setValue("-out"); cmd.createArgument().setFile(reportDir); if (checkstyleXml != null) { cmd.createArgument().setValue("-checkstyle"); cmd.createArgument().setFile(checkstyleXml); } if (findbugsXml != null) { cmd.createArgument().setValue("-findbugs"); cmd.createArgument().setFile(findbugsXml); } if (pmdXml != null) { cmd.createArgument().setValue("-pmd"); cmd.createArgument().setFile(pmdXml); } if (cpdXml != null) { cmd.createArgument().setValue("-cpd"); cmd.createArgument().setFile(cpdXml); } if (coberturaXml != null) { cmd.createArgument().setValue("-cobertura"); cmd.createArgument().setFile(coberturaXml); } forkToolProcess(this, cmd, new LogStreamHandler(this, Project.MSG_INFO, Project.MSG_WARN)); final File outFile = new File(reportDir, ReportNormalizer.JCODERZ_REPORT_XML); return outFile; } private File executeReportMerger (List jcReports) { log("Creating report merger command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.setClassname("org.jcoderz.phoenix.report.ReportMerger"); if (mDebug) { cmd.createArgument().setValue("-loglevel"); cmd.createArgument().setValue("FINEST"); } cmd.createArgument().setValue("-out"); cmd.createArgument().setFile(mWorkingDir); final Iterator jcReportIter = jcReports.iterator(); while (jcReportIter.hasNext()) { final File jcReport = (File) jcReportIter.next(); cmd.createArgument().setValue("-jcreport"); cmd.createArgument().setFile(jcReport); } final Iterator filterIter = mFilterElements.getFilters().iterator(); while (filterIter.hasNext()) { final NestedFilterElement filterElement = (NestedFilterElement) filterIter.next(); cmd.createArgument().setValue("-filter"); cmd.createArgument().setFile(filterElement.getFile()); } forkToolProcess(this, cmd, new LogStreamHandler(this, Project.MSG_INFO, Project.MSG_WARN)); final File outFile = new File(mWorkingDir, ReportNormalizer.JCODERZ_REPORT_XML); try { FileUtils.copy(outFile, mDest); } catch (IOException e) { throw new BuildException("Could not copy '" + outFile + "' to destination folder '" + mDest + "'!", e, getLocation()); } return outFile; } /** * Executes the Java2Html tool in a separate process. * * The following command line parameters are supported: * <pre> * -outDir * -report * -projectHome * -projectName * -cvsBase * -timestamp * -wikiBase * -reportStyle * -packageBase * </pre> */ private void executeJava2Html (File jcReport) { log("Creating java2html command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.setClassname("org.jcoderz.phoenix.report.Java2Html"); // let it run in headless mode to avoid exceptions because of a missing X cmd.createVmArgument().setValue("-Djava.awt.headless=true"); cmd.createArgument().setValue("-outDir"); cmd.createArgument().setFile(mDest); cmd.createArgument().setValue("-report"); cmd.createArgument().setFile(jcReport); cmd.createArgument().setValue("-timestamp"); cmd.createArgument().setValue(CREATION_TIMESTAMP.toString( "yyyyMMddHHmmss")); if (mProjectBase != null) { cmd.createArgument().setValue("-projectHome"); cmd.createArgument().setValue(mProjectBase); } if (mStylesheet != null) { cmd.createArgument().setValue("-reportStyle"); cmd.createArgument().setValue(mStylesheet); } cmd.createArgument().setValue("-projectName"); cmd.createArgument().setValue(mName); cmd.createArgument().setValue("-cvsBase"); cmd.createArgument().setValue(mWebRcsBase); cmd.createArgument().setValue("-wikiBase"); cmd.createArgument().setValue(mWikiBase); if (mPackageBase != null) { cmd.createArgument().setValue("-packageBase"); cmd.createArgument().setValue(mPackageBase); } if (mDebug) { cmd.createArgument().setValue("-loglevel"); cmd.createArgument().setValue("FINEST"); } forkToolProcess(this, cmd, new LogStreamHandler(this, Project.MSG_INFO, Project.MSG_WARN)); } // Reports section /** * This method is called by Ant to create an instance of the * NestedReportsElement class when the 'reports' tag is read. * * @return the new instance of type NestedReportsElement. */ public NestedReportsElement createReports () { mReports = new NestedReportsElement(this); return mReports; } public static class NestedReportsElement { private List mReports = new ArrayList(); private JcReportAntTask mTask; public NestedReportsElement (JcReportAntTask task) { mTask = task; } public NestedReportElement createReport () { mTask.log("Creating report element..."); final NestedReportElement nre = new NestedReportElement(); mReports.add(nre); return nre; } public List getReports () { return mReports; } } /** * This class represents a &lt;report&gt; tag in an Ant * <code>build.xml</code> file. * * @author Michael Rumpf */ public static class NestedReportElement { private String mName; private ReportLevel mLevel; private String mSourcePath; private String mClassPath; public String getName () { return mName; } public void setName (String name) { mName = name; } public ReportLevel getLevel () { return mLevel; } public void setLevel (String level) { mLevel = ReportLevel.fromString(level); } public String getClassPath () { return mClassPath; } public void setClassPath (String classPath) { mClassPath = classPath; } public String getSourcePath () { return mSourcePath; } public void setSourcePath (String sourcePath) { mSourcePath = sourcePath; } } // Mappings section /** * This method is called by Ant to create an instance of the * NestedMappingsElement class when the 'mappings' tag is read. * * @return the new instance of type NestedMappingsElement. */ public NestedMappingsElement createMappings () { mMappings = new NestedMappingsElement(this); return mMappings; } public static class NestedMappingsElement { private List mMappings = new ArrayList(); private JcReportAntTask mTask; public NestedMappingsElement (JcReportAntTask task) { mTask = task; } public NestedMappingElement createWebRcs () { mTask.log("Creating mapping element..."); final NestedMappingElement nme = new NestedMappingElement(); mMappings.add(nme); return nme; } public List getMappings () { return mMappings; } } public static class NestedMappingElement { private String mPattern; private String mUrl; private String mSuffix; public String getPattern () { return mPattern; } public void setPattern (String pattern) { mPattern = pattern; } public String getSuffix () { return mSuffix; } public void setSuffix (String suffix) { mSuffix = suffix; } public String getUrl () { return mUrl; } public void setUrl (String url) { mUrl = url; } } // Tools section /** * This method is called by Ant to create an instance of the * NestedToolsElement class when the 'tools' tag is read. * * @return the new instance of type NestedToolsElement. */ public NestedToolsElement createTools () { mTools = new NestedToolsElement(this); return mTools; } public static class NestedToolsElement { private JcReportAntTask mTask; private NestedPmdElement mPmd = null; private NestedCpdElement mCpd = null; private NestedFindbugsElement mFindbugs = null; private NestedCheckstyleElement mCheckstyle = null; private NestedCoberturaElement mCobertura = null; public NestedToolsElement (JcReportAntTask task) { mTask = task; } public NestedPmdElement createPmd () { mTask.log("Creating Pmd element..."); mPmd = new NestedPmdElement(mTask); return mPmd; } public NestedPmdElement getPmd () { return mPmd; } public NestedCpdElement createCpd () { mTask.log("Creating Cpd element..."); mCpd = new NestedCpdElement(mTask); return mCpd; } public NestedCpdElement getCpd () { return mCpd; } public NestedFindbugsElement createFindbugs () { mTask.log("Creating Findbugs element..."); mFindbugs = new NestedFindbugsElement(mTask); return mFindbugs; } public NestedFindbugsElement getFindbugs () { return mFindbugs; } public NestedCheckstyleElement createCheckstyle () { mTask.log("Creating Checkstyle element..."); mCheckstyle = new NestedCheckstyleElement(mTask); return mCheckstyle; } public NestedCheckstyleElement getCheckstyle () { return mCheckstyle; } public NestedCoberturaElement createCobertura () { mTask.log("Creating Cobertura element..."); mCobertura = new NestedCoberturaElement(mTask); return mCobertura; } public NestedCoberturaElement getCobertura () { return mCobertura; } } /** * This is the base class for all tool elements. * It provides support for the maxheap attribute * and the nested classpath element. * * @author Michael Rumpf */ public static class NestedToolElement { protected JcReportAntTask mTask; protected Path mPath; protected int mMaxHeap = DEFAULT_MAX_HEAP; /** The global Java Commandline instance */ protected final CommandlineJava mCommandline = new CommandlineJava(); /** * Sets the maximum heap value. * If not defined in the Ant task the default value of 512MB will be used. * * @param maxheap the max heap value. */ public void setMaxheap (String maxheap) { mMaxHeap = Integer.valueOf(maxheap).intValue(); } /** * Creates a classpath for the tool element. * * @return the created classpath. */ public Path createClasspath () { mPath = mCommandline.createClasspath(mTask.getProject()).createPath(); return mPath; } } public static class NestedPmdElement extends NestedToolElement { private String mConfig; private String mTargetjdk; private String mEncoding; public NestedPmdElement (JcReportAntTask task) { mTask = task; mCommandline.setClassname("net.sourceforge.pmd.PMD"); } public void setConfig (String config) { mConfig = config; } public void setTargetjdk (String targetjdk) { mTargetjdk = targetjdk; } public void setEncoding (String encoding) { mEncoding = encoding; } public File executePmd (File reportDir, File srcDir, File clsDir) { mTask.log("Creating pmd command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.createArgument().setFile(srcDir); // We always write pmd reports in XML format cmd.createArgument().setValue("xml"); if (mConfig != null) { cmd.createArgument().setFile(new File(mConfig)); } if (mEncoding != null) { cmd.createArgument().setValue("-encoding"); cmd.createArgument().setValue(mEncoding); } if (mTargetjdk != null) { cmd.createArgument().setValue("-targetjdk"); cmd.createArgument().setValue(mTargetjdk); } final File outFile = new File(reportDir, "pmd.xml"); FileOutputStream fos = null; try { fos = new FileOutputStream(outFile); } catch (IOException e) { throw new BuildException("Could not find output file: " + outFile.getAbsolutePath(), e, mTask.getLocation()); } forkToolProcess(mTask, cmd, new PumpStreamHandler(fos, System.err)); return outFile; } } public static class NestedCpdElement extends NestedToolElement { private static final int DEFAULT_MINIMUM_TOKENS = 100; private int mMinimumtokens = DEFAULT_MINIMUM_TOKENS; public NestedCpdElement (JcReportAntTask task) { mTask = task; mCommandline.setClassname("net.sourceforge.pmd.cpd.CPD"); } public void setMinimumtokens (String minimumtokens) { mMinimumtokens = Integer.valueOf(minimumtokens).intValue(); } /** * Executes the cpd tool in a separate process. * * The following command line switches are supported by this method: * <pre> * CPD --minimum-tokens xxx --files xxx * </pre> */ public File executeCpd (File reportDir, File srcDir, File clsDir) { mTask.log("Creating cpd command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.createArgument().setFile(srcDir); // We always write pmd reports in XML format cmd.createArgument().setValue("--format"); cmd.createArgument().setValue("net.sourceforge.pmd.cpd.XMLRenderer"); cmd.createArgument().setValue("--language"); cmd.createArgument().setValue("java"); cmd.createArgument().setValue("--files"); cmd.createArgument().setFile(srcDir); cmd.createArgument().setValue("--minimum-tokens"); cmd.createArgument().setValue(String.valueOf(mMinimumtokens)); final File outFile = new File(reportDir, "cpd.xml"); FileOutputStream fos = null; try { fos = new FileOutputStream(outFile); } catch (IOException e) { throw new BuildException("Could not find output file: " + outFile.getAbsolutePath(), e, mTask.getLocation()); } forkToolProcess(mTask, cmd, new PumpStreamHandler(fos, System.err)); return outFile; } } public static class NestedFindbugsElement extends NestedToolElement { private String mConfig; private String mWarninglevel = "medium"; private String mEffort = "default"; private String mOmitVisitors = ""; private Path mAuxPath; private boolean mFindBugsDebug = false; /** * Path of the findbugs plugin jar files. Must at least contain * the coreplugin.jar */ private Path mPluginList; public NestedFindbugsElement (JcReportAntTask task) { mTask = task; mCommandline.setClassname("edu.umd.cs.findbugs.FindBugs"); } /** * Sets the debug parameter. * @param debug the debug parameter. */ public void setDebug (Boolean debug) { mFindBugsDebug = debug.booleanValue(); } public void setOmitVisitors (String omitVisitors) { mOmitVisitors = omitVisitors; } public void setConfig (String config) { mConfig = config; } public void setEffort (String effort) { if (effort.equals("min") || effort.equals("default") || effort.equals("max")) { mEffort = effort; } else { mTask.log("Invalid effort value '" + effort + "!'"); } } public void setWarninglevel (String warninglevel) { if (warninglevel.equals("experimental") || warninglevel.equals("low") || warninglevel.equals("medium") || warninglevel.equals("high")) { mWarninglevel = warninglevel; } else { mTask.log("Invalid warninglevel value '" + warninglevel + "!'"); } } /** * The findbugs tool needs an list of jar files where all the plugins are * defined in. Minimum plugin list contains the coreplugin. * * @return the created plugin list path. */ public Path createPluginlist () { mPluginList = mCommandline.createClasspath(mTask.getProject()).createPath(); return mPluginList; } /** * The findbugs tool needs an auxiliary classpath with all the classes, * referenced from the project class files. * * @return the created auxiliary classpath. */ public Path createAuxclasspath () { mAuxPath = mCommandline.createClasspath( mTask.getProject()).createPath(); return mAuxPath; } /** * Executes the findbugs tool in a separate process. * <pre> * maxheap: * -maxHeap size Maximum Java heap size in megabytes (default=256) * * effort: * -effort[:min|default|max] set analysis effort level * * warninglevel: * -experimental report all warnings including experimental bug * patterns * -low report all warnings * -medium report only medium and high priority warnings * [default] * -high report only high priority warnings * * config: * -exclude &lt;filter file> include only bugs matching given filter * * internally: * -outputFile &lt;filename> Save output in named file * -xml[:withMessages] XML output (optionally with messages) * * auxclasspath: * -auxclasspath &lt;classpath> set aux classpath for analysis * * report: sourcepath * -sourcepath &lt;source path> set source path for analyzed classes * </pre> * The target assumes that all libs needed by findbugs are on the * classpath and the plugins are set via pluginlist element. * * @param name the name of the report * @param srcDir the source folder * @return the exit code of the process. */ public File executeFindbugs (File reportDir, File srcDir, File clsDir) { mTask.log("Creating findbugs command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); if (mFindBugsDebug) { cmd.createVmArgument().setValue("-Dfindbugs.debug=true"); } if (mPluginList == null) { throw new BuildException("The 'pluginlist' element is mandatory" + " for the findbugs task!", mTask.getLocation()); } cmd.createArgument().setValue("-pluginList"); cmd.createArgument().setPath(mPluginList); if (!StringUtil.isEmptyOrNull(mOmitVisitors)) { cmd.createArgument().setValue("-omitVisitors"); cmd.createArgument().setValue(mOmitVisitors); } final File outFile = new File(reportDir, "findbugs.xml"); cmd.createArgument().setValue("-outputFile"); cmd.createArgument().setFile(outFile); cmd.createArgument().setValue("-sourcepath"); cmd.createArgument().setFile(srcDir); // We always write findbugs reports in XML format cmd.createArgument().setValue("-xml:withMessages"); if (mConfig != null) { cmd.createArgument().setValue("-exclude"); cmd.createArgument().setFile(new File(mConfig)); } if (mAuxPath != null) { cmd.createArgument().setValue("-auxclasspath"); cmd.createArgument().setPath(mAuxPath); } cmd.createArgument().setValue("-" + mWarninglevel); cmd.createArgument().setValue("-effort:" + mEffort); cmd.createArgument().setFile(clsDir); // TODO: use PumpStreamHandler to suppress info messages from FindBugs forkToolProcess(mTask, cmd, new LogStreamHandler(mTask, Project.MSG_INFO, Project.MSG_WARN)); return outFile; } } public static class NestedCheckstyleElement extends NestedToolElement { private String mConfig; private String mProperties; public NestedCheckstyleElement (JcReportAntTask task) { mTask = task; mCommandline.setClassname("com.puppycrawl.tools.checkstyle.Main"); } public void setConfig (String config) { mConfig = config; } public void setProperties (String properties) { mProperties = properties; } public File executeCheckstyle (File reportDir, File srcDir, File clsPath) { mTask.log("Creating checkstyle command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); cmd.createArgument().setValue("-o"); final File outFile = new File(reportDir, "checkstyle.xml"); cmd.createArgument().setFile(outFile); if (mConfig == null) { throw new BuildException("The 'config' attribute is mandatory" + " for the checkstyle task!", mTask.getLocation()); } cmd.createArgument().setValue("-c"); cmd.createArgument().setFile(new File(mConfig)); // We always write checkstyle reports in XML format cmd.createArgument().setValue("-f"); cmd.createArgument().setValue("xml"); if (mProperties != null) { cmd.createArgument().setValue("-p"); cmd.createArgument().setFile(new File(mProperties)); } cmd.createArgument().setValue("-r"); cmd.createArgument().setFile(srcDir); forkToolProcess(mTask, cmd, new LogStreamHandler(mTask, Project.MSG_INFO, Project.MSG_WARN)); return outFile; } } public static class NestedCoberturaElement extends NestedToolElement { private String mDatafile; public NestedCoberturaElement (JcReportAntTask task) { mTask = task; mCommandline.setClassname("net.sourceforge.cobertura.reporting.Main"); } public void setDatafile (String datafile) { mDatafile = datafile; } /** * Executes the cobertura tool in a separate process. * * <pre> * [--datafile file] * [--destination dir] * source code directory [...] * </pre> */ public File executeCobertura (File reportDir, File srcDir, File clsPath) { mTask.log("Creating cobertura command line..."); final CommandlineJava cmd = createCommandlineJava(mCommandline, mMaxHeap); File dataFile = null; if (mDatafile == null) { throw new BuildException("The datafile attribute is mandatory!", mTask.getLocation()); } else { dataFile = new File(mDatafile); if (!dataFile.exists()) { throw new BuildException("The datafile was not found!", mTask.getLocation()); } } cmd.createArgument().setValue("--destination"); final File outFile = new File(reportDir, "coverage.xml"); cmd.createArgument().setFile(reportDir); // We always write checkstyle reports in XML format cmd.createArgument().setValue("--format"); cmd.createArgument().setValue("xml"); cmd.createArgument().setValue("--datafile"); cmd.createArgument().setFile(dataFile); cmd.createArgument().setFile(srcDir); forkToolProcess(mTask, cmd, new LogStreamHandler(mTask, Project.MSG_INFO, Project.MSG_WARN)); return outFile; } } // Filters section /** * This method is called by Ant to create an instance of the * NestedFiltersElement class when the 'filters' tag is read. * * @return the new instance of type NestedFiltersElement. */ public NestedFiltersElement createFilters () { mFilterElements = new NestedFiltersElement(this); return mFilterElements; } public static class NestedFiltersElement { private List mFilters = new ArrayList(); private JcReportAntTask mTask; public NestedFiltersElement (JcReportAntTask task) { mTask = task; } public NestedFilterElement createFilter () { mTask.log("Creating filter element..."); final NestedFilterElement nfe = new NestedFilterElement(); mFilters.add(nfe); return nfe; } public List getFilters () { return mFilters; } } public static class NestedFilterElement { private File mFile; public File getFile () { return mFile; } public void setFile (String file) { mFile = new File(file); } } // Helper methods /** * Creates a copy of the global command line instance * and sets the maximum heap vm parameter. * * @param cmdline the global command line instance. * @param maxHeap the maximum heap size for the process. * @return a copy of the global command line instance. */ private static CommandlineJava createCommandlineJava ( CommandlineJava cmdline, int maxHeap) { final CommandlineJava cmd; try { cmd = (CommandlineJava) cmdline.clone(); } catch (CloneNotSupportedException unexpected) { throw new RuntimeException( "Ups, CommandLineJava doesn't support the method clone()", unexpected); } cmd.createVmArgument().setValue("-Xmx" + maxHeap + "m"); return cmd; } /** * Forks the tool as external process. * * @param cmdline the command line. * @param psh the pump stream handler for redirecting the process streams. */ private static void forkToolProcess (JcReportAntTask task, CommandlineJava cmdline, PumpStreamHandler psh) { final Execute execute = new Execute(psh); execute.setCommandline(cmdline.getCommandline()); try { task.logCommandLine(cmdline.getCommandline()); final int exitCode = execute.execute(); if (exitCode != 0) { task.log("Process returned with exit code: " + exitCode); } } catch (IOException e) { throw new BuildException("Process fork failed.", e, task.getLocation()); } } /** * This is a special logging method to print the array of command * line parameters to the ant logging sub-system. * * @param cmdLine the command line parameter array. */ private void logCommandLine (String[] cmdLine) { log("Command line: "); for (int i = 0; i < cmdLine.length; i++) { log(" " + cmdLine[i]); } } /** * Overwrites the method from the super class in order to * check for debug mode. * * @param msg the message to log. */ public void log (String msg) { if (mDebug) { super.log(msg); } } }
package org.joda.time.field; import org.joda.time.DateTimeField; import org.joda.time.DurationField; /** * <code>DecoratedDateTimeField</code> extends {@link AbstractDateTimeField}, * implementing only the minimum required set of methods. These implemented * methods delegate to a wrapped field. * <p> * This design allows new DateTimeField types to be defined that piggyback on * top of another, inheriting all the safe method implementations from * AbstractDateTimeField. Should any method require pure delegation to the * wrapped field, simply override and use the provided getWrappedField method. * <p> * DecoratedDateTimeField is thread-safe and immutable, and its subclasses must * be as well. * * @author Brian S O'Neill * @since 1.0 * @see DelegatedDateTimeField */ public abstract class DecoratedDateTimeField extends AbstractDateTimeField { /** Serialization version */ private static final long serialVersionUID = 203115783733757597L; /** The DateTimeField being wrapped */ private final DateTimeField iField; /** * @param name allow name to be overridden */ protected DecoratedDateTimeField(DateTimeField field, String name) { super(name); if (field == null) { throw new IllegalArgumentException("The field must not be null"); } if (!field.isSupported()) { throw new IllegalArgumentException("The field must be supported"); } iField = field; } /** * Gets the wrapped date time field. * * @return the wrapped DateTimeField */ public final DateTimeField getWrappedField() { return iField; } public boolean isLenient() { return iField.isLenient(); } public int get(long instant) { return iField.get(instant); } public long set(long instant, int value) { return iField.set(instant, value); } public DurationField getDurationField() { return iField.getDurationField(); } public DurationField getRangeDurationField() { return iField.getRangeDurationField(); } public int getMinimumValue() { return iField.getMinimumValue(); } public int getMaximumValue() { return iField.getMaximumValue(); } public long roundFloor(long instant) { return iField.roundFloor(instant); } }
package org.orbeon.oxf.processor; import org.apache.log4j.Logger; import org.orbeon.oxf.common.OXFException; import org.orbeon.oxf.common.ValidationException; import org.orbeon.oxf.processor.generator.TidyConfig; import org.orbeon.oxf.processor.serializer.CachedSerializer; import org.orbeon.oxf.util.LoggerFactory; import org.orbeon.oxf.xml.ForwardingContentHandler; import org.orbeon.oxf.xml.TransformerUtils; import org.orbeon.oxf.xml.XMLUtils; import org.orbeon.oxf.xml.dom4j.LocationData; import org.w3c.dom.Document; import org.w3c.tidy.Configuration; import org.w3c.tidy.Tidy; import org.xml.sax.ContentHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import javax.xml.parsers.SAXParser; import javax.xml.transform.Transformer; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXResult; import java.io.*; /** * Intercept either an OutputStream or a Writer. * * This implementation holds a buffer for either a Writer or an Output Stream. The buffer can then * be parsed. */ public class StreamInterceptor { private static Logger logger = LoggerFactory.createLogger(StreamInterceptor.class); private StringWriter writer; private ByteArrayOutputStream byteStream; private String encoding = CachedSerializer.DEFAULT_ENCODING; private String contentType = ProcessorUtils.DEFAULT_CONTENT_TYPE; public Writer getWriter() { if (byteStream != null) throw new IllegalStateException("getWriter is called after getOutputStream was already called."); if (writer == null) writer = new StringWriter(); return writer; } public OutputStream getOutputStream() { if (writer != null) throw new IllegalStateException("getOutputStream is called after getWriter was already called."); if (byteStream == null) byteStream = new ByteArrayOutputStream(); return byteStream; } public void setEncoding(String encoding) { this.encoding = encoding; } public void setContentType(String contentType) { this.contentType = contentType; } public void parse(ContentHandler contentHandler) { parse(contentHandler, null, false); } public void parse(ContentHandler contentHandler, boolean fragment) { parse(contentHandler, null, fragment); } public void parse(ContentHandler contentHandler, TidyConfig tidyConfig, boolean fragment) { try { // Create InputSource InputSource inputSource = null; String stringContent = null; if (writer != null) { stringContent = writer.toString(); if (stringContent.length() > 0) { if (logger.isDebugEnabled()) { logger.debug("Document to parse in filter: "); logger.debug(stringContent); } inputSource = new InputSource(new StringReader(stringContent)); } } else if (byteStream != null) { byte[] byteContent = byteStream.toByteArray(); if (byteContent.length > 0) { if (logger.isDebugEnabled()) { logger.debug("Document to parse in filter: "); logger.debug(new String(byteContent, encoding)); } inputSource = new InputSource(new ByteArrayInputStream(byteContent)); if (encoding != null) inputSource.setEncoding(encoding); } } else { throw new OXFException("Filtered resource did not call getWriter() or getOutputStream()."); } // Parse the output only if text was generated if (inputSource != null) { if (ProcessorUtils.HTML_CONTENT_TYPE.equals(contentType)) { // The document contains HTML. Parse it using Tidy. Tidy tidy = new Tidy(); if (tidyConfig != null) { tidy.setShowWarnings(tidyConfig.isShowWarnings()); tidy.setQuiet(tidyConfig.isQuiet()); if (tidyConfig.isQuiet()) tidy.setErrout(new PrintWriter(new StringWriter())); } InputStream inputStream; if (writer == null) { // Unfortunately, it doesn't look like tidy support // detecting the encoding from the HTML document, so we // are left to using a default or hope that the source // set a known encoding. inputStream = inputSource.getByteStream(); tidy.setCharEncoding(TidyConfig.getTidyEncoding(encoding)); } else { // Here we go from characters to bytes to characters // again, which is very suboptimal, but the version of // tidy used does not support a Reader as input. // Use utf-8 both ways and hope for the best inputStream = new ByteArrayInputStream(stringContent.getBytes("utf-8")); tidy.setCharEncoding(Configuration.UTF8); } Document document = tidy.parseDOM(inputStream, null); // Output the result Transformer transformer = TransformerUtils.getIdentityTransformer(); if (fragment) { // Do not generate start and end document events transformer.transform(new DOMSource(document), new SAXResult(new ForwardingContentHandler(contentHandler) { public void startDocument() { } public void endDocument() { } })); } else { // Generate a complete document transformer.transform(new DOMSource(document), new SAXResult(contentHandler)); } } else { // Assume it is XML and parse the output SAXParser parser = XMLUtils.newSAXParser(); XMLReader reader = parser.getXMLReader(); if (fragment) { // Do not generate start and end document events reader.setContentHandler(new ForwardingContentHandler(contentHandler) { public void startDocument() { } public void endDocument() { } }); } else { // Generate a complete document reader.setContentHandler(contentHandler); } //inputSource.setSystemId(); reader.parse(inputSource); } } } catch (SAXParseException e) { throw new ValidationException(e.getMessage(), new LocationData(e)); } catch (Exception e) { throw new OXFException(e); } } }
package lombok.ast.javac; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; import lombok.ast.AlternateConstructorInvocation; import lombok.ast.Annotation; import lombok.ast.AnnotationDeclaration; import lombok.ast.AnnotationElement; import lombok.ast.AnnotationMethodDeclaration; import lombok.ast.ArrayAccess; import lombok.ast.ArrayCreation; import lombok.ast.ArrayDimension; import lombok.ast.ArrayInitializer; import lombok.ast.Assert; import lombok.ast.AstException; import lombok.ast.BinaryExpression; import lombok.ast.BinaryOperator; import lombok.ast.Block; import lombok.ast.BooleanLiteral; import lombok.ast.Break; import lombok.ast.Case; import lombok.ast.Cast; import lombok.ast.Catch; import lombok.ast.CharLiteral; import lombok.ast.ClassDeclaration; import lombok.ast.ClassLiteral; import lombok.ast.Comment; import lombok.ast.CompilationUnit; import lombok.ast.ConstructorDeclaration; import lombok.ast.ConstructorInvocation; import lombok.ast.Continue; import lombok.ast.Default; import lombok.ast.DoWhile; import lombok.ast.EmptyDeclaration; import lombok.ast.EmptyStatement; import lombok.ast.EnumConstant; import lombok.ast.EnumDeclaration; import lombok.ast.EnumTypeBody; import lombok.ast.Expression; import lombok.ast.ExpressionStatement; import lombok.ast.FloatingPointLiteral; import lombok.ast.For; import lombok.ast.ForEach; import lombok.ast.ForwardingAstVisitor; import lombok.ast.Identifier; import lombok.ast.If; import lombok.ast.ImportDeclaration; import lombok.ast.InlineIfExpression; import lombok.ast.InstanceInitializer; import lombok.ast.InstanceOf; import lombok.ast.IntegralLiteral; import lombok.ast.InterfaceDeclaration; import lombok.ast.JavadocContainer; import lombok.ast.KeywordModifier; import lombok.ast.LabelledStatement; import lombok.ast.Literal; import lombok.ast.MethodDeclaration; import lombok.ast.MethodInvocation; import lombok.ast.Modifiers; import lombok.ast.Node; import lombok.ast.NullLiteral; import lombok.ast.PackageDeclaration; import lombok.ast.Position; import lombok.ast.Return; import lombok.ast.Select; import lombok.ast.Statement; import lombok.ast.StaticInitializer; import lombok.ast.StrictListAccessor; import lombok.ast.StringLiteral; import lombok.ast.Super; import lombok.ast.SuperConstructorInvocation; import lombok.ast.Switch; import lombok.ast.Synchronized; import lombok.ast.This; import lombok.ast.Throw; import lombok.ast.Try; import lombok.ast.TypeArguments; import lombok.ast.TypeBody; import lombok.ast.TypeReference; import lombok.ast.TypeReferencePart; import lombok.ast.TypeVariable; import lombok.ast.UnaryExpression; import lombok.ast.UnaryOperator; import lombok.ast.VariableDeclaration; import lombok.ast.VariableDefinition; import lombok.ast.VariableDefinitionEntry; import lombok.ast.While; import lombok.ast.WildcardKind; import lombok.ast.grammar.Source; import lombok.ast.grammar.SourceStructure; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.sun.tools.javac.code.BoundKind; import com.sun.tools.javac.code.Flags; import com.sun.tools.javac.code.TypeTags; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.TreeMaker; import com.sun.tools.javac.tree.JCTree.JCAnnotation; import com.sun.tools.javac.tree.JCTree.JCBlock; import com.sun.tools.javac.tree.JCTree.JCCase; import com.sun.tools.javac.tree.JCTree.JCCatch; import com.sun.tools.javac.tree.JCTree.JCClassDecl; import com.sun.tools.javac.tree.JCTree.JCCompilationUnit; import com.sun.tools.javac.tree.JCTree.JCExpression; import com.sun.tools.javac.tree.JCTree.JCExpressionStatement; import com.sun.tools.javac.tree.JCTree.JCIdent; import com.sun.tools.javac.tree.JCTree.JCLiteral; import com.sun.tools.javac.tree.JCTree.JCMethodDecl; import com.sun.tools.javac.tree.JCTree.JCMethodInvocation; import com.sun.tools.javac.tree.JCTree.JCModifiers; import com.sun.tools.javac.tree.JCTree.JCNewClass; import com.sun.tools.javac.tree.JCTree.JCStatement; import com.sun.tools.javac.tree.JCTree.JCTypeApply; import com.sun.tools.javac.tree.JCTree.JCTypeParameter; import com.sun.tools.javac.tree.JCTree.JCVariableDecl; import com.sun.tools.javac.tree.JCTree.TypeBoundKind; import com.sun.tools.javac.util.Context; import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.Name; import com.sun.tools.javac.util.Name.Table; /** * Turns {@code lombok.ast} based ASTs into javac's {@code JCTree} model. */ public class JcTreeBuilder extends ForwardingAstVisitor { private final TreeMaker treeMaker; private final Table table; private final Map<Node, Collection<SourceStructure>> sourceStructures; private final Map<JCTree, Integer> endPosTable; private List<? extends JCTree> result = null; public JcTreeBuilder(Source source, Context context) { this(source.getSourceStructures(), TreeMaker.instance(context), Name.Table.instance(context), new HashMap<JCTree, Integer>()); } private JcTreeBuilder(Map<Node, Collection<SourceStructure>> structures, TreeMaker treeMaker, Table table, Map<JCTree, Integer> endPosTable) { this.treeMaker = treeMaker; this.table = table; this.sourceStructures = structures; this.endPosTable = endPosTable; } private Name toName(Identifier identifier) { if (identifier == null) return null; return table.fromString(identifier.getName()); } private JCTree toTree(Node node) { if (node == null) return null; JcTreeBuilder visitor = create(); node.accept(visitor); try { return visitor.get(); } catch (RuntimeException e) { System.err.printf("Node '%s' (%s) did not produce any results\n", node, node.getClass().getSimpleName()); throw e; } } private JCExpression toExpression(Node node) { return (JCExpression)toTree(node); } private JCStatement toStatement(Node node) { return (JCStatement)toTree(node); } private <T extends JCTree> List<T> toList(Class<T> type, StrictListAccessor<?, ?> accessor) { List<T> result = List.nil(); for (Node node : accessor) { JcTreeBuilder visitor = create(); node.accept(visitor); List<? extends JCTree> values; try { values = visitor.getAll(); if (values.size() == 0) throw new RuntimeException(); } catch (RuntimeException e) { System.err.printf("Node '%s' (%s) did not produce any results\n", node, node.getClass().getSimpleName()); throw e; } for (JCTree value : values) { if (value != null && !type.isInstance(value)) { throw new ClassCastException(value.getClass().getName() + " cannot be cast to " + type.getName()); } result = result.append(type.cast(value)); } } return result; } private <T extends JCTree> List<T> toList(Class<T> type, Node node) { if (node == null) return List.nil(); JcTreeBuilder visitor = create(); node.accept(visitor); @SuppressWarnings("unchecked") List<T> all = (List<T>)visitor.getAll(); return List.<T>nil().appendList(all); } public JCTree get() { if (result.size() > 1) { throw new RuntimeException("Expected only one result but got " + result.size()); } return result.head; } public List<? extends JCTree> getAll() { return result; } private boolean set(Node node, JCTree value) { if (result != null) throw new IllegalStateException("result is already set"); JCTree actualValue = value; if (node instanceof Expression) { for (int i = 0; i < ((Expression)node).getIntendedParens(); i++) { actualValue = treeMaker.Parens((JCExpression)actualValue); posParen(node, i, ((Expression)node).getParensPositions(), actualValue); } } result = List.of(actualValue); return true; } private void posParen(Node node, int iteration, java.util.List<Position> parenPositions, JCTree jcTree) { Position p = null; if (parenPositions.size() > iteration) p = parenPositions.get(iteration); int start = (p == null || p.isUnplaced() || p.getStart() < 0) ? node.getPosition().getStart() - 1 - iteration : p.getStart(); int end = (p == null || p.isUnplaced() || p.getEnd() < 0) ? node.getPosition().getEnd() + 1 + iteration : p.getEnd(); setPos(start, end, jcTree); } private boolean set(List<? extends JCTree> values) { if (result != null) throw new IllegalStateException("result is already set"); result = values; return true; } private JcTreeBuilder create() { return new JcTreeBuilder(sourceStructures, treeMaker, table, endPosTable); } @Override public boolean visitNode(Node node) { throw new UnsupportedOperationException(String.format("Unhandled node '%s' (%s)", node, node.getClass().getSimpleName())); } @Override public boolean visitCompilationUnit(CompilationUnit node) { List<JCTree> preamble = toList(JCTree.class, node.getPackageDeclaration()); List<JCTree> imports = toList(JCTree.class, node.importDeclarations()); List<JCTree> types = toList(JCTree.class, node.typeDeclarations()); List<JCAnnotation> annotations = List.nil(); JCExpression pid = null; for (JCTree elem : preamble) { if (elem instanceof JCAnnotation) { annotations = annotations.append((JCAnnotation)elem); } else if (elem instanceof JCExpression && pid == null) { pid = (JCExpression) elem; } else { throw new RuntimeException("Unexpected element in preamble: " + elem); } } JCCompilationUnit topLevel = treeMaker.TopLevel(annotations, pid, imports.appendList(types)); topLevel.endPositions = endPosTable; int start = Integer.MAX_VALUE; int end = node.getPosition().getEnd(); if (node.getPackageDeclaration() != null) start = Math.min(start, node.getPackageDeclaration().getPosition().getStart()); if (!node.importDeclarations().isEmpty()) start = Math.min(start, node.rawImportDeclarations().first().getPosition().getStart()); if (!node.typeDeclarations().isEmpty()) start = Math.min(start, node.rawTypeDeclarations().first().getPosition().getStart()); if (start == Integer.MAX_VALUE) start = node.getPosition().getStart(); return set(node, setPos(start, end, topLevel)); } @Override public boolean visitPackageDeclaration(PackageDeclaration node) { List<JCTree> defs = List.nil(); for (Annotation annotation : node.annotations()) { defs = defs.append(toTree(annotation)); } //Actual package declaration defs = defs.append(chain(node.parts())); set(defs); return true; } @Override public boolean visitImportDeclaration(ImportDeclaration node) { JCExpression name = chain(node.parts()); if (node.isStarImport()) { int start = posOfStructure(node, ".", true); int end = posOfStructure(node, "*", false); name = setPos(start, end, treeMaker.Select(name, table.asterisk)); } return posSet(node, treeMaker.Import(name, node.isStaticImport())); } @Override public boolean visitClassDeclaration(ClassDeclaration node) { int start = posOfStructure(node, "class", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.ClassDef( (JCModifiers) toTree(node.getModifiers()), toName(node.getName()), toList(JCTypeParameter.class, node.typeVariables()), toTree(node.getExtending()), toList(JCExpression.class, node.implementing()), node.getBody() == null ? List.<JCTree>nil() : toList(JCTree.class, node.getBody().members()) ))); } @Override public boolean visitInterfaceDeclaration(InterfaceDeclaration node) { JCModifiers modifiers = (JCModifiers) toTree(node.getModifiers()); modifiers.flags |= Flags.INTERFACE; int start = posOfStructure(node, "interface", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.ClassDef( modifiers, toName(node.getName()), toList(JCTypeParameter.class, node.typeVariables()), null, toList(JCExpression.class, node.extending()), node.getBody() == null ? List.<JCTree>nil() : toList(JCTree.class, node.getBody().members()) ))); } public boolean visitEmptyStatement(EmptyStatement node) { return posSet(node, treeMaker.Skip()); } @Override public boolean visitEnumDeclaration(EnumDeclaration node) { JCModifiers modifiers = (JCModifiers) toTree(node.getModifiers()); modifiers.flags |= Flags.ENUM; int start = posOfStructure(node, "enum", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.ClassDef( modifiers, toName(node.getName()), List.<JCTypeParameter>nil(), null, toList(JCExpression.class, node.implementing()), node.getBody() == null ? List.<JCTree>nil() : toList(JCTree.class, node.getBody()) ))); } @Override public boolean visitEnumTypeBody(EnumTypeBody node) { List<JCTree> constants = toList(JCTree.class, node.constants()); List<JCTree> members = toList(JCTree.class, node.members()); set(List.<JCTree>nil().appendList(constants).appendList(members)); return true; } private static final long ENUM_CONSTANT_FLAGS = Flags.PUBLIC | Flags.STATIC | Flags.FINAL | Flags.ENUM; @Override public boolean visitEnumConstant(EnumConstant node) { JCIdent parentType = treeMaker.Ident(toName(((EnumDeclaration)node.getParent().getParent()).getName())); JCClassDecl body = (JCClassDecl) toTree(node.getBody()); if (body != null) body.mods.flags |= Flags.STATIC | Flags.ENUM; JCNewClass newClass = treeMaker.NewClass( null, List.<JCExpression>nil(), parentType, toList(JCExpression.class, node.arguments()), body ); int start = posOfStructure(node, "(", true); int end = body != null ? node.getPosition().getEnd() : posOfStructure(node, ")", false); if (body != null) body.pos = node.getPosition().getStart(); if (start != node.getPosition().getStart()) { setPos(start, end, newClass); } else { if (body != null) setPos(node.getBody(), newClass); } return posSet(node, treeMaker.VarDef( treeMaker.Modifiers(ENUM_CONSTANT_FLAGS, toList(JCAnnotation.class, node.annotations())), toName(node.getName()), parentType, newClass )); } @Override public boolean visitTypeBody(TypeBody node) { return posSet(node, treeMaker.ClassDef(treeMaker.Modifiers(0), table.empty, List.<JCTypeParameter>nil(), null, List.<JCExpression>nil(), toList(JCTree.class, node.members()))); } @Override public boolean visitExpressionStatement(ExpressionStatement node) { return posSet(node, treeMaker.Exec(toExpression(node.getExpression()))); } @Override public boolean visitIntegralLiteral(IntegralLiteral node) { if (node.isMarkedAsLong()) { return posSet(node, treeMaker.Literal(TypeTags.LONG, node.longValue())); } return posSet(node, treeMaker.Literal(TypeTags.INT, node.intValue())); } @Override public boolean visitFloatingPointLiteral(FloatingPointLiteral node) { if (node.isMarkedAsFloat()) { return posSet(node, treeMaker.Literal(TypeTags.FLOAT, node.floatValue())); } return posSet(node, treeMaker.Literal(TypeTags.DOUBLE, node.doubleValue())); } @Override public boolean visitBooleanLiteral(BooleanLiteral node) { return posSet(node, treeMaker.Literal(TypeTags.BOOLEAN, node.getValue() ? 1 : 0)); } @Override public boolean visitCharLiteral(CharLiteral node) { return posSet(node, treeMaker.Literal(TypeTags.CHAR, (int)node.getValue())); } @Override public boolean visitNullLiteral(NullLiteral node) { return posSet(node, treeMaker.Literal(TypeTags.BOT, null)); } @Override public boolean visitStringLiteral(StringLiteral node) { return posSet(node, treeMaker.Literal(TypeTags.CLASS, node.getValue())); } @Override public boolean visitIdentifier(Identifier node) { return posSet(node, treeMaker.Ident(toName(node))); } @Override public boolean visitCast(Cast node) { return posSet(node, treeMaker.TypeCast(toTree(node.getRawTypeReference()), toExpression(node.getOperand()))); } @Override public boolean visitConstructorInvocation(ConstructorInvocation node) { return posSet(node, treeMaker.NewClass( toExpression(node.getQualifier()), toList(JCExpression.class, node.getConstructorTypeArguments()), toExpression(node.getTypeReference()), toList(JCExpression.class, node.arguments()), (JCClassDecl)toTree(node.getAnonymousClassBody()) )); } @Override public boolean visitSelect(Select node) { int start = posOfStructure(node.getIdentifier(), ".", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Select(toExpression(node.getOperand()), toName(node.getIdentifier())))); } @Override public boolean visitUnaryExpression(UnaryExpression node) { Expression operand = node.getOperand(); UnaryOperator operator = node.getOperator(); if (operator == UnaryOperator.UNARY_MINUS && operand instanceof Literal && !(operand instanceof CharLiteral)) { JCLiteral result = (JCLiteral) toTree(operand); result.value = negative(result.value); return set(node, setPos(operand, result)); } int start = node.getPosition().getStart(); int end = node.getPosition().getEnd(); /* * The pos of "++x" is the entire thing, but the pos of "x++" is only the symbol. * I guess the javac guys think consistency is overrated :( */ switch (operator) { case POSTFIX_DECREMENT: case POSTFIX_INCREMENT: start = posOfStructure(node, node.getOperator().getSymbol(), true); end = posOfStructure(node, node.getOperator().getSymbol(), false); } return set(node, setPos(start, end, treeMaker.Unary(UNARY_OPERATORS.get(operator), toExpression(operand)))); } @Override public boolean visitAlternateConstructorInvocation(AlternateConstructorInvocation node) { int thisStart = posOfStructure(node, "this", true); int thisEnd = posOfStructure(node, "this", false); if (node.getConstructorTypeArguments() != null && !node.getConstructorTypeArguments().generics().isEmpty()) { thisStart = node.getConstructorTypeArguments().getPosition().getStart(); } JCMethodInvocation invoke = treeMaker.Apply( toList(JCExpression.class, node.getConstructorTypeArguments()), setPos(thisStart, thisEnd, treeMaker.Ident(table._this)), toList(JCExpression.class, node.arguments())); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); return posSet(node, treeMaker.Exec(setPos(start, end, invoke))); } @Override public boolean visitSuperConstructorInvocation(SuperConstructorInvocation node) { JCExpression methodId; if (node.getQualifier() == null) { methodId = treeMaker.Ident(table._super); methodId.pos = posOfStructure(node, "super", true); } else { methodId = treeMaker.Select( toExpression(node.getQualifier()), table._super); setPos(posOfStructure(node, ".", true), posOfStructure(node, "super", false), methodId); } JCMethodInvocation invoke = treeMaker.Apply( toList(JCExpression.class, node.getConstructorTypeArguments()), methodId, toList(JCExpression.class, node.arguments())); int start = posOfStructure(node, "(", Integer.MAX_VALUE, true); int end = posOfStructure(node, ")", Integer.MAX_VALUE, false); return posSet(node, treeMaker.Exec(setPos(start, end, invoke))); } @Override public boolean visitSuper(Super node) { JCTree tree; if (node.getQualifier() != null) { tree = treeMaker.Select((JCExpression) toTree(node.getQualifier()), table._super); setPos(posOfStructure(node, ".", true), posOfStructure(node, "super", false), tree); } else { tree = treeMaker.Ident(table._super); tree.pos = posOfStructure(node, "super", true); } return set(node, tree); } @Override public boolean visitBinaryExpression(BinaryExpression node) { BinaryOperator operator = node.getOperator(); if (posOfStructure(node, node.getRawOperator(), true) == 96 && node.getPosition().getEnd() == 122) { System.out.println("ARRIVED"); } int start = posOfStructure(node, node.getRawOperator(), true); int end = node.getPosition().getEnd(); if (operator == BinaryOperator.PLUS) { if (tryStringCombine(node)) return true; } JCExpression lhs = toExpression(node.getLeft()); JCExpression rhs = toExpression(node.getRight()); if (operator == BinaryOperator.ASSIGN) { return set(node, setPos(start, end, treeMaker.Assign(lhs, rhs))); } if (operator.isAssignment()) { return set(node, setPos(start, end, treeMaker.Assignop(BINARY_OPERATORS.get(operator), lhs, rhs))); } return set(node, setPos(start, end, treeMaker.Binary(BINARY_OPERATORS.get(operator), lhs, rhs))); } private boolean tryStringCombine(BinaryExpression node) { if (node.getParens() > 0) { ; } else if (node.getParent() instanceof BinaryExpression) { try { if (!((BinaryExpression)node.getParent()).getOperator().isAssignment()) return false; } catch (AstException ignore) { return false; } } else if (node.getParent() instanceof InstanceOf) { return false; } java.util.List<String> buffer = new ArrayList<String>(); BinaryExpression current = node; int start = Integer.MAX_VALUE; while (true) { start = Math.min(start, posOfStructure(current, "+", true)); if (current.getRawRight() instanceof StringLiteral && current.getRight().getParens() == 0) { buffer.add(((StringLiteral)current.getRawRight()).getValue()); } else { return false; } if (current.getRawLeft() instanceof BinaryExpression) { current = (BinaryExpression) current.getRawLeft(); try { if (current.getOperator() != BinaryOperator.PLUS || current.getParens() > 0) return false; } catch (AstException e) { return false; } } else if (current.getRawLeft() instanceof StringLiteral && current.getLeft().getParens() == 0) { buffer.add(((StringLiteral)current.getRawLeft()).getValue()); break; } else { return false; } } StringBuilder out = new StringBuilder(); for (int i = buffer.size() - 1; i >= 0; i--) out.append(buffer.get(i)); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Literal(TypeTags.CLASS, out.toString()))); } @Override public boolean visitInstanceOf(InstanceOf node) { int start = posOfStructure(node, "instanceof", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.TypeTest( toExpression(node.getObjectReference()), toExpression(node.getTypeReference())))); } @Override public boolean visitInlineIfExpression(InlineIfExpression node) { int start = posOfStructure(node, "?", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Conditional( toExpression(node.getCondition()), toExpression(node.getIfTrue()), toExpression(node.getIfFalse())))); } @Override public boolean visitMethodInvocation(MethodInvocation node) { JCExpression methodId; if (node.getOperand() == null) { methodId = (JCExpression) toTree(node.getName()); } else { int start = posOfStructure(node, ".", true); int end = node.getName().getPosition().getEnd(); methodId = setPos(start, end, treeMaker.Select( toExpression(node.getOperand()), toName(node.getName()))); } int start = posOfStructure(node, "(", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Apply( toList(JCExpression.class, node.getMethodTypeArguments()), methodId, toList(JCExpression.class, node.arguments()) ))); } @Override public boolean visitArrayInitializer(ArrayInitializer node) { return posSet(node, treeMaker.NewArray( null, List.<JCExpression>nil(), toList(JCExpression.class, node.expressions()) )); } @Override public boolean visitArrayCreation(ArrayCreation node) { java.util.List<Integer> typeTrees = new ArrayList<Integer>(); int endPosOfTypeTree = 0; List<JCExpression> dims = List.nil(); for (ArrayDimension dim : node.dimensions()) { JCExpression e = toExpression(dim); if (e == null) { Position p = dim.getPosition(); typeTrees.add(p.getStart()); endPosOfTypeTree = Math.max(endPosOfTypeTree, p.getEnd()); } else { dims = dims.append(e); } } Collections.reverse(typeTrees); List<JCExpression> init; if (node.getInitializer() == null) { init = null; } else { init = toList(JCExpression.class, node.getInitializer().expressions()); typeTrees.remove(typeTrees.size()-1); //javac sees this as new TYPE[] {}, with both 'new' and the last [] as structure. } JCExpression elementType = toExpression(node.getComponentTypeReference()); for (Integer start : typeTrees) { elementType = setPos(start, endPosOfTypeTree, treeMaker.TypeArray(elementType)); } return posSet(node, treeMaker.NewArray(elementType, dims, init)); } @Override public boolean visitArrayDimension(ArrayDimension node) { return set(node, toTree(node.getDimension())); } private static Object negative(Object value) { Number num = (Number)value; if (num instanceof Integer) return -num.intValue(); if (num instanceof Long) return -num.longValue(); if (num instanceof Float) return -num.floatValue(); if (num instanceof Double) return -num.doubleValue(); throw new IllegalArgumentException("value should be an Integer, Long, Float or Double, not a " + value.getClass().getSimpleName()); } @Override public boolean visitAssert(Assert node) { return posSet(node, treeMaker.Assert(toExpression(node.getAssertion()), toExpression(node.getMessage()))); } @Override public boolean visitBreak(Break node) { return posSet(node, treeMaker.Break(toName(node.getLabel()))); } @Override public boolean visitContinue(Continue node) { return posSet(node, treeMaker.Continue(toName(node.getLabel()))); } @Override public boolean visitDoWhile(DoWhile node) { JCExpression expr = toExpression(node.getCondition()); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); expr = setPos(start, end, treeMaker.Parens(expr)); return posSet(node, treeMaker.DoLoop(toStatement(node.getStatement()), expr)); } @Override public boolean visitFor(For node) { List<JCStatement> inits; List<JCExpressionStatement> updates; if (node.isVariableDeclarationBased()) { inits = toList(JCStatement.class, node.getVariableDeclaration()); } else { inits = List.nil(); for (Expression init : node.expressionInits()) { inits = inits.append(setPos(init, treeMaker.Exec(toExpression(init)))); } } updates = List.nil(); for (Expression update : node.updates()) { updates = updates.append(setPos(update, treeMaker.Exec(toExpression(update)))); } return posSet(node, treeMaker.ForLoop(inits, toExpression(node.getCondition()), updates, toStatement(node.getStatement()))); } @Override public boolean visitForEach(ForEach node) { return posSet(node, treeMaker.ForeachLoop((JCVariableDecl) toTree(node.getVariable()), toExpression(node.getIterable()), toStatement(node.getStatement()))); } @Override public boolean visitIf(If node) { JCExpression expr = toExpression(node.getCondition()); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); expr = setPos(start, end, treeMaker.Parens(expr)); return posSet(node, treeMaker.If(expr, toStatement(node.getStatement()), toStatement(node.getElseStatement()))); } @Override public boolean visitLabelledStatement(LabelledStatement node) { return posSet(node, treeMaker.Labelled(toName(node.getLabel()), toStatement(node.getStatement()))); } @Override public boolean visitModifiers(Modifiers node) { JCModifiers mods = treeMaker.Modifiers(node.getExplicitModifierFlags(), toList(JCAnnotation.class, node.annotations())); Comment javadoc = null; if (node.getParent() instanceof JavadocContainer) { javadoc = ((JavadocContainer)node.getParent()).getJavadoc(); } else if (node.getParent() instanceof VariableDefinition && node.getParent().getParent() instanceof VariableDeclaration) { javadoc = ((VariableDeclaration)node.getParent().getParent()).getJavadoc(); } if (javadoc != null && javadoc.isMarkedDeprecated()) mods.flags |= Flags.DEPRECATED; if (node.isEmpty()) { //Workaround for a javac bug; start (but not end!) gets set of an empty modifiers object, //but only if these represent the modifiers of a constructor or method that has type variables. if ( (node.getParent() instanceof MethodDeclaration && ((MethodDeclaration)node.getParent()).typeVariables().size() > 0) || (node.getParent() instanceof ConstructorDeclaration && ((ConstructorDeclaration)node.getParent()).typeVariables().size() > 0)) { mods.pos = node.getParent().getPosition().getStart(); } return set(node, mods); } else { return posSet(node, mods); } } @Override public boolean visitKeywordModifier(KeywordModifier node) { return set(node, treeMaker.Modifiers(getModifier(node))); } @Override public boolean visitInstanceInitializer(InstanceInitializer node) { return set(node, toTree(node.getBody())); } @Override public boolean visitStaticInitializer(StaticInitializer node) { JCBlock block = (JCBlock) toTree(node.getBody()); block.flags |= Flags.STATIC; return posSet(node, block); } @Override public boolean visitBlock(Block node) { return posSet(node, treeMaker.Block(0, toList(JCStatement.class, node.contents()))); } @Override public boolean visitVariableDeclaration(VariableDeclaration node) { List<JCVariableDecl> list = toList(JCVariableDecl.class, node.getDefinition()); JCVariableDecl last = list.get(list.size() -1); endPosTable.put(last, node.getPosition().getEnd()); return set(list); } @Override public boolean visitVariableDefinition(VariableDefinition node) { JCModifiers mods = (JCModifiers) toTree(node.getModifiers()); JCExpression vartype = toExpression(node.getTypeReference()); if (node.isVarargs()) { mods.flags |= Flags.VARARGS; vartype = addDimensions(node, vartype, 1); setPos(posOfStructure(node, "...", true), posOfStructure(node, "...", false), vartype); } List<JCVariableDecl> defs = List.nil(); for (VariableDefinitionEntry e : node.variables()) { defs = defs.append(setPos( e, treeMaker.VarDef(mods, toName(e.getName()), addDimensions(e, vartype, e.getArrayDimensions()), toExpression(e.getInitializer())))); } /* the endpos when multiple nodes are generated is after the comma for all but the last item, for some reason. */ { for (int i = 0; i < defs.size() -1; i++) { endPosTable.put(defs.get(i), posOfStructure(node, ",", i, false)); } } if (defs.isEmpty()) throw new RuntimeException("Empty VariableDefinition node"); set(defs); return true; } @Override public boolean visitAnnotationDeclaration(AnnotationDeclaration node) { JCModifiers modifiers = (JCModifiers) toTree(node.getModifiers()); modifiers.flags |= Flags.INTERFACE | Flags.ANNOTATION; int start = posOfStructure(node, "interface", true); int end = node.getPosition().getEnd(); if (modifiers.pos == -1) modifiers.pos = posOfStructure(node, "@", true); endPosTable.put(modifiers, posOfStructure(node, "@", false)); return set(node, setPos(start, end, treeMaker.ClassDef( modifiers, toName(node.getName()), List.<JCTypeParameter>nil(), null, List.<JCExpression>nil(), node.getBody() == null ? List.<JCTree>nil() : toList(JCTree.class, node.getBody().members()) ))); } @Override public boolean visitAnnotationMethodDeclaration(AnnotationMethodDeclaration node) { JCMethodDecl methodDef = treeMaker.MethodDef( (JCModifiers)toTree(node.getModifiers()), toName(node.getMethodName()), toExpression(node.getReturnTypeReference()), List.<JCTypeParameter>nil(), List.<JCVariableDecl>nil(), List.<JCExpression>nil(), null, toExpression(node.getDefaultValue()) ); int start = node.getMethodName().getPosition().getStart(); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, methodDef)); } @Override public boolean visitClassLiteral(ClassLiteral node) { int start = posOfStructure(node, ".", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Select((JCExpression) toTree(node.getTypeReference()), table._class))); } @Override public boolean visitAnnotationElement(AnnotationElement node) { JCExpression arg = toExpression(node.getValue()); if (node.getName() != null) { arg = setPos(node.getValue(), treeMaker.Assign((JCIdent) toTree(node.getName()), arg)); } return set(node, arg); } @Override public boolean visitAnnotation(Annotation node) { int start = node.getPosition().getStart(); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Annotation(toTree(node.getAnnotationTypeReference()), toList(JCExpression.class, node.elements())))); } @Override public boolean visitTypeReference(TypeReference node) { WildcardKind wildcard = node.getWildcard(); if (wildcard == WildcardKind.UNBOUND) { return posSet(node, treeMaker.Wildcard(treeMaker.TypeBoundKind(BoundKind.UNBOUND), null)); } JCExpression result = plainTypeReference(node); result = addWildcards(node, result, wildcard); result = addDimensions(node, result, node.getArrayDimensions()); return set(node, result); } @Override public boolean visitArrayAccess(ArrayAccess node) { int start = posOfStructure(node, "[", true); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, treeMaker.Indexed(toExpression(node.getOperand()), toExpression(node.getIndexExpression())))); } private JCExpression addDimensions(Node node, JCExpression type, int dimensions) { JCExpression resultingType = type; for (int i = 0; i < dimensions; i++) { int start = posOfStructure(node, "[", dimensions - i - 1, true); int end = posOfStructure(node, "]", false); resultingType = setPos(start, end, treeMaker.TypeArray(resultingType)); } return resultingType; } private JCExpression plainTypeReference(TypeReference node) { if (node.isPrimitive() || node.isVoid() || node.parts().size() == 1) { int end = node.getPosition().getEnd(); if (node.getArrayDimensions() > 0) { end = node.parts().last().getPosition().getEnd(); } if (end == node.getPosition().getStart()) end = node.getPosition().getEnd(); Identifier identifier = node.parts().first().getIdentifier(); int typeTag = primitiveTypeTag(identifier.getName()); if (typeTag > 0) return setPos(node.getPosition().getStart(), end, treeMaker.TypeIdent(typeTag)); } JCExpression current = null; for (TypeReferencePart part : node.parts()) { JCExpression expr = (JCExpression) toTree(part); if (current == null) { current = expr; continue; } if (expr instanceof JCIdent) { current = treeMaker.Select(current, ((JCIdent)expr).name); setPos(posOfStructure(part, ".", true), part.getPosition().getEnd(), current); } else if (expr instanceof JCTypeApply) { JCTypeApply apply = (JCTypeApply)expr; apply.clazz = treeMaker.Select(current, ((JCIdent)apply.clazz).name); setPos(posOfStructure(part, ".", true), part.getIdentifier().getPosition().getEnd(), apply.clazz); current = apply; } else { throw new IllegalStateException("Didn't expect a " + expr.getClass().getName() + " in " + node); } } return current; } private JCExpression addWildcards(Node node, JCExpression type, WildcardKind wildcardKind) { TypeBoundKind typeBoundKind; switch (wildcardKind) { case NONE: return type; case EXTENDS: typeBoundKind = treeMaker.TypeBoundKind(BoundKind.EXTENDS); setPos(posOfStructure(node, "extends", true), posOfStructure(node, "extends", false), typeBoundKind); return setPos(type.pos, endPosTable.get(type), treeMaker.Wildcard(typeBoundKind, type)); case SUPER: typeBoundKind = treeMaker.TypeBoundKind(BoundKind.SUPER); setPos(posOfStructure(node, "super", true), posOfStructure(node, "super", false), typeBoundKind); return setPos(type.pos, endPosTable.get(type), treeMaker.Wildcard(typeBoundKind, type)); default: throw new IllegalStateException("Unexpected unbound wildcard: " + wildcardKind); } } @Override public boolean visitTypeReferencePart(TypeReferencePart node) { JCIdent ident = (JCIdent) toTree(node.getIdentifier()); List<JCExpression> typeArguments = toList(JCExpression.class, node.getTypeArguments()); if (typeArguments.isEmpty()) { return set(node, ident); } else { JCTypeApply typeApply = treeMaker.TypeApply(ident, typeArguments); return set(node, setPos(node.getTypeArguments(), typeApply)); } } @Override public boolean visitTypeArguments(TypeArguments node) { set(toList(JCExpression.class, node.generics())); return true; } @Override public boolean visitTypeVariable(TypeVariable node) { return posSet(node, treeMaker.TypeParameter(toName(node.getName()), toList(JCExpression.class, node.extending()))); } @Override public boolean visitMethodDeclaration(MethodDeclaration node) { JCMethodDecl methodDef = treeMaker.MethodDef( (JCModifiers)toTree(node.getModifiers()), toName(node.getMethodName()), toExpression(node.getReturnTypeReference()), toList(JCTypeParameter.class, node.typeVariables()), toList(JCVariableDecl.class, node.parameters()), toList(JCExpression.class, node.thrownTypeReferences()), (JCBlock)toTree(node.getBody()), null ); for (JCVariableDecl decl : methodDef.params) { decl.mods.flags |= Flags.PARAMETER; } int start = node.getMethodName().getPosition().getStart(); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, methodDef)); } @Override public boolean visitConstructorDeclaration(ConstructorDeclaration node) { JCMethodDecl constrDef = treeMaker.MethodDef( (JCModifiers)toTree(node.getModifiers()), table.init, null, toList(JCTypeParameter.class, node.typeVariables()), toList(JCVariableDecl.class, node.parameters()), toList(JCExpression.class, node.thrownTypeReferences()), (JCBlock)toTree(node.getBody()), null ); for (JCVariableDecl decl : constrDef.params) { decl.mods.flags |= Flags.PARAMETER; } int start = node.getTypeName().getPosition().getStart(); int end = node.getPosition().getEnd(); return set(node, setPos(start, end, constrDef)); } @Override public boolean visitReturn(Return node) { return posSet(node, treeMaker.Return(toExpression(node.getValue()))); } @Override public boolean visitSwitch(Switch node) { List<JCCase> cases = List.nil(); JCExpression currentPat = null; Node currentNode = null; List<JCStatement> stats = null; boolean preamble = true; for (Statement s : node.getBody().contents()) { if (s instanceof Case || s instanceof Default) { JCExpression newPat = (s instanceof Default) ? null : toExpression(((Case)s).getCondition()); if (preamble) { preamble = false; } else { cases = addCase(cases, currentPat, currentNode, stats); } stats = List.nil(); currentPat = newPat; currentNode = s; } else { if (preamble) { throw new RuntimeException("switch body does not start with default/case."); } stats = stats.append(toStatement(s)); } } if (!preamble) cases = addCase(cases, currentPat, currentNode, stats); JCExpression expr = toExpression(node.getCondition()); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); expr = setPos(start, end, treeMaker.Parens(expr)); return posSet(node, treeMaker.Switch(expr, cases)); } private List<JCCase> addCase(List<JCCase> cases, JCExpression currentPat, Node currentNode, List<JCStatement> stats) { JCStatement last = stats.last(); int start = currentNode.getPosition().getStart(); int end = last == null ? currentNode.getPosition().getEnd() : endPosTable.get(last); cases = cases.append(setPos(start, end, treeMaker.Case(currentPat, stats))); return cases; } @Override public boolean visitSynchronized(Synchronized node) { JCExpression expr = toExpression(node.getLock()); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); expr = setPos(start, end, treeMaker.Parens(expr)); return posSet(node, treeMaker.Synchronized(expr, (JCBlock)toTree(node.getBody()))); } @Override public boolean visitThis(This node) { JCTree tree; int end = node.getPosition().getEnd(), start; if (node.getQualifier() != null) { tree = treeMaker.Select((JCExpression) toTree(node.getQualifier()), table._this); start = posOfStructure(node, ".", true); } else { tree = treeMaker.Ident(table._this); start = node.getPosition().getStart(); } return set(node, setPos(start, end, tree)); } @Override public boolean visitTry(Try node) { List<JCCatch> catches = toList(JCCatch.class, node.catches()); return posSet(node, treeMaker.Try((JCBlock) toTree(node.getBody()), catches, (JCBlock) toTree(node.getFinally()))); } @Override public boolean visitCatch(Catch node) { JCVariableDecl exceptionDeclaration = (JCVariableDecl) toTree(node.getExceptionDeclaration()); exceptionDeclaration.getModifiers().flags |= Flags.PARAMETER; return posSet(node, treeMaker.Catch(exceptionDeclaration, (JCBlock) toTree(node.getBody()))); } @Override public boolean visitThrow(Throw node) { return posSet(node, treeMaker.Throw(toExpression(node.getThrowable()))); } @Override public boolean visitWhile(While node) { JCExpression expr = toExpression(node.getCondition()); int start = posOfStructure(node, "(", true); int end = posOfStructure(node, ")", false); expr = setPos(start, end, treeMaker.Parens(expr)); return posSet(node, treeMaker.WhileLoop(expr, toStatement(node.getStatement()))); } @Override public boolean visitEmptyDeclaration(EmptyDeclaration node) { if (node.getParent() instanceof CompilationUnit) { return posSet(node, treeMaker.Skip()); } return set(node, posNone(treeMaker.Block(0, List.<JCStatement>nil()))); } private static final EnumMap<UnaryOperator, Integer> UNARY_OPERATORS = Maps.newEnumMap(UnaryOperator.class); static { UNARY_OPERATORS.put(UnaryOperator.BINARY_NOT, JCTree.COMPL); UNARY_OPERATORS.put(UnaryOperator.LOGICAL_NOT, JCTree.NOT); UNARY_OPERATORS.put(UnaryOperator.UNARY_PLUS, JCTree.POS); UNARY_OPERATORS.put(UnaryOperator.PREFIX_INCREMENT, JCTree.PREINC); UNARY_OPERATORS.put(UnaryOperator.UNARY_MINUS, JCTree.NEG); UNARY_OPERATORS.put(UnaryOperator.PREFIX_DECREMENT, JCTree.PREDEC); UNARY_OPERATORS.put(UnaryOperator.POSTFIX_INCREMENT, JCTree.POSTINC); UNARY_OPERATORS.put(UnaryOperator.POSTFIX_DECREMENT, JCTree.POSTDEC); } private static final EnumMap<BinaryOperator, Integer> BINARY_OPERATORS = Maps.newEnumMap(BinaryOperator.class); static { BINARY_OPERATORS.put(BinaryOperator.PLUS_ASSIGN, JCTree.PLUS_ASG); BINARY_OPERATORS.put(BinaryOperator.MINUS_ASSIGN, JCTree.MINUS_ASG); BINARY_OPERATORS.put(BinaryOperator.MULTIPLY_ASSIGN, JCTree.MUL_ASG); BINARY_OPERATORS.put(BinaryOperator.DIVIDE_ASSIGN, JCTree.DIV_ASG); BINARY_OPERATORS.put(BinaryOperator.REMAINDER_ASSIGN, JCTree.MOD_ASG); BINARY_OPERATORS.put(BinaryOperator.AND_ASSIGN, JCTree.BITAND_ASG); BINARY_OPERATORS.put(BinaryOperator.XOR_ASSIGN, JCTree.BITXOR_ASG); BINARY_OPERATORS.put(BinaryOperator.OR_ASSIGN, JCTree.BITOR_ASG); BINARY_OPERATORS.put(BinaryOperator.SHIFT_LEFT_ASSIGN, JCTree.SL_ASG); BINARY_OPERATORS.put(BinaryOperator.SHIFT_RIGHT_ASSIGN, JCTree.SR_ASG); BINARY_OPERATORS.put(BinaryOperator.BITWISE_SHIFT_RIGHT_ASSIGN, JCTree.USR_ASG); BINARY_OPERATORS.put(BinaryOperator.LOGICAL_OR, JCTree.OR); BINARY_OPERATORS.put(BinaryOperator.LOGICAL_AND, JCTree.AND); BINARY_OPERATORS.put(BinaryOperator.BITWISE_OR, JCTree.BITOR); BINARY_OPERATORS.put(BinaryOperator.BITWISE_XOR, JCTree.BITXOR); BINARY_OPERATORS.put(BinaryOperator.BITWISE_AND, JCTree.BITAND); BINARY_OPERATORS.put(BinaryOperator.EQUALS, JCTree.EQ); BINARY_OPERATORS.put(BinaryOperator.NOT_EQUALS, JCTree.NE); BINARY_OPERATORS.put(BinaryOperator.GREATER, JCTree.GT); BINARY_OPERATORS.put(BinaryOperator.GREATER_OR_EQUAL, JCTree.GE); BINARY_OPERATORS.put(BinaryOperator.LESS, JCTree.LT); BINARY_OPERATORS.put(BinaryOperator.LESS_OR_EQUAL, JCTree.LE); BINARY_OPERATORS.put(BinaryOperator.SHIFT_LEFT, JCTree.SL); BINARY_OPERATORS.put(BinaryOperator.SHIFT_RIGHT, JCTree.SR); BINARY_OPERATORS.put(BinaryOperator.BITWISE_SHIFT_RIGHT, JCTree.USR); BINARY_OPERATORS.put(BinaryOperator.PLUS, JCTree.PLUS); BINARY_OPERATORS.put(BinaryOperator.MINUS, JCTree.MINUS); BINARY_OPERATORS.put(BinaryOperator.MULTIPLY, JCTree.MUL); BINARY_OPERATORS.put(BinaryOperator.DIVIDE, JCTree.DIV); BINARY_OPERATORS.put(BinaryOperator.REMAINDER, JCTree.MOD); } private static final Map<String, Integer> PRIMITIVES = ImmutableMap.<String, Integer>builder() .put("byte", TypeTags.BYTE) .put("char", TypeTags.CHAR) .put("short", TypeTags.SHORT) .put("int", TypeTags.INT) .put("long", TypeTags.LONG) .put("float", TypeTags.FLOAT) .put("double", TypeTags.DOUBLE) .put("boolean", TypeTags.BOOLEAN) .put("void", TypeTags.VOID) .build(); static int primitiveTypeTag(String typeName) { Integer primitive = PRIMITIVES.get(typeName); return primitive == null ? 0 : primitive; } private long getModifier(KeywordModifier keyword) { return keyword.asReflectModifiers(); } private JCExpression chain(Iterable<Identifier> parts) { JCExpression previous = null; for (Identifier part : parts) { Name next = toName(part); if (previous == null) { previous = setPos(part, treeMaker.Ident(next)); } else { previous = setPos(posOfStructure(part, ".", true), part.getPosition().getEnd(), treeMaker.Select(previous, next)); } } return previous; } private int posOfStructure(Node node, String structure, boolean atStart) { return posOfStructure(node, structure, atStart ? 0 : Integer.MAX_VALUE, atStart); } private int posOfStructure(Node node, String structure, int idx, boolean atStart) { int start = node.getPosition().getStart(); if (sourceStructures != null && sourceStructures.containsKey(node)) { for (SourceStructure struct : sourceStructures.get(node)) { if (structure.equals(struct.getContent())) { start = atStart ? struct.getPosition().getStart() : struct.getPosition().getEnd(); if (idx-- <= 0) break; } } } return start; } private boolean posSet(Node node, JCTree jcTree) { return set(node, setPos(node, jcTree)); } private <T extends JCTree> T posNone(T jcTree) { jcTree.pos = -1; endPosTable.remove(jcTree); return jcTree; } private <T extends JCTree> T setPos(Node node, T jcTree) { return setPos(node.getPosition().getStart(), node.getPosition().getEnd(), jcTree); } private <T extends JCTree> T setPos(int start, int end, T jcTree) { jcTree.pos = start; endPosTable.put(jcTree, end); return jcTree; } }
package at.ac.tuwien.kr.alpha.grounder; public class AtomId { public final int atomId; public AtomId(int atomId) { this.atomId = atomId; } @Override public String toString() { return Integer.toString(atomId); } }
package at.archistar.crypto.math; import de.flexiprovider.common.math.codingtheory.GF2mField; import de.flexiprovider.common.math.linearalgebra.GF2mMatrix; /** * <p>A matrix operating in GF(256).</p> * * <p>Uses {@link GF256} for optimized operations and provides some additional methods in contrary to the flexiprovider- * class.</p> * * @author Elias Frantar <i>(added documentation)</i> * @author Andreas Happe <andreashappe@snikt.net> * @version 2014-7-18 */ public class CustomMatrix extends GF2mMatrix { private static final GF2mField gf256 = new GF2mField(8, 0x11d); // Galois-Field (x^8 + x^4 + x^3 + x + 1 = 0) / 285 /** * Constructor * @param data the data to put into the matrix */ public CustomMatrix(int[][] data) { super(gf256, data); } /** * Constructor * @param encoded the encoded matrix (got via {@link #getEncoded()}) */ public CustomMatrix(byte[] encoded) { super(gf256, encoded); } /** * Performs a matrix * vector multiplication. * * <b>NOTE:</b> Matrix multiplication is not commutative. (A*B != B*A) and so does only work if A(MxN) and B(NxO). * Throws an {@link ArithmeticException} if this condition is violated. * * @param vec the vector to multiply the matrix with (a 1D-matrix) * @return the product of the matrix and the given vector <i>(matrix * vector)</i> */ public int[] rightMultiply(int vec[]) { if (vec.length != matrix.length || vec.length != matrix[0].length) { // multiplication only works if A(MxN) and B(NxO) throw new ArithmeticException("when matrix is MxN, vector must be Nx1"); } int[] result = new int[vec.length]; for (int i = 0; i < vec.length; i++) { int tmp = 0; for (int j = 0; j < vec.length; j++) { tmp = GF256.add(tmp, GF256.mult(matrix[i][j], vec[j])); } result[i] = tmp; } return result; } /** * Returns the <i>i<sup>th</sup></i> row of the matrix. * @param i index of the row to return * @return the <i>i<sup>th</sup></i> row of the matrix (starting at 0) */ public int[] getRow(int i) { return matrix[i]; } public void output() { System.err.println("matrix:"); for (int[] tmp : matrix) { for (int i : tmp) { System.err.print(" " + i); } System.err.println(""); } } }
package bdv.img.cache; import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; import bdv.img.cache.CacheIoTiming.IoStatistics; import bdv.img.cache.CacheIoTiming.IoTimeBudget; import bdv.img.cache.VolatileImgCells.CellCache; import net.imglib2.img.basictypeaccess.volatiles.VolatileAccess; public class VolatileGlobalCellCache implements Cache { private final int maxNumLevels; static class Key { private final int timepoint; private final int setup; private final int level; private final long index; private final int[] cellDims; private final long[] cellMin; public Key( final int timepoint, final int setup, final int level, final long index, final int[] cellDims, final long[] cellMin ) { this.timepoint = timepoint; this.setup = setup; this.level = level; this.index = index; this.cellDims = cellDims; this.cellMin = cellMin; int value = Long.hashCode( index ); value = 31 * value + level; value = 31 * value + setup; value = 31 * value + timepoint; hashcode = value; } @Override public boolean equals( final Object other ) { if ( this == other ) return true; if ( !( other instanceof VolatileGlobalCellCache.Key ) ) return false; final Key that = ( Key ) other; return ( this.timepoint == that.timepoint ) && ( this.setup == that.setup ) && ( this.level == that.level ) && ( this.index == that.index ); } final int hashcode; @Override public int hashCode() { return hashcode; } protected int[] getCellDims() { return cellDims; } protected long[] getCellMin() { return cellMin; } } class Entry< K, V extends VolatileCacheValue > { private final K key; private V data; private final VolatileCacheLoader< K, V > loader; /** * When was this entry last enqueued for loading (see * {@link VolatileGlobalCellCache#currentQueueFrame}). This is initialized * to -1. When the entry's data becomes valid, it is set to * {@link Long#MAX_VALUE}. */ private long enqueueFrame; public Entry( final K key, final V data, final VolatileCacheLoader< K, V > loader ) { this.key = key; this.data = data; this.loader = loader; enqueueFrame = -1; } private void loadIfNotValid() throws InterruptedException { // TODO: assumption for following synchronisation pattern is that isValid() will never go from true to false. When invalidation API is added, that might change. if ( !data.isValid() ) { synchronized ( this ) { if ( !data.isValid() ) { data = loader.load( key ); enqueueFrame = Long.MAX_VALUE; softReferenceCache.put( key, new MySoftReference< K >( this, finalizeQueue ) ); notifyAll(); } } } } } interface GetKey< K > { public K getKey(); } class MySoftReference< K > extends SoftReference< Entry< ?, ? > > implements GetKey< K > { private final K key; public MySoftReference( final Entry< K, ? > referent, final ReferenceQueue< ? super Entry< ?, ? > > q ) { super( referent, q ); key = referent.key; } @Override public K getKey() { return key; } } class MyWeakReference< K > extends WeakReference< Entry< ?, ? > > implements GetKey< K > { private final K key; public MyWeakReference( final Entry< K, ? > referent, final ReferenceQueue< ? super Entry< ?, ? > > q ) { super( referent, q ); key = referent.key; } @Override public K getKey() { return key; } } protected static final int MAX_PER_FRAME_FINALIZE_ENTRIES = 500; protected void finalizeRemovedCacheEntries() { synchronized ( softReferenceCache ) { for ( int i = 0; i < MAX_PER_FRAME_FINALIZE_ENTRIES; ++i ) { final Reference< ? extends Entry< ?, ? > > poll = finalizeQueue.poll(); if ( poll == null ) break; final Object key = ( ( GetKey< ? > ) poll ).getKey(); final Reference< Entry< ?, ? > > ref = softReferenceCache.get( key ); if ( ref == poll ) softReferenceCache.remove( key ); } } } protected final ConcurrentHashMap< Object, Reference< Entry< ?, ? > > > softReferenceCache = new ConcurrentHashMap< Object, Reference< Entry< ?, ? > > >(); protected final ReferenceQueue< Entry< ?, ? > > finalizeQueue = new ReferenceQueue< Entry< ?, ? > >(); protected final BlockingFetchQueues< Key > queue; protected volatile long currentQueueFrame = 0; class Fetcher extends Thread { @Override public final void run() { Key key = null; while ( true ) { while ( key == null ) try { key = queue.take(); } catch ( final InterruptedException e ) {} long waitMillis = pauseUntilTimeMillis - System.currentTimeMillis(); while ( waitMillis > 0 ) { try { synchronized ( lock ) { lock.wait( waitMillis ); } } catch ( final InterruptedException e ) {} waitMillis = pauseUntilTimeMillis - System.currentTimeMillis(); } try { loadIfNotValid( key ); key = null; } catch ( final InterruptedException e ) {} } } private final Object lock = new Object(); private volatile long pauseUntilTimeMillis = 0; public void pauseUntil( final long timeMillis ) { pauseUntilTimeMillis = timeMillis; interrupt(); } public void wakeUp() { pauseUntilTimeMillis = 0; synchronized ( lock ) { lock.notify(); } } } /** * pause all {@link Fetcher} threads for the specified number of milliseconds. */ public void pauseFetcherThreadsFor( final long ms ) { pauseFetcherThreadsUntil( System.currentTimeMillis() + ms ); } /** * pause all {@link Fetcher} threads until the given time (see * {@link System#currentTimeMillis()}). */ public void pauseFetcherThreadsUntil( final long timeMillis ) { for ( final Fetcher f : fetchers ) f.pauseUntil( timeMillis ); } /** * Wake up all Fetcher threads immediately. This ends any * {@link #pauseFetcherThreadsFor(long)} and * {@link #pauseFetcherThreadsUntil(long)} set earlier. */ public void wakeFetcherThreads() { for ( final Fetcher f : fetchers ) f.wakeUp(); } private final ArrayList< Fetcher > fetchers; private final CacheIoTiming cacheIoTiming; @Deprecated public VolatileGlobalCellCache( final int maxNumTimepoints, final int maxNumSetups, final int maxNumLevels, final int numFetcherThreads ) { this( maxNumLevels, numFetcherThreads ); } /** * @param maxNumLevels * the highest occurring mipmap level plus 1. * @param numFetcherThreads */ public VolatileGlobalCellCache( final int maxNumLevels, final int numFetcherThreads ) { this.maxNumLevels = maxNumLevels; cacheIoTiming = new CacheIoTiming(); queue = new BlockingFetchQueues< Key >( maxNumLevels ); fetchers = new ArrayList< Fetcher >(); for ( int i = 0; i < numFetcherThreads; ++i ) { final Fetcher f = new Fetcher(); f.setDaemon( true ); f.setName( "Fetcher-" + i ); fetchers.add( f ); f.start(); } } /** * Load the data for the {@link VolatileCell} referenced by k, if * <ul> * <li>the {@link VolatileCell} is in the cache, and * <li>the data is not yet loaded (valid). * </ul> * * @param k * @throws InterruptedException */ protected void loadIfNotValid( final Key k ) throws InterruptedException { final Reference< Entry< ?, ? > > ref = softReferenceCache.get( k ); if ( ref != null ) { final Entry< ?, ? > entry = ref.get(); if ( entry != null ) loadEntryIfNotValid( entry ); } } /** * Load the data for the {@link Entry}, if it is not yet loaded (valid). * @throws InterruptedException */ protected void loadEntryIfNotValid( final Entry< ?, ? > entry ) throws InterruptedException { entry.loadIfNotValid(); } /** * Enqueue the {@link Entry} if it hasn't been enqueued for this frame * already. */ protected void enqueueEntry( final Entry< ?, ? > entryTODO, final int priority, final boolean enqueuToFront ) { final Entry< Key, ? > entry = ( Entry< Key, ? > ) entryTODO; // TODO: fix generics if ( entry.enqueueFrame < currentQueueFrame ) { entry.enqueueFrame = currentQueueFrame; final Key k = entry.key; queue.put( k, priority, enqueuToFront ); } } /** * Load the data for the {@link Entry} if it is not yet loaded (valid) and * there is enough {@link IoTimeBudget} left. Otherwise, enqueue the * {@link Entry} if it hasn't been enqueued for this frame already. */ protected void loadOrEnqueue( final Entry< ?, ? > entry, final int priority, final boolean enqueuToFront ) { final IoStatistics stats = cacheIoTiming.getThreadGroupIoStatistics(); final IoTimeBudget budget = stats.getIoTimeBudget(); final long timeLeft = budget.timeLeft( priority ); if ( timeLeft > 0 ) { synchronized ( entry ) { if ( entry.data.isValid() ) return; enqueueEntry( entry, priority, enqueuToFront ); final long t0 = stats.getIoNanoTime(); stats.start(); try { entry.wait( timeLeft / 1000000l, 1 ); } catch ( final InterruptedException e ) {} stats.stop(); final long t = stats.getIoNanoTime() - t0; budget.use( t, priority ); } } else enqueueEntry( entry, priority, enqueuToFront ); } private void loadEntryWithCacheHints( final Entry< ?, ? > entry, final CacheHints cacheHints ) { switch ( cacheHints.getLoadingStrategy() ) { case VOLATILE: default: enqueueEntry( entry, cacheHints.getQueuePriority(), cacheHints.isEnqueuToFront() ); break; case BLOCKING: while ( true ) try { loadEntryIfNotValid( entry ); break; } catch ( final InterruptedException e ) {} break; case BUDGETED: if ( !entry.data.isValid() ) loadOrEnqueue( entry, cacheHints.getQueuePriority(), cacheHints.isEnqueuToFront() ); break; case DONTLOAD: break; } } /** * Get a cell if it is in the cache or null. Note, that a cell being in the * cache only means that there is a data array, but not necessarily that the * data has already been loaded. * * If the cell data has not been loaded, do the following, depending on the * {@link LoadingStrategy}: * <ul> * <li> {@link LoadingStrategy#VOLATILE}: * Enqueue the cell for asynchronous loading by a fetcher thread, if * it has not been enqueued in the current frame already. * <li> {@link LoadingStrategy#BLOCKING}: * Load the cell data immediately. * <li> {@link LoadingStrategy#BUDGETED}: * Load the cell data immediately if there is enough * {@link IoTimeBudget} left for the current thread group. * Otherwise enqueue for asynchronous loading, if it has not been * enqueued in the current frame already. * <li> {@link LoadingStrategy#DONTLOAD}: * Do nothing. * </ul> * * @param timepoint * timepoint coordinate of the cell * @param setup * setup coordinate of the cell * @param level * level coordinate of the cell * @param index * index of the cell (flattened spatial coordinate of the cell) * @param cacheHints * {@link LoadingStrategy}, queue priority, and queue order. * @return a cell with the specified coordinates or null. */ public < V extends VolatileCacheValue > V getGlobalIfCached( final Key key, final CacheHints cacheHints ) { final Reference< Entry< ?, ? > > ref = softReferenceCache.get( key ); if ( ref != null ) { final Entry< ?, ? > entry = ref.get(); if ( entry != null ) { loadEntryWithCacheHints( entry, cacheHints ); return ( V ) entry.data; } } return null; } /** * Create a new cell with the specified coordinates, if it isn't in the * cache already. Depending on the {@link LoadingStrategy}, do the * following: * <ul> * <li> {@link LoadingStrategy#VOLATILE}: * Enqueue the cell for asynchronous loading by a fetcher thread. * <li> {@link LoadingStrategy#BLOCKING}: * Load the cell data immediately. * <li> {@link LoadingStrategy#BUDGETED}: * Load the cell data immediately if there is enough * {@link IoTimeBudget} left for the current thread group. * Otherwise enqueue for asynchronous loading. * <li> {@link LoadingStrategy#DONTLOAD}: * Do nothing. * </ul> * * @param cellDims * dimensions of the cell in pixels * @param cellMin * minimum spatial coordinates of the cell in pixels * @param timepoint * timepoint coordinate of the cell * @param setup * setup coordinate of the cell * @param level * level coordinate of the cell * @param index * index of the cell (flattened spatial coordinate of the cell) * @param cacheHints * {@link LoadingStrategy}, queue priority, and queue order. * @return a cell with the specified coordinates. */ public < K, V extends VolatileCacheValue > V createGlobal( final K key, final CacheHints cacheHints, final VolatileCacheLoader< K, V > cacheLoader ) { Entry< K, V > entry = null; synchronized ( softReferenceCache ) { final Reference< Entry< ?, ? > > ref = softReferenceCache.get( key ); if ( ref != null ) entry = ( Entry< K, V > ) ref.get(); // TODO: try to let softRefCache be Reference< ? extends Entry< ?, ? > > type if ( entry == null ) { final V value = cacheLoader.createEmptyValue( key ); entry = new Entry< K, V >( key, value, cacheLoader ); softReferenceCache.put( key, new MyWeakReference< K >( entry, finalizeQueue ) ); } } loadEntryWithCacheHints( entry, cacheHints ); return entry.data; } /** * Prepare the cache for providing data for the "next frame": * <ul> * <li>the contents of fetch queues is moved to the prefetch. * <li>some cleaning up of garbage collected entries ({@link #finalizeRemovedCacheEntries()}). * <li>the internal frame counter is incremented, which will enable * previously enqueued requests to be enqueued again for the new frame. * </ul> */ @Override public void prepareNextFrame() { queue.clear(); finalizeRemovedCacheEntries(); ++currentQueueFrame; } /** * (Re-)initialize the IO time budget, that is, the time that can be spent * in blocking IO per frame/ * * @param partialBudget * Initial budget (in nanoseconds) for priority levels 0 through * <em>n</em>. The budget for level <em>i&gt;j</em> must always be * smaller-equal the budget for level <em>j</em>. If <em>n</em> * is smaller than the maximum number of mipmap levels, the * remaining priority levels are filled up with budget[n]. */ @Override public void initIoTimeBudget( final long[] partialBudget ) { final IoStatistics stats = cacheIoTiming.getThreadGroupIoStatistics(); if ( stats.getIoTimeBudget() == null ) stats.setIoTimeBudget( new IoTimeBudget( maxNumLevels ) ); stats.getIoTimeBudget().reset( partialBudget ); } /** * Get the {@link CacheIoTiming} that provides per thread-group IO * statistics and budget. */ @Override public CacheIoTiming getCacheIoTiming() { return cacheIoTiming; } /** * Remove all references to loaded data as well as all enqueued requests * from the cache. */ public void clearCache() { for ( final Reference< Entry< ?, ? > > ref : softReferenceCache.values() ) ref.clear(); softReferenceCache.clear(); prepareNextFrame(); // TODO: add a full clear to BlockingFetchQueues. // (BlockingFetchQueues.clear() moves stuff to the prefetchQueue.) } public static interface VolatileCacheValue { public boolean isValid(); } public static interface VolatileCacheLoader< K, V extends VolatileCacheValue > { public V createEmptyValue( K key ); public V load( K key ) throws InterruptedException; } public static class CacheArrayLoaderWrapper< A extends VolatileAccess > implements VolatileCacheLoader< Key, VolatileCell< A > > { private final CacheArrayLoader< A > loader; public CacheArrayLoaderWrapper( final CacheArrayLoader< A > loader ) { this.loader = loader; } @Override public VolatileCell< A > createEmptyValue( final Key key ) { final VolatileCell< A > cell = new VolatileCell< A >( key.cellDims, key.cellMin, loader.emptyArray( key.getCellDims() ) ); return cell; } @Override public VolatileCell< A > load( final Key key ) throws InterruptedException { final VolatileCell< A > cell = new VolatileCell< A >( key.cellDims, key.cellMin, loader.loadArray( key.timepoint, key.setup, key.level, key.cellDims, key.cellMin ) ); return cell; } } public class VolatileCellCache< A extends VolatileAccess > implements CellCache< A > { private final int timepoint; private final int setup; private final int level; private CacheHints cacheHints; private final CacheArrayLoader< A > loader; private final VolatileCacheLoader< Key, VolatileCell< A > > cacheLoader; public VolatileCellCache( final int timepoint, final int setup, final int level, final CacheHints cacheHints, final CacheArrayLoader< A > loader ) { this.timepoint = timepoint; this.setup = setup; this.level = level; this.cacheHints = cacheHints; this.loader = loader; this.cacheLoader = new CacheArrayLoaderWrapper< A >( loader ); } @Override public VolatileCell< A > get( final long index ) { final Key key = new Key( timepoint, setup, level, index, null, null ); return getGlobalIfCached( key, cacheHints ); } @Override public VolatileCell< A > load( final long index, final int[] cellDims, final long[] cellMin ) { final Key key = new Key( timepoint, setup, level, index, cellDims, cellMin ); return createGlobal( key, cacheHints, cacheLoader ); } @Override public void setCacheHints( final CacheHints cacheHints ) { this.cacheHints = cacheHints; } } }
package be.bagofwords.db; import be.bagofwords.application.LateCloseableComponent; import be.bagofwords.application.memory.MemoryManager; import be.bagofwords.application.status.StatusViewable; import be.bagofwords.cache.CachesManager; import be.bagofwords.db.bloomfilter.BloomFilterDataInterface; import be.bagofwords.db.bloomfilter.LongBloomFilterWithCheckSum; import be.bagofwords.db.cached.CachedDataInterface; import be.bagofwords.db.combinator.Combinator; import be.bagofwords.db.combinator.LongCombinator; import be.bagofwords.db.combinator.OverWriteCombinator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; public abstract class DataInterfaceFactory implements StatusViewable, LateCloseableComponent { private final CachesManager cachesManager; private MemoryManager memoryManager; private final List<DataInterface> allInterfaces; private DataInterface<LongBloomFilterWithCheckSum> cachedBloomFilters; private FlushDataInterfacesThread flushDataInterfacesThread; public DataInterfaceFactory(CachesManager cachesManager, MemoryManager memoryManager) { this.cachesManager = cachesManager; this.memoryManager = memoryManager; this.allInterfaces = new ArrayList<>(); this.flushDataInterfacesThread = new FlushDataInterfacesThread(this, memoryManager); this.flushDataInterfacesThread.start(); } protected abstract <T extends Object> DataInterface<T> createBaseDataInterface(String nameOfSubset, Class<T> objectClass, Combinator<T> combinator); public DataInterface<Long> createCountDataInterface(String subset) { return createDataInterface(DatabaseCachingType.CACHED_AND_BLOOM, subset, Long.class, new LongCombinator()); } public <T extends Object> DataInterface<T> createDataInterface(DatabaseCachingType type, String subset, Class<T> objectClass) { return createDataInterface(type, subset, objectClass, new OverWriteCombinator<T>()); } public <T extends Object> DataInterface<T> createDataInterface(String subset, Class<T> objectClass) { return createDataInterface(DatabaseCachingType.CACHED_AND_BLOOM, subset, objectClass, new OverWriteCombinator<T>()); } public <T extends Object> DataInterface<T> createDataInterface(final DatabaseCachingType type, final String subset, final Class<T> objectClass, final Combinator<T> combinator) { DataInterface<T> result = createBaseDataInterface(subset, objectClass, combinator); if (type.useCache()) { result = cached(result); } if (type.useBloomFilter()) { result = bloom(result); } synchronized (allInterfaces) { allInterfaces.add(result); } return result; } protected <T extends Object> DataInterface<T> cached(DataInterface<T> baseDataInterface) { return new CachedDataInterface<>(cachesManager, memoryManager, baseDataInterface); } protected <T extends Object> DataInterface<T> bloom(DataInterface<T> dataInterface) { checkInitialisationCachedBloomFilters(); return new BloomFilterDataInterface<>(dataInterface, cachedBloomFilters); } private void checkInitialisationCachedBloomFilters() { if (cachedBloomFilters == null) { cachedBloomFilters = cached(createBaseDataInterface("system/bloomFilter", LongBloomFilterWithCheckSum.class, new OverWriteCombinator<LongBloomFilterWithCheckSum>())); synchronized (allInterfaces) { allInterfaces.add(cachedBloomFilters); } } } public List<DataInterface> getAllInterfaces() { return allInterfaces; } @Override public synchronized void close() { flushDataInterfacesThread.terminateAndWait(); closeAllInterfaces(); } public void closeAllInterfaces() { synchronized (allInterfaces) { for (DataInterface dataI : allInterfaces) { if (dataI != cachedBloomFilters) { dataI.flushIfNotClosed(); dataI.close(); } } if (cachedBloomFilters != null) { cachedBloomFilters.flushIfNotClosed(); cachedBloomFilters.close(); cachedBloomFilters = null; } allInterfaces.clear(); } } @Override public String toString() { return getClass().getSimpleName(); } @Override public void printHtmlStatus(StringBuilder sb) { sb.append("<h1>Data interfaces</h1>"); List<DataInterface> interfaces = new ArrayList<>(getAllInterfaces()); Collections.sort(interfaces, new Comparator<DataInterface>() { @Override public int compare(DataInterface o1, DataInterface o2) { long max1 = Math.max(o1.getTotalTimeRead(), o1.getTotalTimeWrite()); long max2 = Math.max(o2.getTotalTimeRead(), o2.getTotalTimeWrite()); return -Double.compare(max1, max2); //Highest first } }); for (DataInterface dataInterface : interfaces) { printDataInterfaceUsage("&nbsp;&nbsp;&nbsp;", sb, dataInterface); } } protected void printDataInterfaceUsage(String indentation, StringBuilder sb, DataInterface dataInterface) { if (dataInterface.getNumberOfReads() + dataInterface.getNumberOfWrites() > 0 || dataInterface.getTotalTimeRead() > 0 || dataInterface.getTotalTimeWrite() > 0) { sb.append(indentation + dataInterface.getClass().getSimpleName() + " " + dataInterface.getName() + " reads=" + dataInterface.getNumberOfReads() + " readTime=" + dataInterface.getTotalTimeRead() + " writes=" + dataInterface.getNumberOfWrites() + " writeTime=" + dataInterface.getTotalTimeWrite()); sb.append("<br>"); DataInterface implementingDataInterface = dataInterface.getImplementingDataInterface(); if (implementingDataInterface != null) { printDataInterfaceUsage(indentation + indentation, sb, implementingDataInterface); } } } }
package ch.ethz.geco.bass.audio.util; import ch.ethz.geco.bass.Main; import ch.ethz.geco.bass.audio.AudioManager; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.reflect.TypeToken; import com.sedmelluq.discord.lavaplayer.track.AudioTrack; import java.lang.reflect.Type; import java.util.*; import java.util.stream.Collectors; /** * Represents a playlist. */ public class Playlist { private final Comparator<AudioTrack> comparator = Comparator.comparing((AudioTrack track) -> ((AudioTrackMetaData) track.getUserData()).getTrackID()).reversed() .thenComparing((AudioTrack track) -> ((AudioTrackMetaData) track.getUserData()).getVoteCount()).reversed(); /** * The internal mapping of trackIDs to tracks. */ private final HashMap<Integer, AudioTrack> trackSet = new HashMap<>(); /** * The playlist in a sorted format. */ private ArrayList<AudioTrack> sortedPlaylist = new ArrayList<>(); /** * Adds a track to the playlist if the track is not already in it. * * @param track the track to add * @return true on success, false if the track is already in the playlist */ public boolean add(AudioTrack track) { synchronized (this) { Integer trackID = ((AudioTrackMetaData) track.getUserData()).getTrackID(); if (trackSet.putIfAbsent(trackID, track) == null) { if (sortedPlaylist.add(track)) { resort(); return true; } else { trackSet.remove(trackID); } } return false; } } /** * Returns the next track in order. * * @return the next track in order, or null if the playlist is empty */ public AudioTrack poll() { synchronized (this) { if (sortedPlaylist.isEmpty()) { // Broadcast to users JsonObject jo = new JsonObject(); JsonObject data = new JsonObject(); data.addProperty("state", "stopped"); jo.addProperty("method", "post"); jo.addProperty("type", "player/control"); jo.add("data", data); Main.server.broadcast(jo); return null; } AudioTrack track = sortedPlaylist.remove(0); trackSet.remove(((AudioTrackMetaData) track.getUserData()).getTrackID()); return track; } } /** * Returns a sorted list of all audio tracks. * * @return a sorted list of all audio tracks */ public List<AudioTrack> getSortedList() { synchronized (this) { return sortedPlaylist; } } /** * Sets the vote of a user for the given track. * <p> * vote = 0 means that the vote gets removed for that user. * * @param trackID the ID of the track * @param userID the ID of the user who voted * @param vote the vote */ public void setVote(Integer trackID, String userID, Byte vote) { AudioTrack track = trackSet.get(trackID); if (track != null) { ((AudioTrackMetaData) track.getUserData()).getVotes().put(userID, vote); resort(); } else { // TODO: somehow report an error to the interface that the track wasn't found } } /** * Resorts the playlist */ public void resort() { synchronized (this) { // Insertion sort the playlist AudioTrack[] tracks = ((AudioTrack[]) sortedPlaylist.toArray()); int i = 1; while (i < tracks.length) { AudioTrack x = tracks[i]; int j = i; while (j >= 0 && comparator.compare(tracks[j], x) > 0) { tracks[j + 1] = tracks[j]; j } tracks[j + 1] = x; i++; } sortedPlaylist = new ArrayList<>(Arrays.asList(tracks)); // FIXME: duplicate code, find a way to be able to call specific requests when needed. // Broadcast queue/all response Type listType = new TypeToken<List<AudioTrack>>(){}.getType(); JsonArray trackList = (JsonArray) Main.GSON.toJsonTree(AudioManager.getScheduler().getPlaylist().getSortedList(), listType); JsonObject response = new JsonObject(); response.addProperty("method", "post"); response.addProperty("type", "queue/all"); response.add("data", trackList); Main.server.broadcast(response); } } }
package cn.lambdalib.util.version; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Type; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import cn.lambdalib.annoreg.core.Registrant; import cn.lambdalib.annoreg.mc.RegEventHandler; import cn.lambdalib.annoreg.mc.RegEventHandler.Bus; import cn.lambdalib.annoreg.mc.RegInitCallback; import cn.lambdalib.annoreg.mc.RegPostInitCallback; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.util.ChatComponentText; import net.minecraft.util.ChatComponentTranslation; import net.minecraft.util.IChatComponent; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.player.PlayerEvent; import net.minecraftforge.event.world.WorldEvent; import net.minecraftforge.event.world.WorldEvent.Load; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Mod.EventHandler; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent; import com.google.gson.Gson; import com.google.gson.JsonIOException; import com.google.gson.JsonSyntaxException; import com.google.gson.reflect.TypeToken; @Registrant public class CheckManger { /** * {modid:[modname,localVersion,apiurl]} */ HashMap<String,String[]> modPool=new HashMap<>(); HashMap<String,String> latestVersion=new HashMap<>(); private static CheckManger instance=new CheckManger(); private static boolean alerted=false; private CheckManger(){} public static CheckManger instance() { return instance; } /** * * @param modid * @param pars String[]{modname,localVersion,apiUrl} */ public void addMod(String modid,String[] pars) { this.modPool.put(modid, pars); System.out.println(modid+pars); } public void addNewVersion(String modid,String newVersion) { this.latestVersion.put(modid,newVersion); } @SubscribeEvent public void enterWorldHandler(PlayerLoggedInEvent e) { if(!alerted) { EntityPlayer player=e.player; String[] pars; for(String modid:this.latestVersion.keySet()) { pars=this.modPool.get(modid); //modname,latestestversion// player.addChatMessage(new ChatComponentTranslation("chat.newversion",pars[0],this.latestVersion.get(modid))); } // String s; // s=this.modPool.get("academy-craft")[0]; // player.addChatMessage(new ChatComponentText(s)); // s=this.modPool.get("academy-craft")[1]; // player.addChatMessage(new ChatComponentText(s)); // s=this.modPool.get("academy-craft")[2]; // player.addChatMessage(new ChatComponentText(s)); } } @RegPostInitCallback public static void init() { MinecraftForge.EVENT_BUS.register(instance); FMLCommonHandler.instance().bus().register(instance); //run threads for(String modid:instance.modPool.keySet()) { Fetcher fetcher=new Fetcher(modid); Thread thread=new Thread(fetcher); thread.start(); } } } class Fetcher implements Runnable { private boolean alive=true; final URL api_url; final String modid; final String localVersion; public Fetcher(String modid) { this.modid=modid; /**modname,localVersion,apiurl*/ String[] pars=CheckManger.instance().modPool.get(modid); this.localVersion=pars[1]; URL url; try { url=new URL(pars[2]); } catch (MalformedURLException e) { e.printStackTrace(); url=null; } this.api_url=url; } @Override public void run() { if(this.api_url==null) return; Gson gson=new Gson(); Type dict=new TypeToken<List<Map<String,Object>>>(){}.getType(); List<Map<String,Object>> releases = null; try { releases=gson.fromJson(new BufferedReader(new InputStreamReader(this.api_url.openStream())),dict); } catch (Exception e) { e.printStackTrace(); } HashMap<String,Integer> versions=new HashMap<>(releases.size()); for(int i=0;i<releases.size();++i) { versions.put((String) releases.get(i).get("tag_name"),i); } if(!versions.keySet().contains(this.localVersion)) return; String latestVersion=(String) releases.get(0).get("tag_name"); CheckManger.instance().addNewVersion(this.modid, latestVersion); } }
package com.adms.mglplanlv.entity; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.NamedNativeQueries; import javax.persistence.NamedNativeQuery; import com.adms.common.domain.BaseDomain; @Entity @NamedNativeQueries({ // MGL_PLAN_LV_FACTORY @NamedNativeQuery( name = "execPlanLvAll", query = " EXEC [dbo].[MGL_PLAN_LV_FACTORY] ?, ?, ? ", resultClass = PlanLvValue.class), // MGL_PLAN_LV_FACTORY_MTD @NamedNativeQuery( name = "execPlanLvAllMTD", query = " EXEC [dbo].[MGL_PLAN_LV_FACTORY_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvAllYTD", query = " EXEC [dbo].[MGL_PLAN_LV_FACTORY_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- MTL Kbank --> @NamedNativeQuery( name = "execPlanLvValueForMtlKbankMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTL_KBANK_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForMtlKbankYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTL_KBANK_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- MTL Broker --> @NamedNativeQuery( name = "execPlanLvValueForMTLBrokerMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTL_BROKER_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForMTLBrokerYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTL_BROKER_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- MTI KBank --> @NamedNativeQuery( name = "execPlanLvValueForMTIKBankMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTI_KBANK_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForMTIKBankYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MTI_KBANK_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- MSIG UOB --> @NamedNativeQuery( name = "execPlanLvValueForMSIGUOBMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MSIG_UOB_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForMSIGUOBYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MSIG_UOB_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- MSIG Broker --> @NamedNativeQuery( name = "execPlanLvValueForMSIGBrokerMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MSIG_BROKER_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForMSIGBrokerYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_MSIG_BROKER_YTD] ?, ? ", resultClass = PlanLvValue.class), // <!-- FWD TVD --> @NamedNativeQuery( name = "execPlanLvValueForFWDTVDMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_FWD_TVD_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForFWDTVDYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_FWD_TVD_YTD] ?, ? ", resultClass = PlanLvValue.class), //<!-- GEN Health --> @NamedNativeQuery( name = "execPlanLvValueForGENHEALTHMTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_GEN_HEALTH_MTD] ?, ? ", resultClass = PlanLvValue.class), @NamedNativeQuery( name = "execPlanLvValueForGENHEALTHYTD", query = " EXEC [dbo].[MGL_PLAN_LV_DATA_FOR_GEN_HEALTH_YTD] ?, ? ", resultClass = PlanLvValue.class) }) public class PlanLvValue extends BaseDomain { private static final long serialVersionUID = 6691695767644325655L; @Id @Column(name="ID") private Long id; @Column(name="PRODUCT") private String product; @Column(name="PLAN_TYPE") private String planType; @Column(name="NUM_OF_FILE") private Integer numOfFile; @Column(name="TYP") private Double typ; @Column(name="AMP") private Double amp; public String getProduct() { return product; } public void setProduct(String product) { this.product = product; } public String getPlanType() { return planType; } public void setPlanType(String planType) { this.planType = planType; } public Integer getNumOfFile() { return numOfFile; } public void setNumOfFile(Integer numOfFile) { this.numOfFile = numOfFile; } public Double getTyp() { return typ; } public void setTyp(Double typ) { this.typ = typ; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Double getAmp() { return amp; } public void setAmp(Double amp) { this.amp = amp; } @Override public String toString() { return "PlanLvValue [product=" + product + ", planType=" + planType + ", numOfFile=" + numOfFile + ", typ=" + typ + ", amp=" + amp + "]"; } }
package com.amihaiemil.eoyaml; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * A plain scalar value read from somewhere. * @author Mihai Andronace (amihaiemil@gmail.com) * @version $Id$ * @since 3.1.3 */ final class ReadPlainScalar extends BaseScalar { /** * Pattern to match scalars in mappings or sequences. * * Ignore zero or more spaces and a hyphen (-) followed by one * or more spaces. * * A quoted scalar literal is inside: * - ('(?:[^'\\]|\\.)*') : a single (') quoted string or * - ("(?:[^"\\]|\\.)*") : double (") quoted string * * A scalar for a mapping are characters after: * - .*:[ ]+(.*) : Any characters before a colon followed by * one or more spaces. * * The sequence scalar is: * - -[ ]+(.*) : Any characters after a hyphen (-) and one more spaces. */ private static final Pattern QUOTED_LITERAL_MAP_SEQ = Pattern.compile("^(" + "[ ]*(-[ ]+)" + "(('(?:[^'\\\\]|\\\\.)*')|" + "(\"(?:[^\"\\\\]|\\\\.)*\"))|" + "(.*:[ ]+(.*))|" + "(-[ ]+(.*))" + ")$"); /** * Regex group index that matches quoted literals. */ private static final int QUOTED_LITERAL_GROUP = 3; /** * Regex group index that matches scalar values of mappings. */ private static final int MAPPING_GROUP = 7; /** * Regex group index that matches scalar value of non-quotes sequences. */ private static final int SEQUENCE_GROUP = 9; /** * All YAML Lines of the document. */ private final AllYamlLines all; /** * Line where the plain scalar value is supposed to be found. * The Scalar can be either after the ":" character, if this * line is from a mapping, or after the "-" character, if * this line is from a sequence, or it represents the whole line, * if no "-" or ":" are found. */ private final YamlLine scalar; /** * Constructor. * @param all All lines of the document. * @param scalar YamlLine containing the scalar. */ ReadPlainScalar(final AllYamlLines all, final YamlLine scalar) { this.all = all; this.scalar = scalar; } /** * Unescaped String value of this scalar. Pay attention, if the * scalar's value is the "null" String, then we return null, because * "null" is a reserved keyword in YAML, indicating a null Scalar. * @checkstyle ReturnCount (50 lines) * @return String or null if the Strings value is "null". */ @Override public String value() { final String trimmed = this.scalar.trimmed(); String value = trimmed; Matcher matcher = this.escapedSequenceScalar(this.scalar); if(matcher.matches()) { if (matcher.group(QUOTED_LITERAL_GROUP) != null) { value = matcher.group(QUOTED_LITERAL_GROUP); } else if (matcher.group(MAPPING_GROUP) != null) { value = matcher.group(MAPPING_GROUP).trim(); } else if (matcher.group(SEQUENCE_GROUP) != null) { value = matcher.group(SEQUENCE_GROUP).trim(); } } else { value = trimmed; } if("null".equals(value)) { return null; } else { return this.unescape(value); } } @Override public Comment comment() { final Comment comment; if(this.scalar instanceof YamlLine.NullYamlLine) { comment = new BuiltComment(this, ""); } else { comment = new ReadComment( new Skip( this.all, line -> line.number() != this.scalar.number() ), this ); } return comment; } /** * Remove the possible escaping quotes or apostrophes surrounding the * given value. * @param value The value to unescape. * @return The value without quotes or apostrophes. */ private String unescape(final String value) { final String unescaped; if(value == null || value.length()<=2) { unescaped = value; } else { if (value.startsWith("\"") && value.endsWith("\"")) { unescaped = value.substring(1, value.length() - 1); } else if (value.startsWith("'") && value.endsWith("'")) { unescaped = value.substring(1, value.length() - 1); } else { unescaped = value; } } return unescaped; } /** * Returns true if there's a YamlMapping starting right after the * dash, on the same line. * @param dashLine Line. * @return True of false. */ private Matcher escapedSequenceScalar(final YamlLine dashLine) { final String trimmed = dashLine.trimmed(); return QUOTED_LITERAL_MAP_SEQ.matcher(trimmed); } }
package com.avairebot.orion.requests; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import java.io.IOException; public class Response { private final okhttp3.Response response; public Response(okhttp3.Response response) { this.response = response; } public okhttp3.Response getResponse() { return response; } public Object toService(Class<?> clazz) { Gson gson = new GsonBuilder().serializeNulls().create(); return gson.fromJson(toString(), clazz); } @Override public String toString() { try { return response.body().string(); } catch (IOException e) { e.printStackTrace(); return null; } } }
package com.bio4j.model.enzymedb.nodes; import com.bio4j.model.enzymedb.EnzymeDBGraph.EnzymeType; import com.bio4j.model.enzymedb.relationships.EnzymaticActivity; import com.bio4j.model.go.relationships.*; import com.bio4j.model.uniprot.nodes.Protein; import com.ohnosequences.typedGraphs.Node; import com.ohnosequences.typedGraphs.Property; import java.util.List; public interface Enzyme< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT> > extends Node<N, NT> { public String id(); public String cofactors(); public String officialName(); //public String alternateNames(); public String catalyticActivity(); public String comment(); public String prositeCrossReferences(); // properties public static interface id< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends id<N, NT, P> > extends Property<N, NT, P, String> { @Override public default String name() { return "id"; } @Override public default Class<String> valueClass() { return String.class; } } public static interface cofactors< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends cofactors<N, NT, P> > extends Property<N, NT, P, String[]> { @Override public default String name() { return "cofactors"; } @Override public default Class<String[]> valueClass() { return String[].class; } } public static interface officialName< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends officialName<N, NT, P> > extends Property<N, NT, P, String> { @Override public default String name() { return "officialName"; } @Override public default Class<String> valueClass() { return String.class; } } // public static interface alternateNames< // N extends Enzyme<N, NT>, // NT extends EnzymeType<N, NT>, // P extends alternateNames<N, NT, P> // extends Property<N, NT, P, String> { // @Override // public default String name() { // return "alternateNames"; // @Override // public default Class<String> valueClass() { // return String.class; public static interface catalyticActivity< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends catalyticActivity<N, NT, P> > extends Property<N, NT, P, String> { @Override public default String name() { return "catalyticActivity"; } @Override public default Class<String> valueClass() { return String.class; } } public static interface comment< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends comment<N, NT, P> > extends Property<N, NT, P, String> { @Override public default String name() { return "comment"; } @Override public default Class<String> valueClass() { return String.class; } } public static interface prositeCrossReferences< N extends Enzyme<N, NT>, NT extends EnzymeType<N, NT>, P extends prositeCrossReferences<N, NT, P> > extends Property<N, NT, P, String[]> { @Override public default String name() { return "prositeCrossReferences"; } @Override public default Class<String[]> valueClass() { return String[].class; } } // relationships // enzymaticActivity // incoming public List<? extends EnzymaticActivity> enzymaticActivity_in(); public List<? extends Protein> enzymaticActivity_inNodes(); }
package com.buteam3.controller; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.util.StringUtils; import com.buteam3.repository.MessageRepository; import com.buteam3.entity.Message; import com.stormpath.sdk.account.AccountList; import com.stormpath.sdk.application.Application; import com.stormpath.sdk.servlet.application.ApplicationResolver; /** * Class is controller object which handles interaction between * front-end and database for chat tool. * * @author buteam3 */ @Controller public class ChatController { private MessageRepository repository; /** * Constructor for home controller. Takes in a repository * of messages to be loaded into chat window in UI. * * @param repository repository of messages */ @Autowired public ChatController(MessageRepository repository) { this.repository = repository; } /** * * @param model * @param req * @return */ public String chat(ModelMap model, HttpServletRequest req) { Application application = ApplicationResolver.INSTANCE.getApplication(req); AccountList accounts = application.getAccounts(); model.addAttribute("accounts", accounts); chatmsg(model); return "chat"; } /** * Loads a a list of messages from database based on channel id * * @param model model map linking messages to chat UI chatbox */ private void chatmsg(ModelMap model) { List<Message> message = repository.findByMidGreaterThan(0); model.addAttribute("chatbox", message); } /** * Method called when a new message is entered. Saves the new message * to the database through message repository. * * @param model * @param message * @param result * @return */ @RequestMapping(value="/chat_msg/new", method = RequestMethod.POST) public String insertData(ModelMap model, @Valid Message message, BindingResult result) { if (!result.hasErrors()) { repository.save(message); } chatmsg(model); return "fragments/chat"; } /** * Calls chatmsg to Load a a list of messages from database based on channel id * * @param model * @param message * @param result * @return */ @RequestMapping(value="/chat_msg/read", method = RequestMethod.POST) public String readData(Long mid) { List<Message> message = repository.findByMidGreaterThan(mid); String commaDelimitedString = StringUtils.collectionToCommaDelimitedString(Message); return commaDelimitedString; } }
package com.ejlchina.searcher.util; import com.ejlchina.searcher.param.Operator; import java.io.Serializable; import java.lang.invoke.SerializedLambda; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; public class MapBuilder { public static final String SEPARATOR = "-"; public static final String OP_SUFFIX = "op"; public static final String IC_SUFFIX = "ic"; public static final String SORT = "sort"; public static final String ORDER = "order"; public static final String PAGE = "page"; public static final String SIZE = "size"; public static final String OFFSET = "offset"; public static final String MAX = "max"; private static final Map<String, String> CONFIGS = new HashMap<>(); @FunctionalInterface public interface FieldFunction<T, R> extends Function<T, R>, Serializable { } private final Map<FieldFunction<?, ?>, String> cache = new ConcurrentHashMap<>(); private final Map<String, Object> map; private String lastField = null; public MapBuilder(Map<String, Object> map) { this.map = map; } public static void config(String key, String value) { CONFIGS.put(key, value); } public static String config(String key) { String config = CONFIGS.get(key); if (config != null) { return config; } return key; } public MapBuilder put(String key, Object value) { map.put(key, value); return this; } public <T> MapBuilder field(FieldFunction<T, ?> fieldFn, Object... values) { return field(toFieldName(fieldFn), values); } public <T> MapBuilder field(String fieldName, Object... values) { String separator = config(SEPARATOR); for (int index = 0; index < values.length; index++) { map.put(fieldName + separator + index, values[index]); } lastField = fieldName; return this; } public <T> MapBuilder op(Operator operator) { if (lastField == null) { throw new IllegalStateException("the method [ op(Operator operator) ] must go after [ val(FieldFunction<T, ?> fieldFn, Object... values) ]"); } map.put(lastField + config(SEPARATOR) + config(OP_SUFFIX), operator); return this; } public <T> MapBuilder op(String operator) { if (lastField == null) { throw new IllegalStateException("the method [ op(Operator operator) ] must go after [ val(FieldFunction<T, ?> fieldFn, Object... values) ]"); } map.put(lastField + config(SEPARATOR) + config(OP_SUFFIX), operator); return this; } public <T> MapBuilder ic() { if (lastField == null) { throw new IllegalStateException("the method [ ic() ] must go after [ val(FieldFunction<T, ?> fieldFn, Object... values) ]"); } return ic(true); } public <T> MapBuilder ic(boolean ignoreCase) { if (lastField == null) { throw new IllegalStateException("the method [ ic(boolean ignoreCase) ] must go after [ val(FieldFunction<T, ?> fieldFn, Object... values) ]"); } map.put(lastField + config(SEPARATOR) + config(IC_SUFFIX), ignoreCase); return this; } public <T> MapBuilder ic(String ignoreCase) { if (lastField == null) { throw new IllegalStateException("the method [ ic(String ignoreCase) ] must go after [ val(FieldFunction<T, ?> fieldFn, Object... values) ]"); } map.put(lastField + config(SEPARATOR) + config(IC_SUFFIX), ignoreCase); return this; } public <T> MapBuilder orderBy(FieldFunction<T, ?> fieldFn, String order) { return orderBy(toFieldName(fieldFn), order); } public <T> MapBuilder orderBy(String fieldName, String order) { map.put(config(SORT), fieldName); map.put(config(ORDER), order); return this; } public <T> MapBuilder page(int page, int size) { map.put(config(PAGE), page); map.put(config(SIZE), size); return this; } public <T> MapBuilder limit(int offset, int max) { map.put(config(OFFSET), offset); map.put(config(MAX), max); return this; } private String toFieldName(FieldFunction<?, ?> fieldFn) { String fieldName = cache.get(fieldFn); if (fieldName != null) { return fieldName; } try { Method wrMethod = fieldFn.getClass().getDeclaredMethod("writeReplace"); boolean isInaccessible = !wrMethod.isAccessible(); if (isInaccessible) { wrMethod.setAccessible(true); } SerializedLambda sLambda = (SerializedLambda) wrMethod.invoke(fieldFn); if (isInaccessible) { wrMethod.setAccessible(false); } String methodName = sLambda.getImplMethodName(); if (methodName.startsWith("get") && methodName.length() > 3) { fieldName = StringUtils.firstCharToLoweCase(methodName.substring(3)); } if (methodName.startsWith("is") && methodName.length() > 2) { fieldName = StringUtils.firstCharToLoweCase(methodName.substring(2)); } if (fieldName != null) { cache.put(fieldFn, fieldName); return fieldName; } throw new IllegalStateException("can not convert method [" + methodName + "] to field name"); } catch (ReflectiveOperationException e) { throw new IllegalStateException("", e); } } public Map<String, Object> build() { return map; } }
package com.emc.rest.smart; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.glassfish.jersey.apache.connector.ApacheClientProperties; import org.glassfish.jersey.apache.connector.ApacheConnectorProvider; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.client.ClientProperties; import org.glassfish.jersey.client.RequestEntityProcessing; import org.glassfish.jersey.client.spi.ConnectorProvider; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; public final class SmartClientFactory { public static Client createSmartClient(SmartConfig smartConfig) { return createSmartClient(smartConfig, new ApacheConnectorProvider()); } public static Client createSmartClient(SmartConfig smartConfig, ConnectorProvider baseConnectorProvider) { // init Jersey config ClientConfig clientConfig = new ClientConfig(); // set up multi-threaded connection pool PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(); // 200 maximum active connections (should be more than enough for any JVM instance) connectionManager.setDefaultMaxPerRoute(200); connectionManager.setMaxTotal(200); clientConfig.property(ApacheClientProperties.CONNECTION_MANAGER, connectionManager); // enable request buffering to ensure content-length is always set clientConfig.property(ClientProperties.REQUEST_ENTITY_PROCESSING, RequestEntityProcessing.BUFFERED); clientConfig.property(ClientProperties.OUTBOUND_CONTENT_LENGTH_BUFFER, 65536); // 64k entity buffer // pass in jersey parameters from calling code (allows customization of client) for (String propName : smartConfig.getProperties().keySet()) { clientConfig.property(propName, smartConfig.property(propName)); } // inject SmartConnector provider (this is the Jersey integration point of the load balancer) clientConfig.connectorProvider(new SmartConnectorProvider(baseConnectorProvider, smartConfig)); // set up polling for updated host list (if polling is disabled in smartConfig or there's no host list provider, // nothing will happen) PollingDaemon pollingDaemon = new PollingDaemon(smartConfig); pollingDaemon.start(); // build Jersey client return ClientBuilder.newClient(clientConfig); } private SmartClientFactory() { } }
package com.enderio.core.common.util; import java.util.ArrayList; import java.util.List; import java.util.Random; import com.enderio.core.EnderCore; import com.enderio.core.api.common.util.IItemReceptor; import net.minecraft.block.Block; import net.minecraft.entity.item.EntityItem; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.ISidedInventory; import net.minecraft.inventory.InventoryLargeChest; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.JsonToNBT; import net.minecraft.nbt.NBTException; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntityChest; import net.minecraft.util.MathHelper; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import net.minecraftforge.oredict.ShapelessOreRecipe; import cofh.api.transport.IItemDuct; import cpw.mods.fml.common.Loader; public class ItemUtil { public static final List<IItemReceptor> receptors = new ArrayList<IItemReceptor>(); private static final Random rand = new Random(); static { try { Class.forName("crazypants.util.BuildcraftUtil"); } catch (Exception e) { if (Loader.isModLoaded("BuildCraft|Transport")) { Log.warn("ItemUtil: Could not register Build Craft pipe handler. Machines will not be able to output to BC pipes."); } //Don't log if BC isn't installed, but we still check in case another mod is using their API } } /** * Turns a String into an item that can be used in a recipe. This is one of: * <ul> * <li>String</li> * <li>Item</li> * <li>Block</li> * <li>ItemStack</li> * </ul> * Because this method can return a String, it is highly recommended that you * use the result in a {@link ShapedOreRecipe} or {@link ShapelessOreRecipe}. * * @param string * The String to parse. * @return An object for use in recipes. */ public static Object parseStringIntoRecipeItem(String string) { return parseStringIntoRecipeItem(string, false); } /** * Turns a String into an item that can be used in a recipe. This is one of: * <ul> * <li>String</li> * <li>Item</li> * <li>Block</li> * <li>ItemStack</li> * </ul> * Because this method can return a String, it is highly recommended that you * use the result in a {@link ShapedOreRecipe} or {@link ShapelessOreRecipe}. * * @see #parseStringIntoItemStack(String) * @param string * The String to parse. * @param forceItemStack * True if the result should be forced to be an ItemStack. * @return AN object for use in recipes. */ public static Object parseStringIntoRecipeItem(String string, boolean forceItemStack) { if ("null".equals(string)) { return null; } else if (OreDictionary.getOres(string).isEmpty()) { ItemStack stack = null; String[] info = string.split(";"); Object temp = null; int damage = OreDictionary.WILDCARD_VALUE; temp = Item.itemRegistry.getObject(info[0]); if (info.length > 1) { damage = Integer.parseInt(info[1]); } if (temp instanceof Item) { stack = new ItemStack((Item) temp, 1, damage); } else if (temp instanceof Block) { stack = new ItemStack((Block) temp, 1, damage); } else if (temp instanceof ItemStack) { stack = ((ItemStack) temp).copy(); stack.setItemDamage(damage); } else { throw new IllegalArgumentException(string + " is not a vaild string. Strings should be either an oredict name, or in the format objectname;damage (damage is optional)"); } return stack; } else if (forceItemStack) { return OreDictionary.getOres(string).get(0).copy(); } else { return string; } } /** * Turns a string into an ItemStack. * <p> * This is basically a convenience method that casts the result of * {@link #parseStringIntoRecipeItem(String, boolean)}, but with one extra * feature. * <p> * A '#' character may be used at the end of the string to signify stack size, * e.g. "minecraft:diamond#2" would be a stack of 2 diamonds. * <p> * The stack size will automatically be clamped to be below the give Item's * max stack size. * * @param string * The String to parse. * @return An ItemStack the string represents. */ public static ItemStack parseStringIntoItemStack(String string) { int size = 1; int numIdx = string.indexOf(' int nbtIdx = string.indexOf('$'); NBTTagCompound tag = null; if (numIdx > -1) { String num = string.substring(numIdx + 1, nbtIdx == -1 ? string.length() : nbtIdx); try { size = Integer.parseInt(num); } catch (NumberFormatException e) { throw new IllegalArgumentException(num + " is not a valid stack size", e); } string = string.replace('#' + num, ""); nbtIdx -= num.length() + 1; } if (nbtIdx > -1) { String nbt = string.substring(nbtIdx + 1); try { tag = (NBTTagCompound) JsonToNBT.func_150315_a(nbt); } catch (NBTException e) { throw new IllegalArgumentException(nbt + " is not valid NBT json.", e); } string = string.replace('$' + nbt, ""); } ItemStack stack = (ItemStack) parseStringIntoRecipeItem(string, true); stack.stackSize = MathHelper.clamp_int(size, 1, stack.getMaxStackSize()); stack.setTagCompound(tag); return stack; } /** * Returns the appropriate config string for the given {@link ItemStack} * <p> * This does not take into account ore dict. * * @param stack * The {@link ItemStack} to serialize * @param damage * If damage should be taken into account * @param size * If stack size should be taken into account * @return A string that will be the equivalent of if {@link ItemStack stack} * was constructed from it using * {@link #parseStringIntoItemStack(String)} */ public static String getStringForItemStack(ItemStack stack, boolean damage, boolean size) { if (stack == null) { return null; } String base = Item.itemRegistry.getNameForObject(stack.getItem()); if (damage) { base += ";" + stack.getItemDamage(); } if (size) { base += "#" + stack.stackSize; } return base; } /** * Tests if two {@link ItemStack}s are completely equal, forgoing stack size. * This means that for this method to return true, Item type, damage value, * and NBT data of both ItemStacks must be identical. * * @param s1 * The first ItemStack to compare. * @param s2 * The second ItemStack to compare. * @author powercrystals */ public static boolean stacksEqual(ItemStack s1, ItemStack s2) { if (s1 == null && s2 == null) return true; if (s1 == null || s2 == null) return false; if (!s1.isItemEqual(s2)) return false; if (s1.getTagCompound() == null && s2.getTagCompound() == null) return true; if (s1.getTagCompound() == null || s2.getTagCompound() == null) return false; return s1.getTagCompound().equals(s2.getTagCompound()); } /** * Spawns an ItemStack into the world with motion that simulates a normal * block drop. * * @param world * The world object. * @param item * The ItemStack to spawn. * @param x * X coordinate of the block in which to spawn the entity. * @param y * Y coordinate of the block in which to spawn the entity. * @param z * Z coordinate of the block in which to spawn the entity. */ public static void spawnItemInWorldWithRandomMotion(World world, ItemStack item, int x, int y, int z) { if (item != null) { spawnItemInWorldWithRandomMotion(new EntityItem(world, x + 0.5, y + 0.5, z + 0.5, item)); } } /** * Spawns an EntityItem into the world with motion that simulates a normal * block drop. * * @param entity * The entity to spawn. */ public static void spawnItemInWorldWithRandomMotion(EntityItem entity) { entity.delayBeforeCanPickup = 10; float f = (rand.nextFloat() * 0.1f) - 0.05f; float f1 = (rand.nextFloat() * 0.1f) - 0.05f; float f2 = (rand.nextFloat() * 0.1f) - 0.05f; entity.motionX += f; entity.motionY += f1; entity.motionZ += f2; entity.worldObj.spawnEntityInWorld(entity); } /** * Returns true if the given stack has the given Ore Dictionary name applied * to it. * * @param stack * The ItemStack to check. * @param oredict * The oredict name. * @return True if the ItemStack matches the name passed. */ public static boolean itemStackMatchesOredict(ItemStack stack, String oredict) { int[] ids = OreDictionary.getOreIDs(stack); for (int i : ids) { String name = OreDictionary.getOreName(i); if (name.equals(oredict)) { return true; } } return false; } /** * Gets an NBT tag from an ItemStack, creating it if needed. The tag returned * will always be the same as the one on the stack. * * @param stack * The ItemStack to get the tag from. * @return An NBTTagCompound from the stack. */ public static NBTTagCompound getNBTTag(ItemStack stack) { if (!stack.hasTagCompound()) { stack.stackTagCompound = new NBTTagCompound(); } return stack.stackTagCompound; } public static String getDurabilityString(ItemStack item) { if (item == null) { return null; } return EnderCore.lang.localize("tooltip.durability") + " " + (item.getMaxDamage() - item.getItemDamage()) + "/" + item.getMaxDamage(); } public static NBTTagCompound getOrCreateNBT(ItemStack stack) { if (stack.stackTagCompound == null) { stack.stackTagCompound = new NBTTagCompound(); } return stack.stackTagCompound; } public static int doInsertItem(Object into, ItemStack item, ForgeDirection side) { if (into == null || item == null) { return 0; } if (into instanceof ISidedInventory) { return ItemUtil.doInsertItemInv((ISidedInventory) into, item, side, true); } else if (into instanceof IInventory) { return ItemUtil.doInsertItemInv(getInventory((IInventory) into), item, side, true); } else if (into instanceof IItemDuct) { return ItemUtil.doInsertItem((IItemDuct) into, item, side); } for (IItemReceptor rec : receptors) { if (rec.canInsertIntoObject(into, side)) { return rec.doInsertItem(into, item, side); } } return 0; } public static int doInsertItem(IItemDuct con, ItemStack item, ForgeDirection inventorySide) { int startedWith = item.stackSize; ItemStack remaining = con.insertItem(inventorySide, item); if (remaining == null) { return startedWith; } return startedWith - remaining.stackSize; } public static int doInsertItem(IInventory inv, int startSlot, int endSlot, ItemStack item) { return doInsertItemInv(inv, null, invSlotter.getInstance(startSlot, endSlot), item, ForgeDirection.UNKNOWN, true); } public static int doInsertItem(IInventory inv, int startSlot, int endSlot, ItemStack item, boolean doInsert) { return doInsertItemInv(inv, null, invSlotter.getInstance(startSlot, endSlot), item, ForgeDirection.UNKNOWN, doInsert); } /* * Insert items into an IInventory or an ISidedInventory. */ private static int doInsertItemInv(IInventory inv, ItemStack item, ForgeDirection inventorySide, boolean doInsert) { final ISidedInventory sidedInv = inv instanceof ISidedInventory ? (ISidedInventory) inv : null; ISlotIterator slots; if (sidedInv != null) { if (inventorySide == null) { inventorySide = ForgeDirection.UNKNOWN; } // Note: This is not thread-safe. Change to getInstance() to constructor when needed (1.8++?). slots = sidedSlotter.getInstance(sidedInv.getAccessibleSlotsFromSide(inventorySide.ordinal())); } else { slots = invSlotter.getInstance(0, inv.getSizeInventory()); } return doInsertItemInv(inv, sidedInv, slots, item, inventorySide, doInsert); } private static int doInsertItemInv(IInventory inv, ISidedInventory sidedInv, ISlotIterator slots, ItemStack item, ForgeDirection inventorySide, boolean doInsert) { int numInserted = 0; int numToInsert = item.stackSize; int firstFreeSlot = -1; // PASS1: Try to add to an existing stack while (numToInsert > 0 && slots.hasNext()) { final int slot = slots.nextSlot(); if (sidedInv == null || sidedInv.canInsertItem(slot, item, inventorySide.ordinal())) { final ItemStack contents = inv.getStackInSlot(slot); if (contents != null) { if (areStackMergable(contents, item)) { final int freeSpace = Math.min(inv.getInventoryStackLimit(), contents.getMaxStackSize()) - contents.stackSize; // some inventories like using itemstacks with invalid stack sizes if (freeSpace > 0) { final int noToInsert = Math.min(numToInsert, freeSpace); final ItemStack toInsert = item.copy(); toInsert.stackSize = contents.stackSize + noToInsert; // isItemValidForSlot() may check the stacksize, so give it the number the stack would have in the end. // If it does something funny, like "only even numbers", we are screwed. if (sidedInv != null || inv.isItemValidForSlot(slot, toInsert)) { numInserted += noToInsert; numToInsert -= noToInsert; if (doInsert) { inv.setInventorySlotContents(slot, toInsert); } } } } } else if (firstFreeSlot == -1) { firstFreeSlot = slot; } } } // PASS2: Try to insert into an empty slot if (numToInsert > 0 && firstFreeSlot != -1) { final ItemStack toInsert = item.copy(); toInsert.stackSize = min(numToInsert, inv.getInventoryStackLimit(), toInsert.getMaxStackSize()); // some inventories like using itemstacks with invalid stack sizes if (sidedInv != null || inv.isItemValidForSlot(firstFreeSlot, toInsert)) { numInserted += toInsert.stackSize; numToInsert -= toInsert.stackSize; if (doInsert) { inv.setInventorySlotContents(firstFreeSlot, toInsert); } } } if (numInserted > 0 && doInsert) { inv.markDirty(); } return numInserted; } private final static int min(int i1, int i2, int i3) { return i1 < i2 ? (i1 < i3 ? i1 : i3) : (i2 < i3 ? i2 : i3); } public static boolean isStackFull(ItemStack contents) { if (contents == null) { return false; } return contents.stackSize >= contents.getMaxStackSize(); } public static IInventory getInventory(IInventory inv) { if (inv instanceof TileEntityChest) { TileEntityChest chest = (TileEntityChest) inv; TileEntityChest neighbour = null; if (chest.adjacentChestXNeg != null) { neighbour = chest.adjacentChestXNeg; } else if (chest.adjacentChestXPos != null) { neighbour = chest.adjacentChestXPos; } else if (chest.adjacentChestZNeg != null) { neighbour = chest.adjacentChestZNeg; } else if (chest.adjacentChestZPos != null) { neighbour = chest.adjacentChestZPos; } if (neighbour != null) { return new InventoryLargeChest("", inv, neighbour); } return inv; } return inv; } /** * Checks if items, damage and NBT are equal and the items are stackable. * * @param s1 * @param s2 * @return True if the two stacks are mergeable, false otherwise. */ public static boolean areStackMergable(ItemStack s1, ItemStack s2) { if (s1 == null || s2 == null || !s1.isStackable() || !s2.isStackable()) { return false; } if (!s1.isItemEqual(s2)) { return false; } return ItemStack.areItemStackTagsEqual(s1, s2); } /** * Checks if items, damage and NBT are equal. * * @param s1 * @param s2 * @return True if the two stacks are equal, false otherwise. */ public static boolean areStacksEqual(ItemStack s1, ItemStack s2) { if (s1 == null || s2 == null) { return false; } if (!s1.isItemEqual(s2)) { return false; } return ItemStack.areItemStackTagsEqual(s1, s2); } private interface ISlotIterator { int nextSlot(); boolean hasNext(); } private final static class invSlotter implements ISlotIterator { private static final invSlotter me = new invSlotter(); private int end; private int current; public final static invSlotter getInstance(int start, int end) { me.end = end; me.current = start; return me; } @Override public final int nextSlot() { return current++; } @Override public final boolean hasNext() { return current < end; } } private final static class sidedSlotter implements ISlotIterator { private static final sidedSlotter me = new sidedSlotter(); private int[] slots; private int current; public final static sidedSlotter getInstance(int[] slots) { me.slots = slots; me.current = 0; return me; } @Override public final int nextSlot() { return slots[current++]; } @Override public final boolean hasNext() { return slots != null && current < slots.length; } } }
package com.forgeessentials.mapper; import java.awt.Color; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Scanner; import javax.imageio.ImageIO; import net.minecraft.block.Block; import net.minecraft.init.Blocks; import net.minecraft.util.ResourceLocation; import net.minecraft.world.ChunkCoordIntPair; import net.minecraft.world.WorldServer; import net.minecraft.world.chunk.Chunk; import net.minecraft.world.chunk.storage.AnvilChunkLoader; import net.minecraft.world.chunk.storage.RegionFileCache; import com.forgeessentials.core.preloader.api.ServerBlock; import cpw.mods.fml.common.registry.GameData; public final class MapperUtil { public static final int CHUNK_BLOCKS = 1 << 4; public static final int REGION_CHUNKS = 1 << 5; public static final int REGION_CHUNK_COUNT = REGION_CHUNKS * REGION_CHUNKS; public static final int REGION_BLOCKS = CHUNK_BLOCKS << 5; public static final String BASE_PATH = "textures/blocks"; public static Map<Block, Integer[]> colorMap = new HashMap<Block, Integer[]>(); public static Color[][] colors = new Color[4096][]; public static Color[][][] datacolors = new Color[4096][][]; public static Color[][] biomecolors = new Color[BiomeMap.values().length][]; public static Color[][] raincolors = new Color[64][]; public static Color[][] tempcolors = new Color[64][]; public static void renderChunk(BufferedImage image, int offsetX, int offsetY, Chunk chunk) { for (int ix = 0; ix < 16; ix++) { for (int iz = 0; iz < 16; iz++) { int iy = chunk.getHeightValue(ix, iz); for (; iy >= 0; iy { Block block = chunk.getBlock(ix, iy, iz); int meta = chunk.getBlockMetadata(ix, iy, iz); if (block == Blocks.air) continue; image.setRGB(offsetX + ix, offsetY + iz, getBlockColor(block, meta).getRGB()); break; } if (iy < 0) { /* error */ } } } } public static BufferedImage renderChunk(Chunk chunk) { BufferedImage image = new BufferedImage(16, 16, BufferedImage.TYPE_3BYTE_BGR); renderChunk(image, 0, 0, chunk); return image; } public static BufferedImage renderRegion(WorldServer world, int regionX, int regionZ) { int chunkStartX = regionX * MapperUtil.REGION_CHUNKS; int chunkStartZ = regionZ * MapperUtil.REGION_CHUNKS; // File regionFile = new File(file2, "r." + regionX + "." + regionZ + ".mca"); BufferedImage image = new BufferedImage(MapperUtil.REGION_BLOCKS, MapperUtil.REGION_BLOCKS, BufferedImage.TYPE_3BYTE_BGR); for (int rx = 0; rx < MapperUtil.REGION_CHUNKS; rx++) { for (int rz = 0; rz < MapperUtil.REGION_CHUNKS; rz++) { int cx = chunkStartX + rx; int cz = chunkStartZ + rz; if (!MapperUtil.chunkExists(world, cx, cz)) continue; Chunk chunk = loadChunk(world, cx, cz); MapperUtil.renderChunk(image, rx * MapperUtil.CHUNK_BLOCKS, rz * MapperUtil.CHUNK_BLOCKS, chunk); } } return image; } public static Chunk loadChunk(WorldServer world, int cx, int cz) { Chunk chunk = (Chunk) world.theChunkProviderServer.loadedChunkHashMap.getValueByKey(ChunkCoordIntPair.chunkXZ2Int(cx, cz)); if (chunk != null) return chunk; try { AnvilChunkLoader loader = (AnvilChunkLoader) world.theChunkProviderServer.currentChunkLoader; Object[] data = loader.loadChunk__Async(world, cx, cz); return (Chunk) data[0]; } catch (IOException e) { return null; } } public static boolean chunkExists(WorldServer world, int cx, int cz) { return RegionFileCache.createOrLoadRegionFile(world.getChunkSaveLocation(), cx, cz).chunkExists(cx & 0x1F, cz & 0x1F); } public static Color getBlockColor(Block block, int meta) { int id = GameData.getBlockRegistry().getId(block); if (id >= colors.length) return Color.BLACK; if (datacolors[id] != null && meta < datacolors[id].length && datacolors[id][meta] != null) return datacolors[id][meta][0]; if (colors[id] != null && colors[id][0] != null) return colors[id][0]; // IIcon icon = block.getIcon(ForgeDirection.UP.ordinal(), chunk.getBlockMetadata(ix, iy, iz)); // if (icon == null) continue; Color color; BufferedImage image = getBlockImage(block); if (image != null) color = getAverageColor(image); else if (colors[id] != null && colors[id].length > 0 && colors[id][0] != null) color = colors[id][0]; else color = Color.BLACK; if (id >= colors.length) { Color[][] newColors = new Color[id + 1][]; System.arraycopy(colors, 0, newColors, 0, colors.length); colors = newColors; Color[][][] newDataColors = new Color[id + 1][][]; System.arraycopy(datacolors, 0, newDataColors, 0, datacolors.length); datacolors = newDataColors; } if (meta > 0 || datacolors[id] != null) { if (datacolors[id] == null) datacolors[id] = new Color[16][]; if (datacolors[id][meta] == null) datacolors[id][meta] = new Color[5]; datacolors[id][meta][0] = color; datacolors[id][meta][1] = color; datacolors[id][meta][2] = color; datacolors[id][meta][3] = color; datacolors[id][meta][4] = color; } else { if (colors[id] == null) colors[id] = new Color[5]; colors[id][0] = color; colors[id][1] = color; colors[id][2] = color; colors[id][3] = color; colors[id][4] = color; } return color; } public static InputStream getResourceStream(ResourceLocation p_110605_1_) { return Object.class.getResourceAsStream("/assets/" + p_110605_1_.getResourceDomain() + "/" + p_110605_1_.getResourcePath()); } public static InputStream getInputStream(ResourceLocation loc) throws IOException { InputStream is = getResourceStream(loc); if (is != null) return is; // String fileName = String.format("%s/%s/%s", new Object[] { "assets", loc.getResourceDomain(), // loc.getResourcePath() }); return null; } public static ResourceLocation completeResourceLocation(ResourceLocation loc, int mipmap) { if (mipmap == 0) return new ResourceLocation(loc.getResourceDomain(), String.format("%s/%s%s", new Object[] { BASE_PATH, loc.getResourcePath(), ".png" })); return new ResourceLocation(loc.getResourceDomain(), String.format("%s/mipmaps/%s.%d%s", new Object[] { BASE_PATH, loc.getResourcePath(), Integer.valueOf(mipmap), ".png" })); } public static BufferedImage getBlockImage(Block block) { try { String textureName = ((ServerBlock) block).getTextureNameServer(); ResourceLocation relLoc = new ResourceLocation(textureName); ResourceLocation absLoc = completeResourceLocation(relLoc, 0); InputStream is = getInputStream(absLoc); if (is == null) return null; return ImageIO.read(is); } catch (IOException e) { return null; } } public static Color getAverageColor(BufferedImage image) { long r = 0; long g = 0; long b = 0; for (int ix = 0; ix < image.getWidth(); ix++) { for (int iy = 0; iy < image.getHeight(); iy++) { int rgb = image.getRGB(ix, iy); Color color = new Color(rgb); r += color.getRed(); g += color.getGreen(); b += color.getBlue(); } } int count = image.getWidth() * image.getHeight(); r /= count; g /= count; b /= count; return new Color((int) r, (int) g, (int) b); } public static void loadColorScheme(InputStream stream) { colors = new Color[4096][]; datacolors = new Color[4096][][]; // biomecolors = new Color[BiomeMap.values().length][]; // raincolors = new Color[64][]; // tempcolors = new Color[64][]; /* Default the biome color */ for (int i = 0; i < biomecolors.length; i++) { Color[] c = new Color[5]; int red = 0x80 | (0x40 * ((i >> 0) & 1)) | (0x20 * ((i >> 3) & 1)) | (0x10 * ((i >> 6) & 1)); int green = 0x80 | (0x40 * ((i >> 1) & 1)) | (0x20 * ((i >> 4) & 1)) | (0x10 * ((i >> 7) & 1)); int blue = 0x80 | (0x40 * ((i >> 2) & 1)) | (0x20 * ((i >> 5) & 1)); c[0] = new Color(red, green, blue); c[3] = new Color(red * 4 / 5, green * 4 / 5, blue * 4 / 5); c[1] = new Color(red / 2, green / 2, blue / 2); c[2] = new Color(red * 2 / 5, green * 2 / 5, blue * 2 / 5); c[4] = new Color((c[1].getRed() + c[3].getRed()) / 2, (c[1].getGreen() + c[3].getGreen()) / 2, (c[1].getBlue() + c[3].getBlue()) / 2, (c[1].getAlpha() + c[3].getAlpha()) / 2); biomecolors[i] = c; } try { Scanner scanner = new Scanner(stream); while (scanner.hasNextLine()) { String line = scanner.nextLine(); if (line.startsWith("#") || line.equals("")) { continue; } /* Make parser less pedantic - tabs or spaces should be fine */ String[] split = line.split("[\t ]"); int cnt = 0; for (String s : split) { if (s.length() > 0) cnt++; } String[] nsplit = new String[cnt]; cnt = 0; for (String s : split) { if (s.length() > 0) { nsplit[cnt] = s; cnt++; } } split = nsplit; if (split.length < 17) { continue; } Integer id; Integer dat = null; boolean isbiome = false; boolean istemp = false; boolean israin = false; int idx = split[0].indexOf(':'); if (idx > 0) { /* ID:data - data color */ id = new Integer(split[0].substring(0, idx)); dat = new Integer(split[0].substring(idx + 1)); } else if (split[0].charAt(0) == '[') { /* Biome color data */ String bio = split[0].substring(1); idx = bio.indexOf(']'); if (idx >= 0) bio = bio.substring(0, idx); isbiome = true; id = -1; BiomeMap[] bm = BiomeMap.values(); for (int i = 0; i < bm.length; i++) { if (bm[i].toString().equalsIgnoreCase(bio)) { id = i; break; } else if (bio.equalsIgnoreCase("BIOME_" + i)) { id = i; break; } } if (id < 0) { /* Not biome - check for rain or temp */ if (bio.startsWith("RAINFALL-")) { try { double v = Double.parseDouble(bio.substring(9)); if ((v >= 0) && (v <= 1.00)) { id = (int) (v * 63.0); israin = true; } } catch (NumberFormatException nfx) { } } else if (bio.startsWith("TEMPERATURE-")) { try { double v = Double.parseDouble(bio.substring(12)); if ((v >= 0) && (v <= 1.00)) { id = (int) (v * 63.0); istemp = true; } } catch (NumberFormatException nfx) { } } } } else { id = new Integer(split[0]); } if ((!isbiome) && (id >= colors.length)) { Color[][] newcolors = new Color[id + 1][]; System.arraycopy(colors, 0, newcolors, 0, colors.length); colors = newcolors; Color[][][] newdatacolors = new Color[id + 1][][]; System.arraycopy(datacolors, 0, newdatacolors, 0, datacolors.length); datacolors = newdatacolors; } Color[] c = new Color[5]; /* store colors by raycast sequence number */ c[0] = new Color(Integer.parseInt(split[1]), Integer.parseInt(split[2]), Integer.parseInt(split[3]), Integer.parseInt(split[4])); c[3] = new Color(Integer.parseInt(split[5]), Integer.parseInt(split[6]), Integer.parseInt(split[7]), Integer.parseInt(split[8])); c[1] = new Color(Integer.parseInt(split[9]), Integer.parseInt(split[10]), Integer.parseInt(split[11]), Integer.parseInt(split[12])); c[2] = new Color(Integer.parseInt(split[13]), Integer.parseInt(split[14]), Integer.parseInt(split[15]), Integer.parseInt(split[16])); /* Blended color - for 'smooth' option on flat map */ c[4] = new Color((c[1].getRed() + c[3].getRed()) / 2, (c[1].getGreen() + c[3].getGreen()) / 2, (c[1].getBlue() + c[3].getBlue()) / 2, (c[1].getAlpha() + c[3].getAlpha()) / 2); if (isbiome) { if (istemp) { tempcolors[id] = c; } else if (israin) { raincolors[id] = c; } else if ((id >= 0) && (id < biomecolors.length)) biomecolors[id] = c; } else if (dat != null) { Color[][] dcolor = datacolors[id]; /* Existing list? */ if (dcolor == null) { dcolor = new Color[16][]; /* Make 16 index long list */ datacolors[id] = dcolor; } if ((dat >= 0) && (dat < 16)) { /* Add color to list */ dcolor[dat] = c; } if (dat == 0) { /* Index zero is base color too */ colors[id] = c; } } else { colors[id] = c; } } scanner.close(); /* Last, push base color into any open slots in data colors list */ for (int k = 0; k < datacolors.length; k++) { Color[][] dc = datacolors[k]; /* see if data colors too */ if (dc != null) { Color[] c = colors[k]; for (int i = 0; i < 16; i++) { if (dc[i] == null) dc[i] = c; } } } /* And interpolate any missing rain and temperature colors */ interpolateColorTable(tempcolors); interpolateColorTable(raincolors); } catch (RuntimeException e) { // Log.severe("Could not load colors '" + name + "' ('" + colorSchemeFile + "').", e); } } public static void interpolateColorTable(Color[][] c) { int idx = -1; for (int k = 0; k < c.length; k++) { if (c[k] == null) { /* Missing? */ if ((idx >= 0) && (k == (c.length - 1))) { /* We're last - so fill forward from last color */ for (int kk = idx + 1; kk <= k; kk++) { c[kk] = c[idx]; } } /* Skip - will backfill when we find next color */ } else if (idx == -1) { /* No previous color, just backfill this color */ for (int kk = 0; kk < k; kk++) { c[kk] = c[k]; } idx = k; /* This is now last defined color */ } else { /* Else, interpolate between last idx and this one */ int cnt = c[k].length; for (int kk = idx + 1; kk < k; kk++) { double interp = (double) (kk - idx) / (double) (k - idx); Color[] cc = new Color[cnt]; for (int jj = 0; jj < cnt; jj++) { cc[jj] = new Color((int) ((1.0 - interp) * c[idx][jj].getRed() + interp * c[k][jj].getRed()), (int) ((1.0 - interp) * c[idx][jj].getGreen() + interp * c[k][jj].getGreen()), (int) ((1.0 - interp) * c[idx][jj].getBlue() + interp * c[k][jj].getBlue()), (int) ((1.0 - interp) * c[idx][jj].getAlpha() + interp * c[k][jj].getAlpha())); } c[kk] = cc; } idx = k; } } } public static int worldToChunk(int v) { return v >> 4; } public static int worldToRegion(int v) { return v >> 9; } public static int chunkToRegion(int v) { return v >> 5; } public static int regionToChunk(int v) { return v << 5; } }
package com.github.angelndevil2.dsee; import com.github.angelndevil2.dsee.util.PropertiesUtil; import lombok.extern.slf4j.Slf4j; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.ParseException; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.Charset; @Slf4j public class Launcher { private static String vmArgs; /** * for command line to attach this agent. * * @param args command line arguments */ @SuppressWarnings("unchecked") public static void main(String[] args) throws IOException, ParseException { CmdOptions options = new CmdOptions(); options.setArgs(args); CommandLine cmd = options.getCmd(); if (cmd.hasOption('h')) { options.printUsage(); return; } if (cmd.hasOption("d")) { try { PropertiesUtil.setDirs(cmd.getOptionValue("d").trim()); } catch (IOException e) { System.err.println(PropertiesUtil.getConfDir() + File.separator + PropertiesUtil.AppProperties + " not found. may use -d option" + e); return; } } if (cmd.hasOption('p')) { String pid = cmd.getOptionValue('p'); if (pid == null) throw new NullPointerException("pid is null"); try { Class vmClass = Class.forName("com.sun.tools.attach.VirtualMachine"); Object virtualMachine = vmClass.getMethod("attach", String.class).invoke(null, pid); String jarName = Bootstrap.findPathJar(null); virtualMachine.getClass().getMethod("loadAgent", String.class, String.class).invoke(virtualMachine, jarName, vmArgs); log.debug(jarName + " registered."); virtualMachine.getClass().getMethod("detach").invoke(virtualMachine); } catch (Exception e) { e.printStackTrace(); System.exit(0); } } else { System.out.println("d or p options is required"); options.printUsage(); } } }
package com.github.iounit.runner; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.lang.reflect.Method; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.reflect.MethodUtils; import org.junit.Test; import org.junit.runners.Parameterized.Parameter; import com.github.iounit.annotations.IOInput; import com.github.iounit.annotations.IOTest; import com.github.iounit.runner.IOUnitClassRunnerWithParameters.SuiteClass; import com.github.iounit.util.FileUtils; public abstract class BaseIORunner { @Parameter public File file; @SuiteClass public Class<?> sourceTestClass; @Test public void runTest() throws Exception { final String input = FileUtils.read(new FileInputStream(file)); String expected = null; final String output = run(input); final File outFile = determineOutFile(file); if (!outFile.exists()) { FileUtils.write(output, new FileOutputStream(outFile)); } expected = FileUtils.read(new FileInputStream(outFile)); // Normalize new lines for compare if (!expected.replaceAll("\r\n?", "\n").equals(output.replaceAll("\r\n?", "\n"))) { if(saveFailure()){ File failureFile = determineFailureOutFile(file); try{ FileUtils.write(output, new FileOutputStream(failureFile )); }catch(Exception e){ System.err.println("Could not write " + failureFile); } } if ("Y".equals(System.getProperty("IOUnitOverwriteOutput"))) { FileUtils.write(output, new FileOutputStream(outFile)); } else { assertEquals(normalize(expected), normalize(output)); } } } protected String normalize(final String input) { final StringBuilder sb = new StringBuilder(input.replaceAll("\r\n?", "\n")); int idx = sb.indexOf("\t"); while (idx >= 0) { final int lineStart = Math.max(0, sb.substring(0, idx).lastIndexOf("\n") + 1); final StringBuilder tab = new StringBuilder(" "); if ((idx - lineStart) % 4 != 0) { tab.setLength((idx - lineStart) % 4); } sb.replace(idx, idx + 1, tab.toString()); idx = sb.indexOf("\t"); } return sb.toString(); } /** * The first and 2nd group are used to determine the expected file name * * @param file2 * @return */ private File determineOutFile(final File file2) { final Matcher matcher = Pattern.compile(getMatcher()).matcher(file2.getPath()); if (matcher.matches() && matcher.groupCount() > 0) { return new File(matcher.group(1) + ".expected." + (matcher.groupCount() > 1 ? matcher.group(2) : "txt")); } else { return new File(file2.getPath().replaceAll("(.*)[.]([^.]+)", "$1.expected.$2")); } } private File determineFailureOutFile(final File file2) { final Matcher matcher = Pattern.compile(getMatcher()).matcher(file2.getPath()); if (matcher.matches() && matcher.groupCount() > 0) { return new File(matcher.group(1) + ".failed." + (matcher.groupCount() > 1 ? matcher.group(2) : "txt")); } else { return new File(file2.getPath().replaceAll("(.*)[.]([^.]+)", "$1.failed.$2")); } } private String getMatcher() { final Method[] methods = MethodUtils.getMethodsWithAnnotation(sourceTestClass, IOTest.class); final IOTest ioInput = methods.length > 0 ? methods[0].getAnnotation(IOTest.class) : null; if (ioInput != null) { if (!ioInput.inputMatches().trim().isEmpty()) { return ioInput.inputMatches(); } else if (!ioInput.inputExtension().trim().isEmpty()) { return "(.*)[.]" + ioInput.inputExtension().replaceFirst("^[.]", ""); } } final IOInput ioInputOld = sourceTestClass.getAnnotation(IOInput.class); if (ioInputOld != null) { if (!ioInputOld.matches().trim().isEmpty()) { return ioInputOld.matches(); } else if (!ioInputOld.extension().trim().isEmpty()) { return "(.*)[.]" + ioInputOld.extension().replaceFirst("^[.]", ""); } } return "(.*)\\.input\\.(.*)"; } private boolean saveFailure() { final Method[] methods = MethodUtils.getMethodsWithAnnotation(sourceTestClass, IOTest.class); final IOTest testInfo = methods.length > 0 ? methods[0].getAnnotation(IOTest.class) : null; if (testInfo != null) { return testInfo.saveFailedOutput(); } return true; } public abstract String run(String input) throws Exception; }
package com.instructure.canvasapi.model; import android.os.Parcel; import com.instructure.canvasapi.utilities.APIHelpers; import java.util.Date; public class Alert extends CanvasModel<Alert>{ //Variables from API private String id; private boolean marked_read; private boolean dismissed; private String alert_type; private String title; private String action_date; private String creation_date; private String observer_id; private String student_id; private String course_id; private String alert_criteria_id; private String asset_url; public enum ALERT_TYPE { COURSE_ANNOUNCEMENT, INSTITUTION_ANNOUNCEMENT, ASSIGNMENT_GRADE_HIGH, ASSIGNMENT_GRADE_LOW, ASSIGNMENT_MISSING, COURSE_GRADE_HIGH, COURSE_GRADE_LOW } @Override public long getId() { return id.hashCode(); } @Override public Date getComparisonDate() { return null; } @Override public String getComparisonString() { return null; } //region Getters/Setters public String getStringId(){ return id; } public void setId(String id){ this.id = id; } public void setMarkedRead(boolean isRead){ this.marked_read = isRead; } public boolean isMarkedRead(){ return this.marked_read; } public void setDismissed(boolean dismissed){ this.dismissed = dismissed; } public boolean isDismissed(){ return this.dismissed; } public void setAlertType(ALERT_TYPE alert_type){ this.alert_type = alertTypeToAPIString(alert_type); } public ALERT_TYPE getAlertType(){ return getAlertTypeFromString(alert_type); } public void setTitle(String title){ this.title = title; } public String getTitle(){ return this.title; } public void setActionDate(String actionDate){ this.action_date = actionDate; } public Date getActionDate(){ return APIHelpers.stringToDate(this.action_date); } public void setCreationDate(String creationDate){ this.creation_date = creationDate; } public Date getCreationDate(){ return APIHelpers.stringToDate(this.creation_date); } public void setObserverId(String observerId){ this.observer_id = observerId; } public String getObserverId(){ return this.observer_id; } public void setStudentId(String studentId){ this.student_id = studentId; } public String getStudentId(){ return this.student_id; } public void setCourseId(String courseId){ this.course_id = courseId; } public String getCourseId(){ return this.course_id; } public void setAssetUrl(String assetUrl){ this.asset_url = assetUrl; } public String getAssetUrl(){ return this.asset_url; } public String getAlertCriteriaId() { return alert_criteria_id; } public void setAlertCriteriaId(String alert_criteria_id) { this.alert_criteria_id = alert_criteria_id; } //endregion public static ALERT_TYPE getAlertTypeFromString(String alert_type){ switch(alert_type){ case("course_announcement"): return ALERT_TYPE.COURSE_ANNOUNCEMENT; case("institution_announcement"): return ALERT_TYPE.INSTITUTION_ANNOUNCEMENT; case("assignment_grade_high"): return ALERT_TYPE.ASSIGNMENT_GRADE_HIGH; case("assignment_grade_low"): return ALERT_TYPE.ASSIGNMENT_GRADE_LOW; case("assignment_missing"): return ALERT_TYPE.ASSIGNMENT_MISSING; case("course_grade_high"): return ALERT_TYPE.COURSE_GRADE_HIGH; case("course_grade_low"): return ALERT_TYPE.COURSE_GRADE_LOW; default: return null; } } public static String alertTypeToAPIString(ALERT_TYPE alert_type){ switch(alert_type){ case COURSE_ANNOUNCEMENT: return "course_announcement"; case INSTITUTION_ANNOUNCEMENT: return "institution_announcement"; case ASSIGNMENT_GRADE_HIGH: return "assignment_grade_high"; case ASSIGNMENT_GRADE_LOW: return "assignment_grade_low"; case ASSIGNMENT_MISSING: return "assignment_missing"; case COURSE_GRADE_HIGH: return "course_grade_high"; case COURSE_GRADE_LOW: return "course_grade_low"; default: return null; } } //region Parcel stuffs @Override public void writeToParcel(Parcel parcel, int i) { parcel.writeString(this.id); parcel.writeString(this.alert_criteria_id); parcel.writeByte(this.marked_read ? (byte) 1 : (byte) 0); parcel.writeByte(this.dismissed ? (byte) 1 : (byte) 0); parcel.writeString(this.alert_type); parcel.writeString(this.title); parcel.writeString(this.action_date); parcel.writeString(this.creation_date); parcel.writeString(this.observer_id); parcel.writeString(this.student_id); parcel.writeString(this.course_id); parcel.writeString(this.asset_url); } private Alert(Parcel parcel){ this.id = parcel.readString(); this.alert_criteria_id = parcel.readString(); this.marked_read = parcel.readByte() != 0; this.dismissed = parcel.readByte() != 0; this.alert_type = parcel.readString(); this.title = parcel.readString(); this.action_date = parcel.readString(); this.creation_date = parcel.readString(); this.observer_id = parcel.readString(); this.student_id = parcel.readString(); this.course_id = parcel.readString(); this.asset_url = parcel.readString(); } public static Creator<Alert> CREATOR = new Creator<Alert>() { public Alert createFromParcel(Parcel parcel) { return new Alert(parcel); } public Alert[] newArray(int size) { return new Alert[size]; } }; //endregion }
package com.lothrazar.cyclic.event; import com.lothrazar.cyclic.base.ItemEntityInteractable; import com.lothrazar.cyclic.block.cable.CableWrench; import com.lothrazar.cyclic.block.cable.WrenchActionType; import com.lothrazar.cyclic.block.scaffolding.ItemScaffolding; import com.lothrazar.cyclic.item.AntimatterEvaporatorWandItem; import com.lothrazar.cyclic.item.builder.BuilderActionType; import com.lothrazar.cyclic.item.builder.BuilderItem; import com.lothrazar.cyclic.item.carrot.ItemHorseEnder; import com.lothrazar.cyclic.item.datacard.ShapeCard; import com.lothrazar.cyclic.item.heart.HeartItem; import com.lothrazar.cyclic.item.storagebag.StorageBagItem; import com.lothrazar.cyclic.registry.BlockRegistry; import com.lothrazar.cyclic.registry.PotionRegistry; import com.lothrazar.cyclic.registry.SoundRegistry; import com.lothrazar.cyclic.util.UtilChat; import com.lothrazar.cyclic.util.UtilItemStack; import com.lothrazar.cyclic.util.UtilSound; import com.lothrazar.cyclic.util.UtilWorld; import java.util.Set; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.ai.attributes.AttributeModifier; import net.minecraft.entity.ai.attributes.Attributes; import net.minecraft.entity.ai.attributes.ModifiableAttributeInstance; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.ItemStack; import net.minecraft.potion.EffectInstance; import net.minecraft.potion.Effects; import net.minecraft.util.Direction; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.event.entity.living.LivingEvent.LivingUpdateEvent; import net.minecraftforge.event.entity.player.BonemealEvent; import net.minecraftforge.event.entity.player.EntityItemPickupEvent; import net.minecraftforge.event.entity.player.PlayerEvent; import net.minecraftforge.event.entity.player.PlayerInteractEvent; import net.minecraftforge.event.entity.player.PlayerInteractEvent.EntityInteract; import net.minecraftforge.event.entity.player.PlayerInteractEvent.RightClickBlock; import net.minecraftforge.event.entity.player.SleepingLocationCheckEvent; import net.minecraftforge.eventbus.api.Event.Result; import net.minecraftforge.eventbus.api.SubscribeEvent; public class ItemEvents { @SubscribeEvent public void onPlayerCloneDeath(PlayerEvent.Clone event) { ModifiableAttributeInstance original = event.getOriginal().getAttribute(Attributes.MAX_HEALTH); if (original != null) { AttributeModifier healthModifier = original.getModifier(HeartItem.ID); if (healthModifier != null) { event.getPlayer().getAttribute(Attributes.MAX_HEALTH).applyPersistentModifier(healthModifier); } } } @SubscribeEvent public void onEntityUpdate(LivingUpdateEvent event) { LivingEntity liv = event.getEntityLiving(); if (liv.getPersistentData().contains(ItemHorseEnder.NBT_KEYACTIVE) && liv.getPersistentData().getInt(ItemHorseEnder.NBT_KEYACTIVE) > 0) { if (liv.isInWater() && liv.canBreatheUnderwater() == false && liv.getAir() < liv.getMaxAir() && !liv.isPotionActive(Effects.WATER_BREATHING)) { liv.addPotionEffect(new EffectInstance(Effects.WATER_BREATHING, 20 * 60, 4)); liv.addPotionEffect(new EffectInstance(PotionRegistry.PotionEffects.swimspeed, 20 * 60, 1)); ItemHorseEnder.onSuccess(liv); } if (liv.isBurning() && !liv.isPotionActive(Effects.FIRE_RESISTANCE)) { liv.addPotionEffect(new EffectInstance(Effects.FIRE_RESISTANCE, 20 * 60, 4)); liv.extinguish(); ItemHorseEnder.onSuccess(liv); } if (liv.fallDistance > 12 && !liv.isPotionActive(Effects.SLOW_FALLING)) { liv.addPotionEffect(new EffectInstance(Effects.SLOW_FALLING, 20 * 60, 4)); // if (liv.getPassengers().size() > 0) { // liv.getPassengers().get(0).addPotionEffect(new EffectInstance(Effects.SLOW_FALLING, 20 * 60, 1)); ItemHorseEnder.onSuccess(liv); } if (liv.getHealth() < 6 && !liv.isPotionActive(Effects.ABSORPTION)) { liv.addPotionEffect(new EffectInstance(Effects.ABSORPTION, 20 * 60, 4)); liv.addPotionEffect(new EffectInstance(Effects.RESISTANCE, 20 * 60, 4)); ItemHorseEnder.onSuccess(liv); } } } // @SubscribeEvent // public void onLivingDeathEvent(LivingDeathEvent event) { @SubscribeEvent public void onBonemealEvent(BonemealEvent event) { World world = event.getWorld(); BlockPos pos = event.getPos(); if (world.getBlockState(pos).getBlock() == Blocks.PODZOL && world.isAirBlock(pos.up())) { world.setBlockState(pos.up(), BlockRegistry.flower_cyan.getDefaultState()); event.setResult(Result.ALLOW); } else if (world.getBlockState(pos).getBlock() == BlockRegistry.flower_cyan) { event.setResult(Result.ALLOW); if (world.rand.nextDouble() < 0.5) { UtilItemStack.drop(world, pos, new ItemStack(BlockRegistry.flower_cyan)); } } } @SubscribeEvent public void onBedCheck(SleepingLocationCheckEvent event) { if (event.getEntity() instanceof PlayerEntity) { PlayerEntity p = (PlayerEntity) event.getEntity(); if (p.getPersistentData().getBoolean("cyclic_sleeping")) { // TODO: const in sleeping mat event.setResult(Result.ALLOW); } } } @SubscribeEvent public void onRightClickBlock(RightClickBlock event) { if (event.getItemStack().isEmpty()) { return; } if (event.getItemStack().getItem() instanceof ItemScaffolding && event.getPlayer().isCrouching()) { scaffoldHit(event); } } private void scaffoldHit(RightClickBlock event) { ItemScaffolding item = (ItemScaffolding) event.getItemStack().getItem(); Direction opp = event.getFace().getOpposite(); BlockPos dest = UtilWorld.nextReplaceableInDirection(event.getWorld(), event.getPos(), opp, 16, item.getBlock()); if (event.getWorld().isAirBlock(dest)) { event.getWorld().setBlockState(dest, item.getBlock().getDefaultState()); ItemStack stac = event.getPlayer().getHeldItem(event.getHand()); UtilItemStack.shrink(event.getPlayer(), stac); event.setCanceled(true); } } @SubscribeEvent public void onEntityInteractEvent(EntityInteract event) { if (event.getItemStack().getItem() instanceof ItemEntityInteractable) { ItemEntityInteractable item = (ItemEntityInteractable) event.getItemStack().getItem(); item.interactWith(event); } } @SubscribeEvent public void onHit(PlayerInteractEvent.LeftClickBlock event) { PlayerEntity player = event.getPlayer(); ItemStack held = player.getHeldItem(event.getHand()); if (held.isEmpty()) { return; } World world = player.getEntityWorld(); ///////////// shape if (held.getItem() instanceof ShapeCard && player.isCrouching()) { BlockState target = world.getBlockState(event.getPos()); ShapeCard.setBlockState(held, target); UtilChat.sendStatusMessage(player, target.getBlock().getTranslationKey()); } ///////////////// builders if (held.getItem() instanceof BuilderItem) { if (BuilderActionType.getTimeout(held) > 0) { //without a timeout, this fires every tick. so you 'hit once' and get this happening 6 times return; } BuilderActionType.setTimeout(held); event.setCanceled(true); if (player.isCrouching()) { //pick out target block BlockState target = world.getBlockState(event.getPos()); BuilderActionType.setBlockState(held, target); UtilChat.sendStatusMessage(player, target.getBlock().getTranslationKey()); } else { //change size if (!world.isRemote) { BuilderActionType.toggle(held); } UtilSound.playSound(player, SoundRegistry.tool_mode); UtilChat.sendStatusMessage(player, UtilChat.lang(BuilderActionType.getName(held))); } } ////////////////////////// wrench if (held.getItem() instanceof CableWrench && WrenchActionType.getTimeout(held) == 0) { //mode if (!world.isRemote) { WrenchActionType.toggle(held); } UtilSound.playSound(player, SoundRegistry.tool_mode); WrenchActionType.setTimeout(held); UtilChat.sendStatusMessage(player, UtilChat.lang(WrenchActionType.getName(held))); } if (held.getItem() instanceof AntimatterEvaporatorWandItem) { AntimatterEvaporatorWandItem.toggleMode(player, held); } } @SubscribeEvent public void onPlayerPickup(EntityItemPickupEvent event) { if (event.getEntityLiving() instanceof PlayerEntity) { PlayerEntity player = (PlayerEntity) event.getEntityLiving(); ItemStack stack = event.getItem().getItem(); ItemStack resultStack = null; Set<Integer> bagSlots = StorageBagItem.getAllBagSlots(player); for (Integer i : bagSlots) { ItemStack bag = player.inventory.getStackInSlot(i); switch (StorageBagItem.getPickupMode(bag)) { case EVERYTHING: resultStack = StorageBagItem.tryInsert(bag, stack); break; case FILTER: resultStack = StorageBagItem.tryFilteredInsert(bag, stack); break; case NOTHING: break; } if (resultStack == ItemStack.EMPTY) { break; } } if (resultStack != null) { event.getItem().setItem(resultStack); } if (resultStack != null && resultStack.getCount() != stack.getCount()) { event.setResult(Result.ALLOW); } else { event.setResult(Result.DEFAULT); } } } }
package com.magiclabs.restapi; import java.util.Arrays; import java.util.Optional; import com.eclipsesource.json.JsonObject; import com.eclipsesource.json.JsonValue; public class SchemaValidator { private static enum JsonType { OBJECT, ARRAY, BOOLEAN, STRING, NUMBER } public static JsonObject validate(String type, JsonObject schema) throws InvalidSchemaException { JsonObject rootObject = checkField(schema, type, true, JsonType.OBJECT) .get().asObject(); checkIfInvalidField(schema, false, type); String rootType = checkField(rootObject, "_type", false, JsonType.STRING).orElse(JsonValue.valueOf("object")).asString(); // if (rootType.equals("stash")) { // checkStashProperty(type, rootObject); // } else if (rootType.equals("object")) { checkField(rootObject, "_id", false, JsonType.STRING); Optional<JsonValue> opt = checkField(rootObject, "_acl", false, JsonType.OBJECT); if (opt.isPresent()) { checkAcl(type, opt.get().asObject()); } checkIfInvalidField(rootObject, true, "_acl", "_id"); checkObjectProperties(type, rootObject); } else throw InvalidSchemaException.invalidObjectType(type, rootType); return schema; } private static void checkAcl(String type, JsonObject json) throws InvalidSchemaException { // TODO implement this } private static void checkObjectProperties(String propertyName, JsonObject json) { json.names() .stream() .filter(name -> name.charAt(0) != '_') .findFirst() .orElseThrow( () -> InvalidSchemaException.noProperty(propertyName)); json.names().stream().filter(name -> name.charAt(0) != '_') .forEach(name -> checkProperty(name, json.get(name))); } private static void checkObjectProperty(String propertyName, JsonObject json) throws InvalidSchemaException { checkField(json, "_type", false, JsonType.STRING, JsonValue.valueOf("object")); checkField(json, "_required", false, JsonType.BOOLEAN); checkField(json, "_array", false, JsonType.BOOLEAN); checkIfInvalidField(json, true, "_type", "_required", "_array"); checkObjectProperties(propertyName, json); } private static void checkProperty(String propertyName, JsonValue jsonValue) throws InvalidSchemaException { if (!jsonValue.isObject()) throw new InvalidSchemaException(String.format( "invalid value [%s] for object property [%s]", jsonValue, propertyName)); JsonObject jsonObject = jsonValue.asObject(); Optional<JsonValue> optType = checkField(jsonObject, "_type", false, JsonType.STRING); String type = optType.isPresent() ? optType.get().asString() : "object"; if (type.equals("text")) checkTextProperty(propertyName, jsonObject); else if (type.equals("string")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("date")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("time")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("timestamp")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("integer")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("long")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("float")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("double")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("boolean")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("object")) checkObjectProperty(propertyName, jsonObject); else if (type.equals("enum")) checkEnumProperty(propertyName, jsonObject); else if (type.equals("geopoint")) checkSimpleProperty(jsonObject, propertyName, type); else if (type.equals("stash")) checkStashProperty(propertyName, jsonObject); else throw new InvalidSchemaException("Invalid field type: " + type); } private static void checkSimpleProperty(JsonObject json, String propertyName, String propertyType) throws InvalidSchemaException { checkField(json, "_required", false, JsonType.BOOLEAN); checkField(json, "_array", false, JsonType.BOOLEAN); checkIfInvalidField(json, false, "_type", "_required", "_array"); } private static void checkStashProperty(String type, JsonObject json) { checkField(json, "_required", false, JsonType.BOOLEAN); checkIfInvalidField(json, false, "_type", "_required"); } private static void checkEnumProperty(String propertyName, JsonObject json) throws InvalidSchemaException { checkField(json, "_required", false, JsonType.BOOLEAN); checkField(json, "_array", false, JsonType.BOOLEAN); checkIfInvalidField(json, false, "_type", "_required", "_array"); } private static void checkTextProperty(String propertyName, JsonObject json) throws InvalidSchemaException { checkIfInvalidField(json, false, "_type", "_required", "_language", "_array"); checkField(json, "_required", false, JsonType.BOOLEAN); checkField(json, "_language", false, JsonType.STRING); checkField(json, "_array", false, JsonType.BOOLEAN); } private static void checkIfInvalidField(JsonObject json, boolean checkSettingsOnly, String... validFieldNames) { json.names() .stream() .filter(name -> checkSettingsOnly ? name.charAt(0) == ('_') : true) .filter(name -> { for (String validName : validFieldNames) { if (name.equals(validName)) return false; } return true; }) .findFirst() .ifPresent( name -> { throw InvalidSchemaException.invalidField(name, validFieldNames); }); } private static void checkField(JsonObject jsonObject, String fieldName, boolean required, JsonType fieldType, JsonValue anticipatedFieldValue) throws InvalidSchemaException { checkField(jsonObject, fieldName, required, fieldType) .ifPresent( fieldValue -> { if (!fieldValue.equals(anticipatedFieldValue)) throw InvalidSchemaException.invalidFieldValue( fieldName, fieldValue, anticipatedFieldValue); }); } private static Optional<JsonValue> checkField(JsonObject jsonObject, String fieldName, boolean required, JsonType fieldType) throws InvalidSchemaException { JsonValue fieldValue = jsonObject.get(fieldName); if (fieldValue == null) if (required) throw new InvalidSchemaException( "This schema field is required: " + fieldName); else return Optional.empty(); if ((fieldValue.isObject() && fieldType == JsonType.OBJECT) || (fieldValue.isString() && fieldType == JsonType.STRING) || (fieldValue.isArray() && fieldType == JsonType.ARRAY) || (fieldValue.isBoolean() && fieldType == JsonType.BOOLEAN) || (fieldValue.isNumber() && fieldType == JsonType.NUMBER)) return Optional.of(fieldValue); throw new InvalidSchemaException(String.format( "Invalid type [%s] for schema field [%s]. Must be [%s]", getJsonType(fieldValue), fieldName, fieldType)); } private static String getJsonType(JsonValue value) { return value.isString() ? "string" : value.isObject() ? "object" : value.isNumber() ? "number" : value.isArray() ? "array" : value.isBoolean() ? "boolean" : "null"; } public static class InvalidSchemaException extends RuntimeException { private static final long serialVersionUID = 6335047694807220133L; public InvalidSchemaException(String message) { super(message); } public static InvalidSchemaException invalidField(String fieldName, String... expectedFiedNames) { return new InvalidSchemaException(String.format( "invalid field [%s]: expected fields are %s", fieldName, Arrays.toString(expectedFiedNames))); } public static InvalidSchemaException invalidObjectType(String type, String rootType) { return new InvalidSchemaException(String.format( "invalid root object type [%s]", rootType)); } public static InvalidSchemaException invalidFieldValue( String fieldName, JsonValue fieldValue, JsonValue anticipatedFieldValue) { return new InvalidSchemaException(String.format( "schema field [%s] equal to [%s] should be equal to [%s]", fieldName, fieldValue, anticipatedFieldValue)); } public static InvalidSchemaException noProperty(String propertyName) { return new InvalidSchemaException(String.format( "property [%s] of type [object] has no properties", propertyName)); } } }
package com.mashape.unirest.http; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.zip.GZIPInputStream; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.StatusLine; import org.apache.http.util.EntityUtils; import com.mashape.unirest.http.utils.ResponseUtils; public class HttpResponse<T> { private int statusCode; private String statusText; private Headers headers = new Headers(); private InputStream rawBody; private T body; @SuppressWarnings("unchecked") public HttpResponse(org.apache.http.HttpResponse response, Class<T> responseClass) { HttpEntity responseEntity = response.getEntity(); Header[] allHeaders = response.getAllHeaders(); for(Header header : allHeaders) { String headerName = header.getName().toLowerCase(); List<String> list = headers.get(headerName); if (list == null) list = new ArrayList<String>(); list.add(header.getValue()); headers.put(headerName, list); } StatusLine statusLine = response.getStatusLine(); this.statusCode = statusLine.getStatusCode(); this.statusText = statusLine.getReasonPhrase(); if (responseEntity != null) { String charset = "UTF-8"; Header contentType = responseEntity.getContentType(); if (contentType != null) { String responseCharset = ResponseUtils.getCharsetFromContentType(contentType.getValue()); if (responseCharset != null && !responseCharset.trim().equals("")) { charset = responseCharset; } } try { byte[] rawBody; try { InputStream responseInputStream = responseEntity.getContent(); if (ResponseUtils.isGzipped(responseEntity.getContentEncoding())) { responseInputStream = new GZIPInputStream(responseEntity.getContent()); } rawBody = ResponseUtils.getBytes(responseInputStream); } catch (IOException e2) { throw new RuntimeException(e2); } InputStream inputStream = new ByteArrayInputStream(rawBody); this.rawBody = inputStream; if (JsonNode.class.equals(responseClass)) { String jsonString = new String(rawBody, charset).trim(); this.body = (T) new JsonNode(jsonString); } else if (String.class.equals(responseClass)) { this.body = (T) new String(rawBody, charset); } else if (InputStream.class.equals(responseClass)) { this.body = (T) this.rawBody; } else { throw new Exception("Unknown result type. Only String, JsonNode and InputStream are supported."); } } catch (Exception e) { throw new RuntimeException(e); } } try { EntityUtils.consume(responseEntity); } catch (IOException e) { throw new RuntimeException(e); } } public int getStatus() { return statusCode; } public String getStatusText() { return statusText; } public Headers getHeaders() { return headers; } public InputStream getRawBody() { return rawBody; } public T getBody() { return body; } }
package com.microsoft.aad.msal4j; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.SerializeException; import com.nimbusds.oauth2.sdk.http.CommonContentTypes; import com.nimbusds.oauth2.sdk.http.HTTPRequest; import com.nimbusds.oauth2.sdk.http.HTTPResponse; import com.nimbusds.oauth2.sdk.util.URLUtils; import com.nimbusds.openid.connect.sdk.token.OIDCTokens; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import lombok.AccessLevel; import lombok.Getter; @Getter(AccessLevel.PACKAGE) class TokenRequest { Logger log = LoggerFactory.getLogger(TokenRequest.class); final Authority requestAuthority; private final MsalRequest msalRequest; private final ServiceBundle serviceBundle; TokenRequest(Authority requestAuthority, MsalRequest msalRequest, ServiceBundle serviceBundle) { this.requestAuthority = requestAuthority; this.serviceBundle = serviceBundle; this.msalRequest = msalRequest; } AuthenticationResult executeOauthRequestAndProcessResponse() throws ParseException, MsalServiceException, SerializeException, IOException { HttpEvent httpEvent = createHttpEvent(); try(TelemetryHelper telemetryHelper = serviceBundle.getTelemetryManager().createTelemetryHelper( msalRequest.requestContext().getTelemetryRequestId(), msalRequest.application().clientId(), httpEvent, false)) { AuthenticationResult result; HTTPResponse httpResponse; httpResponse = toOauthHttpRequest().send(); addResponseHeadersToHttpEvent(httpEvent, httpResponse); if (httpResponse.getStatusCode() == HTTPResponse.SC_OK) { final TokenResponse response = TokenResponse.parseHttpResponse(httpResponse); OIDCTokens tokens = response.getOIDCTokens(); String refreshToken = null; if (tokens.getRefreshToken() != null) { refreshToken = tokens.getRefreshToken().getValue(); } AccountCacheEntity accountCacheEntity = null; if (tokens.getIDToken() != null) { String idTokenJson = tokens.getIDToken().getParsedParts()[1].decodeToString(); IdToken idToken = JsonHelper.convertJsonToObject(idTokenJson, IdToken.class); if (!StringHelper.isBlank(response.getClientInfo())) { AuthorityType type = msalRequest.application().authenticationAuthority.authorityType; if(type == AuthorityType.B2C){ B2CAuthority authority = (B2CAuthority) msalRequest.application().authenticationAuthority; accountCacheEntity = AccountCacheEntity.create( response.getClientInfo(), requestAuthority.host(), idToken, authority.policy); } else { accountCacheEntity = AccountCacheEntity.create( response.getClientInfo(), requestAuthority.host(), idToken); } } } long currTimestampSec = new Date().getTime() / 1000; result = AuthenticationResult.builder(). accessToken(tokens.getAccessToken().getValue()). refreshToken(refreshToken). familyId(response.getFoci()). idToken(tokens.getIDTokenString()). environment(requestAuthority.host()). expiresOn(currTimestampSec + response.getExpiresIn()). extExpiresOn(response.getExtExpiresIn() > 0 ? currTimestampSec + response.getExtExpiresIn() : 0). accountCacheEntity(accountCacheEntity). scopes(response.getScope()). build(); } else { MsalServiceException exception = MsalServiceExceptionFactory.fromHttpResponse(httpResponse); httpEvent.setOauthErrorCode(exception.errorCode()); throw exception; } return result; } } private void addResponseHeadersToHttpEvent(HttpEvent httpEvent, HTTPResponse httpResponse) { httpEvent.setHttpResponseStatus(httpResponse.getStatusCode()); if (!StringHelper.isBlank(httpResponse.getHeaderValue("User-Agent"))) { httpEvent.setUserAgent(httpResponse.getHeaderValue("User-Agent")); } if (!StringHelper.isBlank(httpResponse.getHeaderValue("x-ms-request-id"))) { httpEvent.setRequestIdHeader(httpResponse.getHeaderValue("x-ms-request-id")); } if (!StringHelper.isBlank(httpResponse.getHeaderValue("x-ms-clitelem"))) { XmsClientTelemetryInfo xmsClientTelemetryInfo = XmsClientTelemetryInfo.parseXmsTelemetryInfo( httpResponse.getHeaderValue("x-ms-clitelem")); if (xmsClientTelemetryInfo != null) { httpEvent.setXmsClientTelemetryInfo(xmsClientTelemetryInfo); } } } private HttpEvent createHttpEvent() throws MalformedURLException { HttpEvent httpEvent = new HttpEvent(); httpEvent.setHttpMethod("POST"); try { httpEvent.setHttpPath(requestAuthority.tokenEndpointUrl().toURI()); if(!StringHelper.isBlank(requestAuthority.tokenEndpointUrl().getQuery())) httpEvent.setQueryParameters(requestAuthority.tokenEndpointUrl().getQuery()); } catch(URISyntaxException ex){ log.warn(LogHelper.createMessage("Setting URL telemetry fields failed: " + LogHelper.getPiiScrubbedDetails(ex), msalRequest.headers().getHeaderCorrelationIdValue())); } return httpEvent; } OAuthHttpRequest toOauthHttpRequest() throws SerializeException, MalformedURLException { if (requestAuthority.tokenEndpointUrl() == null) { throw new SerializeException("The endpoint URI is not specified"); } final OAuthHttpRequest oauthHttpRequest = new OAuthHttpRequest( HTTPRequest.Method.POST, requestAuthority.tokenEndpointUrl(), msalRequest.headers().getReadonlyHeaderMap(), this.serviceBundle); oauthHttpRequest.setContentType(CommonContentTypes.APPLICATION_URLENCODED); final Map<String, List<String>> params = msalRequest.msalAuthorizationGrant().toParameters(); oauthHttpRequest.setQuery(URLUtils.serializeParameters(params)); if (msalRequest.application().clientAuthentication != null) { msalRequest.application().clientAuthentication.applyTo(oauthHttpRequest); } return oauthHttpRequest; } }
package com.microsoft.sqlserver.jdbc; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketTimeoutException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.nio.charset.Charset; import java.security.KeyStore; import java.security.Provider; import java.security.Security; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.sql.Timestamp; import java.text.MessageFormat; import java.time.OffsetDateTime; import java.time.OffsetTime; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.GregorianCalendar; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SimpleTimeZone; import java.util.TimeZone; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocket; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import javax.xml.bind.DatatypeConverter; final class TDS { // TDS protocol versions static final int VER_DENALI = 0x74000004; // TDS 7.4 static final int VER_KATMAI = 0x730B0003; // TDS 7.3B(includes null bit compression) static final int VER_YUKON = 0x72090002; // TDS 7.2 static final int VER_UNKNOWN = 0x00000000; // Unknown/uninitialized static final int TDS_RET_STAT = 0x79; static final int TDS_COLMETADATA = 0x81; static final int TDS_TABNAME = 0xA4; static final int TDS_COLINFO = 0xA5; static final int TDS_ORDER = 0xA9; static final int TDS_ERR = 0xAA; static final int TDS_MSG = 0xAB; static final int TDS_RETURN_VALUE = 0xAC; static final int TDS_LOGIN_ACK = 0xAD; static final int TDS_FEATURE_EXTENSION_ACK = 0xAE; static final int TDS_ROW = 0xD1; static final int TDS_NBCROW = 0xD2; static final int TDS_ENV_CHG = 0xE3; static final int TDS_SSPI = 0xED; static final int TDS_DONE = 0xFD; static final int TDS_DONEPROC = 0xFE; static final int TDS_DONEINPROC = 0xFF; static final int TDS_FEDAUTHINFO = 0xEE; // FedAuth static final int TDS_FEATURE_EXT_FEDAUTH = 0x02; static final int TDS_FEDAUTH_LIBRARY_SECURITYTOKEN = 0x01; static final int TDS_FEDAUTH_LIBRARY_ADAL = 0x02; static final int TDS_FEDAUTH_LIBRARY_RESERVED = 0x7F; static final byte ADALWORKFLOW_ACTIVEDIRECTORYPASSWORD = 0x01; static final byte ADALWORKFLOW_ACTIVEDIRECTORYINTEGRATED = 0x02; static final byte FEDAUTH_INFO_ID_STSURL = 0x01; // FedAuthInfoData is token endpoint URL from which to acquire fed auth token static final byte FEDAUTH_INFO_ID_SPN = 0x02; // FedAuthInfoData is the SPN to use for acquiring fed auth token // AE constants static final int TDS_FEATURE_EXT_AE = 0x04; static final int MAX_SUPPORTED_TCE_VERSION = 0x01; // max version static final int CUSTOM_CIPHER_ALGORITHM_ID = 0; // max version static final int AES_256_CBC = 1; static final int AEAD_AES_256_CBC_HMAC_SHA256 = 2; static final int AE_METADATA = 0x08; static final int TDS_TVP = 0xF3; static final int TVP_ROW = 0x01; static final int TVP_NULL_TOKEN = 0xFFFF; static final int TVP_STATUS_DEFAULT = 0x02; static final int TVP_ORDER_UNIQUE_TOKEN = 0x10; // TVP_ORDER_UNIQUE_TOKEN flags static final byte TVP_ORDERASC_FLAG = 0x1; static final byte TVP_ORDERDESC_FLAG = 0x2; static final byte TVP_UNIQUE_FLAG = 0x4; // TVP flags, may be used in other places static final int FLAG_NULLABLE = 0x01; static final int FLAG_TVP_DEFAULT_COLUMN = 0x200; static final int FEATURE_EXT_TERMINATOR = -1; static final String getTokenName(int tdsTokenType) { switch (tdsTokenType) { case TDS_RET_STAT: return "TDS_RET_STAT (0x79)"; case TDS_COLMETADATA: return "TDS_COLMETADATA (0x81)"; case TDS_TABNAME: return "TDS_TABNAME (0xA4)"; case TDS_COLINFO: return "TDS_COLINFO (0xA5)"; case TDS_ORDER: return "TDS_ORDER (0xA9)"; case TDS_ERR: return "TDS_ERR (0xAA)"; case TDS_MSG: return "TDS_MSG (0xAB)"; case TDS_RETURN_VALUE: return "TDS_RETURN_VALUE (0xAC)"; case TDS_LOGIN_ACK: return "TDS_LOGIN_ACK (0xAD)"; case TDS_FEATURE_EXTENSION_ACK: return "TDS_FEATURE_EXTENSION_ACK (0xAE)"; case TDS_ROW: return "TDS_ROW (0xD1)"; case TDS_NBCROW: return "TDS_NBCROW (0xD2)"; case TDS_ENV_CHG: return "TDS_ENV_CHG (0xE3)"; case TDS_SSPI: return "TDS_SSPI (0xED)"; case TDS_DONE: return "TDS_DONE (0xFD)"; case TDS_DONEPROC: return "TDS_DONEPROC (0xFE)"; case TDS_DONEINPROC: return "TDS_DONEINPROC (0xFF)"; case TDS_FEDAUTHINFO: return "TDS_FEDAUTHINFO (0xEE)"; default: return "unknown token (0x" + Integer.toHexString(tdsTokenType).toUpperCase() + ")"; } } // RPC ProcIDs for use with RPCRequest (PKT_RPC) calls static final short PROCID_SP_CURSOR = 1; static final short PROCID_SP_CURSOROPEN = 2; static final short PROCID_SP_CURSORPREPARE = 3; static final short PROCID_SP_CURSOREXECUTE = 4; static final short PROCID_SP_CURSORPREPEXEC = 5; static final short PROCID_SP_CURSORUNPREPARE = 6; static final short PROCID_SP_CURSORFETCH = 7; static final short PROCID_SP_CURSOROPTION = 8; static final short PROCID_SP_CURSORCLOSE = 9; static final short PROCID_SP_EXECUTESQL = 10; static final short PROCID_SP_PREPARE = 11; static final short PROCID_SP_EXECUTE = 12; static final short PROCID_SP_PREPEXEC = 13; static final short PROCID_SP_PREPEXECRPC = 14; static final short PROCID_SP_UNPREPARE = 15; // Constants for use with cursor RPCs static final short SP_CURSOR_OP_UPDATE = 1; static final short SP_CURSOR_OP_DELETE = 2; static final short SP_CURSOR_OP_INSERT = 4; static final short SP_CURSOR_OP_REFRESH = 8; static final short SP_CURSOR_OP_LOCK = 16; static final short SP_CURSOR_OP_SETPOSITION = 32; static final short SP_CURSOR_OP_ABSOLUTE = 64; // Constants for server-cursored result sets. // See the Engine Cursors Functional Specification for details. static final int FETCH_FIRST = 1; static final int FETCH_NEXT = 2; static final int FETCH_PREV = 4; static final int FETCH_LAST = 8; static final int FETCH_ABSOLUTE = 16; static final int FETCH_RELATIVE = 32; static final int FETCH_REFRESH = 128; static final int FETCH_INFO = 256; static final int FETCH_PREV_NOADJUST = 512; static final byte RPC_OPTION_NO_METADATA = (byte) 0x02; // Transaction manager request types static final short TM_GET_DTC_ADDRESS = 0; static final short TM_PROPAGATE_XACT = 1; static final short TM_BEGIN_XACT = 5; static final short TM_PROMOTE_PROMOTABLE_XACT = 6; static final short TM_COMMIT_XACT = 7; static final short TM_ROLLBACK_XACT = 8; static final short TM_SAVE_XACT = 9; static final byte PKT_QUERY = 1; static final byte PKT_RPC = 3; static final byte PKT_REPLY = 4; static final byte PKT_CANCEL_REQ = 6; static final byte PKT_BULK = 7; static final byte PKT_DTC = 14; static final byte PKT_LOGON70 = 16; // 0x10 static final byte PKT_SSPI = 17; static final byte PKT_PRELOGIN = 18; // 0x12 static final byte PKT_FEDAUTH_TOKEN_MESSAGE = 8; // Authentication token for federated authentication static final byte STATUS_NORMAL = 0x00; static final byte STATUS_BIT_EOM = 0x01; static final byte STATUS_BIT_ATTENTION = 0x02;// this is called ignore bit in TDS spec static final byte STATUS_BIT_RESET_CONN = 0x08; // Various TDS packet size constants static final int INVALID_PACKET_SIZE = -1; static final int INITIAL_PACKET_SIZE = 4096; static final int MIN_PACKET_SIZE = 512; static final int MAX_PACKET_SIZE = 32767; static final int DEFAULT_PACKET_SIZE = 8000; static final int SERVER_PACKET_SIZE = 0; // Accept server's configured packet size // TDS packet header size and offsets static final int PACKET_HEADER_SIZE = 8; static final int PACKET_HEADER_MESSAGE_TYPE = 0; static final int PACKET_HEADER_MESSAGE_STATUS = 1; static final int PACKET_HEADER_MESSAGE_LENGTH = 2; static final int PACKET_HEADER_SPID = 4; static final int PACKET_HEADER_SEQUENCE_NUM = 6; static final int PACKET_HEADER_WINDOW = 7; // Reserved/Not used // MARS header length: // 2 byte header type // 8 byte transaction descriptor // 4 byte outstanding request count static final int MARS_HEADER_LENGTH = 18; // 2 byte header type, 8 byte transaction descriptor, static final int TRACE_HEADER_LENGTH = 26; // header length (4) + header type (2) + guid (16) + Sequence number size (4) static final short HEADERTYPE_TRACE = 3; // trace header type // Message header length static final int MESSAGE_HEADER_LENGTH = MARS_HEADER_LENGTH + 4; // length includes message header itself static final byte B_PRELOGIN_OPTION_VERSION = 0x00; static final byte B_PRELOGIN_OPTION_ENCRYPTION = 0x01; static final byte B_PRELOGIN_OPTION_INSTOPT = 0x02; static final byte B_PRELOGIN_OPTION_THREADID = 0x03; static final byte B_PRELOGIN_OPTION_MARS = 0x04; static final byte B_PRELOGIN_OPTION_TRACEID = 0x05; static final byte B_PRELOGIN_OPTION_FEDAUTHREQUIRED = 0x06; static final byte B_PRELOGIN_OPTION_TERMINATOR = (byte) 0xFF; // Login option byte 1 static final byte LOGIN_OPTION1_ORDER_X86 = 0x00; static final byte LOGIN_OPTION1_ORDER_6800 = 0x01; static final byte LOGIN_OPTION1_CHARSET_ASCII = 0x00; static final byte LOGIN_OPTION1_CHARSET_EBCDIC = 0x02; static final byte LOGIN_OPTION1_FLOAT_IEEE_754 = 0x00; static final byte LOGIN_OPTION1_FLOAT_VAX = 0x04; static final byte LOGIN_OPTION1_FLOAT_ND5000 = 0x08; static final byte LOGIN_OPTION1_DUMPLOAD_ON = 0x00; static final byte LOGIN_OPTION1_DUMPLOAD_OFF = 0x10; static final byte LOGIN_OPTION1_USE_DB_ON = 0x00; static final byte LOGIN_OPTION1_USE_DB_OFF = 0x20; static final byte LOGIN_OPTION1_INIT_DB_WARN = 0x00; static final byte LOGIN_OPTION1_INIT_DB_FATAL = 0x40; static final byte LOGIN_OPTION1_SET_LANG_OFF = 0x00; static final byte LOGIN_OPTION1_SET_LANG_ON = (byte) 0x80; // Login option byte 2 static final byte LOGIN_OPTION2_INIT_LANG_WARN = 0x00; static final byte LOGIN_OPTION2_INIT_LANG_FATAL = 0x01; static final byte LOGIN_OPTION2_ODBC_OFF = 0x00; static final byte LOGIN_OPTION2_ODBC_ON = 0x02; static final byte LOGIN_OPTION2_TRAN_BOUNDARY_OFF = 0x00; static final byte LOGIN_OPTION2_TRAN_BOUNDARY_ON = 0x04; static final byte LOGIN_OPTION2_CACHE_CONNECTION_OFF = 0x00; static final byte LOGIN_OPTION2_CACHE_CONNECTION_ON = 0x08; static final byte LOGIN_OPTION2_USER_NORMAL = 0x00; static final byte LOGIN_OPTION2_USER_SERVER = 0x10; static final byte LOGIN_OPTION2_USER_REMUSER = 0x20; static final byte LOGIN_OPTION2_USER_SQLREPL = 0x30; static final byte LOGIN_OPTION2_INTEGRATED_SECURITY_OFF = 0x00; static final byte LOGIN_OPTION2_INTEGRATED_SECURITY_ON = (byte) 0x80; // Login option byte 3 static final byte LOGIN_OPTION3_DEFAULT = 0x00; static final byte LOGIN_OPTION3_CHANGE_PASSWORD = 0x01; static final byte LOGIN_OPTION3_SEND_YUKON_BINARY_XML = 0x02; static final byte LOGIN_OPTION3_USER_INSTANCE = 0x04; static final byte LOGIN_OPTION3_UNKNOWN_COLLATION_HANDLING = 0x08; static final byte LOGIN_OPTION3_FEATURE_EXTENSION = 0x10; // Login type flag (bits 5 - 7 reserved for future use) static final byte LOGIN_SQLTYPE_DEFAULT = 0x00; static final byte LOGIN_SQLTYPE_TSQL = 0x01; static final byte LOGIN_SQLTYPE_ANSI_V1 = 0x02; static final byte LOGIN_SQLTYPE_ANSI89_L1 = 0x03; static final byte LOGIN_SQLTYPE_ANSI89_L2 = 0x04; static final byte LOGIN_SQLTYPE_ANSI89_IEF = 0x05; static final byte LOGIN_SQLTYPE_ANSI89_ENTRY = 0x06; static final byte LOGIN_SQLTYPE_ANSI89_TRANS = 0x07; static final byte LOGIN_SQLTYPE_ANSI89_INTER = 0x08; static final byte LOGIN_SQLTYPE_ANSI89_FULL = 0x09; static final byte LOGIN_OLEDB_OFF = 0x00; static final byte LOGIN_OLEDB_ON = 0x10; static final byte LOGIN_READ_ONLY_INTENT = 0x20; static final byte LOGIN_READ_WRITE_INTENT = 0x00; static final byte ENCRYPT_OFF = 0x00; static final byte ENCRYPT_ON = 0x01; static final byte ENCRYPT_NOT_SUP = 0x02; static final byte ENCRYPT_REQ = 0x03; static final byte ENCRYPT_INVALID = (byte) 0xFF; static final String getEncryptionLevel(int level) { switch (level) { case ENCRYPT_OFF: return "OFF"; case ENCRYPT_ON: return "ON"; case ENCRYPT_NOT_SUP: return "NOT SUPPORTED"; case ENCRYPT_REQ: return "REQUIRED"; default: return "unknown encryption level (0x" + Integer.toHexString(level).toUpperCase() + ")"; } } // Prelogin packet length, including the tds header, // version, encrpytion, and traceid data sessions. // For detailed info, please check the definition of // preloginRequest in Prelogin function. static final byte B_PRELOGIN_MESSAGE_LENGTH = 67; static final byte B_PRELOGIN_MESSAGE_LENGTH_WITH_FEDAUTH = 73; // Scroll options and concurrency options lifted out // of the the Yukon cursors spec for sp_cursoropen. final static int SCROLLOPT_KEYSET = 1; final static int SCROLLOPT_DYNAMIC = 2; final static int SCROLLOPT_FORWARD_ONLY = 4; final static int SCROLLOPT_STATIC = 8; final static int SCROLLOPT_FAST_FORWARD = 16; final static int SCROLLOPT_PARAMETERIZED_STMT = 4096; final static int SCROLLOPT_AUTO_FETCH = 8192; final static int SCROLLOPT_AUTO_CLOSE = 16384; final static int CCOPT_READ_ONLY = 1; final static int CCOPT_SCROLL_LOCKS = 2; final static int CCOPT_OPTIMISTIC_CC = 4; final static int CCOPT_OPTIMISTIC_CCVAL = 8; final static int CCOPT_ALLOW_DIRECT = 8192; final static int CCOPT_UPDT_IN_PLACE = 16384; // Result set rows include an extra, "hidden" ROWSTAT column which indicates // the overall success or failure of the row fetch operation. With a keyset // cursor, the value in the ROWSTAT column indicates whether the row has been // deleted from the database. static final int ROWSTAT_FETCH_SUCCEEDED = 1; static final int ROWSTAT_FETCH_MISSING = 2; // ColumnInfo status final static int COLINFO_STATUS_EXPRESSION = 0x04; final static int COLINFO_STATUS_KEY = 0x08; final static int COLINFO_STATUS_HIDDEN = 0x10; final static int COLINFO_STATUS_DIFFERENT_NAME = 0x20; final static int MAX_FRACTIONAL_SECONDS_SCALE = 7; final static Timestamp MAX_TIMESTAMP = Timestamp.valueOf("2079-06-06 23:59:59"); final static Timestamp MIN_TIMESTAMP = Timestamp.valueOf("1900-01-01 00:00:00"); static int nanosSinceMidnightLength(int scale) { final int[] scaledLengths = {3, 3, 3, 4, 4, 5, 5, 5}; assert scale >= 0; assert scale <= MAX_FRACTIONAL_SECONDS_SCALE; return scaledLengths[scale]; } final static int DAYS_INTO_CE_LENGTH = 3; final static int MINUTES_OFFSET_LENGTH = 2; // Number of days in a "normal" (non-leap) year according to SQL Server. final static int DAYS_PER_YEAR = 365; final static int BASE_YEAR_1900 = 1900; final static int BASE_YEAR_1970 = 1970; final static String BASE_DATE_1970 = "1970-01-01"; static int timeValueLength(int scale) { return nanosSinceMidnightLength(scale); } static int datetime2ValueLength(int scale) { return DAYS_INTO_CE_LENGTH + nanosSinceMidnightLength(scale); } static int datetimeoffsetValueLength(int scale) { return DAYS_INTO_CE_LENGTH + MINUTES_OFFSET_LENGTH + nanosSinceMidnightLength(scale); } // TDS is just a namespace - it can't be instantiated. private TDS() { } } class Nanos { static final int PER_SECOND = 1000000000; static final int PER_MAX_SCALE_INTERVAL = PER_SECOND / (int) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE); static final int PER_MILLISECOND = PER_SECOND / 1000; static final long PER_DAY = 24 * 60 * 60 * (long) PER_SECOND; private Nanos() { } } // Constants relating to the historically accepted Julian-Gregorian calendar cutover date (October 15, 1582). // Used in processing SQL Server temporal data types whose date component may precede that date. // Scoping these constants to a class defers their initialization to first use. class GregorianChange { // Cutover date for a pure Gregorian calendar - that is, a proleptic Gregorian calendar with // Gregorian leap year behavior throughout its entire range. This is the cutover date is used // with temporal server values, which are represented in terms of number of days relative to a // base date. static final java.util.Date PURE_CHANGE_DATE = new java.util.Date(Long.MIN_VALUE); // The standard Julian to Gregorian cutover date (October 15, 1582) that the JDBC temporal // classes (Time, Date, Timestamp) assume when converting to and from their UTC milliseconds // representations. static final java.util.Date STANDARD_CHANGE_DATE = (new GregorianCalendar(Locale.US)).getGregorianChange(); // A hint as to the number of days since 1/1/0001, past which we do not need to // not rationalize the difference between SQL Server behavior (pure Gregorian) // and Java behavior (standard Gregorian). // Not having to rationalize the difference has a substantial (measured) performance benefit // for temporal getters. // The hint does not need to be exact, as long as it's later than the actual change date. static final int DAYS_SINCE_BASE_DATE_HINT = DDC.daysSinceBaseDate(1583, 1, 1); // Extra days that need to added to a pure gregorian date, post the gergorian // cut over date, to match the default julian-gregorain calendar date of java. static final int EXTRA_DAYS_TO_BE_ADDED; static { // This issue refers to the following bugs in java(same issue). // The issue is fixed in JRE 1.7 // and exists in all the older versions. // Due to the above bug, in older JVM versions(1.6 and before), // the date calculation is incorrect at the Gregorian cut over date. // i.e. the next date after Oct 4th 1582 is Oct 17th 1582, where as // it should have been Oct 15th 1582. // We intentionally do not make a check based on JRE version. // If we do so, our code would break if the bug is fixed in a later update // to an older JRE. So, we check for the existence of the bug instead. GregorianCalendar cal = new GregorianCalendar(Locale.US); cal.clear(); cal.set(1, 1, 577738, 0, 0, 0);// 577738 = 1+577737(no of days since epoch that brings us to oct 15th 1582) if (cal.get(Calendar.DAY_OF_MONTH) == 15) { // If the date calculation is correct(the above bug is fixed), // post the default gregorian cut over date, the pure gregorian date // falls short by two days for all dates compared to julian-gregorian date. // so, we add two extra days for functional correctness. // Note: other ways, in which this issue can be fixed instead of // trying to detect the JVM bug is // a) use unoptimized code path in the function convertTemporalToObject // b) use cal.add api instead of cal.set api in the current optimized code path // In both the above approaches, the code is about 6-8 times slower, // resulting in an overall perf regression of about (10-30)% for perf test cases EXTRA_DAYS_TO_BE_ADDED = 2; } else EXTRA_DAYS_TO_BE_ADDED = 0; } private GregorianChange() { } } // UTC/GMT time zone singleton. The enum type delays initialization until first use. enum UTC { INSTANCE; static final TimeZone timeZone = new SimpleTimeZone(0, "UTC"); } final class TDSChannel { private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Channel"); final Logger getLogger() { return logger; } private final String traceID; final public String toString() { return traceID; } private final SQLServerConnection con; private final TDSWriter tdsWriter; final TDSWriter getWriter() { return tdsWriter; } final TDSReader getReader(TDSCommand command) { return new TDSReader(this, con, command); } // Socket for raw TCP/IP communications with SQL Server private Socket tcpSocket; // Socket for SSL-encrypted communications with SQL Server private SSLSocket sslSocket; // Socket providing the communications interface to the driver. // For SSL-encrypted connections, this is the SSLSocket wrapped // around the TCP socket. For unencrypted connections, it is // just the TCP socket itself. private Socket channelSocket; // Implementation of a Socket proxy that can switch from TDS-wrapped I/O // (using the TDSChannel itself) during SSL handshake to raw I/O over // the TCP/IP socket. ProxySocket proxySocket = null; // I/O streams for raw TCP/IP communications with SQL Server private InputStream tcpInputStream; private OutputStream tcpOutputStream; // I/O streams providing the communications interface to the driver. // For SSL-encrypted connections, these are streams obtained from // the SSL socket above. They wrap the underlying TCP streams. // For unencrypted connections, they are just the TCP streams themselves. private InputStream inputStream; private OutputStream outputStream; /** TDS packet payload logger */ private static Logger packetLogger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.DATA"); private final boolean isLoggingPackets = packetLogger.isLoggable(Level.FINEST); final boolean isLoggingPackets() { return isLoggingPackets; } // Number of TDS messages sent to and received from the server int numMsgsSent = 0; int numMsgsRcvd = 0; // Last SPID received from the server. Used for logging and to tag subsequent outgoing // packets to facilitate diagnosing problems from the server side. private int spid = 0; void setSPID(int spid) { this.spid = spid; } int getSPID() { return spid; } void resetPooledConnection() { tdsWriter.resetPooledConnection(); } TDSChannel(SQLServerConnection con) { this.con = con; traceID = "TDSChannel (" + con.toString() + ")"; this.tcpSocket = null; this.sslSocket = null; this.channelSocket = null; this.tcpInputStream = null; this.tcpOutputStream = null; this.inputStream = null; this.outputStream = null; this.tdsWriter = new TDSWriter(this, con); } /** * Opens the physical communications channel (TCP/IP socket and I/O streams) to the SQL Server. */ final void open(String host, int port, int timeoutMillis, boolean useParallel, boolean useTnir, boolean isTnirFirstAttempt, int timeoutMillisForFullTimeout) throws SQLServerException { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + ": Opening TCP socket..."); SocketFinder socketFinder = new SocketFinder(traceID, con); channelSocket = tcpSocket = socketFinder.findSocket(host, port, timeoutMillis, useParallel, useTnir, isTnirFirstAttempt, timeoutMillisForFullTimeout); try { // Set socket options tcpSocket.setTcpNoDelay(true); tcpSocket.setKeepAlive(true); // set SO_TIMEOUT int socketTimeout = con.getSocketTimeoutMilliseconds(); tcpSocket.setSoTimeout(socketTimeout); inputStream = tcpInputStream = tcpSocket.getInputStream(); outputStream = tcpOutputStream = tcpSocket.getOutputStream(); } catch (IOException ex) { SQLServerException.ConvertConnectExceptionToSQLServerException(host, port, con, ex); } } /** * Disables SSL on this TDS channel. */ void disableSSL() { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Disabling SSL..."); /* * The mission: To close the SSLSocket and release everything that it is holding onto other than the TCP/IP socket and streams. * * The challenge: Simply closing the SSLSocket tries to do additional, unnecessary shutdown I/O over the TCP/IP streams that are bound to the * socket proxy, resulting in a hang and confusing SQL Server. * * Solution: Rewire the ProxySocket's input and output streams (one more time) to closed streams. SSLSocket sees that the streams are already * closed and does not attempt to do any further I/O on them before closing itself. */ // Create a couple of cheap closed streams InputStream is = new ByteArrayInputStream(new byte[0]); try { is.close(); } catch (IOException e) { // No reason to expect a brand new ByteArrayInputStream not to close, // but just in case... logger.fine("Ignored error closing InputStream: " + e.getMessage()); } OutputStream os = new ByteArrayOutputStream(); try { os.close(); } catch (IOException e) { // No reason to expect a brand new ByteArrayOutputStream not to close, // but just in case... logger.fine("Ignored error closing OutputStream: " + e.getMessage()); } // Rewire the proxy socket to the closed streams if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Rewiring proxy streams for SSL socket close"); proxySocket.setStreams(is, os); // Now close the SSL socket. It will see that the proxy socket's streams // are closed and not try to do any further I/O over them. try { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Closing SSL socket"); sslSocket.close(); } catch (IOException e) { // Don't care if we can't close the SSL socket. We're done with it anyway. logger.fine("Ignored error closing SSLSocket: " + e.getMessage()); } // Do not close the proxy socket. Doing so would close our TCP socket // to which the proxy socket is bound. Instead, just null out the reference // to free up the few resources it holds onto. proxySocket = null; // Finally, with all of the SSL support out of the way, put the TDSChannel // back to using the TCP/IP socket and streams directly. inputStream = tcpInputStream; outputStream = tcpOutputStream; channelSocket = tcpSocket; sslSocket = null; if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " SSL disabled"); } /** * Used during SSL handshake, this class implements an InputStream that reads SSL handshake response data (framed in TDS messages) from the TDS * channel. */ private class SSLHandshakeInputStream extends InputStream { private final TDSReader tdsReader; private final SSLHandshakeOutputStream sslHandshakeOutputStream; private final Logger logger; private final String logContext; SSLHandshakeInputStream(TDSChannel tdsChannel, SSLHandshakeOutputStream sslHandshakeOutputStream) { this.tdsReader = tdsChannel.getReader(null); this.sslHandshakeOutputStream = sslHandshakeOutputStream; this.logger = tdsChannel.getLogger(); this.logContext = tdsChannel.toString() + " (SSLHandshakeInputStream):"; } /** * If there is no handshake response data available to be read from existing packets then this method ensures that the SSL handshake output * stream has been flushed to the server, and reads another packet (starting the next TDS response message). * * Note that simply using TDSReader.ensurePayload isn't sufficient as it does not automatically start the new response message. */ private void ensureSSLPayload() throws IOException { if (0 == tdsReader.available()) { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " No handshake response bytes available. Flushing SSL handshake output stream."); try { sslHandshakeOutputStream.endMessage(); } catch (SQLServerException e) { logger.finer(logContext + " Ending TDS message threw exception:" + e.getMessage()); throw new IOException(e.getMessage()); } if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Reading first packet of SSL handshake response"); try { tdsReader.readPacket(); } catch (SQLServerException e) { logger.finer(logContext + " Reading response packet threw exception:" + e.getMessage()); throw new IOException(e.getMessage()); } } } public long skip(long n) throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Skipping " + n + " bytes..."); if (n <= 0) return 0; if (n > Integer.MAX_VALUE) n = Integer.MAX_VALUE; ensureSSLPayload(); try { tdsReader.skip((int) n); } catch (SQLServerException e) { logger.finer(logContext + " Skipping bytes threw exception:" + e.getMessage()); throw new IOException(e.getMessage()); } return n; } private final byte oneByte[] = new byte[1]; public int read() throws IOException { int bytesRead; while (0 == (bytesRead = readInternal(oneByte, 0, oneByte.length))) ; assert 1 == bytesRead || -1 == bytesRead; return 1 == bytesRead ? oneByte[0] : -1; } public int read(byte[] b) throws IOException { return readInternal(b, 0, b.length); } public int read(byte b[], int offset, int maxBytes) throws IOException { return readInternal(b, offset, maxBytes); } private int readInternal(byte b[], int offset, int maxBytes) throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Reading " + maxBytes + " bytes..."); ensureSSLPayload(); try { tdsReader.readBytes(b, offset, maxBytes); } catch (SQLServerException e) { logger.finer(logContext + " Reading bytes threw exception:" + e.getMessage()); throw new IOException(e.getMessage()); } return maxBytes; } } /** * Used during SSL handshake, this class implements an OutputStream that writes SSL handshake request data (framed in TDS messages) to the TDS * channel. */ private class SSLHandshakeOutputStream extends OutputStream { private final TDSWriter tdsWriter; /** Flag indicating when it is necessary to start a new prelogin TDS message */ private boolean messageStarted; private final Logger logger; private final String logContext; SSLHandshakeOutputStream(TDSChannel tdsChannel) { this.tdsWriter = tdsChannel.getWriter(); this.messageStarted = false; this.logger = tdsChannel.getLogger(); this.logContext = tdsChannel.toString() + " (SSLHandshakeOutputStream):"; } public void flush() throws IOException { // It seems that the security provider implementation in some JVMs // (notably SunJSSE in the 6.0 JVM) likes to add spurious calls to // flush the SSL handshake output stream during SSL handshaking. // We need to ignore these calls because the SSL handshake payload // needs to be completely encapsulated in TDS. The SSL handshake // input stream always ensures that this output stream has been flushed // before trying to read the response. if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Ignored a request to flush the stream"); } void endMessage() throws SQLServerException { // We should only be asked to end the message if we have started one assert messageStarted; if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Finishing TDS message"); // Flush any remaining bytes through the writer. Since there may be fewer bytes // ready to send than a full TDS packet, we end the message here and start a new // one later if additional handshake data needs to be sent. tdsWriter.endMessage(); messageStarted = false; } private final byte singleByte[] = new byte[1]; public void write(int b) throws IOException { singleByte[0] = (byte) (b & 0xFF); writeInternal(singleByte, 0, singleByte.length); } public void write(byte[] b) throws IOException { writeInternal(b, 0, b.length); } public void write(byte[] b, int off, int len) throws IOException { writeInternal(b, off, len); } private void writeInternal(byte[] b, int off, int len) throws IOException { try { // Start out the handshake request in a new prelogin message. Subsequent // writes just add handshake data to the request until flushed. if (!messageStarted) { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Starting new TDS packet..."); tdsWriter.startMessage(null, TDS.PKT_PRELOGIN); messageStarted = true; } if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Writing " + len + " bytes..."); tdsWriter.writeBytes(b, off, len); } catch (SQLServerException e) { logger.finer(logContext + " Writing bytes threw exception:" + e.getMessage()); throw new IOException(e.getMessage()); } } } /** * This class implements an InputStream that just forwards all of its methods to an underlying InputStream. * * It is more predictable than FilteredInputStream which forwards some of its read methods directly to the underlying stream, but not others. */ private final class ProxyInputStream extends InputStream { private InputStream filteredStream; ProxyInputStream(InputStream is) { filteredStream = is; } final void setFilteredStream(InputStream is) { filteredStream = is; } public long skip(long n) throws IOException { long bytesSkipped; if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Skipping " + n + " bytes"); bytesSkipped = filteredStream.skip(n); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Skipped " + n + " bytes"); return bytesSkipped; } public int available() throws IOException { int bytesAvailable = filteredStream.available(); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " " + bytesAvailable + " bytes available"); return bytesAvailable; } private final byte oneByte[] = new byte[1]; public int read() throws IOException { int bytesRead; while (0 == (bytesRead = readInternal(oneByte, 0, oneByte.length))) ; assert 1 == bytesRead || -1 == bytesRead; return 1 == bytesRead ? oneByte[0] : -1; } public int read(byte[] b) throws IOException { return readInternal(b, 0, b.length); } public int read(byte b[], int offset, int maxBytes) throws IOException { return readInternal(b, offset, maxBytes); } private int readInternal(byte b[], int offset, int maxBytes) throws IOException { int bytesRead; if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Reading " + maxBytes + " bytes"); try { bytesRead = filteredStream.read(b, offset, maxBytes); } catch (IOException e) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " " + e.getMessage()); logger.finer(toString() + " Reading bytes threw exception:" + e.getMessage()); throw e; } if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Read " + bytesRead + " bytes"); return bytesRead; } public boolean markSupported() { boolean markSupported = filteredStream.markSupported(); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Returning markSupported: " + markSupported); return markSupported; } public void mark(int readLimit) { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Marking next " + readLimit + " bytes"); filteredStream.mark(readLimit); } public void reset() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Resetting to previous mark"); filteredStream.reset(); } public void close() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Closing"); filteredStream.close(); } } /** * This class implements an OutputStream that just forwards all of its methods to an underlying OutputStream. * * This class essentially does what FilteredOutputStream does, but is more efficient for our usage. FilteredOutputStream transforms block writes * to sequences of single-byte writes. */ final class ProxyOutputStream extends OutputStream { private OutputStream filteredStream; ProxyOutputStream(OutputStream os) { filteredStream = os; } final void setFilteredStream(OutputStream os) { filteredStream = os; } public void close() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Closing"); filteredStream.close(); } public void flush() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Flushing"); filteredStream.flush(); } private final byte singleByte[] = new byte[1]; public void write(int b) throws IOException { singleByte[0] = (byte) (b & 0xFF); writeInternal(singleByte, 0, singleByte.length); } public void write(byte[] b) throws IOException { writeInternal(b, 0, b.length); } public void write(byte[] b, int off, int len) throws IOException { writeInternal(b, off, len); } private void writeInternal(byte[] b, int off, int len) throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Writing " + len + " bytes"); filteredStream.write(b, off, len); } } /** * This class implements a Socket whose I/O streams can be switched from using a TDSChannel for I/O to using its underlying TCP/IP socket. * * The SSL socket binds to a ProxySocket. The initial SSL handshake is done over TDSChannel I/O streams so that the handshake payload is framed in * TDS packets. The I/O streams are then switched to TCP/IP I/O streams using setStreams, and SSL communications continue directly over the TCP/IP * I/O streams. * * Most methods other than those for getting the I/O streams are simply forwarded to the TDSChannel's underlying TCP/IP socket. Methods that * change the socket binding or provide direct channel access are disallowed. */ private class ProxySocket extends Socket { private final TDSChannel tdsChannel; private final Logger logger; private final String logContext; private final ProxyInputStream proxyInputStream; private final ProxyOutputStream proxyOutputStream; ProxySocket(TDSChannel tdsChannel) { this.tdsChannel = tdsChannel; this.logger = tdsChannel.getLogger(); this.logContext = tdsChannel.toString() + " (ProxySocket):"; // Create the I/O streams SSLHandshakeOutputStream sslHandshakeOutputStream = new SSLHandshakeOutputStream(tdsChannel); SSLHandshakeInputStream sslHandshakeInputStream = new SSLHandshakeInputStream(tdsChannel, sslHandshakeOutputStream); this.proxyOutputStream = new ProxyOutputStream(sslHandshakeOutputStream); this.proxyInputStream = new ProxyInputStream(sslHandshakeInputStream); } void setStreams(InputStream is, OutputStream os) { proxyInputStream.setFilteredStream(is); proxyOutputStream.setFilteredStream(os); } public InputStream getInputStream() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Getting input stream"); return proxyInputStream; } public OutputStream getOutputStream() throws IOException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Getting output stream"); return proxyOutputStream; } // Allow methods that should just forward to the underlying TCP socket or return fixed values public InetAddress getInetAddress() { return tdsChannel.tcpSocket.getInetAddress(); } public boolean getKeepAlive() throws SocketException { return tdsChannel.tcpSocket.getKeepAlive(); } public InetAddress getLocalAddress() { return tdsChannel.tcpSocket.getLocalAddress(); } public int getLocalPort() { return tdsChannel.tcpSocket.getLocalPort(); } public SocketAddress getLocalSocketAddress() { return tdsChannel.tcpSocket.getLocalSocketAddress(); } public boolean getOOBInline() throws SocketException { return tdsChannel.tcpSocket.getOOBInline(); } public int getPort() { return tdsChannel.tcpSocket.getPort(); } public int getReceiveBufferSize() throws SocketException { return tdsChannel.tcpSocket.getReceiveBufferSize(); } public SocketAddress getRemoteSocketAddress() { return tdsChannel.tcpSocket.getRemoteSocketAddress(); } public boolean getReuseAddress() throws SocketException { return tdsChannel.tcpSocket.getReuseAddress(); } public int getSendBufferSize() throws SocketException { return tdsChannel.tcpSocket.getSendBufferSize(); } public int getSoLinger() throws SocketException { return tdsChannel.tcpSocket.getSoLinger(); } public int getSoTimeout() throws SocketException { return tdsChannel.tcpSocket.getSoTimeout(); } public boolean getTcpNoDelay() throws SocketException { return tdsChannel.tcpSocket.getTcpNoDelay(); } public int getTrafficClass() throws SocketException { return tdsChannel.tcpSocket.getTrafficClass(); } public boolean isBound() { return true; } public boolean isClosed() { return false; } public boolean isConnected() { return true; } public boolean isInputShutdown() { return false; } public boolean isOutputShutdown() { return false; } public String toString() { return tdsChannel.tcpSocket.toString(); } public SocketChannel getChannel() { return null; } // Disallow calls to methods that would change the underlying TCP socket public void bind(SocketAddress bindPoint) throws IOException { logger.finer(logContext + " Disallowed call to bind. Throwing IOException."); throw new IOException(); } public void connect(SocketAddress endpoint) throws IOException { logger.finer(logContext + " Disallowed call to connect (without timeout). Throwing IOException."); throw new IOException(); } public void connect(SocketAddress endpoint, int timeout) throws IOException { logger.finer(logContext + " Disallowed call to connect (with timeout). Throwing IOException."); throw new IOException(); } // Ignore calls to methods that would otherwise allow the SSL socket // to directly manipulate the underlying TCP socket public void close() throws IOException { if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " Ignoring close"); } public void setReceiveBufferSize(int size) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setReceiveBufferSize size:" + size); } public void setSendBufferSize(int size) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setSendBufferSize size:" + size); } public void setReuseAddress(boolean on) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setReuseAddress"); } public void setSoLinger(boolean on, int linger) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setSoLinger"); } public void setSoTimeout(int timeout) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setSoTimeout"); } public void setTcpNoDelay(boolean on) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setTcpNoDelay"); } public void setTrafficClass(int tc) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setTrafficClass"); } public void shutdownInput() throws IOException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring shutdownInput"); } public void shutdownOutput() throws IOException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring shutdownOutput"); } public void sendUrgentData(int data) throws IOException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring sendUrgentData"); } public void setKeepAlive(boolean on) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setKeepAlive"); } public void setOOBInline(boolean on) throws SocketException { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Ignoring setOOBInline"); } } /** * This class implements an X509TrustManager that always accepts the X509Certificate chain offered to it. * * A PermissiveX509TrustManager is used to "verify" the authenticity of the server when the trustServerCertificate connection property is set to * true. */ private final class PermissiveX509TrustManager extends Object implements X509TrustManager { private final TDSChannel tdsChannel; private final Logger logger; private final String logContext; PermissiveX509TrustManager(TDSChannel tdsChannel) { this.tdsChannel = tdsChannel; this.logger = tdsChannel.getLogger(); this.logContext = tdsChannel.toString() + " (PermissiveX509TrustManager):"; } public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " Trusting client certificate (!)"); } public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " Trusting server certificate"); } public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[0]; } } /** * This class implements an X509TrustManager that hostname for validation. * * This validates the subject name in the certificate with the host name */ private final class HostNameOverrideX509TrustManager extends Object implements X509TrustManager { private final Logger logger; private final String logContext; private final X509TrustManager defaultTrustManager; private String hostName; HostNameOverrideX509TrustManager(TDSChannel tdsChannel, X509TrustManager tm, String hostName) { this.logger = tdsChannel.getLogger(); this.logContext = tdsChannel.toString() + " (HostNameOverrideX509TrustManager):"; defaultTrustManager = tm; // canonical name is in lower case so convert this to lowercase too. this.hostName = hostName.toLowerCase(); ; } // Parse name in RFC 2253 format // Returns the common name if successful, null if failed to find the common name. // The parser tuned to be safe than sorry so if it sees something it cant parse correctly it returns null private String parseCommonName(String distinguishedName) { int index; // canonical name converts entire name to lowercase index = distinguishedName.indexOf("cn="); if (index == -1) { return null; } distinguishedName = distinguishedName.substring(index + 3); // Parse until a comma or end is reached // Note the parser will handle gracefully (essentially will return empty string) , inside the quotes (e.g cn="Foo, bar") however // RFC 952 says that the hostName cant have commas however the parser should not (and will not) crash if it sees a , within quotes. for (index = 0; index < distinguishedName.length(); index++) { if (distinguishedName.charAt(index) == ',') { break; } } String commonName = distinguishedName.substring(0, index); // strip any quotes if (commonName.length() > 1 && ('\"' == commonName.charAt(0))) { if ('\"' == commonName.charAt(commonName.length() - 1)) commonName = commonName.substring(1, commonName.length() - 1); else { // Be safe the name is not ended in " return null so the common Name wont match commonName = null; } } return commonName; } private boolean validateServerName(String nameInCert) throws CertificateException { // Failed to get the common name from DN or empty CN if (null == nameInCert) { if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " Failed to parse the name from the certificate or name is empty."); return false; } // Verify that the name in certificate matches exactly with the host name if (!nameInCert.equals(hostName)) { if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " The name in certificate " + nameInCert + " does not match with the server name " + hostName + "."); return false; } if (logger.isLoggable(Level.FINER)) logger.finer(logContext + " The name in certificate:" + nameInCert + " validated against server name " + hostName + "."); return true; } public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Forwarding ClientTrusted."); defaultTrustManager.checkClientTrusted(chain, authType); } public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " Forwarding Trusting server certificate"); defaultTrustManager.checkServerTrusted(chain, authType); if (logger.isLoggable(Level.FINEST)) logger.finest(logContext + " default serverTrusted succeeded proceeding with server name validation"); validateServerNameInCertificate(chain[0]); } private void validateServerNameInCertificate(X509Certificate cert) throws CertificateException { String nameInCertDN = cert.getSubjectX500Principal().getName("canonical"); if (logger.isLoggable(Level.FINER)) { logger.finer(logContext + " Validating the server name:" + hostName); logger.finer(logContext + " The DN name in certificate:" + nameInCertDN); } boolean isServerNameValidated; // the name in cert is in RFC2253 format parse it to get the actual subject name String subjectCN = parseCommonName(nameInCertDN); isServerNameValidated = validateServerName(subjectCN); if (!isServerNameValidated) { Collection<List<?>> sanCollection = cert.getSubjectAlternativeNames(); if (sanCollection != null) { // find a subjectAlternateName entry corresponding to DNS Name for (List<?> sanEntry : sanCollection) { if (sanEntry != null && sanEntry.size() >= 2) { Object key = sanEntry.get(0); Object value = sanEntry.get(1); if (logger.isLoggable(Level.FINER)) { logger.finer(logContext + "Key: " + key + "; KeyClass:" + (key != null ? key.getClass() : null) + ";value: " + value + "; valueClass:" + (value != null ? value.getClass() : null)); } // "Note that the Collection returned may contain // more than one name of the same type." // So, more than one entry of dnsNameType can be present. // Java docs guarantee that the first entry in the list will be an integer. // 2 is the sequence no of a dnsName if ((key != null) && (key instanceof Integer) && ((Integer) key == 2)) { // As per RFC2459, the DNSName will be in the // "preferred name syntax" as specified by RFC // 1034 and the name can be in upper or lower case. // And no significance is attached to case. // Java docs guarantee that the second entry in the list // will be a string for dnsName if (value != null && value instanceof String) { String dnsNameInSANCert = (String) value; // convert to upper case and then to lower case in english locale // to avoid Turkish i issues. // Note that, this conversion was not necessary for // cert.getSubjectX500Principal().getName("canonical"); // as the above API already does this by default as per documentation. dnsNameInSANCert = dnsNameInSANCert.toUpperCase(Locale.US); dnsNameInSANCert = dnsNameInSANCert.toLowerCase(Locale.US); isServerNameValidated = validateServerName(dnsNameInSANCert); if (isServerNameValidated) { if (logger.isLoggable(Level.FINER)) { logger.finer(logContext + " found a valid name in certificate: " + dnsNameInSANCert); } break; } } if (logger.isLoggable(Level.FINER)) { logger.finer(logContext + " the following name in certificate does not match the serverName: " + value); } } } else { if (logger.isLoggable(Level.FINER)) { logger.finer(logContext + " found an invalid san entry: " + sanEntry); } } } } } if (!isServerNameValidated) { String msg = SQLServerException.getErrString("R_certNameFailed"); throw new CertificateException(msg); } } public X509Certificate[] getAcceptedIssuers() { return defaultTrustManager.getAcceptedIssuers(); } } enum SSLHandhsakeState { SSL_HANDHSAKE_NOT_STARTED, SSL_HANDHSAKE_STARTED, SSL_HANDHSAKE_COMPLETE }; /** * Enables SSL Handshake. * * @param host * Server Host Name for SSL Handshake * @param port * Server Port for SSL Handshake * @throws SQLServerException */ void enableSSL(String host, int port) throws SQLServerException { // If enabling SSL fails, which it can for a number of reasons, the following items // are used in logging information to the TDS channel logger to help diagnose the problem. Provider tmfProvider = null; // TrustManagerFactory provider Provider sslContextProvider = null; // SSLContext provider Provider ksProvider = null; // KeyStore provider String tmfDefaultAlgorithm = null; // Default algorithm (typically X.509) used by the TrustManagerFactory SSLHandhsakeState handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_NOT_STARTED; boolean isFips = false; String trustStoreType = null; String fipsProvider = null; // If anything in here fails, terminate the connection and throw an exception try { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Enabling SSL..."); String trustStoreFileName = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE.toString()); String trustStorePassword = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE_PASSWORD.toString()); String hostNameInCertificate = con.activeConnectionProperties .getProperty(SQLServerDriverStringProperty.HOSTNAME_IN_CERTIFICATE.toString()); trustStoreType = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE_TYPE.toString()); if(StringUtils.isEmpty(trustStoreType)) { trustStoreType = SQLServerDriverStringProperty.TRUST_STORE_TYPE.getDefaultValue(); } fipsProvider = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.FIPS_PROVIDER.toString()); isFips = Boolean.valueOf(con.activeConnectionProperties.getProperty(SQLServerDriverBooleanProperty.FIPS.toString())); if (isFips) { validateFips(fipsProvider, trustStoreType, trustStoreFileName); } byte requestedEncryptionLevel = con.getRequestedEncryptionLevel(); assert TDS.ENCRYPT_OFF == requestedEncryptionLevel || // Login only SSL TDS.ENCRYPT_ON == requestedEncryptionLevel; // Full SSL byte negotiatedEncryptionLevel = con.getNegotiatedEncryptionLevel(); assert TDS.ENCRYPT_OFF == negotiatedEncryptionLevel || // Login only SSL TDS.ENCRYPT_ON == negotiatedEncryptionLevel || // Full SSL TDS.ENCRYPT_REQ == negotiatedEncryptionLevel; // Full SSL // If we requested login only SSL or full SSL without server certificate validation, // then we'll "validate" the server certificate using a naive TrustManager that trusts // everything it sees. TrustManager[] tm = null; if (TDS.ENCRYPT_OFF == con.getRequestedEncryptionLevel() || (TDS.ENCRYPT_ON == con.getRequestedEncryptionLevel() && con.trustServerCertificate())) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " SSL handshake will trust any certificate"); tm = new TrustManager[] {new PermissiveX509TrustManager(this)}; } // Otherwise, we'll validate the certificate using a real TrustManager obtained // from the a security provider that is capable of validating X.509 certificates. else { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " SSL handshake will validate server certificate"); KeyStore ks = null; // If we are using the system default trustStore and trustStorePassword // then we can skip all of the KeyStore loading logic below. // The security provider's implementation takes care of everything for us. if (null == trustStoreFileName && null == trustStorePassword) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Using system default trust store and password"); } // Otherwise either the trustStore, trustStorePassword, or both was specified. // In that case, we need to load up a KeyStore ourselves. else { // First, obtain an interface to a KeyStore that can load trust material // stored in Java Key Store (JKS) format. if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Finding key store interface"); if (isFips) { ks = KeyStore.getInstance(trustStoreType, fipsProvider); } else { ks = KeyStore.getInstance(trustStoreType); } ksProvider = ks.getProvider(); // Next, load up the trust store file from the specified location. // Note: This function returns a null InputStream if the trust store cannot // be loaded. This is by design. See the method comment and documentation // for KeyStore.load for details. InputStream is = loadTrustStore(trustStoreFileName); // Finally, load the KeyStore with the trust material (if any) from the // InputStream and close the stream. if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Loading key store"); try { ks.load(is, (null == trustStorePassword) ? null : trustStorePassword.toCharArray()); } finally { // We are done with the trustStorePassword (if set). Clear it for better security. con.activeConnectionProperties.remove(SQLServerDriverStringProperty.TRUST_STORE_PASSWORD.toString()); // We are also done with the trust store input stream. if (null != is) { try { is.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " Ignoring error closing trust material InputStream..."); } } } } // Either we now have a KeyStore populated with trust material or we are using the // default source of trust material (cacerts). Either way, we are now ready to // use a TrustManagerFactory to create a TrustManager that uses the trust material // to validate the server certificate. // Next step is to get a TrustManagerFactory that can produce TrustManagers // that understands X.509 certificates. TrustManagerFactory tmf = null; if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Locating X.509 trust manager factory"); tmfDefaultAlgorithm = TrustManagerFactory.getDefaultAlgorithm(); tmf = TrustManagerFactory.getInstance(tmfDefaultAlgorithm); tmfProvider = tmf.getProvider(); // Tell the TrustManagerFactory to give us TrustManagers that we can use to // validate the server certificate using the trust material in the KeyStore. if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Getting trust manager"); tmf.init(ks); tm = tmf.getTrustManagers(); // if the host name in cert provided use it or use the host name Only if it is not FIPS if (!isFips) { if (null != hostNameInCertificate) { tm = new TrustManager[] {new HostNameOverrideX509TrustManager(this, (X509TrustManager) tm[0], hostNameInCertificate)}; } else { tm = new TrustManager[] {new HostNameOverrideX509TrustManager(this, (X509TrustManager) tm[0], host)}; } } } // end if (!con.trustServerCertificate()) // Now, with a real or fake TrustManager in hand, get a context for creating a // SSL sockets through a SSL socket factory. We require at least TLS support. SSLContext sslContext = null; if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Getting TLS or better SSL context"); sslContext = SSLContext.getInstance("TLS"); sslContextProvider = sslContext.getProvider(); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Initializing SSL context"); sslContext.init(null, tm, null); // Got the SSL context. Now create an SSL socket over our own proxy socket // which we can toggle between TDS-encapsulated and raw communications. // Initially, the proxy is set to encapsulate the SSL handshake in TDS packets. proxySocket = new ProxySocket(this); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Creating SSL socket"); sslSocket = (SSLSocket) sslContext.getSocketFactory().createSocket(proxySocket, host, port, false); // don't close proxy when SSL socket // is closed // At long last, start the SSL handshake ... if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Starting SSL handshake"); // TLS 1.2 intermittent exception happens here. handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_STARTED; sslSocket.startHandshake(); handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_COMPLETE; // After SSL handshake is complete, rewire proxy socket to use raw TCP/IP streams ... if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Rewiring proxy streams after handshake"); proxySocket.setStreams(inputStream, outputStream); // ... and rewire TDSChannel to use SSL streams. if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Getting SSL InputStream"); inputStream = sslSocket.getInputStream(); if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Getting SSL OutputStream"); outputStream = sslSocket.getOutputStream(); // SSL is now enabled; switch over the channel socket channelSocket = sslSocket; if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " SSL enabled"); } catch (Exception e) { // Log the original exception and its source at FINER level if (logger.isLoggable(Level.FINER)) logger.log(Level.FINER, e.getMessage(), e); // If enabling SSL fails, the following information may help diagnose the problem. // Do not use Level INFO or above which is sent to standard output/error streams. // This is because due to an intermittent TLS 1.2 connection issue, we will be retrying the connection and // do not want to print this message in console. if (logger.isLoggable(Level.FINER)) logger.log(Level.FINER, "java.security path: " + JAVA_SECURITY + "\n" + "Security providers: " + Arrays.asList(Security.getProviders()) + "\n" + ((null != sslContextProvider) ? ("SSLContext provider info: " + sslContextProvider.getInfo() + "\n" + "SSLContext provider services:\n" + sslContextProvider.getServices() + "\n") : "") + ((null != tmfProvider) ? ("TrustManagerFactory provider info: " + tmfProvider.getInfo() + "\n") : "") + ((null != tmfDefaultAlgorithm) ? ("TrustManagerFactory default algorithm: " + tmfDefaultAlgorithm + "\n") : "") + ((null != ksProvider) ? ("KeyStore provider info: " + ksProvider.getInfo() + "\n") : "") + "java.ext.dirs: " + System.getProperty("java.ext.dirs")); MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_sslFailed")); Object[] msgArgs = {e.getMessage()}; // It is important to get the localized message here, otherwise error messages won't match for different locales. String errMsg = e.getLocalizedMessage(); // The error message may have a connection id appended to it. Extract the message only for comparison. // This client connection id is appended in method checkAndAppendClientConnId(). if (errMsg.contains(SQLServerException.LOG_CLIENT_CONNECTION_ID_PREFIX)) { errMsg = errMsg.substring(0, errMsg.indexOf(SQLServerException.LOG_CLIENT_CONNECTION_ID_PREFIX)); } // Isolate the TLS1.2 intermittent connection error. if (e instanceof IOException && (SSLHandhsakeState.SSL_HANDHSAKE_STARTED == handshakeState) && (errMsg.equals(SQLServerException.getErrString("R_truncatedServerResponse")))) { con.terminate(SQLServerException.DRIVER_ERROR_INTERMITTENT_TLS_FAILED, form.format(msgArgs), e); } else { con.terminate(SQLServerException.DRIVER_ERROR_SSL_FAILED, form.format(msgArgs), e); } } } /** * Validate FIPS if fips set as true * * Valid FIPS settings: * <LI>Encrypt should be true * <LI>trustServerCertificate should be false * <LI>if certificate is not installed FIPSProvider & TrustStoreType should be present. * * @param fipsProvider * FIPS Provider * @param trustStoreType * @param trustStoreFileName * @throws SQLServerException * @since 6.1.4 */ private void validateFips(final String fipsProvider, final String trustStoreType, final String trustStoreFileName) throws SQLServerException { boolean isValid = false; boolean isEncryptOn; boolean isValidTrustStoreType; boolean isValidTrustStore; boolean isTrustServerCertificate; boolean isValidFipsProvider; String strError = SQLServerException.getErrString("R_invalidFipsConfig"); isEncryptOn = (TDS.ENCRYPT_ON == con.getRequestedEncryptionLevel()); // Here different FIPS provider supports different KeyStore type along with different JVM Implementation. isValidFipsProvider = !StringUtils.isEmpty(fipsProvider); isValidTrustStoreType = !StringUtils.isEmpty(trustStoreType); isValidTrustStore = !StringUtils.isEmpty(trustStoreFileName); isTrustServerCertificate = con.trustServerCertificate(); if (isEncryptOn && !isTrustServerCertificate) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Found parameters are encrypt is true & trustServerCertificate false"); isValid = true; if (isValidTrustStore) { // In case of valid trust store we need to check fipsProvider and TrustStoreType. if (!isValidFipsProvider || !isValidTrustStoreType) { isValid = false; strError = SQLServerException.getErrString("R_invalidFipsProviderConfig"); if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " FIPS provider & TrustStoreType should pass with TrustStore."); } if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Found FIPS parameters seems to be valid."); } } else { strError = SQLServerException.getErrString("R_invalidFipsEncryptConfig"); } if (!isValid) { throw new SQLServerException(strError, null, 0, null); } } private final static String SEPARATOR = System.getProperty("file.separator"); private final static String JAVA_HOME = System.getProperty("java.home"); private final static String JAVA_SECURITY = JAVA_HOME + SEPARATOR + "lib" + SEPARATOR + "security"; private final static String JSSECACERTS = JAVA_SECURITY + SEPARATOR + "jssecacerts"; private final static String CACERTS = JAVA_SECURITY + SEPARATOR + "cacerts"; /** * Loads the contents of a trust store into an InputStream. * * When a location to a trust store is specified, this method attempts to load that store. Otherwise, it looks for and attempts to load the * default trust store using essentially the same logic (outlined in the JSSE Reference Guide) as the default X.509 TrustManagerFactory. * * @return an InputStream containing the contents of the loaded trust store * @return null if the trust store cannot be loaded. * * Note: It is by design that this function returns null when the trust store cannot be loaded rather than throwing an exception. The * reason is that KeyStore.load, which uses the returned InputStream, interprets a null InputStream to mean that there are no trusted * certificates, which mirrors the behavior of the default (no trust store, no password specified) path. */ final InputStream loadTrustStore(String trustStoreFileName) { FileInputStream is = null; // First case: Trust store filename was specified if (null != trustStoreFileName) { try { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Opening specified trust store: " + trustStoreFileName); is = new FileInputStream(trustStoreFileName); } catch (FileNotFoundException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " Trust store not found: " + e.getMessage()); // If the trustStoreFileName connection property is set, but the file is not found, // then treat it as if the file was empty so that the TrustManager reports // that no certificate is found. } } // Second case: Trust store filename derived from javax.net.ssl.trustStore system property else if (null != (trustStoreFileName = System.getProperty("javax.net.ssl.trustStore"))) { try { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Opening default trust store (from javax.net.ssl.trustStore): " + trustStoreFileName); is = new FileInputStream(trustStoreFileName); } catch (FileNotFoundException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " Trust store not found: " + e.getMessage()); // If the javax.net.ssl.trustStore property is set, but the file is not found, // then treat it as if the file was empty so that the TrustManager reports // that no certificate is found. } } // Third case: No trust store specified and no system property set. Use jssecerts/cacerts. else { try { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Opening default trust store: " + JSSECACERTS); is = new FileInputStream(JSSECACERTS); } catch (FileNotFoundException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " Trust store not found: " + e.getMessage()); } // No jssecerts. Try again with cacerts... if (null == is) { try { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Opening default trust store: " + CACERTS); is = new FileInputStream(CACERTS); } catch (FileNotFoundException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " Trust store not found: " + e.getMessage()); // No jssecerts or cacerts. Treat it as if the trust store is empty so that // the TrustManager reports that no certificate is found. } } } return is; } final int read(byte[] data, int offset, int length) throws SQLServerException { try { return inputStream.read(data, offset, length); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.fine(toString() + " read failed:" + e.getMessage()); if (e instanceof SocketTimeoutException) { con.terminate(SQLServerException.ERROR_SOCKET_TIMEOUT, e.getMessage(), e); } else { con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e); } return 0; // Keep the compiler happy. } } final void write(byte[] data, int offset, int length) throws SQLServerException { try { outputStream.write(data, offset, length); } catch (IOException e) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " write failed:" + e.getMessage()); con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e); } } final void flush() throws SQLServerException { try { outputStream.flush(); } catch (IOException e) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " flush failed:" + e.getMessage()); con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e); } } final void close() { if (null != sslSocket) disableSSL(); if (null != inputStream) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Closing inputStream..."); try { inputStream.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + ": Ignored error closing inputStream", e); } } if (null != outputStream) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Closing outputStream..."); try { outputStream.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + ": Ignored error closing outputStream", e); } } if (null != tcpSocket) { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + ": Closing TCP socket..."); try { tcpSocket.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + ": Ignored error closing socket", e); } } } /** * Logs TDS packet data to the com.microsoft.sqlserver.jdbc.TDS.DATA logger * * @param data * the buffer containing the TDS packet payload data to log * @param nStartOffset * offset into the above buffer from where to start logging * @param nLength * length (in bytes) of payload * @param messageDetail * other loggable details about the payload */ void logPacket(byte data[], int nStartOffset, int nLength, String messageDetail) { assert 0 <= nLength && nLength <= data.length; assert 0 <= nStartOffset && nStartOffset <= data.length; final char hexChars[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'}; final char printableChars[] = {'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', ' ', '!', '\"', ' '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'}; // Log message body lines have this form: // "XX XX XX XX XX XX XX XX XX XX XX XX XX XX XX XX ................" // 012345678911111111112222222222333333333344444444445555555555666666 // 01234567890123456789012345678901234567890123456789012345 final char lineTemplate[] = {' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'}; char logLine[] = new char[lineTemplate.length]; System.arraycopy(lineTemplate, 0, logLine, 0, lineTemplate.length); // Logging builds up a string buffer for the entire log trace // before writing it out. So use an initial size large enough // that the buffer doesn't have to resize itself. StringBuilder logMsg = new StringBuilder(messageDetail.length() + // Message detail 4 * nLength + // 2-digit hex + space + ASCII, per byte 4 * (1 + nLength / 16) + // 2 extra spaces + CR/LF, per line (16 bytes per line) 80); // Extra fluff: IP:Port, Connection #, SPID, ... // Format the headline like so: // /157.55.121.182:2983 Connection 1, SPID 53, Message info here ... // Note: the log formatter itself timestamps what we write so we don't have // to do it again here. logMsg.append(tcpSocket.getLocalAddress().toString() + ":" + tcpSocket.getLocalPort() + " SPID:" + spid + " " + messageDetail + "\r\n"); // Fill in the body of the log message, line by line, 16 bytes per line. int nBytesLogged = 0; int nBytesThisLine; while (true) { // Fill up the line with as many bytes as we can (up to 16 bytes) for (nBytesThisLine = 0; nBytesThisLine < 16 && nBytesLogged < nLength; nBytesThisLine++, nBytesLogged++) { int nUnsignedByteVal = (data[nStartOffset + nBytesLogged] + 256) % 256; logLine[3 * nBytesThisLine] = hexChars[nUnsignedByteVal / 16]; logLine[3 * nBytesThisLine + 1] = hexChars[nUnsignedByteVal % 16]; logLine[50 + nBytesThisLine] = printableChars[nUnsignedByteVal]; } // Pad out the remainder with whitespace for (int nBytesJustified = nBytesThisLine; nBytesJustified < 16; nBytesJustified++) { logLine[3 * nBytesJustified] = ' '; logLine[3 * nBytesJustified + 1] = ' '; } logMsg.append(logLine, 0, 50 + nBytesThisLine); if (nBytesLogged == nLength) break; logMsg.append("\r\n"); } if (packetLogger.isLoggable(Level.FINEST)) { packetLogger.finest(logMsg.toString()); } } /** * Get the current socket SO_TIMEOUT value. * * @return the current socket timeout value * @throws IOException thrown if the socket timeout cannot be read */ final int getNetworkTimeout() throws IOException { return tcpSocket.getSoTimeout(); } /** * Set the socket SO_TIMEOUT value. * * @param timeout the socket timeout in milliseconds * @throws IOException thrown if the socket timeout cannot be set */ final void setNetworkTimeout(int timeout) throws IOException { tcpSocket.setSoTimeout(timeout); } } /** * SocketFinder is used to find a server socket to which a connection can be made. This class abstracts the logic of finding a socket from TDSChannel * class. * * In the case when useParallel is set to true, this is achieved by trying to make parallel connections to multiple IP addresses. This class is * responsible for spawning multiple threads and keeping track of the search result and the connected socket or exception to be thrown. * * In the case where multiSubnetFailover is false, we try our old logic of trying to connect to the first ip address * * Typical usage of this class is SocketFinder sf = new SocketFinder(traceId, conn); Socket = sf.getSocket(hostName, port, timeout); */ final class SocketFinder { /** * Indicates the result of a search */ enum Result { UNKNOWN,// search is still in progress SUCCESS,// found a socket FAILURE// failed in finding a socket } // Thread pool - the values in the constructor are chosen based on the // explanation given in design_connection_director_multisubnet.doc private static final ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>()); // When parallel connections are to be used, use minimum timeout slice of 1500 milliseconds. private static final int minTimeoutForParallelConnections = 1500; // lock used for synchronization while updating // data within a socketFinder object private final Object socketFinderlock = new Object(); // lock on which the parent thread would wait // after spawning threads. private final Object parentThreadLock = new Object(); // indicates whether the socketFinder has succeeded or failed // in finding a socket or is still trying to find a socket private volatile Result result = Result.UNKNOWN; // total no of socket connector threads // spawned by a socketFinder object private int noOfSpawnedThreads = 0; // no of threads that finished their socket connection // attempts and notified socketFinder about their result private volatile int noOfThreadsThatNotified = 0; // If a valid connected socket is found, this value would be non-null, // else this would be null private volatile Socket selectedSocket = null; // This would be one of the exceptions returned by the // socketConnector threads private volatile IOException selectedException = null; // Logging variables private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.SocketFinder"); private final String traceID; // maximum number of IP Addresses supported private static final int ipAddressLimit = 64; // necessary for raising exceptions so that the connection pool can be notified private final SQLServerConnection conn; /** * Constructs a new SocketFinder object with appropriate traceId * * @param callerTraceID * traceID of the caller * @param sqlServerConnection * the SQLServer connection */ SocketFinder(String callerTraceID, SQLServerConnection sqlServerConnection) { traceID = "SocketFinder(" + callerTraceID + ")"; conn = sqlServerConnection; } /** * Used to find a socket to which a connection can be made * * @param hostName * @param portNumber * @param timeoutInMilliSeconds * @return connected socket * @throws IOException */ Socket findSocket(String hostName, int portNumber, int timeoutInMilliSeconds, boolean useParallel, boolean useTnir, boolean isTnirFirstAttempt, int timeoutInMilliSecondsForFullTimeout) throws SQLServerException { assert timeoutInMilliSeconds != 0 : "The driver does not allow a time out of 0"; try { InetAddress[] inetAddrs = null; // inetAddrs is only used if useParallel is true or TNIR is true. Skip resolving address if that's not the case. if (useParallel || useTnir) { // Ignore TNIR if host resolves to more than 64 IPs. Make sure we are using original timeout for this. inetAddrs = InetAddress.getAllByName(hostName); if ((useTnir) && (inetAddrs.length > ipAddressLimit)) { useTnir = false; timeoutInMilliSeconds = timeoutInMilliSecondsForFullTimeout; } } if (!useParallel) { // MSF is false. TNIR could be true or false. DBMirroring could be true or false. // For TNIR first attempt, we should do existing behavior including how host name is resolved. if (useTnir && isTnirFirstAttempt) { return getDefaultSocket(hostName, portNumber, SQLServerConnection.TnirFirstAttemptTimeoutMs); } else if (!useTnir) { return getDefaultSocket(hostName, portNumber, timeoutInMilliSeconds); } } // Code reaches here only if MSF = true or (TNIR = true and not TNIR first attempt) if (logger.isLoggable(Level.FINER)) { StringBuilder loggingString = new StringBuilder(this.toString() + " Total no of InetAddresses: " + inetAddrs.length + ". They are: "); for (InetAddress inetAddr : inetAddrs) { loggingString.append(inetAddr.toString() + ";"); } logger.finer(loggingString.toString()); } if (inetAddrs.length > ipAddressLimit) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_ipAddressLimitWithMultiSubnetFailover")); Object[] msgArgs = {Integer.toString(ipAddressLimit)}; String errorStr = form.format(msgArgs); // we do not want any retry to happen here. So, terminate the connection // as the config is unsupported. conn.terminate(SQLServerException.DRIVER_ERROR_UNSUPPORTED_CONFIG, errorStr); } if (Util.isIBM()) { timeoutInMilliSeconds = Math.max(timeoutInMilliSeconds, minTimeoutForParallelConnections); if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + "Using Java NIO with timeout:" + timeoutInMilliSeconds); } findSocketUsingJavaNIO(inetAddrs, portNumber, timeoutInMilliSeconds); } else { LinkedList<Inet4Address> inet4Addrs = new LinkedList<Inet4Address>(); LinkedList<Inet6Address> inet6Addrs = new LinkedList<Inet6Address>(); for (InetAddress inetAddr : inetAddrs) { if (inetAddr instanceof Inet4Address) { inet4Addrs.add((Inet4Address) inetAddr); } else { boolean instanceOfIPv6 = inetAddr instanceof Inet6Address; assert instanceOfIPv6 : "Unexpected IP address " + inetAddr.toString(); inet6Addrs.add((Inet6Address) inetAddr); } } // use half timeout only if both IPv4 and IPv6 addresses are present int timeoutForEachIPAddressType; if ((!inet4Addrs.isEmpty()) && (!inet6Addrs.isEmpty())) { timeoutForEachIPAddressType = Math.max(timeoutInMilliSeconds / 2, minTimeoutForParallelConnections); } else timeoutForEachIPAddressType = Math.max(timeoutInMilliSeconds, minTimeoutForParallelConnections); if (!inet4Addrs.isEmpty()) { if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + "Using Java NIO with timeout:" + timeoutForEachIPAddressType); } // inet4Addrs.toArray(new InetAddress[0]) is java style of converting a linked list to an array of reqd size findSocketUsingJavaNIO(inet4Addrs.toArray(new InetAddress[0]), portNumber, timeoutForEachIPAddressType); } if (!result.equals(Result.SUCCESS)) { // try threading logic if (!inet6Addrs.isEmpty()) { // do not start any threads if there is only one ipv6 address if (inet6Addrs.size() == 1) { return getConnectedSocket(inet6Addrs.get(0), portNumber, timeoutForEachIPAddressType); } if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + "Using Threading with timeout:" + timeoutForEachIPAddressType); } findSocketUsingThreading(inet6Addrs, portNumber, timeoutForEachIPAddressType); } } } // If the thread continued execution due to timeout, the result may not be known. // In that case, update the result to failure. Note that this case is possible // for both IPv4 and IPv6. // Using double-checked locking for performance reasons. if (result.equals(Result.UNKNOWN)) { synchronized (socketFinderlock) { if (result.equals(Result.UNKNOWN)) { result = Result.FAILURE; if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + " The parent thread updated the result to failure"); } } } } // After we reach this point, there is no need for synchronization any more. // Because, the result would be known(success/failure). // And no threads would update SocketFinder // as their function calls would now be no-ops. if (result.equals(Result.FAILURE)) { if (selectedException == null) { if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + " There is no selectedException. The wait calls timed out before any connect call returned or timed out."); } String message = SQLServerException.getErrString("R_connectionTimedOut"); selectedException = new IOException(message); } throw selectedException; } } catch (InterruptedException ex) { // re-interrupt the current thread, in order to restore the thread's interrupt status. Thread.currentThread().interrupt(); close(selectedSocket); SQLServerException.ConvertConnectExceptionToSQLServerException(hostName, portNumber, conn, ex); } catch (IOException ex) { close(selectedSocket); // The code below has been moved from connectHelper. // If we do not move it, the functions open(caller of findSocket) // and findSocket will have to // declare both IOException and SQLServerException in the throws clause // as we throw custom SQLServerExceptions(eg:IPAddressLimit, wrapping other exceptions // like interruptedException) in findSocket. // That would be a bit awkward, because connecthelper(the caller of open) // just wraps IOException into SQLServerException and throws SQLServerException. // Instead, it would be good to wrap all exceptions at one place - Right here, their origin. SQLServerException.ConvertConnectExceptionToSQLServerException(hostName, portNumber, conn, ex); } boolean equalSuccess = result.equals(Result.SUCCESS); assert equalSuccess; assert selectedSocket != null : "Bug in code. Selected Socket cannot be null here."; return selectedSocket; } /** * This function uses java NIO to connect to all the addresses in inetAddrs with in a specified timeout. If it succeeds in connecting, it closes * all the other open sockets and updates the result to success. * * @param inetAddrs * the array of inetAddress to which connection should be made * @param portNumber * the port number at which connection should be made * @param timeoutInMilliSeconds * @throws IOException */ private void findSocketUsingJavaNIO(InetAddress[] inetAddrs, int portNumber, int timeoutInMilliSeconds) throws IOException { // The driver does not allow a time out of zero. // Also, the unit of time the user can specify in the driver is seconds. // So, even if the user specifies 1 second(least value), the least possible // value that can come here as timeoutInMilliSeconds is 500 milliseconds. assert timeoutInMilliSeconds != 0 : "The timeout cannot be zero"; assert inetAddrs.length != 0 : "Number of inetAddresses should not be zero in this function"; Selector selector = null; LinkedList<SocketChannel> socketChannels = new LinkedList<SocketChannel>(); SocketChannel selectedChannel = null; try { selector = Selector.open(); for (int i = 0; i < inetAddrs.length; i++) { SocketChannel sChannel = SocketChannel.open(); socketChannels.add(sChannel); // make the channel non-blocking sChannel.configureBlocking(false); // register the channel for connect event int ops = SelectionKey.OP_CONNECT; SelectionKey key = sChannel.register(selector, ops); sChannel.connect(new InetSocketAddress(inetAddrs[i], portNumber)); if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + " initiated connection to address: " + inetAddrs[i] + ", portNumber: " + portNumber); } long timerNow = System.currentTimeMillis(); long timerExpire = timerNow + timeoutInMilliSeconds; // Denotes the no of channels that still need to processed int noOfOutstandingChannels = inetAddrs.length; while (true) { long timeRemaining = timerExpire - timerNow; // if the timeout expired or a channel is selected or there are no more channels left to processes if ((timeRemaining <= 0) || (selectedChannel != null) || (noOfOutstandingChannels <= 0)) break; // denotes the no of channels that are ready to be processed. i.e. they are either connected // or encountered an exception while trying to connect int readyChannels = selector.select(timeRemaining); if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + " no of channels ready: " + readyChannels); // There are no real time guarantees on the time out of the select API used above. // This check is necessary // a) to guard against cases where the select returns faster than expected. // b) for cases where no channels could connect with in the time out if (readyChannels != 0) { Set<SelectionKey> selectedKeys = selector.selectedKeys(); Iterator<SelectionKey> keyIterator = selectedKeys.iterator(); while (keyIterator.hasNext()) { SelectionKey key = keyIterator.next(); SocketChannel ch = (SocketChannel) key.channel(); if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + " processing the channel :" + ch);// this traces the IP by default boolean connected = false; try { connected = ch.finishConnect(); // ch.finishConnect should either return true or throw an exception // as we have subscribed for OP_CONNECT. assert connected == true : "finishConnect on channel:" + ch + " cannot be false"; selectedChannel = ch; if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + " selected the channel :" + selectedChannel); break; } catch (IOException ex) { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + " the exception: " + ex.getClass() + " with message: " + ex.getMessage() + " occured while processing the channel: " + ch); updateSelectedException(ex, this.toString()); // close the channel pro-actively so that we do not // hang on to network resources ch.close(); } // unregister the key and remove from the selector's selectedKeys key.cancel(); keyIterator.remove(); noOfOutstandingChannels } } timerNow = System.currentTimeMillis(); } } catch (IOException ex) { // in case of an exception, close the selected channel. // All other channels will be closed in the finally block, // as they need to be closed irrespective of a success/failure close(selectedChannel); throw ex; } finally { // close the selector // As per java docs, on selector.close(), any uncancelled keys still // associated with this // selector are invalidated, their channels are deregistered, and any other // resources associated with this selector are released. // So, its not necessary to cancel each key again close(selector); // Close all channels except the selected one. // As we close channels pro-actively in the try block, // its possible that we close a channel twice. // Closing a channel second time is a no-op. // This code is should be in the finally block to guard against cases where // we pre-maturely exit try block due to an exception in selector or other places. for (SocketChannel s : socketChannels) { if (s != selectedChannel) { close(s); } } } // if a channel was selected, make the necessary updates if (selectedChannel != null) { //the selectedChannel has the address that is connected successfully //convert it to a java.net.Socket object with the address SocketAddress iadd = selectedChannel.getRemoteAddress(); selectedSocket = new Socket(); selectedSocket.connect(iadd); result = Result.SUCCESS; //close the channel since it is not used anymore selectedChannel.close(); } } // This method contains the old logic of connecting to // a socket of one of the IPs corresponding to a given host name. // In the old code below, the logic around 0 timeout has been removed as // 0 timeout is not allowed. The code has been re-factored so that the logic // is common for hostName or InetAddress. private Socket getDefaultSocket(String hostName, int portNumber, int timeoutInMilliSeconds) throws IOException { // Open the socket, with or without a timeout, throwing an UnknownHostException // if there is a failure to resolve the host name to an InetSocketAddress. // Note that Socket(host, port) throws an UnknownHostException if the host name // cannot be resolved, but that InetSocketAddress(host, port) does not - it sets // the returned InetSocketAddress as unresolved. InetSocketAddress addr = new InetSocketAddress(hostName, portNumber); return getConnectedSocket(addr, timeoutInMilliSeconds); } private Socket getConnectedSocket(InetAddress inetAddr, int portNumber, int timeoutInMilliSeconds) throws IOException { InetSocketAddress addr = new InetSocketAddress(inetAddr, portNumber); return getConnectedSocket(addr, timeoutInMilliSeconds); } private Socket getConnectedSocket(InetSocketAddress addr, int timeoutInMilliSeconds) throws IOException { assert timeoutInMilliSeconds != 0 : "timeout cannot be zero"; if (addr.isUnresolved()) throw new java.net.UnknownHostException(); selectedSocket = new Socket(); selectedSocket.connect(addr, timeoutInMilliSeconds); return selectedSocket; } private void findSocketUsingThreading(LinkedList<Inet6Address> inetAddrs, int portNumber, int timeoutInMilliSeconds) throws IOException, InterruptedException { assert timeoutInMilliSeconds != 0 : "The timeout cannot be zero"; boolean empty = inetAddrs.isEmpty(); assert empty == false : "Number of inetAddresses should not be zero in this function"; LinkedList<Socket> sockets = new LinkedList<Socket>(); LinkedList<SocketConnector> socketConnectors = new LinkedList<SocketConnector>(); try { // create a socket, inetSocketAddress and a corresponding socketConnector per inetAddress noOfSpawnedThreads = inetAddrs.size(); for (InetAddress inetAddress : inetAddrs) { Socket s = new Socket(); sockets.add(s); InetSocketAddress inetSocketAddress = new InetSocketAddress(inetAddress, portNumber); SocketConnector socketConnector = new SocketConnector(s, inetSocketAddress, timeoutInMilliSeconds, this); socketConnectors.add(socketConnector); } // acquire parent lock and spawn all threads synchronized (parentThreadLock) { for (SocketConnector sc : socketConnectors) { threadPoolExecutor.execute(sc); } long timerNow = System.currentTimeMillis(); long timerExpire = timerNow + timeoutInMilliSeconds; // The below loop is to guard against the spurious wake up problem while (true) { long timeRemaining = timerExpire - timerNow; if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + " TimeRemaining:" + timeRemaining + "; Result:" + result + "; Max. open thread count: " + threadPoolExecutor.getLargestPoolSize() + "; Current open thread count:" + threadPoolExecutor.getActiveCount()); } // if there is no time left or if the result is determined, break. // Note that a dirty read of result is totally fine here. // Since this thread holds the parentThreadLock, even if we do a dirty // read here, the child thread, after updating the result, would not be // able to call notify on the parentThreadLock // (and thus finish execution) as it would be waiting on parentThreadLock // held by this thread(the parent thread). // So, this thread will wait again and then be notified by the childThread. // On the other hand, if we try to take socketFinderLock here to avoid // dirty read, we would introduce a dead lock due to the // reverse order of locking in updateResult method. if (timeRemaining <= 0 || (!result.equals(Result.UNKNOWN))) break; parentThreadLock.wait(timeRemaining); if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + " The parent thread wokeup."); } timerNow = System.currentTimeMillis(); } } } finally { // Close all sockets except the selected one. // As we close sockets pro-actively in the child threads, // its possible that we close a socket twice. // Closing a socket second time is a no-op. // If a child thread is waiting on the connect call on a socket s, // closing the socket s here ensures that an exception is thrown // in the child thread immediately. This mitigates the problem // of thread explosion by ensuring that unnecessary threads die // quickly without waiting for "min(timeOut, 21)" seconds for (Socket s : sockets) { if (s != selectedSocket) { close(s); } } } } /** * search result */ Result getResult() { return result; } void close(Selector selector) { if (null != selector) { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + ": Closing Selector"); try { selector.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + ": Ignored the following error while closing Selector", e); } } } void close(Socket socket) { if (null != socket) { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + ": Closing TCP socket:" + socket); try { socket.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + ": Ignored the following error while closing socket", e); } } } void close(SocketChannel socketChannel) { if (null != socketChannel) { if (logger.isLoggable(Level.FINER)) logger.finer(this.toString() + ": Closing TCP socket channel:" + socketChannel); try { socketChannel.close(); } catch (IOException e) { if (logger.isLoggable(Level.FINE)) logger.log(Level.FINE, this.toString() + "Ignored the following error while closing socketChannel", e); } } } /** * Used by socketConnector threads to notify the socketFinder of their connection attempt result(a connected socket or exception). It updates the * result, socket and exception variables of socketFinder object. This method notifies the parent thread if a socket is found or if all the * spawned threads have notified. It also closes a socket if it is not selected for use by socketFinder. * * @param socket * the SocketConnector's socket * @param exception * Exception that occurred in socket connector thread * @param threadId * Id of the calling Thread for diagnosis */ void updateResult(Socket socket, IOException exception, String threadId) { if (result.equals(Result.UNKNOWN)) { if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread is waiting for socketFinderLock:" + threadId); } synchronized (socketFinderlock) { if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread acquired socketFinderLock:" + threadId); } if (result.equals(Result.UNKNOWN)) { // if the connection was successful and no socket has been // selected yet if (exception == null && selectedSocket == null) { selectedSocket = socket; result = Result.SUCCESS; if (logger.isLoggable(Level.FINER)) { logger.finer("The socket of the following thread has been chosen:" + threadId); } } // if an exception occurred if (exception != null) { updateSelectedException(exception, threadId); } } noOfThreadsThatNotified++; // if all threads notified, but the result is still unknown, // update the result to failure if ((noOfThreadsThatNotified >= noOfSpawnedThreads) && result.equals(Result.UNKNOWN)) { result = Result.FAILURE; } if (!result.equals(Result.UNKNOWN)) { // 1) Note that at any point of time, there is only one // thread(parent/child thread) competing for parentThreadLock. // 2) The only time where a child thread could be waiting on // parentThreadLock is before the wait call in the parentThread // 3) After the above happens, the parent thread waits to be // notified on parentThreadLock. After being notified, // it would be the ONLY thread competing for the lock. // for the following reasons // a) The parentThreadLock is taken while holding the socketFinderLock. // So, all child threads, except one, block on socketFinderLock // (not parentThreadLock) // b) After parentThreadLock is notified by a child thread, the result // would be known(Refer the double-checked locking done at the // start of this method). So, all child threads would exit // as no-ops and would never compete with parent thread // for acquiring parentThreadLock // 4) As the parent thread is the only thread that competes for the // parentThreadLock, it need not wait to acquire the lock once it wakes // up and gets scheduled. // This results in better performance as it would close unnecessary // sockets and thus help child threads die quickly. if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread is waiting for parentThreadLock:" + threadId); } synchronized (parentThreadLock) { if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread acquired parentThreadLock:" + threadId); } parentThreadLock.notify(); } if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread released parentThreadLock and notified the parent thread:" + threadId); } } } if (logger.isLoggable(Level.FINER)) { logger.finer("The following child thread released socketFinderLock:" + threadId); } } } /** * Updates the selectedException if * <p> * a) selectedException is null * <p> * b) ex is a non-socketTimeoutException and selectedException is a socketTimeoutException * <p> * If there are multiple exceptions, that are not related to socketTimeout the first non-socketTimeout exception is picked. If all exceptions are * related to socketTimeout, the first exception is picked. Note: This method is not thread safe. The caller should ensure thread safety. * * @param ex * the IOException * @param traceId * the traceId of the thread */ public void updateSelectedException(IOException ex, String traceId) { boolean updatedException = false; if (selectedException == null) { selectedException = ex; updatedException = true; } else if ((!(ex instanceof SocketTimeoutException)) && (selectedException instanceof SocketTimeoutException)) { selectedException = ex; updatedException = true; } if (updatedException) { if (logger.isLoggable(Level.FINER)) { logger.finer("The selected exception is updated to the following: ExceptionType:" + ex.getClass() + "; ExceptionMessage:" + ex.getMessage() + "; by the following thread:" + traceId); } } } /** * Used fof tracing * * @return traceID string */ public String toString() { return traceID; } } /** * This is used to connect a socket in a separate thread */ final class SocketConnector implements Runnable { // socket on which connection attempt would be made private final Socket socket; // the socketFinder associated with this connector private final SocketFinder socketFinder; // inetSocketAddress to connect to private final InetSocketAddress inetSocketAddress; // timeout in milliseconds private final int timeoutInMilliseconds; // Logging variables private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.SocketConnector"); private final String traceID; // Id of the thread. used for diagnosis private final String threadID; // a counter used to give unique IDs to each connector thread. // this will have the id of the thread that was last created. private static long lastThreadID = 0; /** * Constructs a new SocketConnector object with the associated socket and socketFinder */ SocketConnector(Socket socket, InetSocketAddress inetSocketAddress, int timeOutInMilliSeconds, SocketFinder socketFinder) { this.socket = socket; this.inetSocketAddress = inetSocketAddress; this.timeoutInMilliseconds = timeOutInMilliSeconds; this.socketFinder = socketFinder; this.threadID = Long.toString(nextThreadID()); this.traceID = "SocketConnector:" + this.threadID + "(" + socketFinder.toString() + ")"; } /** * If search for socket has not finished, this function tries to connect a socket(with a timeout) synchronously. It further notifies the * socketFinder the result of the connection attempt */ public void run() { IOException exception = null; // Note that we do not need socketFinder lock here // as we update nothing in socketFinder based on the condition. // So, its perfectly fine to make a dirty read. SocketFinder.Result result = socketFinder.getResult(); if (result.equals(SocketFinder.Result.UNKNOWN)) { try { if (logger.isLoggable(Level.FINER)) { logger.finer( this.toString() + " connecting to InetSocketAddress:" + inetSocketAddress + " with timeout:" + timeoutInMilliseconds); } socket.connect(inetSocketAddress, timeoutInMilliseconds); } catch (IOException ex) { if (logger.isLoggable(Level.FINER)) { logger.finer(this.toString() + " exception:" + ex.getClass() + " with message:" + ex.getMessage() + " occured while connecting to InetSocketAddress:" + inetSocketAddress); } exception = ex; } socketFinder.updateResult(socket, exception, this.toString()); } } /** * Used for tracing * * @return traceID string */ public String toString() { return traceID; } /** * Generates the next unique thread id. */ private static synchronized long nextThreadID() { if (lastThreadID == Long.MAX_VALUE) { if (logger.isLoggable(Level.FINER)) logger.finer("Resetting the Id count"); lastThreadID = 1; } else { lastThreadID++; } return lastThreadID; } } /** * TDSWriter implements the client to server TDS data pipe. */ final class TDSWriter { private static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Writer"); private final String traceID; final public String toString() { return traceID; } private final TDSChannel tdsChannel; private final SQLServerConnection con; // Flag to indicate whether data written via writeXXX() calls // is loggable. Data is normally loggable. But sensitive // data, such as user credentials, should never be logged for // security reasons. private boolean dataIsLoggable = true; void setDataLoggable(boolean value) { dataIsLoggable = value; } private TDSCommand command = null; // TDS message type (Query, RPC, DTC, etc.) sent at the beginning // of every TDS message header. Value is set when starting a new // TDS message of the specified type. private byte tdsMessageType; private volatile int sendResetConnection = 0; // Size (in bytes) of the TDS packets to/from the server. // This size is normally fixed for the life of the connection, // but it can change once after the logon packet because packet // size negotiation happens at logon time. private int currentPacketSize = 0; // Size of the TDS packet header, which is: // byte type // byte status // short length // short SPID // byte packet // byte window private final static int TDS_PACKET_HEADER_SIZE = 8; private final static byte[] placeholderHeader = new byte[TDS_PACKET_HEADER_SIZE]; // Intermediate array used to convert typically "small" values such as fixed-length types // (byte, int, long, etc.) and Strings from their native form to bytes for sending to // the channel buffers. private byte valueBytes[] = new byte[256]; // Monotonically increasing packet number associated with the current message private volatile int packetNum = 0; // Bytes for sending decimal/numeric data private final static int BYTES4 = 4; private final static int BYTES8 = 8; private final static int BYTES12 = 12; private final static int BYTES16 = 16; public final static int BIGDECIMAL_MAX_LENGTH = 0x11; // is set to true when EOM is sent for the current message. // Note that this variable will never be accessed from multiple threads // simultaneously and so it need not be volatile private boolean isEOMSent = false; boolean isEOMSent() { return isEOMSent; } // Packet data buffers private ByteBuffer stagingBuffer; private ByteBuffer socketBuffer; private ByteBuffer logBuffer; private CryptoMetadata cryptoMeta = null; TDSWriter(TDSChannel tdsChannel, SQLServerConnection con) { this.tdsChannel = tdsChannel; this.con = con; traceID = "TDSWriter@" + Integer.toHexString(hashCode()) + " (" + con.toString() + ")"; } // TDS message start/end operations void preparePacket() throws SQLServerException { if (tdsChannel.isLoggingPackets()) { Arrays.fill(logBuffer.array(), (byte) 0xFE); logBuffer.clear(); } // Write a placeholder packet header. This will be replaced // with the real packet header when the packet is flushed. writeBytes(placeholderHeader); } /** * Start a new TDS message. */ void writeMessageHeader() throws SQLServerException { // TDS 7.2 & later: // Include ALL_Headers/MARS header in message's first packet // Note: The PKT_BULK message does not nees this ALL_HEADERS if ((TDS.PKT_QUERY == tdsMessageType || TDS.PKT_DTC == tdsMessageType || TDS.PKT_RPC == tdsMessageType)) { boolean includeTraceHeader = false; int totalHeaderLength = TDS.MESSAGE_HEADER_LENGTH; if (TDS.PKT_QUERY == tdsMessageType || TDS.PKT_RPC == tdsMessageType) { if (con.isDenaliOrLater() && !ActivityCorrelator.getCurrent().IsSentToServer() && Util.IsActivityTraceOn()) { includeTraceHeader = true; totalHeaderLength += TDS.TRACE_HEADER_LENGTH; } } writeInt(totalHeaderLength); // allHeaders.TotalLength (DWORD) writeInt(TDS.MARS_HEADER_LENGTH); // MARS header length (DWORD) writeShort((short) 2); // allHeaders.HeaderType(MARS header) (USHORT) writeBytes(con.getTransactionDescriptor()); writeInt(1); // marsHeader.OutstandingRequestCount if (includeTraceHeader) { writeInt(TDS.TRACE_HEADER_LENGTH); // trace header length (DWORD) writeTraceHeaderData(); ActivityCorrelator.setCurrentActivityIdSentFlag(); // set the flag to indicate this ActivityId is sent } } } void writeTraceHeaderData() throws SQLServerException { ActivityId activityId = ActivityCorrelator.getCurrent(); final byte[] actIdByteArray = Util.asGuidByteArray(activityId.getId()); long seqNum = activityId.getSequence(); writeShort(TDS.HEADERTYPE_TRACE); // trace header type writeBytes(actIdByteArray, 0, actIdByteArray.length); // guid part of ActivityId writeInt((int) seqNum); // sequence number of ActivityId if (logger.isLoggable(Level.FINER)) logger.finer("Send Trace Header - ActivityID: " + activityId.toString()); } /** * Convenience method to prepare the TDS channel for writing and start a new TDS message. * * @param command * The TDS command * @param tdsMessageType * The TDS message type (PKT_QUERY, PKT_RPC, etc.) */ void startMessage(TDSCommand command, byte tdsMessageType) throws SQLServerException { this.command = command; this.tdsMessageType = tdsMessageType; this.packetNum = 0; this.isEOMSent = false; this.dataIsLoggable = true; // If the TDS packet size has changed since the last request // (which should really only happen after the login packet) // then allocate new buffers that are the correct size. int negotiatedPacketSize = con.getTDSPacketSize(); if (currentPacketSize != negotiatedPacketSize) { socketBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN); stagingBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN); logBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN); currentPacketSize = negotiatedPacketSize; } socketBuffer.position(socketBuffer.limit()); stagingBuffer.clear(); preparePacket(); writeMessageHeader(); } final void endMessage() throws SQLServerException { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Finishing TDS message"); writePacket(TDS.STATUS_BIT_EOM); } // If a complete request has not been sent to the server, // the client MUST send the next packet with both ignore bit (0x02) and EOM bit (0x01) // set in the status to cancel the request. final boolean ignoreMessage() throws SQLServerException { if (packetNum > 0) { assert !isEOMSent; if (logger.isLoggable(Level.FINER)) logger.finest(toString() + " Finishing TDS message by sending ignore bit and end of message"); writePacket(TDS.STATUS_BIT_EOM | TDS.STATUS_BIT_ATTENTION); return true; } return false; } final void resetPooledConnection() { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " resetPooledConnection"); sendResetConnection = TDS.STATUS_BIT_RESET_CONN; } // Primitive write operations void writeByte(byte value) throws SQLServerException { if (stagingBuffer.remaining() >= 1) { stagingBuffer.put(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.put(value); else logBuffer.position(logBuffer.position() + 1); } } else { valueBytes[0] = value; writeWrappedBytes(valueBytes, 1); } } void writeChar(char value) throws SQLServerException { if (stagingBuffer.remaining() >= 2) { stagingBuffer.putChar(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.putChar(value); else logBuffer.position(logBuffer.position() + 2); } } else { Util.writeShort((short) value, valueBytes, 0); writeWrappedBytes(valueBytes, 2); } } void writeShort(short value) throws SQLServerException { if (stagingBuffer.remaining() >= 2) { stagingBuffer.putShort(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.putShort(value); else logBuffer.position(logBuffer.position() + 2); } } else { Util.writeShort(value, valueBytes, 0); writeWrappedBytes(valueBytes, 2); } } void writeInt(int value) throws SQLServerException { if (stagingBuffer.remaining() >= 4) { stagingBuffer.putInt(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.putInt(value); else logBuffer.position(logBuffer.position() + 4); } } else { Util.writeInt(value, valueBytes, 0); writeWrappedBytes(valueBytes, 4); } } /** * Append a real value in the TDS stream. * * @param value * the data value */ void writeReal(Float value) throws SQLServerException { writeInt(Float.floatToRawIntBits(value.floatValue())); } /** * Append a double value in the TDS stream. * * @param value * the data value */ void writeDouble(double value) throws SQLServerException { if (stagingBuffer.remaining() >= 8) { stagingBuffer.putDouble(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.putDouble(value); else logBuffer.position(logBuffer.position() + 8); } } else { long bits = Double.doubleToLongBits(value); long mask = 0xFF; int nShift = 0; for (int i = 0; i < 8; i++) { writeByte((byte) ((bits & mask) >> nShift)); nShift += 8; mask = mask << 8; } } } /** * Append a big decimal in the TDS stream. * * @param bigDecimalVal * the big decimal data value * @param srcJdbcType * the source JDBCType * @param precision * the precision of the data value * @param scale * the scale of the column * @throws SQLServerException */ void writeBigDecimal(BigDecimal bigDecimalVal, int srcJdbcType, int precision, int scale) throws SQLServerException { /* * Length including sign byte One 1-byte unsigned integer that represents the sign of the decimal value (0 => Negative, 1 => positive) One 4-, * 8-, 12-, or 16-byte signed integer that represents the decimal value multiplied by 10^scale. */ /* * setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use the rounding used in Server. * Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException would be thrown if RoundingMode is not set */ bigDecimalVal = bigDecimalVal.setScale(scale, RoundingMode.HALF_UP); // data length + 1 byte for sign int bLength = BYTES16 + 1; writeByte((byte) (bLength)); // Byte array to hold all the data and padding bytes. byte[] bytes = new byte[bLength]; byte[] valueBytes = DDC.convertBigDecimalToBytes(bigDecimalVal, scale); // removing the precision and scale information from the valueBytes array System.arraycopy(valueBytes, 2, bytes, 0, valueBytes.length - 2); writeBytes(bytes); } void writeSmalldatetime(String value) throws SQLServerException { GregorianCalendar calendar = initializeCalender(TimeZone.getDefault()); long utcMillis; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT) java.sql.Timestamp timestampValue = java.sql.Timestamp.valueOf(value); utcMillis = timestampValue.getTime(); // Load the calendar with the desired value calendar.setTimeInMillis(utcMillis); // Number of days since the SQL Server Base Date (January 1, 1900) int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900); // Next, figure out the number of milliseconds since midnight of the current day. int millisSinceMidnight = 1000 * calendar.get(Calendar.SECOND) + // Seconds into the current minute 60 * 1000 * calendar.get(Calendar.MINUTE) + // Minutes into the current hour 60 * 60 * 1000 * calendar.get(Calendar.HOUR_OF_DAY); // Hours into the current day // The last millisecond of the current day is always rounded to the first millisecond // of the next day because DATETIME is only accurate to 1/300th of a second. if (1000 * 60 * 60 * 24 - 1 <= millisSinceMidnight) { ++daysSinceSQLBaseDate; millisSinceMidnight = 0; } // Number of days since the SQL Server Base Date (January 1, 1900) writeShort((short) daysSinceSQLBaseDate); int secondsSinceMidnight = (millisSinceMidnight / 1000); int minutesSinceMidnight = (secondsSinceMidnight / 60); // Values that are 29.998 seconds or less are rounded down to the nearest minute minutesSinceMidnight = ((secondsSinceMidnight % 60) > 29.998) ? minutesSinceMidnight + 1 : minutesSinceMidnight; // Minutes since midnight writeShort((short) minutesSinceMidnight); } void writeDatetime(String value) throws SQLServerException { GregorianCalendar calendar = initializeCalender(TimeZone.getDefault()); long utcMillis; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT) int subSecondNanos; java.sql.Timestamp timestampValue = java.sql.Timestamp.valueOf(value); utcMillis = timestampValue.getTime(); subSecondNanos = timestampValue.getNanos(); // Load the calendar with the desired value calendar.setTimeInMillis(utcMillis); // Number of days there have been since the SQL Base Date. // These are based on SQL Server algorithms int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900); // Number of milliseconds since midnight of the current day. int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second 1000 * calendar.get(Calendar.SECOND) + // Seconds into the current minute 60 * 1000 * calendar.get(Calendar.MINUTE) + // Minutes into the current hour 60 * 60 * 1000 * calendar.get(Calendar.HOUR_OF_DAY); // Hours into the current day // The last millisecond of the current day is always rounded to the first millisecond // of the next day because DATETIME is only accurate to 1/300th of a second. if (1000 * 60 * 60 * 24 - 1 <= millisSinceMidnight) { ++daysSinceSQLBaseDate; millisSinceMidnight = 0; } // Last-ditch verification that the value is in the valid range for the // DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then // throw an exception now so that statement execution is safely canceled. // Attempting to put an invalid value on the wire would result in a TDS // exception, which would close the connection. // These are based on SQL Server algorithms if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900) || daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {SSType.DATETIME}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } // Number of days since the SQL Server Base Date (January 1, 1900) writeInt(daysSinceSQLBaseDate); // Milliseconds since midnight (at a resolution of three hundredths of a second) writeInt((3 * millisSinceMidnight + 5) / 10); } void writeDate(String value) throws SQLServerException { GregorianCalendar calendar = initializeCalender(TimeZone.getDefault()); long utcMillis = 0; java.sql.Date dateValue = java.sql.Date.valueOf(value); utcMillis = dateValue.getTime(); // Load the calendar with the desired value calendar.setTimeInMillis(utcMillis); writeScaledTemporal(calendar, 0, // subsecond nanos (none for a date value) 0, // scale (dates are not scaled) SSType.DATE); } void writeTime(java.sql.Timestamp value, int scale) throws SQLServerException { GregorianCalendar calendar = initializeCalender(TimeZone.getDefault()); long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT) int subSecondNanos = 0; utcMillis = value.getTime(); subSecondNanos = value.getNanos(); // Load the calendar with the desired value calendar.setTimeInMillis(utcMillis); writeScaledTemporal(calendar, subSecondNanos, scale, SSType.TIME); } void writeDateTimeOffset(Object value, int scale, SSType destSSType) throws SQLServerException { GregorianCalendar calendar = null; TimeZone timeZone = TimeZone.getDefault(); // Time zone to associate with the value in the Gregorian calendar long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT) int subSecondNanos = 0; int minutesOffset = 0; microsoft.sql.DateTimeOffset dtoValue = (microsoft.sql.DateTimeOffset) value; utcMillis = dtoValue.getTimestamp().getTime(); subSecondNanos = dtoValue.getTimestamp().getNanos(); minutesOffset = dtoValue.getMinutesOffset(); // If the target data type is DATETIMEOFFSET, then use UTC for the calendar that // will hold the value, since writeRPCDateTimeOffset expects a UTC calendar. // Otherwise, when converting from DATETIMEOFFSET to other temporal data types, // use a local time zone determined by the minutes offset of the value, since // the writers for those types expect local calendars. timeZone = (SSType.DATETIMEOFFSET == destSSType) ? UTC.timeZone : new SimpleTimeZone(minutesOffset * 60 * 1000, ""); calendar = new GregorianCalendar(timeZone, Locale.US); calendar.setLenient(true); calendar.clear(); calendar.setTimeInMillis(utcMillis); writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET); writeShort((short) minutesOffset); } void writeOffsetDateTimeWithTimezone(OffsetDateTime offsetDateTimeValue, int scale) throws SQLServerException { GregorianCalendar calendar = null; TimeZone timeZone; long utcMillis = 0; int subSecondNanos = 0; int minutesOffset = 0; try { // offsetTimeValue.getOffset() returns a ZoneOffset object which has only hours and minutes // components. So the result of the division will be an integer always. SQL Server also supports // offsets in minutes precision. minutesOffset = offsetDateTimeValue.getOffset().getTotalSeconds() / 60; } catch (Exception e) { throw new SQLServerException(SQLServerException.getErrString("R_zoneOffsetError"), null, // SQLState is null as this error is generated in // the driver 0, // Use 0 instead of DriverError.NOT_SET to use the correct constructor e); } subSecondNanos = offsetDateTimeValue.getNano(); // writeScaledTemporal() expects subSecondNanos in 9 digits precssion // but getNano() used in OffsetDateTime returns precession based on nanoseconds read from csv // padding zeros to match the expectation of writeScaledTemporal() int padding = 9 - String.valueOf(subSecondNanos).length(); while (padding > 0) { subSecondNanos = subSecondNanos * 10; padding } // For TIME_WITH_TIMEZONE, use UTC for the calendar that will hold the value timeZone = UTC.timeZone; // The behavior is similar to microsoft.sql.DateTimeOffset // In Timestamp format, only YEAR needs to have 4 digits. The leading zeros for the rest of the fields can be omitted. String offDateTimeStr = String.format("%04d", offsetDateTimeValue.getYear()) + '-' + offsetDateTimeValue.getMonthValue() + '-' + offsetDateTimeValue.getDayOfMonth() + ' ' + offsetDateTimeValue.getHour() + ':' + offsetDateTimeValue.getMinute() + ':' + offsetDateTimeValue.getSecond(); utcMillis = Timestamp.valueOf(offDateTimeStr).getTime(); calendar = initializeCalender(timeZone); calendar.setTimeInMillis(utcMillis); // Local timezone value in minutes int minuteAdjustment = ((TimeZone.getDefault().getRawOffset()) / (60 * 1000)); // check if date is in day light savings and add daylight saving minutes if (TimeZone.getDefault().inDaylightTime(calendar.getTime())) minuteAdjustment += (TimeZone.getDefault().getDSTSavings()) / (60 * 1000); // If the local time is negative then positive minutesOffset must be subtracted from calender minuteAdjustment += (minuteAdjustment < 0) ? (minutesOffset * (-1)) : minutesOffset; calendar.add(Calendar.MINUTE, minuteAdjustment); writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET); writeShort((short) minutesOffset); } void writeOffsetTimeWithTimezone(OffsetTime offsetTimeValue, int scale) throws SQLServerException { GregorianCalendar calendar = null; TimeZone timeZone; long utcMillis = 0; int subSecondNanos = 0; int minutesOffset = 0; try { // offsetTimeValue.getOffset() returns a ZoneOffset object which has only hours and minutes // components. So the result of the division will be an integer always. SQL Server also supports // offsets in minutes precision. minutesOffset = offsetTimeValue.getOffset().getTotalSeconds() / 60; } catch (Exception e) { throw new SQLServerException(SQLServerException.getErrString("R_zoneOffsetError"), null, // SQLState is null as this error is generated in // the driver 0, // Use 0 instead of DriverError.NOT_SET to use the correct constructor e); } subSecondNanos = offsetTimeValue.getNano(); // writeScaledTemporal() expects subSecondNanos in 9 digits precssion // but getNano() used in OffsetDateTime returns precession based on nanoseconds read from csv // padding zeros to match the expectation of writeScaledTemporal() int padding = 9 - String.valueOf(subSecondNanos).length(); while (padding > 0) { subSecondNanos = subSecondNanos * 10; padding } // For TIME_WITH_TIMEZONE, use UTC for the calendar that will hold the value timeZone = UTC.timeZone; // Using TDS.BASE_YEAR_1900, based on SQL server behavious // If date only contains a time part, the return value is 1900, the base year. // In Timestamp format, leading zeros for the fields can be omitted. String offsetTimeStr = TDS.BASE_YEAR_1900 + "-01-01" + ' ' + offsetTimeValue.getHour() + ':' + offsetTimeValue.getMinute() + ':' + offsetTimeValue.getSecond(); utcMillis = Timestamp.valueOf(offsetTimeStr).getTime(); calendar = initializeCalender(timeZone); calendar.setTimeInMillis(utcMillis); int minuteAdjustment = (TimeZone.getDefault().getRawOffset()) / (60 * 1000); // check if date is in day light savings and add daylight saving minutes to Local timezone(in minutes) if (TimeZone.getDefault().inDaylightTime(calendar.getTime())) minuteAdjustment += ((TimeZone.getDefault().getDSTSavings()) / (60 * 1000)); // If the local time is negative then positive minutesOffset must be subtracted from calender minuteAdjustment += (minuteAdjustment < 0) ? (minutesOffset * (-1)) : minutesOffset; calendar.add(Calendar.MINUTE, minuteAdjustment); writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET); writeShort((short) minutesOffset); } void writeLong(long value) throws SQLServerException { if (stagingBuffer.remaining() >= 8) { stagingBuffer.putLong(value); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.putLong(value); else logBuffer.position(logBuffer.position() + 8); } } else { valueBytes[0] = (byte) ((value >> 0) & 0xFF); valueBytes[1] = (byte) ((value >> 8) & 0xFF); valueBytes[2] = (byte) ((value >> 16) & 0xFF); valueBytes[3] = (byte) ((value >> 24) & 0xFF); valueBytes[4] = (byte) ((value >> 32) & 0xFF); valueBytes[5] = (byte) ((value >> 40) & 0xFF); valueBytes[6] = (byte) ((value >> 48) & 0xFF); valueBytes[7] = (byte) ((value >> 56) & 0xFF); writeWrappedBytes(valueBytes, 8); } } void writeBytes(byte[] value) throws SQLServerException { writeBytes(value, 0, value.length); } void writeBytes(byte[] value, int offset, int length) throws SQLServerException { assert length <= value.length; int bytesWritten = 0; int bytesToWrite; if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Writing " + length + " bytes"); while ((bytesToWrite = length - bytesWritten) > 0) { if (0 == stagingBuffer.remaining()) writePacket(TDS.STATUS_NORMAL); if (bytesToWrite > stagingBuffer.remaining()) bytesToWrite = stagingBuffer.remaining(); stagingBuffer.put(value, offset + bytesWritten, bytesToWrite); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.put(value, offset + bytesWritten, bytesToWrite); else logBuffer.position(logBuffer.position() + bytesToWrite); } bytesWritten += bytesToWrite; } } void writeWrappedBytes(byte value[], int valueLength) throws SQLServerException { // This function should only be used to write a value that is longer than // what remains in the current staging buffer. However, the value must // be short enough to fit in an empty buffer. assert valueLength <= value.length; assert stagingBuffer.remaining() < valueLength; assert valueLength <= stagingBuffer.capacity(); // Fill any remaining space in the staging buffer int remaining = stagingBuffer.remaining(); if (remaining > 0) { stagingBuffer.put(value, 0, remaining); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.put(value, 0, remaining); else logBuffer.position(logBuffer.position() + remaining); } } writePacket(TDS.STATUS_NORMAL); // After swapping, the staging buffer should once again be empty, so the // remainder of the value can be written to it. stagingBuffer.put(value, remaining, valueLength - remaining); if (tdsChannel.isLoggingPackets()) { if (dataIsLoggable) logBuffer.put(value, remaining, valueLength - remaining); else logBuffer.position(logBuffer.position() + remaining); } } void writeString(String value) throws SQLServerException { int charsCopied = 0; int length = value.length(); while (charsCopied < length) { int bytesToCopy = 2 * (length - charsCopied); if (bytesToCopy > valueBytes.length) bytesToCopy = valueBytes.length; int bytesCopied = 0; while (bytesCopied < bytesToCopy) { char ch = value.charAt(charsCopied++); valueBytes[bytesCopied++] = (byte) ((ch >> 0) & 0xFF); valueBytes[bytesCopied++] = (byte) ((ch >> 8) & 0xFF); } writeBytes(valueBytes, 0, bytesCopied); } } void writeStream(InputStream inputStream, long advertisedLength, boolean writeChunkSizes) throws SQLServerException { assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0; long actualLength = 0; final byte[] streamByteBuffer = new byte[4 * currentPacketSize]; int bytesRead = 0; int bytesToWrite; do { // Read in next chunk for (bytesToWrite = 0; -1 != bytesRead && bytesToWrite < streamByteBuffer.length; bytesToWrite += bytesRead) { try { bytesRead = inputStream.read(streamByteBuffer, bytesToWrite, streamByteBuffer.length - bytesToWrite); } catch (IOException e) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {e.toString()}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } if (-1 == bytesRead) break; // Check for invalid bytesRead returned from InputStream.read if (bytesRead < 0 || bytesRead > streamByteBuffer.length - bytesToWrite) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } } // Write it out if (writeChunkSizes) writeInt(bytesToWrite); writeBytes(streamByteBuffer, 0, bytesToWrite); actualLength += bytesToWrite; } while (-1 != bytesRead || bytesToWrite > 0); // If we were given an input stream length that we had to match and // the actual stream length did not match then cancel the request. if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength")); Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET); } } /* * Adding another function for writing non-unicode reader instead of re-factoring the writeReader() for performance efficiency. As this method * will only be used in bulk copy, it needs to be efficient. Note: Any changes in algorithm/logic should propagate to both writeReader() and * writeNonUnicodeReader(). */ void writeNonUnicodeReader(Reader reader, long advertisedLength, boolean isDestBinary, Charset charSet) throws SQLServerException { assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0; long actualLength = 0; char[] streamCharBuffer = new char[currentPacketSize]; // The unicode version, writeReader() allocates a byte buffer that is 4 times the currentPacketSize, not sure why. byte[] streamByteBuffer = new byte[currentPacketSize]; int charsRead = 0; int charsToWrite; int bytesToWrite; String streamString; do { // Read in next chunk for (charsToWrite = 0; -1 != charsRead && charsToWrite < streamCharBuffer.length; charsToWrite += charsRead) { try { charsRead = reader.read(streamCharBuffer, charsToWrite, streamCharBuffer.length - charsToWrite); } catch (IOException e) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {e.toString()}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } if (-1 == charsRead) break; // Check for invalid bytesRead returned from Reader.read if (charsRead < 0 || charsRead > streamCharBuffer.length - charsToWrite) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } } if (!isDestBinary) { // Write it out // This also writes the PLP_TERMINATOR token after all the data in the the stream are sent. // The Do-While loop goes on one more time as charsToWrite is greater than 0 for the last chunk, and // in this last round the only thing that is written is an int value of 0, which is the PLP Terminator token(0x00000000). writeInt(charsToWrite); for (int charsCopied = 0; charsCopied < charsToWrite; ++charsCopied) { if (null == charSet) { streamByteBuffer[charsCopied] = (byte) (streamCharBuffer[charsCopied] & 0xFF); } else { // encoding as per collation streamByteBuffer[charsCopied] = new String(streamCharBuffer[charsCopied] + "").getBytes(charSet)[0]; } } writeBytes(streamByteBuffer, 0, charsToWrite); } else { bytesToWrite = charsToWrite; if (0 != charsToWrite) bytesToWrite = charsToWrite / 2; streamString = new String(streamCharBuffer); byte[] bytes = ParameterUtils.HexToBin(streamString.trim()); writeInt(bytesToWrite); writeBytes(bytes, 0, bytesToWrite); } actualLength += charsToWrite; } while (-1 != charsRead || charsToWrite > 0); // If we were given an input stream length that we had to match and // the actual stream length did not match then cancel the request. if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength")); Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET); } } /* * Note: There is another method with same code logic for non unicode reader, writeNonUnicodeReader(), implemented for performance efficiency. Any * changes in algorithm/logic should propagate to both writeReader() and writeNonUnicodeReader(). */ void writeReader(Reader reader, long advertisedLength, boolean writeChunkSizes) throws SQLServerException { assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0; long actualLength = 0; char[] streamCharBuffer = new char[2 * currentPacketSize]; byte[] streamByteBuffer = new byte[4 * currentPacketSize]; int charsRead = 0; int charsToWrite; do { // Read in next chunk for (charsToWrite = 0; -1 != charsRead && charsToWrite < streamCharBuffer.length; charsToWrite += charsRead) { try { charsRead = reader.read(streamCharBuffer, charsToWrite, streamCharBuffer.length - charsToWrite); } catch (IOException e) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {e.toString()}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } if (-1 == charsRead) break; // Check for invalid bytesRead returned from Reader.read if (charsRead < 0 || charsRead > streamCharBuffer.length - charsToWrite) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream")); Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET); } } // Write it out if (writeChunkSizes) writeInt(2 * charsToWrite); // Convert from Unicode characters to bytes // Note: The following inlined code is much faster than the equivalent // call to (new String(streamCharBuffer)).getBytes("UTF-16LE") because it // saves a conversion to String and use of Charset in that conversion. for (int charsCopied = 0; charsCopied < charsToWrite; ++charsCopied) { streamByteBuffer[2 * charsCopied] = (byte) ((streamCharBuffer[charsCopied] >> 0) & 0xFF); streamByteBuffer[2 * charsCopied + 1] = (byte) ((streamCharBuffer[charsCopied] >> 8) & 0xFF); } writeBytes(streamByteBuffer, 0, 2 * charsToWrite); actualLength += charsToWrite; } while (-1 != charsRead || charsToWrite > 0); // If we were given an input stream length that we had to match and // the actual stream length did not match then cancel the request. if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength")); Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)}; error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET); } } GregorianCalendar initializeCalender(TimeZone timeZone) { GregorianCalendar calendar = null; // Create the calendar that will hold the value. For DateTimeOffset values, the calendar's // time zone is UTC. For other values, the calendar's time zone is a local time zone. calendar = new GregorianCalendar(timeZone, Locale.US); // Set the calendar lenient to allow setting the DAY_OF_YEAR and MILLISECOND fields // to roll other fields to their correct values. calendar.setLenient(true); // Clear the calendar of any existing state. The state of a new Calendar object always // reflects the current date, time, DST offset, etc. calendar.clear(); return calendar; } final void error(String reason, SQLState sqlState, DriverError driverError) throws SQLServerException { assert null != command; command.interrupt(reason); throw new SQLServerException(reason, sqlState, driverError, null); } /** * Sends an attention signal to the server, if necessary, to tell it to stop processing the current command on this connection. * * If no packets of the command's request have yet been sent to the server, then no attention signal needs to be sent. The interrupt will be * handled entirely by the driver. * * This method does not need synchronization as it does not manipulate interrupt state and writing is guaranteed to occur only from one thread at * a time. */ final boolean sendAttention() throws SQLServerException { // If any request packets were already written to the server then send an // attention signal to the server to tell it to ignore the request or // cancel its execution. if (packetNum > 0) { // Ideally, we would want to add the following assert here. // But to add that the variable isEOMSent would have to be made // volatile as this piece of code would be reached from multiple // threads. So, not doing it to avoid perf hit. Note that // isEOMSent would be updated in writePacket everytime an EOM is sent // assert isEOMSent; if (logger.isLoggable(Level.FINE)) logger.fine(this + ": sending attention..."); ++tdsChannel.numMsgsSent; startMessage(command, TDS.PKT_CANCEL_REQ); endMessage(); return true; } return false; } private void writePacket(int tdsMessageStatus) throws SQLServerException { final boolean atEOM = (TDS.STATUS_BIT_EOM == (TDS.STATUS_BIT_EOM & tdsMessageStatus)); final boolean isCancelled = ((TDS.PKT_CANCEL_REQ == tdsMessageType) || ((tdsMessageStatus & TDS.STATUS_BIT_ATTENTION) == TDS.STATUS_BIT_ATTENTION)); // Before writing each packet to the channel, check if an interrupt has occurred. if (null != command && (!isCancelled)) command.checkForInterrupt(); writePacketHeader(tdsMessageStatus | sendResetConnection); sendResetConnection = 0; flush(atEOM); // If this is the last packet then flush the remainder of the request // through the socket. The first flush() call ensured that data currently // waiting in the socket buffer was sent, flipped the buffers, and started // sending data from the staging buffer (flipped to be the new socket buffer). // This flush() call ensures that all remaining data in the socket buffer is sent. if (atEOM) { flush(atEOM); isEOMSent = true; ++tdsChannel.numMsgsSent; } // If we just sent the first login request packet and SSL encryption was enabled // for login only, then disable SSL now. if (TDS.PKT_LOGON70 == tdsMessageType && 1 == packetNum && TDS.ENCRYPT_OFF == con.getNegotiatedEncryptionLevel()) { tdsChannel.disableSSL(); } // Notify the currently associated command (if any) that we have written the last // of the response packets to the channel. if (null != command && (!isCancelled) && atEOM) command.onRequestComplete(); } private void writePacketHeader(int tdsMessageStatus) { int tdsMessageLength = stagingBuffer.position(); ++packetNum; // Write the TDS packet header back at the start of the staging buffer stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_TYPE, tdsMessageType); stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_STATUS, (byte) tdsMessageStatus); stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH, (byte) ((tdsMessageLength >> 8) & 0xFF)); // Note: message length is 16 bits, stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH + 1, (byte) ((tdsMessageLength >> 0) & 0xFF)); // written BIG ENDIAN stagingBuffer.put(TDS.PACKET_HEADER_SPID, (byte) ((tdsChannel.getSPID() >> 8) & 0xFF)); // Note: SPID is 16 bits, stagingBuffer.put(TDS.PACKET_HEADER_SPID + 1, (byte) ((tdsChannel.getSPID() >> 0) & 0xFF)); // written BIG ENDIAN stagingBuffer.put(TDS.PACKET_HEADER_SEQUENCE_NUM, (byte) (packetNum % 256)); stagingBuffer.put(TDS.PACKET_HEADER_WINDOW, (byte) 0); // Window (Reserved/Not used) // Write the header to the log buffer too if logging. if (tdsChannel.isLoggingPackets()) { logBuffer.put(TDS.PACKET_HEADER_MESSAGE_TYPE, tdsMessageType); logBuffer.put(TDS.PACKET_HEADER_MESSAGE_STATUS, (byte) tdsMessageStatus); logBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH, (byte) ((tdsMessageLength >> 8) & 0xFF)); // Note: message length is 16 bits, logBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH + 1, (byte) ((tdsMessageLength >> 0) & 0xFF)); // written BIG ENDIAN logBuffer.put(TDS.PACKET_HEADER_SPID, (byte) ((tdsChannel.getSPID() >> 8) & 0xFF)); // Note: SPID is 16 bits, logBuffer.put(TDS.PACKET_HEADER_SPID + 1, (byte) ((tdsChannel.getSPID() >> 0) & 0xFF)); // written BIG ENDIAN logBuffer.put(TDS.PACKET_HEADER_SEQUENCE_NUM, (byte) (packetNum % 256)); logBuffer.put(TDS.PACKET_HEADER_WINDOW, (byte) 0); // Window (Reserved/Not used); } } void flush(boolean atEOM) throws SQLServerException { // First, flush any data left in the socket buffer. tdsChannel.write(socketBuffer.array(), socketBuffer.position(), socketBuffer.remaining()); socketBuffer.position(socketBuffer.limit()); // If there is data in the staging buffer that needs to be written // to the socket, the socket buffer is now empty, so swap buffers // and start writing data from the staging buffer. if (stagingBuffer.position() >= TDS_PACKET_HEADER_SIZE) { // Swap the packet buffers ... ByteBuffer swapBuffer = stagingBuffer; stagingBuffer = socketBuffer; socketBuffer = swapBuffer; // ... and prepare to send data from the from the new socket // buffer (the old staging buffer). // We need to use flip() rather than rewind() here so that // the socket buffer's limit is properly set for the last // packet, which may be shorter than the other packets. socketBuffer.flip(); stagingBuffer.clear(); // If we are logging TDS packets then log the packet we're about // to send over the wire now. if (tdsChannel.isLoggingPackets()) { tdsChannel.logPacket(logBuffer.array(), 0, socketBuffer.limit(), this.toString() + " sending packet (" + socketBuffer.limit() + " bytes)"); } // Prepare for the next packet if (!atEOM) preparePacket(); // Finally, start sending data from the new socket buffer. tdsChannel.write(socketBuffer.array(), socketBuffer.position(), socketBuffer.remaining()); socketBuffer.position(socketBuffer.limit()); } } // Composite write operations /** * Write out elements common to all RPC values. * * @param sName * the optional parameter name * @param bOut * boolean true if the value that follows is being registered as an ouput parameter * @param tdsType * TDS type of the value that follows */ void writeRPCNameValType(String sName, boolean bOut, TDSType tdsType) throws SQLServerException { int nNameLen = 0; if (null != sName) nNameLen = sName.length() + 1; // The @ prefix is required for the param writeByte((byte) nNameLen); // param name len if (nNameLen > 0) { writeChar('@'); writeString(sName); } if (null != cryptoMeta) writeByte((byte) (bOut ? 1 | TDS.AE_METADATA : 0 | TDS.AE_METADATA)); // status else writeByte((byte) (bOut ? 1 : 0)); // status writeByte(tdsType.byteValue()); // type } /** * Append a boolean value in RPC transmission format. * * @param sName * the optional parameter name * @param booleanValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCBit(String sName, Boolean booleanValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.BITN); writeByte((byte) 1); // max length of datatype if (null == booleanValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) 1); // length of datatype writeByte((byte) (booleanValue.booleanValue() ? 1 : 0)); } } /** * Append a short value in RPC transmission format. * * @param sName * the optional parameter name * @param shortValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCByte(String sName, Byte byteValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.INTN); writeByte((byte) 1); // max length of datatype if (null == byteValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) 1); // length of datatype writeByte(byteValue.byteValue()); } } /** * Append a short value in RPC transmission format. * * @param sName * the optional parameter name * @param shortValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCShort(String sName, Short shortValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.INTN); writeByte((byte) 2); // max length of datatype if (null == shortValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) 2); // length of datatype writeShort(shortValue.shortValue()); } } /** * Append an int value in RPC transmission format. * * @param sName * the optional parameter name * @param intValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCInt(String sName, Integer intValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.INTN); writeByte((byte) 4); // max length of datatype if (null == intValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) 4); // length of datatype writeInt(intValue.intValue()); } } /** * Append a long value in RPC transmission format. * * @param sName * the optional parameter name * @param longValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCLong(String sName, Long longValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.INTN); writeByte((byte) 8); // max length of datatype if (null == longValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) 8); // length of datatype writeLong(longValue.longValue()); } } /** * Append a real value in RPC transmission format. * * @param sName * the optional parameter name * @param floatValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCReal(String sName, Float floatValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.FLOATN); // Data and length if (null == floatValue) { writeByte((byte) 4); // max length writeByte((byte) 0); // actual length (0 == null) } else { writeByte((byte) 4); // max length writeByte((byte) 4); // actual length writeInt(Float.floatToRawIntBits(floatValue.floatValue())); } } /** * Append a double value in RPC transmission format. * * @param sName * the optional parameter name * @param doubleValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCDouble(String sName, Double doubleValue, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.FLOATN); int l = 8; writeByte((byte) l); // max length of datatype // Data and length if (null == doubleValue) { writeByte((byte) 0); // len of data bytes } else { writeByte((byte) l); // len of data bytes long bits = Double.doubleToLongBits(doubleValue.doubleValue()); long mask = 0xFF; int nShift = 0; for (int i = 0; i < 8; i++) { writeByte((byte) ((bits & mask) >> nShift)); nShift += 8; mask = mask << 8; } } } /** * Append a big decimal in RPC transmission format. * * @param sName * the optional parameter name * @param bdValue * the data value * @param nScale * the desired scale * @param bOut * boolean true if the data value is being registered as an ouput parameter */ void writeRPCBigDecimal(String sName, BigDecimal bdValue, int nScale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.DECIMALN); writeByte((byte) 0x11); // maximum length writeByte((byte) SQLServerConnection.maxDecimalPrecision); // precision byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, nScale); writeBytes(valueBytes, 0, valueBytes.length); } /** * Appends a standard v*max header for RPC parameter transmission. * * @param headerLength * the total length of the PLP data block. * @param isNull * true if the value is NULL. * @param collation * The SQL collation associated with the value that follows the v*max header. Null for non-textual types. */ void writeVMaxHeader(long headerLength, boolean isNull, SQLCollation collation) throws SQLServerException { // Send v*max length indicator 0xFFFF. writeShort((short) 0xFFFF); // Send collation if requested. if (null != collation) collation.writeCollation(this); // Handle null here and return, we're done here if it's null. if (isNull) { // Null header for v*max types is 0xFFFFFFFFFFFFFFFF. writeLong(0xFFFFFFFFFFFFFFFFL); } else if (DataTypes.UNKNOWN_STREAM_LENGTH == headerLength) { // Append v*max length. // UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE writeLong(0xFFFFFFFFFFFFFFFEL); // NOTE: Don't send the first chunk length, this will be calculated by caller. } else { // For v*max types with known length, length is <totallength8><chunklength4> // We're sending same total length as chunk length (as we're sending 1 chunk). writeLong(headerLength); } } /** * Utility for internal writeRPCString calls */ void writeRPCStringUnicode(String sValue) throws SQLServerException { writeRPCStringUnicode(null, sValue, false, null); } /** * Writes a string value as Unicode for RPC * * @param sName * the optional parameter name * @param sValue * the data value * @param bOut * boolean true if the data value is being registered as an ouput parameter * @param collation * the collation of the data value */ void writeRPCStringUnicode(String sName, String sValue, boolean bOut, SQLCollation collation) throws SQLServerException { boolean bValueNull = (sValue == null); int nValueLen = bValueNull ? 0 : (2 * sValue.length()); boolean isShortValue = nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES; // Textual RPC requires a collation. If none is provided, as is the case when // the SSType is non-textual, then use the database collation by default. if (null == collation) collation = con.getDatabaseCollation(); // Use PLP encoding on Yukon and later with long values and OUT parameters boolean usePLP = (!isShortValue || bOut); if (usePLP) { writeRPCNameValType(sName, bOut, TDSType.NVARCHAR); // Handle Yukon v*max type header here. writeVMaxHeader(nValueLen, // Length bValueNull, // Is null? collation); // Send the data. if (!bValueNull) { if (nValueLen > 0) { writeInt(nValueLen); writeString(sValue); } // Send the terminator PLP chunk. writeInt(0); } } else // non-PLP type { // Write maximum length of data if (isShortValue) { writeRPCNameValType(sName, bOut, TDSType.NVARCHAR); writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); } else { writeRPCNameValType(sName, bOut, TDSType.NTEXT); writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES); } collation.writeCollation(this); // Data and length if (bValueNull) { writeShort((short) -1); // actual len } else { // Write actual length of data if (isShortValue) writeShort((short) nValueLen); else writeInt(nValueLen); // If length is zero, we're done. if (0 != nValueLen) writeString(sValue); // data } } } void writeTVP(TVP value) throws SQLServerException { if (!value.isNull()) { writeByte((byte) 0); // status } else { // Default TVP writeByte((byte) TDS.TVP_STATUS_DEFAULT); // default TVP } writeByte((byte) TDS.TDS_TVP); /* * TVP_TYPENAME = DbName OwningSchema TypeName */ // Database where TVP type resides if (null != value.getDbNameTVP()) { writeByte((byte) value.getDbNameTVP().length()); writeString(value.getDbNameTVP()); } else writeByte((byte) 0x00); // empty DB name // Schema where TVP type resides if (null != value.getOwningSchemaNameTVP()) { writeByte((byte) value.getOwningSchemaNameTVP().length()); writeString(value.getOwningSchemaNameTVP()); } else writeByte((byte) 0x00); // empty Schema name // TVP type name if (null != value.getTVPName()) { writeByte((byte) value.getTVPName().length()); writeString(value.getTVPName()); } else writeByte((byte) 0x00); // empty TVP name if (!value.isNull()) { writeTVPColumnMetaData(value); // optional OrderUnique metadata writeTvpOrderUnique(value); } else { writeShort((short) TDS.TVP_NULL_TOKEN); } // TVP_END_TOKEN writeByte((byte) 0x00); try { writeTVPRows(value); } catch (NumberFormatException e) { throw new SQLServerException(SQLServerException.getErrString("R_TVPInvalidColumnValue"), e); } catch (ClassCastException e) { throw new SQLServerException(SQLServerException.getErrString("R_TVPInvalidColumnValue"), e); } } void writeTVPRows(TVP value) throws SQLServerException { boolean isShortValue, isNull; int dataLength; boolean tdsWritterCached = false; ByteBuffer cachedTVPHeaders = null; TDSCommand cachedCommand = null; boolean cachedRequestComplete = false; boolean cachedInterruptsEnabled = false; boolean cachedProcessedResponse = false; if (!value.isNull()) { // is used, the tdsWriter of the calling preparedStatement is overwritten by the SQLServerResultSet#next() method when fetching new rows. // Therefore, we need to send TVP data row by row before fetching new row. if (TVPType.ResultSet == value.tvpType) { if ((null != value.sourceResultSet) && (value.sourceResultSet instanceof SQLServerResultSet)) { SQLServerResultSet sourceResultSet = (SQLServerResultSet) value.sourceResultSet; SQLServerStatement src_stmt = (SQLServerStatement) sourceResultSet.getStatement(); int resultSetServerCursorId = sourceResultSet.getServerCursorId(); if (con.equals(src_stmt.getConnection()) && 0 != resultSetServerCursorId) { cachedTVPHeaders = ByteBuffer.allocate(stagingBuffer.capacity()).order(stagingBuffer.order()); cachedTVPHeaders.put(stagingBuffer.array(), 0, stagingBuffer.position()); cachedCommand = this.command; cachedRequestComplete = command.getRequestComplete(); cachedInterruptsEnabled = command.getInterruptsEnabled(); cachedProcessedResponse = command.getProcessedResponse(); tdsWritterCached = true; if (sourceResultSet.isForwardOnly()) { sourceResultSet.setFetchSize(1); } } } } Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata(); Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator; while (value.next()) { // restore command and TDS header, which have been overwritten by value.next() if (tdsWritterCached) { command = cachedCommand; stagingBuffer.clear(); logBuffer.clear(); writeBytes(cachedTVPHeaders.array(), 0, cachedTVPHeaders.position()); } Object[] rowData = value.getRowData(); // ROW writeByte((byte) TDS.TVP_ROW); columnsIterator = columnMetadata.entrySet().iterator(); int currentColumn = 0; while (columnsIterator.hasNext()) { Map.Entry<Integer, SQLServerMetaData> columnPair = columnsIterator.next(); // If useServerDefault is set, client MUST NOT emit TvpColumnData for the associated column if (columnPair.getValue().useServerDefault) { currentColumn++; continue; } JDBCType jdbcType = JDBCType.of(columnPair.getValue().javaSqlType); String currentColumnStringValue = null; Object currentObject = null; if (null != rowData) { // if rowData has value for the current column, retrieve it. If not, current column will stay null. if (rowData.length > currentColumn) { currentObject = rowData[currentColumn]; if (null != currentObject) { currentColumnStringValue = String.valueOf(currentObject); } } } try { switch (jdbcType) { case BIGINT: if (null == currentColumnStringValue) writeByte((byte) 0); else { writeByte((byte) 8); writeLong(Long.valueOf(currentColumnStringValue).longValue()); } break; case BIT: if (null == currentColumnStringValue) writeByte((byte) 0); else { writeByte((byte) 1); writeByte((byte) (Boolean.valueOf(currentColumnStringValue).booleanValue() ? 1 : 0)); } break; case INTEGER: if (null == currentColumnStringValue) writeByte((byte) 0); else { writeByte((byte) 4); writeInt(Integer.valueOf(currentColumnStringValue).intValue()); } break; case SMALLINT: case TINYINT: if (null == currentColumnStringValue) writeByte((byte) 0); else { writeByte((byte) 2); // length of datatype writeShort(Short.valueOf(currentColumnStringValue).shortValue()); } break; case DECIMAL: case NUMERIC: if (null == currentColumnStringValue) writeByte((byte) 0); else { writeByte((byte) TDSWriter.BIGDECIMAL_MAX_LENGTH); // maximum length BigDecimal bdValue = new BigDecimal(currentColumnStringValue); /* * setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use * the rounding used in Server. Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException * would be thrown if RoundingMode is not set */ bdValue = bdValue.setScale(columnPair.getValue().scale, RoundingMode.HALF_UP); byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, bdValue.scale()); // 1-byte for sign and 16-byte for integer byte[] byteValue = new byte[17]; // removing the precision and scale information from the valueBytes array System.arraycopy(valueBytes, 2, byteValue, 0, valueBytes.length - 2); writeBytes(byteValue); } break; case DOUBLE: if (null == currentColumnStringValue) writeByte((byte) 0); // len of data bytes else { writeByte((byte) 8); // len of data bytes long bits = Double.doubleToLongBits(Double.valueOf(currentColumnStringValue).doubleValue()); long mask = 0xFF; int nShift = 0; for (int i = 0; i < 8; i++) { writeByte((byte) ((bits & mask) >> nShift)); nShift += 8; mask = mask << 8; } } break; case FLOAT: case REAL: if (null == currentColumnStringValue) writeByte((byte) 0); // actual length (0 == null) else { writeByte((byte) 4); // actual length writeInt(Float.floatToRawIntBits(Float.valueOf(currentColumnStringValue).floatValue())); } break; case DATE: case TIME: case TIMESTAMP: case DATETIMEOFFSET: case TIMESTAMP_WITH_TIMEZONE: case TIME_WITH_TIMEZONE: case CHAR: case VARCHAR: case NCHAR: case NVARCHAR: case LONGVARCHAR: case LONGNVARCHAR: case SQLXML: isShortValue = (2L * columnPair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES; isNull = (null == currentColumnStringValue); dataLength = isNull ? 0 : currentColumnStringValue.length() * 2; if (!isShortValue) { // check null if (isNull) // Null header for v*max types is 0xFFFFFFFFFFFFFFFF. writeLong(0xFFFFFFFFFFFFFFFFL); else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength) // Append v*max length. // UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE writeLong(0xFFFFFFFFFFFFFFFEL); else // For v*max types with known length, length is <totallength8><chunklength4> writeLong(dataLength); if (!isNull) { if (dataLength > 0) { writeInt(dataLength); writeString(currentColumnStringValue); } // Send the terminator PLP chunk. writeInt(0); } } else { if (isNull) writeShort((short) -1); // actual len else { writeShort((short) dataLength); writeString(currentColumnStringValue); } } break; case BINARY: case VARBINARY: case LONGVARBINARY: // Handle conversions as done in other types. isShortValue = columnPair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES; isNull = (null == currentObject); if (currentObject instanceof String) dataLength = isNull ? 0 : (toByteArray(currentObject.toString())).length; else dataLength = isNull ? 0 : ((byte[]) currentObject).length; if (!isShortValue) { // check null if (isNull) // Null header for v*max types is 0xFFFFFFFFFFFFFFFF. writeLong(0xFFFFFFFFFFFFFFFFL); else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength) // Append v*max length. // UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE writeLong(0xFFFFFFFFFFFFFFFEL); else // For v*max types with known length, length is <totallength8><chunklength4> writeLong(dataLength); if (!isNull) { if (dataLength > 0) { writeInt(dataLength); if (currentObject instanceof String) writeBytes(toByteArray(currentObject.toString())); else writeBytes((byte[]) currentObject); } // Send the terminator PLP chunk. writeInt(0); } } else { if (isNull) writeShort((short) -1); // actual len else { writeShort((short) dataLength); if (currentObject instanceof String) writeBytes(toByteArray(currentObject.toString())); else writeBytes((byte[]) currentObject); } } break; default: assert false : "Unexpected JDBC type " + jdbcType.toString(); } } catch (IllegalArgumentException e) { throw new SQLServerException(SQLServerException.getErrString("R_errorConvertingValue"), e); } catch (ArrayIndexOutOfBoundsException e) { throw new SQLServerException(SQLServerException.getErrString("R_CSVDataSchemaMismatch"), e); } currentColumn++; } // send this row, read its response (throw exception in case of errors) and reset command status if (tdsWritterCached) { // TVP_END_TOKEN writeByte((byte) 0x00); writePacket(TDS.STATUS_BIT_EOM); TDSReader tdsReader = tdsChannel.getReader(command); int tokenType = tdsReader.peekTokenType(); if (TDS.TDS_ERR == tokenType) { StreamError databaseError = new StreamError(); databaseError.setFromTDS(tdsReader); SQLServerException.makeFromDatabaseError(con, null, databaseError.getMessage(), databaseError, false); } command.setInterruptsEnabled(true); command.setRequestComplete(false); } } } // reset command status which have been overwritten if (tdsWritterCached) { command.setRequestComplete(cachedRequestComplete); command.setInterruptsEnabled(cachedInterruptsEnabled); command.setProcessedResponse(cachedProcessedResponse); } else { // TVP_END_TOKEN writeByte((byte) 0x00); } } private static byte[] toByteArray(String s) { return DatatypeConverter.parseHexBinary(s); } void writeTVPColumnMetaData(TVP value) throws SQLServerException { boolean isShortValue; // TVP_COLMETADATA writeShort((short) value.getTVPColumnCount()); Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata(); Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator = columnMetadata.entrySet().iterator(); /* * TypeColumnMetaData = UserType Flags TYPE_INFO ColName ; */ while (columnsIterator.hasNext()) { Map.Entry<Integer, SQLServerMetaData> pair = columnsIterator.next(); JDBCType jdbcType = JDBCType.of(pair.getValue().javaSqlType); boolean useServerDefault = pair.getValue().useServerDefault; // ULONG ; UserType of column // The value will be 0x0000 with the exceptions of TIMESTAMP (0x0050) and alias types (greater than 0x00FF). writeInt(0); /* * Flags = fNullable ; Column is nullable - %x01 fCaseSen -- Ignored ; usUpdateable -- Ignored ; fIdentity ; Column is identity column - * %x10 fComputed ; Column is computed - %x20 usReservedODBC -- Ignored ; fFixedLenCLRType-- Ignored ; fDefault ; Column is default value * - %x200 usReserved -- Ignored ; */ short flags = TDS.FLAG_NULLABLE; if (useServerDefault) { flags |= TDS.FLAG_TVP_DEFAULT_COLUMN; } writeShort(flags); // Type info switch (jdbcType) { case BIGINT: writeByte(TDSType.INTN.byteValue()); writeByte((byte) 8); // max length of datatype break; case BIT: writeByte(TDSType.BITN.byteValue()); writeByte((byte) 1); // max length of datatype break; case INTEGER: writeByte(TDSType.INTN.byteValue()); writeByte((byte) 4); // max length of datatype break; case SMALLINT: case TINYINT: writeByte(TDSType.INTN.byteValue()); writeByte((byte) 2); // max length of datatype break; case DECIMAL: case NUMERIC: writeByte(TDSType.NUMERICN.byteValue()); writeByte((byte) 0x11); // maximum length writeByte((byte) pair.getValue().precision); writeByte((byte) pair.getValue().scale); break; case DOUBLE: writeByte(TDSType.FLOATN.byteValue()); writeByte((byte) 8); // max length of datatype break; case FLOAT: case REAL: writeByte(TDSType.FLOATN.byteValue()); writeByte((byte) 4); // max length of datatype break; case DATE: case TIME: case TIMESTAMP: case DATETIMEOFFSET: case TIMESTAMP_WITH_TIMEZONE: case TIME_WITH_TIMEZONE: case CHAR: case VARCHAR: case NCHAR: case NVARCHAR: case LONGVARCHAR: case LONGNVARCHAR: case SQLXML: writeByte(TDSType.NVARCHAR.byteValue()); isShortValue = (2L * pair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES; // Use PLP encoding on Yukon and later with long values if (!isShortValue) // PLP { // Handle Yukon v*max type header here. writeShort((short) 0xFFFF); con.getDatabaseCollation().writeCollation(this); } else // non PLP { writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); con.getDatabaseCollation().writeCollation(this); } break; case BINARY: case VARBINARY: case LONGVARBINARY: writeByte(TDSType.BIGVARBINARY.byteValue()); isShortValue = pair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES; // Use PLP encoding on Yukon and later with long values if (!isShortValue) // PLP // Handle Yukon v*max type header here. writeShort((short) 0xFFFF); else // non PLP writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); break; default: assert false : "Unexpected JDBC type " + jdbcType.toString(); } // Column name - must be null (from TDS - TVP_COLMETADATA) writeByte((byte) 0x00); // [TVP_ORDER_UNIQUE] // [TVP_COLUMN_ORDERING] } } void writeTvpOrderUnique(TVP value) throws SQLServerException { /* * TVP_ORDER_UNIQUE = TVP_ORDER_UNIQUE_TOKEN (Count <Count>(ColNum OrderUniqueFlags)) */ Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata(); Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator = columnMetadata.entrySet().iterator(); LinkedList<TdsOrderUnique> columnList = new LinkedList<TdsOrderUnique>(); while (columnsIterator.hasNext()) { byte flags = 0; Map.Entry<Integer, SQLServerMetaData> pair = columnsIterator.next(); SQLServerMetaData metaData = pair.getValue(); if (SQLServerSortOrder.Ascending == metaData.sortOrder) flags = TDS.TVP_ORDERASC_FLAG; else if (SQLServerSortOrder.Descending == metaData.sortOrder) flags = TDS.TVP_ORDERDESC_FLAG; if (metaData.isUniqueKey) flags |= TDS.TVP_UNIQUE_FLAG; // Remember this column if any flags were set if (0 != flags) columnList.add(new TdsOrderUnique(pair.getKey(), flags)); } // Write flagged columns if (!columnList.isEmpty()) { writeByte((byte) TDS.TVP_ORDER_UNIQUE_TOKEN); writeShort((short) columnList.size()); for (TdsOrderUnique column : columnList) { writeShort((short) (column.columnOrdinal + 1)); writeByte(column.flags); } } } private class TdsOrderUnique { int columnOrdinal; byte flags; TdsOrderUnique(int ordinal, byte flags) { this.columnOrdinal = ordinal; this.flags = flags; } } void setCryptoMetaData(CryptoMetadata cryptoMetaForBulk) { this.cryptoMeta = cryptoMetaForBulk; } CryptoMetadata getCryptoMetaData() { return cryptoMeta; } void writeEncryptedRPCByteArray(byte bValue[]) throws SQLServerException { boolean bValueNull = (bValue == null); long nValueLen = bValueNull ? 0 : bValue.length; boolean isShortValue = (nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES); boolean isPLP = (!isShortValue) && (nValueLen <= DataTypes.MAX_VARTYPE_MAX_BYTES); // Handle Shiloh types here. if (isShortValue) { writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); } else if (isPLP) { writeShort((short) DataTypes.SQL_USHORTVARMAXLEN); } else { writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES); } // Data and length if (bValueNull) { writeShort((short) -1); // actual len } else { if (isShortValue) { writeShort((short) nValueLen); // actual len } else if (isPLP) { writeLong(nValueLen); // actual length } else { writeInt((int) nValueLen); // actual len } // If length is zero, we're done. if (0 != nValueLen) { if (isPLP) { writeInt((int) nValueLen); } writeBytes(bValue); } if (isPLP) { writeInt(0); // PLP_TERMINATOR, 0x00000000 } } } void writeEncryptedRPCPLP() throws SQLServerException { writeShort((short) DataTypes.SQL_USHORTVARMAXLEN); writeLong((long) 0); // actual length writeInt(0); // PLP_TERMINATOR, 0x00000000 } void writeCryptoMetaData() throws SQLServerException { writeByte(cryptoMeta.cipherAlgorithmId); writeByte(cryptoMeta.encryptionType.getValue()); writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).databaseId); writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekId); writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekVersion); writeBytes(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekMdVersion); writeByte(cryptoMeta.normalizationRuleVersion); } void writeRPCByteArray(String sName, byte bValue[], boolean bOut, JDBCType jdbcType, SQLCollation collation) throws SQLServerException { boolean bValueNull = (bValue == null); int nValueLen = bValueNull ? 0 : bValue.length; boolean isShortValue = (nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES); // Use PLP encoding on Yukon and later with long values and OUT parameters boolean usePLP = (!isShortValue || bOut); TDSType tdsType; if (null != cryptoMeta) { // send encrypted data as BIGVARBINARY tdsType = (isShortValue || usePLP) ? TDSType.BIGVARBINARY : TDSType.IMAGE; collation = null; } else switch (jdbcType) { case BINARY: case VARBINARY: case LONGVARBINARY: case BLOB: default: tdsType = (isShortValue || usePLP) ? TDSType.BIGVARBINARY : TDSType.IMAGE; collation = null; break; case CHAR: case VARCHAR: case LONGVARCHAR: case CLOB: tdsType = (isShortValue || usePLP) ? TDSType.BIGVARCHAR : TDSType.TEXT; if (null == collation) collation = con.getDatabaseCollation(); break; case NCHAR: case NVARCHAR: case LONGNVARCHAR: case NCLOB: tdsType = (isShortValue || usePLP) ? TDSType.NVARCHAR : TDSType.NTEXT; if (null == collation) collation = con.getDatabaseCollation(); break; } writeRPCNameValType(sName, bOut, tdsType); if (usePLP) { // Handle Yukon v*max type header here. writeVMaxHeader(nValueLen, bValueNull, collation); // Send the data. if (!bValueNull) { if (nValueLen > 0) { writeInt(nValueLen); writeBytes(bValue); } // Send the terminator PLP chunk. writeInt(0); } } else // non-PLP type { // Handle Shiloh types here. if (isShortValue) { writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); } else { writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES); } if (null != collation) collation.writeCollation(this); // Data and length if (bValueNull) { writeShort((short) -1); // actual len } else { if (isShortValue) writeShort((short) nValueLen); // actual len else writeInt(nValueLen); // actual len // If length is zero, we're done. if (0 != nValueLen) writeBytes(bValue); } } } /** * Append a timestamp in RPC transmission format as a SQL Server DATETIME data type * * @param sName * the optional parameter name * @param cal * Pure Gregorian calendar containing the timestamp, including its associated time zone * @param subSecondNanos * the sub-second nanoseconds (0 - 999,999,999) * @param bOut * boolean true if the data value is being registered as an ouput parameter * */ void writeRPCDateTime(String sName, GregorianCalendar cal, int subSecondNanos, boolean bOut) throws SQLServerException { assert (subSecondNanos >= 0) && (subSecondNanos < Nanos.PER_SECOND) : "Invalid subNanoSeconds value: " + subSecondNanos; assert (cal != null) || (cal == null && subSecondNanos == 0) : "Invalid subNanoSeconds value when calendar is null: " + subSecondNanos; writeRPCNameValType(sName, bOut, TDSType.DATETIMEN); writeByte((byte) 8); // max length of datatype if (null == cal) { writeByte((byte) 0); // len of data bytes return; } writeByte((byte) 8); // len of data bytes // We need to extract the Calendar's current date & time in terms // of the number of days since the SQL Base Date (1/1/1900) plus // the number of milliseconds since midnight in the current day. // We cannot rely on any pre-calculated value for the number of // milliseconds in a day or the number of milliseconds since the // base date to do this because days with DST changes are shorter // or longer than "normal" days. // ASSUMPTION: We assume we are dealing with a GregorianCalendar here. // If not, we have no basis in which to compare dates. E.g. if we // are dealing with a Chinese Calendar implementation which does not // use the same value for Calendar.YEAR as the GregorianCalendar, // we cannot meaningfully compute a value relative to 1/1/1900. // First, figure out how many days there have been since the SQL Base Date. // These are based on SQL Server algorithms int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900); // Next, figure out the number of milliseconds since midnight of the current day. int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second 1000 * cal.get(Calendar.SECOND) + // Seconds into the current minute 60 * 1000 * cal.get(Calendar.MINUTE) + // Minutes into the current hour 60 * 60 * 1000 * cal.get(Calendar.HOUR_OF_DAY); // Hours into the current day // The last millisecond of the current day is always rounded to the first millisecond // of the next day because DATETIME is only accurate to 1/300th of a second. if (millisSinceMidnight >= 1000 * 60 * 60 * 24 - 1) { ++daysSinceSQLBaseDate; millisSinceMidnight = 0; } // Last-ditch verification that the value is in the valid range for the // DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then // throw an exception now so that statement execution is safely canceled. // Attempting to put an invalid value on the wire would result in a TDS // exception, which would close the connection. // These are based on SQL Server algorithms if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900) || daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {SSType.DATETIME}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } // And put it all on the wire... // Number of days since the SQL Server Base Date (January 1, 1900) writeInt(daysSinceSQLBaseDate); // Milliseconds since midnight (at a resolution of three hundredths of a second) writeInt((3 * millisSinceMidnight + 5) / 10); } void writeRPCTime(String sName, GregorianCalendar localCalendar, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.TIMEN); writeByte((byte) scale); if (null == localCalendar) { writeByte((byte) 0); return; } writeByte((byte) TDS.timeValueLength(scale)); writeScaledTemporal(localCalendar, subSecondNanos, scale, SSType.TIME); } void writeRPCDate(String sName, GregorianCalendar localCalendar, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.DATEN); if (null == localCalendar) { writeByte((byte) 0); return; } writeByte((byte) TDS.DAYS_INTO_CE_LENGTH); writeScaledTemporal(localCalendar, 0, // subsecond nanos (none for a date value) 0, // scale (dates are not scaled) SSType.DATE); } void writeEncryptedRPCTime(String sName, GregorianCalendar localCalendar, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { if (con.getSendTimeAsDatetime()) { throw new SQLServerException(SQLServerException.getErrString("R_sendTimeAsDateTimeForAE"), null); } writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY); if (null == localCalendar) writeEncryptedRPCByteArray(null); else writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, subSecondNanos, scale, SSType.TIME, (short) 0)); writeByte(TDSType.TIMEN.byteValue()); writeByte((byte) scale); writeCryptoMetaData(); } void writeEncryptedRPCDate(String sName, GregorianCalendar localCalendar, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY); if (null == localCalendar) writeEncryptedRPCByteArray(null); else writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, 0, // subsecond nanos (none for a date value) 0, // scale (dates are not scaled) SSType.DATE, (short) 0)); writeByte(TDSType.DATEN.byteValue()); writeCryptoMetaData(); } void writeEncryptedRPCDateTime(String sName, GregorianCalendar cal, int subSecondNanos, boolean bOut, JDBCType jdbcType) throws SQLServerException { assert (subSecondNanos >= 0) && (subSecondNanos < Nanos.PER_SECOND) : "Invalid subNanoSeconds value: " + subSecondNanos; assert (cal != null) || (cal == null && subSecondNanos == 0) : "Invalid subNanoSeconds value when calendar is null: " + subSecondNanos; writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY); if (null == cal) writeEncryptedRPCByteArray(null); else writeEncryptedRPCByteArray(getEncryptedDateTimeAsBytes(cal, subSecondNanos, jdbcType)); if (JDBCType.SMALLDATETIME == jdbcType) { writeByte(TDSType.DATETIMEN.byteValue()); writeByte((byte) 4); } else { writeByte(TDSType.DATETIMEN.byteValue()); writeByte((byte) 8); } writeCryptoMetaData(); } // getEncryptedDateTimeAsBytes is called if jdbcType/ssType is SMALLDATETIME or DATETIME byte[] getEncryptedDateTimeAsBytes(GregorianCalendar cal, int subSecondNanos, JDBCType jdbcType) throws SQLServerException { int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900); // Next, figure out the number of milliseconds since midnight of the current day. int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second 1000 * cal.get(Calendar.SECOND) + // Seconds into the current minute 60 * 1000 * cal.get(Calendar.MINUTE) + // Minutes into the current hour 60 * 60 * 1000 * cal.get(Calendar.HOUR_OF_DAY); // Hours into the current day // The last millisecond of the current day is always rounded to the first millisecond // of the next day because DATETIME is only accurate to 1/300th of a second. if (millisSinceMidnight >= 1000 * 60 * 60 * 24 - 1) { ++daysSinceSQLBaseDate; millisSinceMidnight = 0; } if (JDBCType.SMALLDATETIME == jdbcType) { int secondsSinceMidnight = (millisSinceMidnight / 1000); int minutesSinceMidnight = (secondsSinceMidnight / 60); // Values that are 29.998 seconds or less are rounded down to the nearest minute minutesSinceMidnight = ((secondsSinceMidnight % 60) > 29.998) ? minutesSinceMidnight + 1 : minutesSinceMidnight; // minutesSinceMidnight for (23:59:30) int maxMinutesSinceMidnight_SmallDateTime = 1440; // Verification for smalldatetime to be within valid range of (1900.01.01) to (2079.06.06) // smalldatetime for unencrypted does not allow insertion of 2079.06.06 23:59:59 and it is rounded up // to 2079.06.07 00:00:00, therefore, we are checking minutesSinceMidnight for that condition. If it's not within valid range, then // throw an exception now so that statement execution is safely canceled. // 157 is the calculated day of year from 06-06 , 1440 is minutesince midnight for (23:59:30) if ((daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1900, 1, TDS.BASE_YEAR_1900) || daysSinceSQLBaseDate > DDC.daysSinceBaseDate(2079, 157, TDS.BASE_YEAR_1900)) || (daysSinceSQLBaseDate == DDC.daysSinceBaseDate(2079, 157, TDS.BASE_YEAR_1900) && minutesSinceMidnight >= maxMinutesSinceMidnight_SmallDateTime)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {SSType.SMALLDATETIME}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } ByteBuffer days = ByteBuffer.allocate(2).order(ByteOrder.LITTLE_ENDIAN); days.putShort((short) daysSinceSQLBaseDate); ByteBuffer seconds = ByteBuffer.allocate(2).order(ByteOrder.LITTLE_ENDIAN); seconds.putShort((short) minutesSinceMidnight); byte[] value = new byte[4]; System.arraycopy(days.array(), 0, value, 0, 2); System.arraycopy(seconds.array(), 0, value, 2, 2); return SQLServerSecurityUtility.encryptWithKey(value, cryptoMeta, con); } else if (JDBCType.DATETIME == jdbcType) { // Last-ditch verification that the value is in the valid range for the // DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then // throw an exception now so that statement execution is safely canceled. // Attempting to put an invalid value on the wire would result in a TDS // exception, which would close the connection. // These are based on SQL Server algorithms // And put it all on the wire... if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900) || daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {SSType.DATETIME}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } // Number of days since the SQL Server Base Date (January 1, 1900) ByteBuffer days = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); days.putInt(daysSinceSQLBaseDate); ByteBuffer seconds = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); seconds.putInt((3 * millisSinceMidnight + 5) / 10); byte[] value = new byte[8]; System.arraycopy(days.array(), 0, value, 0, 4); System.arraycopy(seconds.array(), 0, value, 4, 4); return SQLServerSecurityUtility.encryptWithKey(value, cryptoMeta, con); } assert false : "Unexpected JDBCType type " + jdbcType; return null; } void writeEncryptedRPCDateTime2(String sName, GregorianCalendar localCalendar, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY); if (null == localCalendar) writeEncryptedRPCByteArray(null); else writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, subSecondNanos, scale, SSType.DATETIME2, (short) 0)); writeByte(TDSType.DATETIME2N.byteValue()); writeByte((byte) (scale)); writeCryptoMetaData(); } void writeEncryptedRPCDateTimeOffset(String sName, GregorianCalendar utcCalendar, int minutesOffset, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY); if (null == utcCalendar) writeEncryptedRPCByteArray(null); else { assert 0 == utcCalendar.get(Calendar.ZONE_OFFSET); writeEncryptedRPCByteArray( writeEncryptedScaledTemporal(utcCalendar, subSecondNanos, scale, SSType.DATETIMEOFFSET, (short) minutesOffset)); } writeByte(TDSType.DATETIMEOFFSETN.byteValue()); writeByte((byte) (scale)); writeCryptoMetaData(); } void writeRPCDateTime2(String sName, GregorianCalendar localCalendar, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.DATETIME2N); writeByte((byte) scale); if (null == localCalendar) { writeByte((byte) 0); return; } writeByte((byte) TDS.datetime2ValueLength(scale)); writeScaledTemporal(localCalendar, subSecondNanos, scale, SSType.DATETIME2); } void writeRPCDateTimeOffset(String sName, GregorianCalendar utcCalendar, int minutesOffset, int subSecondNanos, int scale, boolean bOut) throws SQLServerException { writeRPCNameValType(sName, bOut, TDSType.DATETIMEOFFSETN); writeByte((byte) scale); if (null == utcCalendar) { writeByte((byte) 0); return; } assert 0 == utcCalendar.get(Calendar.ZONE_OFFSET); writeByte((byte) TDS.datetimeoffsetValueLength(scale)); writeScaledTemporal(utcCalendar, subSecondNanos, scale, SSType.DATETIMEOFFSET); writeShort((short) minutesOffset); } /** * Returns subSecondNanos rounded to the maximum precision supported. The maximum fractional scale is MAX_FRACTIONAL_SECONDS_SCALE(7). Eg1: if you * pass 456,790,123 the function would return 456,790,100 Eg2: if you pass 456,790,150 the function would return 456,790,200 Eg3: if you pass * 999,999,951 the function would return 1,000,000,000 This is done to ensure that we have consistent rounding behaviour in setters and getters. * Bug #507919 */ private int getRoundedSubSecondNanos(int subSecondNanos) { int roundedNanos = ((subSecondNanos + (Nanos.PER_MAX_SCALE_INTERVAL / 2)) / Nanos.PER_MAX_SCALE_INTERVAL) * Nanos.PER_MAX_SCALE_INTERVAL; return roundedNanos; } /** * Writes to the TDS channel a temporal value as an instance instance of one of the scaled temporal SQL types: DATE, TIME, DATETIME2, or * DATETIMEOFFSET. * * @param cal * Calendar representing the value to write, except for any sub-second nanoseconds * @param subSecondNanos * the sub-second nanoseconds (0 - 999,999,999) * @param scale * the scale (in digits: 0 - 7) to use for the sub-second nanos component * @param ssType * the SQL Server data type (DATE, TIME, DATETIME2, or DATETIMEOFFSET) * * @throws SQLServerException * if an I/O error occurs or if the value is not in the valid range */ private void writeScaledTemporal(GregorianCalendar cal, int subSecondNanos, int scale, SSType ssType) throws SQLServerException { assert con.isKatmaiOrLater(); assert SSType.DATE == ssType || SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType; // First, for types with a time component, write the scaled nanos since midnight if (SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) { assert subSecondNanos >= 0; assert subSecondNanos < Nanos.PER_SECOND; assert scale >= 0; assert scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE; int secondsSinceMidnight = cal.get(Calendar.SECOND) + 60 * cal.get(Calendar.MINUTE) + 60 * 60 * cal.get(Calendar.HOUR_OF_DAY); // Scale nanos since midnight to the desired scale, rounding the value as necessary long divisor = Nanos.PER_MAX_SCALE_INTERVAL * (long) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE - scale); // The scaledNanos variable represents the fractional seconds of the value at the scale // indicated by the scale variable. So, for example, scaledNanos = 3 means 300 nanoseconds // at scale TDS.MAX_FRACTIONAL_SECONDS_SCALE, but 3000 nanoseconds at // TDS.MAX_FRACTIONAL_SECONDS_SCALE - 1 long scaledNanos = ((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos) + divisor / 2) / divisor; // SQL Server rounding behavior indicates that it always rounds up unless // we are at the max value of the type(NOT every day), in which case it truncates. // If rounding nanos to the specified scale rolls the value to the next day ... if (Nanos.PER_DAY / divisor == scaledNanos) { // If the type is time, always truncate if (SSType.TIME == ssType) { --scaledNanos; } // If the type is datetime2 or datetimeoffset, truncate only if its the max value supported else { assert SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType; // ... then bump the date, provided that the resulting date is still within // the valid date range. // Extreme edge case (literally, the VERY edge...): // If nanos overflow rolls the date value out of range (that is, we have a value // a few nanoseconds later than 9999-12-31 23:59:59) then truncate the nanos // instead of rolling. // This case is very likely never hit by "real world" applications, but exists // here as a security measure to ensure that such values don't result in a // connection-closing TDS exception. cal.add(Calendar.SECOND, 1); if (cal.get(Calendar.YEAR) <= 9999) { scaledNanos = 0; } else { cal.add(Calendar.SECOND, -1); --scaledNanos; } } } // Encode the scaled nanos to TDS int encodedLength = TDS.nanosSinceMidnightLength(scale); byte[] encodedBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength); writeBytes(encodedBytes); } // Second, for types with a date component, write the days into the Common Era if (SSType.DATE == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) { // Computation of the number of days into the Common Era assumes that // the DAY_OF_YEAR field reflects a pure Gregorian calendar - one that // uses Gregorian leap year rules across the entire range of dates. // For the DAY_OF_YEAR field to accurately reflect pure Gregorian behavior, // we need to use a pure Gregorian calendar for dates that are Julian dates // under a standard Gregorian calendar and for (Gregorian) dates later than // the cutover date in the cutover year. if (cal.getTimeInMillis() < GregorianChange.STANDARD_CHANGE_DATE.getTime() || cal.getActualMaximum(Calendar.DAY_OF_YEAR) < TDS.DAYS_PER_YEAR) { int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH); int date = cal.get(Calendar.DATE); // Set the cutover as early as possible (pure Gregorian behavior) cal.setGregorianChange(GregorianChange.PURE_CHANGE_DATE); // Initialize the date field by field (preserving the "wall calendar" value) cal.set(year, month, date); } int daysIntoCE = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), 1); // Last-ditch verification that the value is in the valid range for the // DATE/DATETIME2/DATETIMEOFFSET TDS data type (1/1/0001 to 12/31/9999). // If it's not, then throw an exception now so that statement execution // is safely canceled. Attempting to put an invalid value on the wire // would result in a TDS exception, which would close the connection. if (daysIntoCE < 0 || daysIntoCE >= DDC.daysSinceBaseDate(10000, 1, 1)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {ssType}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } byte encodedBytes[] = new byte[3]; encodedBytes[0] = (byte) ((daysIntoCE >> 0) & 0xFF); encodedBytes[1] = (byte) ((daysIntoCE >> 8) & 0xFF); encodedBytes[2] = (byte) ((daysIntoCE >> 16) & 0xFF); writeBytes(encodedBytes); } } /** * Writes to the TDS channel a temporal value as an instance instance of one of the scaled temporal SQL types: DATE, TIME, DATETIME2, or * DATETIMEOFFSET. * * @param cal * Calendar representing the value to write, except for any sub-second nanoseconds * @param subSecondNanos * the sub-second nanoseconds (0 - 999,999,999) * @param scale * the scale (in digits: 0 - 7) to use for the sub-second nanos component * @param ssType * the SQL Server data type (DATE, TIME, DATETIME2, or DATETIMEOFFSET) * @param minutesOffset * the offset value for DATETIMEOFFSET * @throws SQLServerException * if an I/O error occurs or if the value is not in the valid range */ byte[] writeEncryptedScaledTemporal(GregorianCalendar cal, int subSecondNanos, int scale, SSType ssType, short minutesOffset) throws SQLServerException { assert con.isKatmaiOrLater(); assert SSType.DATE == ssType || SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType; // store the time and minutesOffset portion of DATETIME2 and DATETIMEOFFSET to be used with date portion byte encodedBytesForEncryption[] = null; int secondsSinceMidnight = 0; long divisor = 0; long scaledNanos = 0; // First, for types with a time component, write the scaled nanos since midnight if (SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) { assert subSecondNanos >= 0; assert subSecondNanos < Nanos.PER_SECOND; assert scale >= 0; assert scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE; secondsSinceMidnight = cal.get(Calendar.SECOND) + 60 * cal.get(Calendar.MINUTE) + 60 * 60 * cal.get(Calendar.HOUR_OF_DAY); // Scale nanos since midnight to the desired scale, rounding the value as necessary divisor = Nanos.PER_MAX_SCALE_INTERVAL * (long) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE - scale); // The scaledNanos variable represents the fractional seconds of the value at the scale // indicated by the scale variable. So, for example, scaledNanos = 3 means 300 nanoseconds // at scale TDS.MAX_FRACTIONAL_SECONDS_SCALE, but 3000 nanoseconds at // TDS.MAX_FRACTIONAL_SECONDS_SCALE - 1 scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos) + divisor / 2) / divisor) * divisor / 100; // for encrypted time value, SQL server cannot do rounding or casting, // So, driver needs to cast it before encryption. if (SSType.TIME == ssType && 864000000000L <= scaledNanos) { scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor) * divisor / 100; } // SQL Server rounding behavior indicates that it always rounds up unless // we are at the max value of the type(NOT every day), in which case it truncates. // If rounding nanos to the specified scale rolls the value to the next day ... if (Nanos.PER_DAY / divisor == scaledNanos) { // If the type is time, always truncate if (SSType.TIME == ssType) { --scaledNanos; } // If the type is datetime2 or datetimeoffset, truncate only if its the max value supported else { assert SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType; // ... then bump the date, provided that the resulting date is still within // the valid date range. // Extreme edge case (literally, the VERY edge...): // If nanos overflow rolls the date value out of range (that is, we have a value // a few nanoseconds later than 9999-12-31 23:59:59) then truncate the nanos // instead of rolling. // This case is very likely never hit by "real world" applications, but exists // here as a security measure to ensure that such values don't result in a // connection-closing TDS exception. cal.add(Calendar.SECOND, 1); if (cal.get(Calendar.YEAR) <= 9999) { scaledNanos = 0; } else { cal.add(Calendar.SECOND, -1); --scaledNanos; } } } // Encode the scaled nanos to TDS int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE); byte[] encodedBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength); if (SSType.TIME == ssType) { byte[] cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytes, cryptoMeta, con); return cipherText; } else if (SSType.DATETIME2 == ssType) { // for DATETIME2 sends both date and time part together for encryption encodedBytesForEncryption = new byte[encodedLength + 3]; System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, 0, encodedBytes.length); } else if (SSType.DATETIMEOFFSET == ssType) { // for DATETIMEOFFSET sends date, time and offset part together for encryption encodedBytesForEncryption = new byte[encodedLength + 5]; System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, 0, encodedBytes.length); } } // Second, for types with a date component, write the days into the Common Era if (SSType.DATE == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) { // Computation of the number of days into the Common Era assumes that // the DAY_OF_YEAR field reflects a pure Gregorian calendar - one that // uses Gregorian leap year rules across the entire range of dates. // For the DAY_OF_YEAR field to accurately reflect pure Gregorian behavior, // we need to use a pure Gregorian calendar for dates that are Julian dates // under a standard Gregorian calendar and for (Gregorian) dates later than // the cutover date in the cutover year. if (cal.getTimeInMillis() < GregorianChange.STANDARD_CHANGE_DATE.getTime() || cal.getActualMaximum(Calendar.DAY_OF_YEAR) < TDS.DAYS_PER_YEAR) { int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH); int date = cal.get(Calendar.DATE); // Set the cutover as early as possible (pure Gregorian behavior) cal.setGregorianChange(GregorianChange.PURE_CHANGE_DATE); // Initialize the date field by field (preserving the "wall calendar" value) cal.set(year, month, date); } int daysIntoCE = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), 1); // Last-ditch verification that the value is in the valid range for the // DATE/DATETIME2/DATETIMEOFFSET TDS data type (1/1/0001 to 12/31/9999). // If it's not, then throw an exception now so that statement execution // is safely canceled. Attempting to put an invalid value on the wire // would result in a TDS exception, which would close the connection. if (daysIntoCE < 0 || daysIntoCE >= DDC.daysSinceBaseDate(10000, 1, 1)) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange")); Object[] msgArgs = {ssType}; throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null); } byte encodedBytes[] = new byte[3]; encodedBytes[0] = (byte) ((daysIntoCE >> 0) & 0xFF); encodedBytes[1] = (byte) ((daysIntoCE >> 8) & 0xFF); encodedBytes[2] = (byte) ((daysIntoCE >> 16) & 0xFF); byte[] cipherText; if (SSType.DATE == ssType) { cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytes, cryptoMeta, con); } else if (SSType.DATETIME2 == ssType) { // for Max value, does not round up, do casting instead. if (3652058 == daysIntoCE) { // 9999-12-31 if (864000000000L == scaledNanos) { // 24:00:00 in nanoseconds // does not round up scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor) * divisor / 100; int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE); byte[] encodedNanoBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength); // for DATETIME2 sends both date and time part together for encryption encodedBytesForEncryption = new byte[encodedLength + 3]; System.arraycopy(encodedNanoBytes, 0, encodedBytesForEncryption, 0, encodedNanoBytes.length); } } // Copy the 3 byte date value System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, (encodedBytesForEncryption.length - 3), 3); cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytesForEncryption, cryptoMeta, con); } else { // for Max value, does not round up, do casting instead. if (3652058 == daysIntoCE) { // 9999-12-31 if (864000000000L == scaledNanos) { // 24:00:00 in nanoseconds // does not round up scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor) * divisor / 100; int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE); byte[] encodedNanoBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength); // for DATETIMEOFFSET sends date, time and offset part together for encryption encodedBytesForEncryption = new byte[encodedLength + 5]; System.arraycopy(encodedNanoBytes, 0, encodedBytesForEncryption, 0, encodedNanoBytes.length); } } // Copy the 3 byte date value System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, (encodedBytesForEncryption.length - 5), 3); // Copy the 2 byte minutesOffset value System.arraycopy(ByteBuffer.allocate(Short.SIZE / Byte.SIZE).order(ByteOrder.LITTLE_ENDIAN).putShort(minutesOffset).array(), 0, encodedBytesForEncryption, (encodedBytesForEncryption.length - 2), 2); cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytesForEncryption, cryptoMeta, con); } return cipherText; } // Invalid type ssType. This condition should never happen. MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_unknownSSType")); Object[] msgArgs = {ssType}; SQLServerException.makeFromDriverError(null, null, form.format(msgArgs), null, true); return null; } private byte[] scaledNanosToEncodedBytes(long scaledNanos, int encodedLength) { byte encodedBytes[] = new byte[encodedLength]; for (int i = 0; i < encodedLength; i++) encodedBytes[i] = (byte) ((scaledNanos >> (8 * i)) & 0xFF); return encodedBytes; } /** * Append the data in a stream in RPC transmission format. * * @param sName * the optional parameter name * @param stream * is the stream * @param streamLength * length of the stream (may be unknown) * @param bOut * boolean true if the data value is being registered as an ouput parameter * @param jdbcType * The JDBC type used to determine whether the value is textual or non-textual. * @param collation * The SQL collation associated with the value. Null for non-textual SQL Server types. * @throws SQLServerException */ void writeRPCInputStream(String sName, InputStream stream, long streamLength, boolean bOut, JDBCType jdbcType, SQLCollation collation) throws SQLServerException { assert null != stream; assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength >= 0; // Send long values and values with unknown length // using PLP chunking on Yukon and later. boolean usePLP = (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength > DataTypes.SHORT_VARTYPE_MAX_BYTES); if (usePLP) { assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength <= DataTypes.MAX_VARTYPE_MAX_BYTES; writeRPCNameValType(sName, bOut, jdbcType.isTextual() ? TDSType.BIGVARCHAR : TDSType.BIGVARBINARY); // Handle Yukon v*max type header here. writeVMaxHeader(streamLength, false, jdbcType.isTextual() ? collation : null); } // Send non-PLP in all other cases else { // If the length of the InputStream is unknown then we need to buffer the entire stream // in memory so that we can determine its length and send that length to the server // before the stream data itself. if (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength) { // Create ByteArrayOutputStream with initial buffer size of 8K to handle typical // binary field sizes more efficiently. Note we can grow beyond 8000 bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(8000); streamLength = 0L; // Since Shiloh is limited to 64K TDS packets, that's a good upper bound on the maximum // length of InputStream we should try to handle before throwing an exception. long maxStreamLength = 65535L * con.getTDSPacketSize(); try { byte buff[] = new byte[8000]; int bytesRead; while (streamLength < maxStreamLength && -1 != (bytesRead = stream.read(buff, 0, buff.length))) { baos.write(buff); streamLength += bytesRead; } } catch (IOException e) { throw new SQLServerException(e.getMessage(), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET, e); } if (streamLength >= maxStreamLength) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_invalidLength")); Object[] msgArgs = {Long.valueOf(streamLength)}; SQLServerException.makeFromDriverError(null, null, form.format(msgArgs), "", true); } assert streamLength <= Integer.MAX_VALUE; stream = new ByteArrayInputStream(baos.toByteArray(), 0, (int) streamLength); } assert 0 <= streamLength && streamLength <= DataTypes.IMAGE_TEXT_MAX_BYTES; boolean useVarType = streamLength <= DataTypes.SHORT_VARTYPE_MAX_BYTES; writeRPCNameValType(sName, bOut, jdbcType.isTextual() ? (useVarType ? TDSType.BIGVARCHAR : TDSType.TEXT) : (useVarType ? TDSType.BIGVARBINARY : TDSType.IMAGE)); // Write maximum length, optional collation, and actual length if (useVarType) { writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); if (jdbcType.isTextual()) collation.writeCollation(this); writeShort((short) streamLength); } else { writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES); if (jdbcType.isTextual()) collation.writeCollation(this); writeInt((int) streamLength); } } // Write the data writeStream(stream, streamLength, usePLP); } /** * Append the XML data in a stream in RPC transmission format. * * @param sName * the optional parameter name * @param stream * is the stream * @param streamLength * length of the stream (may be unknown) * @param bOut * boolean true if the data value is being registered as an ouput parameter * @throws SQLServerException */ void writeRPCXML(String sName, InputStream stream, long streamLength, boolean bOut) throws SQLServerException { assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength >= 0; assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength <= DataTypes.MAX_VARTYPE_MAX_BYTES; writeRPCNameValType(sName, bOut, TDSType.XML); writeByte((byte) 0); // No schema // Handle null here and return, we're done here if it's null. if (null == stream) { // Null header for v*max types is 0xFFFFFFFFFFFFFFFF. writeLong(0xFFFFFFFFFFFFFFFFL); } else if (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength) { // Append v*max length. // UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE writeLong(0xFFFFFFFFFFFFFFFEL); // NOTE: Don't send the first chunk length, this will be calculated by caller. } else { // For v*max types with known length, length is <totallength8><chunklength4> // We're sending same total length as chunk length (as we're sending 1 chunk). writeLong(streamLength); } if (null != stream) // Write the data writeStream(stream, streamLength, true); } /** * Append the data in a character reader in RPC transmission format. * * @param sName * the optional parameter name * @param re * the reader * @param reLength * the reader data length (in characters) * @param bOut * boolean true if the data value is being registered as an ouput parameter * @param collation * The SQL collation associated with the value. Null for non-textual SQL Server types. * @throws SQLServerException */ void writeRPCReaderUnicode(String sName, Reader re, long reLength, boolean bOut, SQLCollation collation) throws SQLServerException { assert null != re; assert DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength >= 0; // Textual RPC requires a collation. If none is provided, as is the case when // the SSType is non-textual, then use the database collation by default. if (null == collation) collation = con.getDatabaseCollation(); // Send long values and values with unknown length // using PLP chunking on Yukon and later. boolean usePLP = (DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength > DataTypes.SHORT_VARTYPE_MAX_CHARS); if (usePLP) { assert DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength <= DataTypes.MAX_VARTYPE_MAX_CHARS; writeRPCNameValType(sName, bOut, TDSType.NVARCHAR); // Handle Yukon v*max type header here. writeVMaxHeader((DataTypes.UNKNOWN_STREAM_LENGTH == reLength) ? DataTypes.UNKNOWN_STREAM_LENGTH : 2 * reLength, // Length (in bytes) false, collation); } // Send non-PLP in all other cases else { // Length must be known if we're not sending PLP-chunked data. Yukon is handled above. // For Shiloh, this is enforced in DTV by converting the Reader to some other length- // prefixed value in the setter. assert 0 <= reLength && reLength <= DataTypes.NTEXT_MAX_CHARS; // For non-PLP types, use the long TEXT type rather than the short VARCHAR // type if the stream is too long to fit in the latter or if we don't know the length up // front so we have to assume that it might be too long. boolean useVarType = reLength <= DataTypes.SHORT_VARTYPE_MAX_CHARS; writeRPCNameValType(sName, bOut, useVarType ? TDSType.NVARCHAR : TDSType.NTEXT); // Write maximum length, collation, and actual length of the data if (useVarType) { writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES); collation.writeCollation(this); writeShort((short) (2 * reLength)); } else { writeInt(DataTypes.NTEXT_MAX_CHARS); collation.writeCollation(this); writeInt((int) (2 * reLength)); } } // Write the data writeReader(re, reLength, usePLP); } } /** * TDSPacket provides a mechanism for chaining TDS response packets together in a singly-linked list. * * Having both the link and the data in the same class allows TDSReader marks (see below) to automatically hold onto exactly as much response data as * they need, and no more. Java reference semantics ensure that a mark holds onto its referenced packet and subsequent packets (through next * references). When all marked references to a packet go away, the packet, and any linked unmarked packets, can be reclaimed by GC. */ final class TDSPacket { final byte[] header = new byte[TDS.PACKET_HEADER_SIZE]; final byte[] payload; int payloadLength; volatile TDSPacket next; final public String toString() { return "TDSPacket(SPID:" + Util.readUnsignedShortBigEndian(header, TDS.PACKET_HEADER_SPID) + " Seq:" + header[TDS.PACKET_HEADER_SEQUENCE_NUM] + ")"; } TDSPacket(int size) { payload = new byte[size]; payloadLength = 0; next = null; } final boolean isEOM() { return TDS.STATUS_BIT_EOM == (header[TDS.PACKET_HEADER_MESSAGE_STATUS] & TDS.STATUS_BIT_EOM); } }; /** * TDSReaderMark encapsulates a fixed position in the response data stream. * * Response data is quantized into a linked chain of packets. A mark refers to a specific location in a specific packet and relies on Java's reference * semantics to automatically keep all subsequent packets accessible until the mark is destroyed. */ final class TDSReaderMark { final TDSPacket packet; final int payloadOffset; TDSReaderMark(TDSPacket packet, int payloadOffset) { this.packet = packet; this.payloadOffset = payloadOffset; } } /** * TDSReader encapsulates the TDS response data stream. * * Bytes are read from SQL Server into a FIFO of packets. Reader methods traverse the packets to access the data. */ final class TDSReader { private final static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Reader"); final private String traceID; final public String toString() { return traceID; } private final TDSChannel tdsChannel; private final SQLServerConnection con; private final TDSCommand command; final TDSCommand getCommand() { assert null != command; return command; } final SQLServerConnection getConnection() { return con; } private TDSPacket currentPacket = new TDSPacket(0); private TDSPacket lastPacket = currentPacket; private int payloadOffset = 0; private int packetNum = 0; private boolean isStreaming = true; private boolean useColumnEncryption = false; private boolean serverSupportsColumnEncryption = false; private final byte valueBytes[] = new byte[256]; private static final AtomicInteger lastReaderID = new AtomicInteger(0); private static int nextReaderID() { return lastReaderID.incrementAndGet(); } TDSReader(TDSChannel tdsChannel, SQLServerConnection con, TDSCommand command) { this.tdsChannel = tdsChannel; this.con = con; this.command = command; // may be null // if the logging level is not detailed than fine or more we will not have proper readerids. if (logger.isLoggable(Level.FINE)) traceID = "TDSReader@" + nextReaderID() + " (" + con.toString() + ")"; else traceID = con.toString(); if (con.isColumnEncryptionSettingEnabled()) { useColumnEncryption = true; } serverSupportsColumnEncryption = con.getServerSupportsColumnEncryption(); } final boolean isColumnEncryptionSettingEnabled() { return useColumnEncryption; } final boolean getServerSupportsColumnEncryption() { return serverSupportsColumnEncryption; } final void throwInvalidTDS() throws SQLServerException { if (logger.isLoggable(Level.SEVERE)) logger.severe(toString() + " got unexpected value in TDS response at offset:" + payloadOffset); con.throwInvalidTDS(); } final void throwInvalidTDSToken(String tokenName) throws SQLServerException { if (logger.isLoggable(Level.SEVERE)) logger.severe(toString() + " got unexpected value in TDS response at offset:" + payloadOffset); con.throwInvalidTDSToken(tokenName); } /** * Ensures that payload data is available to be read, automatically advancing to (and possibly reading) the next packet. * * @return true if additional data is available to be read false if no more data is available */ private boolean ensurePayload() throws SQLServerException { if (payloadOffset == currentPacket.payloadLength) if (!nextPacket()) return false; assert payloadOffset < currentPacket.payloadLength; return true; } /** * Advance (and possibly read) the next packet. * * @return true if additional data is available to be read false if no more data is available */ private boolean nextPacket() throws SQLServerException { assert null != currentPacket; // Shouldn't call this function unless we're at the end of the current packet... TDSPacket consumedPacket = currentPacket; assert payloadOffset == consumedPacket.payloadLength; // If no buffered packets are left then maybe we can read one... // This action must be synchronized against against another thread calling // readAllPackets() to read in ALL of the remaining packets of the current response. if (null == consumedPacket.next) { readPacket(); if (null == consumedPacket.next) return false; } // Advance to that packet. If we are streaming through the // response, then unlink the current packet from the next // before moving to allow the packet to be reclaimed. TDSPacket nextPacket = consumedPacket.next; if (isStreaming) { if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Moving to next packet -- unlinking consumed packet"); consumedPacket.next = null; } currentPacket = nextPacket; payloadOffset = 0; return true; } /** * Reads the next packet of the TDS channel. * * This method is synchronized to guard against simultaneously reading packets from one thread that is processing the response and another thread * that is trying to buffer it with TDSCommand.detach(). */ synchronized final boolean readPacket() throws SQLServerException { if (null != command && !command.readingResponse()) return false; // Number of packets in should always be less than number of packets out. // If the server has been notified for an interrupt, it may be less by // more than one packet. assert tdsChannel.numMsgsRcvd < tdsChannel.numMsgsSent : "numMsgsRcvd:" + tdsChannel.numMsgsRcvd + " should be less than numMsgsSent:" + tdsChannel.numMsgsSent; TDSPacket newPacket = new TDSPacket(con.getTDSPacketSize()); // First, read the packet header. for (int headerBytesRead = 0; headerBytesRead < TDS.PACKET_HEADER_SIZE;) { int bytesRead = tdsChannel.read(newPacket.header, headerBytesRead, TDS.PACKET_HEADER_SIZE - headerBytesRead); if (bytesRead < 0) { if (logger.isLoggable(Level.FINER)) logger.finer(toString() + " Premature EOS in response. packetNum:" + packetNum + " headerBytesRead:" + headerBytesRead); con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, ((0 == packetNum && 0 == headerBytesRead) ? SQLServerException.getErrString("R_noServerResponse") : SQLServerException.getErrString("R_truncatedServerResponse"))); } headerBytesRead += bytesRead; } // Header size is a 2 byte unsigned short integer in big-endian order. int packetLength = Util.readUnsignedShortBigEndian(newPacket.header, TDS.PACKET_HEADER_MESSAGE_LENGTH); // Make header size is properly bounded and compute length of the packet payload. if (packetLength < TDS.PACKET_HEADER_SIZE || packetLength > con.getTDSPacketSize()) { logger.warning(toString() + " TDS header contained invalid packet length:" + packetLength + "; packet size:" + con.getTDSPacketSize()); throwInvalidTDS(); } newPacket.payloadLength = packetLength - TDS.PACKET_HEADER_SIZE; // Just grab the SPID for logging (another big-endian unsigned short). tdsChannel.setSPID(Util.readUnsignedShortBigEndian(newPacket.header, TDS.PACKET_HEADER_SPID)); // Packet header looks good enough. // When logging, copy the packet header to the log buffer. byte[] logBuffer = null; if (tdsChannel.isLoggingPackets()) { logBuffer = new byte[packetLength]; System.arraycopy(newPacket.header, 0, logBuffer, 0, TDS.PACKET_HEADER_SIZE); } // Now for the payload... for (int payloadBytesRead = 0; payloadBytesRead < newPacket.payloadLength;) { int bytesRead = tdsChannel.read(newPacket.payload, payloadBytesRead, newPacket.payloadLength - payloadBytesRead); if (bytesRead < 0) con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, SQLServerException.getErrString("R_truncatedServerResponse")); payloadBytesRead += bytesRead; } ++packetNum; lastPacket.next = newPacket; lastPacket = newPacket; // When logging, append the payload to the log buffer and write out the whole thing. if (tdsChannel.isLoggingPackets()) { System.arraycopy(newPacket.payload, 0, logBuffer, TDS.PACKET_HEADER_SIZE, newPacket.payloadLength); tdsChannel.logPacket(logBuffer, 0, packetLength, this.toString() + " received Packet:" + packetNum + " (" + newPacket.payloadLength + " bytes)"); } // If end of message, then bump the count of messages received and disable // interrupts. If an interrupt happened prior to disabling, then expect // to read the attention ack packet as well. if (newPacket.isEOM()) { ++tdsChannel.numMsgsRcvd; // Notify the command (if any) that we've reached the end of the response. if (null != command) command.onResponseEOM(); } return true; } final TDSReaderMark mark() { TDSReaderMark mark = new TDSReaderMark(currentPacket, payloadOffset); isStreaming = false; if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Buffering from: " + mark.toString()); return mark; } final void reset(TDSReaderMark mark) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Resetting to: " + mark.toString()); currentPacket = mark.packet; payloadOffset = mark.payloadOffset; } final void stream() { isStreaming = true; } /** * Returns the number of bytes that can be read (or skipped over) from this TDSReader without blocking by the next caller of a method for this * TDSReader. * * @return the actual number of bytes available. */ final int available() { // The number of bytes that can be read without blocking is just the number // of bytes that are currently buffered. That is the number of bytes left // in the current packet plus the number of bytes in the remaining packets. int available = currentPacket.payloadLength - payloadOffset; for (TDSPacket packet = currentPacket.next; null != packet; packet = packet.next) available += packet.payloadLength; return available; } /** * * @return number of bytes available in the current packet */ final int availableCurrentPacket() { /* * The number of bytes that can be read from the current chunk, without including the next chunk that is buffered. This is so the driver can * confirm if the next chunk sent is new packet or just continuation */ int available = currentPacket.payloadLength - payloadOffset; return available; } final int peekTokenType() throws SQLServerException { // Check whether we're at EOF if (!ensurePayload()) return -1; // Peek at the current byte (don't increment payloadOffset!) return currentPacket.payload[payloadOffset] & 0xFF; } final short peekStatusFlag() throws SQLServerException { // skip the current packet(i.e, TDS packet type) and peek into the status flag (USHORT) if (payloadOffset + 3 <= currentPacket.payloadLength) { short value = Util.readShort(currentPacket.payload, payloadOffset + 1); return value; } // as per TDS protocol, TDS_DONE packet should always be followed by status flag // throw exception if status packet is not available throwInvalidTDS(); return 0; } final int readUnsignedByte() throws SQLServerException { // Ensure that we have a packet to read from. if (!ensurePayload()) throwInvalidTDS(); return currentPacket.payload[payloadOffset++] & 0xFF; } final short readShort() throws SQLServerException { if (payloadOffset + 2 <= currentPacket.payloadLength) { short value = Util.readShort(currentPacket.payload, payloadOffset); payloadOffset += 2; return value; } return Util.readShort(readWrappedBytes(2), 0); } final int readUnsignedShort() throws SQLServerException { if (payloadOffset + 2 <= currentPacket.payloadLength) { int value = Util.readUnsignedShort(currentPacket.payload, payloadOffset); payloadOffset += 2; return value; } return Util.readUnsignedShort(readWrappedBytes(2), 0); } final String readUnicodeString(int length) throws SQLServerException { int byteLength = 2 * length; byte bytes[] = new byte[byteLength]; readBytes(bytes, 0, byteLength); return Util.readUnicodeString(bytes, 0, byteLength, con); } final char readChar() throws SQLServerException { return (char) readShort(); } final int readInt() throws SQLServerException { if (payloadOffset + 4 <= currentPacket.payloadLength) { int value = Util.readInt(currentPacket.payload, payloadOffset); payloadOffset += 4; return value; } return Util.readInt(readWrappedBytes(4), 0); } final int readIntBigEndian() throws SQLServerException { if (payloadOffset + 4 <= currentPacket.payloadLength) { int value = Util.readIntBigEndian(currentPacket.payload, payloadOffset); payloadOffset += 4; return value; } return Util.readIntBigEndian(readWrappedBytes(4), 0); } final long readUnsignedInt() throws SQLServerException { return readInt() & 0xFFFFFFFFL; } final long readLong() throws SQLServerException { if (payloadOffset + 8 <= currentPacket.payloadLength) { long value = Util.readLong(currentPacket.payload, payloadOffset); payloadOffset += 8; return value; } return Util.readLong(readWrappedBytes(8), 0); } final void readBytes(byte[] value, int valueOffset, int valueLength) throws SQLServerException { for (int bytesRead = 0; bytesRead < valueLength;) { // Ensure that we have a packet to read from. if (!ensurePayload()) throwInvalidTDS(); // Figure out how many bytes to copy from the current packet // (the lesser of the remaining value bytes and the bytes left in the packet). int bytesToCopy = valueLength - bytesRead; if (bytesToCopy > currentPacket.payloadLength - payloadOffset) bytesToCopy = currentPacket.payloadLength - payloadOffset; // Copy some bytes from the current packet to the destination value. if (logger.isLoggable(Level.FINEST)) logger.finest(toString() + " Reading " + bytesToCopy + " bytes from offset " + payloadOffset); System.arraycopy(currentPacket.payload, payloadOffset, value, valueOffset + bytesRead, bytesToCopy); bytesRead += bytesToCopy; payloadOffset += bytesToCopy; } } final byte[] readWrappedBytes(int valueLength) throws SQLServerException { assert valueLength <= valueBytes.length; readBytes(valueBytes, 0, valueLength); return valueBytes; } final Object readDecimal(int valueLength, TypeInfo typeInfo, JDBCType jdbcType, StreamType streamType) throws SQLServerException { if (valueLength > valueBytes.length) { logger.warning(toString() + " Invalid value length:" + valueLength); throwInvalidTDS(); } readBytes(valueBytes, 0, valueLength); return DDC.convertBigDecimalToObject(Util.readBigDecimal(valueBytes, valueLength, typeInfo.getScale()), jdbcType, streamType); } final Object readMoney(int valueLength, JDBCType jdbcType, StreamType streamType) throws SQLServerException { BigInteger bi; switch (valueLength) { case 8: // money { int intBitsHi = readInt(); int intBitsLo = readInt(); if (JDBCType.BINARY == jdbcType) { byte value[] = new byte[8]; Util.writeIntBigEndian(intBitsHi, value, 0); Util.writeIntBigEndian(intBitsLo, value, 4); return value; } bi = BigInteger.valueOf(((long) intBitsHi << 32) | (intBitsLo & 0xFFFFFFFFL)); break; } case 4: // smallmoney if (JDBCType.BINARY == jdbcType) { byte value[] = new byte[4]; Util.writeIntBigEndian(readInt(), value, 0); return value; } bi = BigInteger.valueOf(readInt()); break; default: throwInvalidTDS(); return null; } return DDC.convertBigDecimalToObject(new BigDecimal(bi, 4), jdbcType, streamType); } final Object readReal(int valueLength, JDBCType jdbcType, StreamType streamType) throws SQLServerException { if (4 != valueLength) throwInvalidTDS(); return DDC.convertFloatToObject(Float.intBitsToFloat(readInt()), jdbcType, streamType); } final Object readFloat(int valueLength, JDBCType jdbcType, StreamType streamType) throws SQLServerException { if (8 != valueLength) throwInvalidTDS(); return DDC.convertDoubleToObject(Double.longBitsToDouble(readLong()), jdbcType, streamType); } final Object readDateTime(int valueLength, Calendar appTimeZoneCalendar, JDBCType jdbcType, StreamType streamType) throws SQLServerException { // Build and return the right kind of temporal object. int daysSinceSQLBaseDate; int ticksSinceMidnight; int msecSinceMidnight; switch (valueLength) { case 8: // SQL datetime is 4 bytes for days since SQL Base Date // (January 1, 1900 00:00:00 GMT) and 4 bytes for // the number of three hundredths (1/300) of a second // since midnight. daysSinceSQLBaseDate = readInt(); ticksSinceMidnight = readInt(); if (JDBCType.BINARY == jdbcType) { byte value[] = new byte[8]; Util.writeIntBigEndian(daysSinceSQLBaseDate, value, 0); Util.writeIntBigEndian(ticksSinceMidnight, value, 4); return value; } msecSinceMidnight = (ticksSinceMidnight * 10 + 1) / 3; // Convert to msec (1 tick = 1 300th of a sec = 3 msec) break; case 4: // SQL smalldatetime has less precision. It stores 2 bytes // for the days since SQL Base Date and 2 bytes for minutes // after midnight. daysSinceSQLBaseDate = readUnsignedShort(); ticksSinceMidnight = readUnsignedShort(); if (JDBCType.BINARY == jdbcType) { byte value[] = new byte[4]; Util.writeShortBigEndian((short) daysSinceSQLBaseDate, value, 0); Util.writeShortBigEndian((short) ticksSinceMidnight, value, 2); return value; } msecSinceMidnight = ticksSinceMidnight * 60 * 1000; // Convert to msec (1 tick = 1 min = 60,000 msec) break; default: throwInvalidTDS(); return null; } // Convert the DATETIME/SMALLDATETIME value to the desired Java type. return DDC.convertTemporalToObject(jdbcType, SSType.DATETIME, appTimeZoneCalendar, daysSinceSQLBaseDate, msecSinceMidnight, 0); // scale // (ignored // for // fixed-scale // DATETIME/SMALLDATETIME // types) } final Object readDate(int valueLength, Calendar appTimeZoneCalendar, JDBCType jdbcType) throws SQLServerException { if (TDS.DAYS_INTO_CE_LENGTH != valueLength) throwInvalidTDS(); // Initialize the date fields to their appropriate values. int localDaysIntoCE = readDaysIntoCE(); // Convert the DATE value to the desired Java type. return DDC.convertTemporalToObject(jdbcType, SSType.DATE, appTimeZoneCalendar, localDaysIntoCE, 0, // midnight local to app time zone 0); // scale (ignored for DATE) } final Object readTime(int valueLength, TypeInfo typeInfo, Calendar appTimeZoneCalendar, JDBCType jdbcType) throws SQLServerException { if (TDS.timeValueLength(typeInfo.getScale()) != valueLength) throwInvalidTDS(); // Read the value from the server long localNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale()); // Convert the TIME value to the desired Java type. return DDC.convertTemporalToObject(jdbcType, SSType.TIME, appTimeZoneCalendar, 0, localNanosSinceMidnight, typeInfo.getScale()); } final Object readDateTime2(int valueLength, TypeInfo typeInfo, Calendar appTimeZoneCalendar, JDBCType jdbcType) throws SQLServerException { if (TDS.datetime2ValueLength(typeInfo.getScale()) != valueLength) throwInvalidTDS(); // Read the value's constituent components long localNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale()); int localDaysIntoCE = readDaysIntoCE(); // Convert the DATETIME2 value to the desired Java type. return DDC.convertTemporalToObject(jdbcType, SSType.DATETIME2, appTimeZoneCalendar, localDaysIntoCE, localNanosSinceMidnight, typeInfo.getScale()); } final Object readDateTimeOffset(int valueLength, TypeInfo typeInfo, JDBCType jdbcType) throws SQLServerException { if (TDS.datetimeoffsetValueLength(typeInfo.getScale()) != valueLength) throwInvalidTDS(); // The nanos since midnight and days into Common Era parts of DATETIMEOFFSET values // are in UTC. Use the minutes offset part to convert to local. long utcNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale()); int utcDaysIntoCE = readDaysIntoCE(); int localMinutesOffset = readShort(); // Convert the DATETIMEOFFSET value to the desired Java type. return DDC.convertTemporalToObject(jdbcType, SSType.DATETIMEOFFSET, new GregorianCalendar(new SimpleTimeZone(localMinutesOffset * 60 * 1000, ""), Locale.US), utcDaysIntoCE, utcNanosSinceMidnight, typeInfo.getScale()); } private int readDaysIntoCE() throws SQLServerException { byte value[] = new byte[TDS.DAYS_INTO_CE_LENGTH]; readBytes(value, 0, value.length); int daysIntoCE = 0; for (int i = 0; i < value.length; i++) daysIntoCE |= ((value[i] & 0xFF) << (8 * i)); // Theoretically should never encounter a value that is outside of the valid date range if (daysIntoCE < 0) throwInvalidTDS(); return daysIntoCE; } // Scale multipliers used to convert variable-scaled temporal values to a fixed 100ns scale. // Using this array is measurably faster than using Math.pow(10, ...) private final static int[] SCALED_MULTIPLIERS = {10000000, 1000000, 100000, 10000, 1000, 100, 10, 1}; private long readNanosSinceMidnight(int scale) throws SQLServerException { assert 0 <= scale && scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE; byte value[] = new byte[TDS.nanosSinceMidnightLength(scale)]; readBytes(value, 0, value.length); long hundredNanosSinceMidnight = 0; for (int i = 0; i < value.length; i++) hundredNanosSinceMidnight |= (value[i] & 0xFFL) << (8 * i); hundredNanosSinceMidnight *= SCALED_MULTIPLIERS[scale]; if (!(0 <= hundredNanosSinceMidnight && hundredNanosSinceMidnight < Nanos.PER_DAY / 100)) throwInvalidTDS(); return 100 * hundredNanosSinceMidnight; } final static String guidTemplate = "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN"; final Object readGUID(int valueLength, JDBCType jdbcType, StreamType streamType) throws SQLServerException { // GUIDs must be exactly 16 bytes if (16 != valueLength) throwInvalidTDS(); // Read in the GUID's binary value byte guid[] = new byte[16]; readBytes(guid, 0, 16); switch (jdbcType) { case CHAR: case VARCHAR: case LONGVARCHAR: case GUID: { StringBuilder sb = new StringBuilder(guidTemplate.length()); for (int i = 0; i < 4; i++) { sb.append(Util.hexChars[(guid[3 - i] & 0xF0) >> 4]); sb.append(Util.hexChars[guid[3 - i] & 0x0F]); } sb.append('-'); for (int i = 0; i < 2; i++) { sb.append(Util.hexChars[(guid[5 - i] & 0xF0) >> 4]); sb.append(Util.hexChars[guid[5 - i] & 0x0F]); } sb.append('-'); for (int i = 0; i < 2; i++) { sb.append(Util.hexChars[(guid[7 - i] & 0xF0) >> 4]); sb.append(Util.hexChars[guid[7 - i] & 0x0F]); } sb.append('-'); for (int i = 0; i < 2; i++) { sb.append(Util.hexChars[(guid[8 + i] & 0xF0) >> 4]); sb.append(Util.hexChars[guid[8 + i] & 0x0F]); } sb.append('-'); for (int i = 0; i < 6; i++) { sb.append(Util.hexChars[(guid[10 + i] & 0xF0) >> 4]); sb.append(Util.hexChars[guid[10 + i] & 0x0F]); } try { return DDC.convertStringToObject(sb.toString(), Encoding.UNICODE.charset(), jdbcType, streamType); } catch (UnsupportedEncodingException e) { MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorConvertingValue")); throw new SQLServerException(form.format(new Object[] {"UNIQUEIDENTIFIER", jdbcType}), null, 0, e); } } default: { if (StreamType.BINARY == streamType || StreamType.ASCII == streamType) return new ByteArrayInputStream(guid); return guid; } } } /** * Reads a multi-part table name from TDS and returns it as an array of Strings. */ final SQLIdentifier readSQLIdentifier() throws SQLServerException { // Multi-part names should have between 1 and 4 parts int numParts = readUnsignedByte(); if (!(1 <= numParts && numParts <= 4)) throwInvalidTDS(); // Each part is a length-prefixed Unicode string String[] nameParts = new String[numParts]; for (int i = 0; i < numParts; i++) nameParts[i] = readUnicodeString(readUnsignedShort()); // Build the identifier from the name parts SQLIdentifier identifier = new SQLIdentifier(); identifier.setObjectName(nameParts[numParts - 1]); if (numParts >= 2) identifier.setSchemaName(nameParts[numParts - 2]); if (numParts >= 3) identifier.setDatabaseName(nameParts[numParts - 3]); if (4 == numParts) identifier.setServerName(nameParts[numParts - 4]); return identifier; } final SQLCollation readCollation() throws SQLServerException { SQLCollation collation = null; try { collation = new SQLCollation(this); } catch (UnsupportedEncodingException e) { con.terminate(SQLServerException.DRIVER_ERROR_INVALID_TDS, e.getMessage(), e); // not reached } return collation; } final void skip(int bytesToSkip) throws SQLServerException { assert bytesToSkip >= 0; while (bytesToSkip > 0) { // Ensure that we have a packet to read from. if (!ensurePayload()) throwInvalidTDS(); int bytesSkipped = bytesToSkip; if (bytesSkipped > currentPacket.payloadLength - payloadOffset) bytesSkipped = currentPacket.payloadLength - payloadOffset; bytesToSkip -= bytesSkipped; payloadOffset += bytesSkipped; } } final void TryProcessFeatureExtAck(boolean featureExtAckReceived) throws SQLServerException { // in case of redirection, do not check if TDS_FEATURE_EXTENSION_ACK is received or not. if (null != this.con.getRoutingInfo()) { return; } if (isColumnEncryptionSettingEnabled() && !featureExtAckReceived) throw new SQLServerException(this, SQLServerException.getErrString("R_AE_NotSupportedByServer"), null, 0, false); } } /** * Timer for use with Commands that support a timeout. * * Once started, the timer runs for the prescribed number of seconds unless stopped. If the timer runs out, it interrupts its associated Command with * a reason like "timed out". */ final class TimeoutTimer implements Runnable { private static final String threadGroupName = "mssql-jdbc-TimeoutTimer"; private final int timeoutSeconds; private final TDSCommand command; private volatile Future<?> task; private static final ExecutorService executor = Executors.newCachedThreadPool(new ThreadFactory() { private final ThreadGroup tg = new ThreadGroup(threadGroupName); private final String threadNamePrefix = tg.getName() + "-"; private final AtomicInteger threadNumber = new AtomicInteger(0); @Override public Thread newThread(Runnable r) { Thread t = new Thread(tg, r, threadNamePrefix + threadNumber.incrementAndGet()); t.setDaemon(true); return t; } }); private volatile boolean canceled = false; TimeoutTimer(int timeoutSeconds, TDSCommand command) { assert timeoutSeconds > 0; assert null != command; this.timeoutSeconds = timeoutSeconds; this.command = command; } final void start() { task = executor.submit(this); } final void stop() { task.cancel(true); canceled = true; } public void run() { int secondsRemaining = timeoutSeconds; try { // Poll every second while time is left on the timer. // Return if/when the timer is canceled. do { if (canceled) return; Thread.sleep(1000); } while (--secondsRemaining > 0); } catch (InterruptedException e) { // re-interrupt the current thread, in order to restore the thread's interrupt status. Thread.currentThread().interrupt(); return; } // If the timer wasn't canceled before it ran out of // time then interrupt the registered command. try { command.interrupt(SQLServerException.getErrString("R_queryTimedOut")); } catch (SQLServerException e) { // Unfortunately, there's nothing we can do if we // fail to time out the request. There is no way // to report back what happened. command.log(Level.FINE, "Command could not be timed out. Reason: " + e.getMessage()); } } } /** * TDSCommand encapsulates an interruptable TDS conversation. * * A conversation may consist of one or more TDS request and response messages. A command may be interrupted at any point, from any thread, and for * any reason. Acknowledgement and handling of an interrupt is fully encapsulated by this class. * * Commands may be created with an optional timeout (in seconds). Timeouts are implemented as a form of interrupt, where the interrupt event occurs * when the timeout period expires. Currently, only the time to receive the response from the channel counts against the timeout period. */ abstract class TDSCommand { abstract boolean doExecute() throws SQLServerException; final static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Command"); private final String logContext; final String getLogContext() { return logContext; } private String traceID; final public String toString() { if (traceID == null) traceID = "TDSCommand@" + Integer.toHexString(hashCode()) + " (" + logContext + ")"; return traceID; } final void log(Level level, String message) { logger.log(level, toString() + ": " + message); } // Optional timer that is set if the command was created with a non-zero timeout period. // When the timer expires, the command is interrupted. private final TimeoutTimer timeoutTimer; // TDS channel accessors // These are set/reset at command execution time. // Volatile ensures visibility to execution thread and interrupt thread private volatile TDSWriter tdsWriter; private volatile TDSReader tdsReader; protected TDSWriter getTDSWriter(){ return tdsWriter; } // Lock to ensure atomicity when manipulating more than one of the following // shared interrupt state variables below. private final Object interruptLock = new Object(); // Flag set when this command starts execution, indicating that it is // ready to respond to interrupts; and cleared when its last response packet is // received, indicating that it is no longer able to respond to interrupts. // If the command is interrupted after interrupts have been disabled, then the // interrupt is ignored. private volatile boolean interruptsEnabled = false; protected boolean getInterruptsEnabled() { return interruptsEnabled; } protected void setInterruptsEnabled(boolean interruptsEnabled) { synchronized (interruptLock) { this.interruptsEnabled = interruptsEnabled; } } // Flag set to indicate that an interrupt has happened. private volatile boolean wasInterrupted = false; private boolean wasInterrupted() { return wasInterrupted; } // The reason for the interrupt. private volatile String interruptReason = null; // Flag set when this command's request to the server is complete. // If a command is interrupted before its request is complete, it is the executing // thread's responsibility to send the attention signal to the server if necessary. // After the request is complete, the interrupting thread must send the attention signal. private volatile boolean requestComplete; protected boolean getRequestComplete() { return requestComplete; } protected void setRequestComplete(boolean requestComplete) { synchronized (interruptLock) { this.requestComplete = requestComplete; } } // Flag set when an attention signal has been sent to the server, indicating that a // TDS packet containing the attention ack message is to be expected in the response. // This flag is cleared after the attention ack message has been received and processed. private volatile boolean attentionPending = false; boolean attentionPending() { return attentionPending; } // Flag set when this command's response has been processed. Until this flag is set, // there may be unprocessed information left in the response, such as transaction // ENVCHANGE notifications. private volatile boolean processedResponse; protected boolean getProcessedResponse() { return processedResponse; } protected void setProcessedResponse(boolean processedResponse) { synchronized (interruptLock) { this.processedResponse = processedResponse; } } // Flag set when this command's response is ready to be read from the server and cleared // after its response has been received, but not necessarily processed, up to and including // any attention ack. The command's response is read either on demand as it is processed, // or by detaching. private volatile boolean readingResponse; final boolean readingResponse() { return readingResponse; } /** * Creates this command with an optional timeout. * * @param logContext * the string describing the context for this command. * @param timeoutSeconds * (optional) the time before which the command must complete before it is interrupted. A value of 0 means no timeout. */ TDSCommand(String logContext, int timeoutSeconds) { this.logContext = logContext; this.timeoutTimer = (timeoutSeconds > 0) ? (new TimeoutTimer(timeoutSeconds, this)) : null; } /** * Executes this command. * * @param tdsWriter * @param tdsReader * @throws SQLServerException * on any error executing the command, including cancel or timeout. */ boolean execute(TDSWriter tdsWriter, TDSReader tdsReader) throws SQLServerException { this.tdsWriter = tdsWriter; this.tdsReader = tdsReader; assert null != tdsReader; try { return doExecute(); // Derived classes implement the execution details } catch (SQLServerException e) { try { // If command execution threw an exception for any reason before the request // was complete then interrupt the command (it may already be interrupted) // and close it out to ensure that any response to the error/interrupt // is processed. // no point in trying to cancel on a closed connection. if (!requestComplete && !tdsReader.getConnection().isClosed()) { interrupt(e.getMessage()); onRequestComplete(); close(); } } catch (SQLServerException interruptException) { if (logger.isLoggable(Level.FINE)) logger.fine(this.toString() + ": Ignoring error in sending attention: " + interruptException.getMessage()); } // throw the original exception even if trying to interrupt fails even in the case // of trying to send a cancel to the server. throw e; } } /** * Provides sane default response handling. * * This default implementation just consumes everything in the response message. */ void processResponse(TDSReader tdsReader) throws SQLServerException { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Processing response"); try { TDSParser.parse(tdsReader, getLogContext()); } catch (SQLServerException e) { if (SQLServerException.DRIVER_ERROR_FROM_DATABASE != e.getDriverErrorCode()) throw e; if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Ignoring error from database: " + e.getMessage()); } } /** * Clears this command from the TDS channel so that another command can execute. * * This method does not process the response. It just buffers it in memory, including any attention ack that may be present. */ final void detach() throws SQLServerException { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": detaching..."); // Read any remaining response packets from the server. // This operation may be timed out or cancelled from another thread. while (tdsReader.readPacket()) ; // Postcondition: the entire response has been read assert !readingResponse; } final void close() { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": closing..."); if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": processing response..."); while (!processedResponse) { try { processResponse(tdsReader); } catch (SQLServerException e) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": close ignoring error processing response: " + e.getMessage()); if (tdsReader.getConnection().isSessionUnAvailable()) { processedResponse = true; attentionPending = false; } } } if (attentionPending) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": processing attention ack..."); try { TDSParser.parse(tdsReader, "attention ack"); } catch (SQLServerException e) { if (tdsReader.getConnection().isSessionUnAvailable()) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": giving up on attention ack after connection closed by exception: " + e); attentionPending = false; } else { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": ignored exception: " + e); } } // If the parser returns to us without processing the expected attention ack, // then assume that no attention ack is forthcoming from the server and // terminate the connection to prevent any other command from executing. if (attentionPending) { logger.severe(this + ": expected attn ack missing or not processed; terminating connection..."); try { tdsReader.throwInvalidTDS(); } catch (SQLServerException e) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": ignored expected invalid TDS exception: " + e); assert tdsReader.getConnection().isSessionUnAvailable(); attentionPending = false; } } } // Postcondition: // Response has been processed and there is no attention pending -- the command is closed. // Of course the connection may be closed too, but the command is done regardless... assert processedResponse && !attentionPending; } /** * Interrupts execution of this command, typically from another thread. * * Only the first interrupt has any effect. Subsequent interrupts are ignored. Interrupts are also ignored until enabled. If interrupting the * command requires an attention signal to be sent to the server, then this method sends that signal if the command's request is already complete. * * Signalling mechanism is "fire and forget". It is up to either the execution thread or, possibly, a detaching thread, to ensure that any pending * attention ack later will be received and processed. * * @param reason * the reason for the interrupt, typically cancel or timeout. * @throws SQLServerException * if interrupting fails for some reason. This call does not throw the reason for the interrupt. */ void interrupt(String reason) throws SQLServerException { // Multiple, possibly simultaneous, interrupts may occur. // Only the first one should be recognized and acted upon. synchronized (interruptLock) { if (interruptsEnabled && !wasInterrupted()) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": Raising interrupt for reason:" + reason); wasInterrupted = true; interruptReason = reason; if (requestComplete) attentionPending = tdsWriter.sendAttention(); } } } private boolean interruptChecked = false; /** * Checks once whether an interrupt has occurred, and, if it has, throws an exception indicating that fact. * * Any calls after the first to check for interrupts are no-ops. This method is called periodically from this command's execution thread to notify * the app when an interrupt has happened. * * It should only be called from places where consistent behavior can be ensured after the exception is thrown. For example, it should not be * called at arbitrary times while processing the response, as doing so could leave the response token stream in an inconsistent state. Currently, * response processing only checks for interrupts after every result or OUT parameter. * * Request processing checks for interrupts before writing each packet. * * @throws SQLServerException * if this command was interrupted, throws the reason for the interrupt. */ final void checkForInterrupt() throws SQLServerException { // Throw an exception with the interrupt reason if this command was interrupted. // Note that the interrupt reason may be null. Checking whether the // command was interrupted does not require the interrupt lock since only one // of the shared state variables is being manipulated; interruptChecked is not // shared with the interrupt thread. if (wasInterrupted() && !interruptChecked) { interruptChecked = true; if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": throwing interrupt exception, reason: " + interruptReason); throw new SQLServerException(interruptReason, SQLState.STATEMENT_CANCELED, DriverError.NOT_SET, null); } } /** * Notifies this command when no more request packets are to be sent to the server. * * After the last packet has been sent, the only way to interrupt the request is to send an attention signal from the interrupt() method. * * Note that this method is called when the request completes normally (last packet sent with EOM bit) or when it completes after being * interrupted (0 or more packets sent with no EOM bit). */ final void onRequestComplete() throws SQLServerException { assert !requestComplete; if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": request complete"); synchronized (interruptLock) { requestComplete = true; // If this command was interrupted before its request was complete then // we need to send the attention signal if necessary. Note that if no // attention signal is sent (i.e. no packets were sent to the server before // the interrupt happened), then don't expect an attention ack or any // other response. if (!interruptsEnabled) { assert !attentionPending; assert !processedResponse; assert !readingResponse; processedResponse = true; } else if (wasInterrupted()) { if (tdsWriter.isEOMSent()) { attentionPending = tdsWriter.sendAttention(); readingResponse = attentionPending; } else { assert !attentionPending; readingResponse = tdsWriter.ignoreMessage(); } processedResponse = !readingResponse; } else { assert !attentionPending; assert !processedResponse; readingResponse = true; } } } /** * Notifies this command when the last packet of the response has been read. * * When the last packet is read, interrupts are disabled. If an interrupt occurred prior to disabling that caused an attention signal to be sent * to the server, then an extra packet containing the attention ack is read. * * This ensures that on return from this method, the TDS channel is clear of all response packets for this command. * * Note that this method is called for the attention ack message itself as well, so we need to be sure not to expect more than one attention * ack... */ final void onResponseEOM() throws SQLServerException { boolean readAttentionAck = false; // Atomically disable interrupts and check for a previous interrupt requiring // an attention ack to be read. synchronized (interruptLock) { if (interruptsEnabled) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": disabling interrupts"); // Determine whether we still need to read the attention ack packet. // When a command is interrupted, Yukon (and later) always sends a response // containing at least a DONE(ERROR) token before it sends the attention ack, // even if the command's request was not complete. readAttentionAck = attentionPending; interruptsEnabled = false; } } // If an attention packet needs to be read then read it. This should // be done outside of the interrupt lock to avoid unnecessarily blocking // interrupting threads. Note that it is remotely possible that the call // to readPacket won't actually read anything if the attention ack was // already read by TDSCommand.detach(), in which case this method could // be called from multiple threads, leading to a benign race to clear the // readingResponse flag. if (readAttentionAck) tdsReader.readPacket(); readingResponse = false; } /** * Notifies this command when the end of its response token stream has been reached. * * After this call, we are guaranteed that tokens in the response have been processed. */ final void onTokenEOF() { processedResponse = true; } /** * Notifies this command when the attention ack (a DONE token with a special flag) has been processed. * * After this call, the attention ack should no longer be expected. */ final void onAttentionAck() { assert attentionPending; attentionPending = false; } /** * Starts sending this command's TDS request to the server. * * @param tdsMessageType * the type of the TDS message (RPC, QUERY, etc.) * @return the TDS writer used to write the request. * @throws SQLServerException * on any error, including acknowledgement of an interrupt. */ final TDSWriter startRequest(byte tdsMessageType) throws SQLServerException { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": starting request..."); // Start this command's request message try { tdsWriter.startMessage(this, tdsMessageType); } catch (SQLServerException e) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": starting request: exception: " + e.getMessage()); throw e; } // (Re)initialize this command's interrupt state for its current execution. // To ensure atomically consistent behavior, do not leave the interrupt lock // until interrupts have been (re)enabled. synchronized (interruptLock) { requestComplete = false; readingResponse = false; processedResponse = false; attentionPending = false; wasInterrupted = false; interruptReason = null; interruptsEnabled = true; } return tdsWriter; } /** * Finishes the TDS request and then starts reading the TDS response from the server. * * @return the TDS reader used to read the response. * @throws SQLServerException * if there is any kind of error. */ final TDSReader startResponse() throws SQLServerException { return startResponse(false); } final TDSReader startResponse(boolean isAdaptive) throws SQLServerException { // Finish sending the request message. If this command was interrupted // at any point before endMessage() returns, then endMessage() throws an // exception with the reason for the interrupt. Request interrupts // are disabled by the time endMessage() returns. if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": finishing request"); try { tdsWriter.endMessage(); } catch (SQLServerException e) { if (logger.isLoggable(Level.FINEST)) logger.finest(this + ": finishing request: endMessage threw exception: " + e.getMessage()); throw e; } // If command execution is subject to timeout then start timing until // the server returns the first response packet. if (null != timeoutTimer) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Starting timer..."); timeoutTimer.start(); } if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Reading response..."); try { // Wait for the server to execute the request and read the first packet // (responseBuffering=adaptive) or all packets (responseBuffering=full) // of the response. if (isAdaptive) { tdsReader.readPacket(); } else { while (tdsReader.readPacket()) ; } } catch (SQLServerException e) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Exception reading response: " + e.getMessage()); throw e; } finally { // If command execution was subject to timeout then stop timing as soon // as the server returns the first response packet or errors out. if (null != timeoutTimer) { if (logger.isLoggable(Level.FINEST)) logger.finest(this.toString() + ": Stopping timer..."); timeoutTimer.stop(); } } return tdsReader; } } /** * UninterruptableTDSCommand encapsulates an uninterruptable TDS conversation. * * TDSCommands have interruptability built in. However, some TDSCommands such as DTC commands, connection commands, cursor close and prepared * statement handle close shouldn't be interruptable. This class provides a base implementation for such commands. */ abstract class UninterruptableTDSCommand extends TDSCommand { UninterruptableTDSCommand(String logContext) { super(logContext, 0); } final void interrupt(String reason) throws SQLServerException { // Interrupting an uninterruptable command is a no-op. That is, // it can happen, but it should have no effect. logger.finest(toString() + " Ignoring interrupt of uninterruptable TDS command; Reason:" + reason); } }
package com.relayrides.pushy.apns; import io.netty.channel.nio.NioEventLoopGroup; import java.lang.Thread.UncaughtExceptionHandler; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Vector; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.net.ssl.SSLContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p>A {@code PushManager} is the main public-facing point of interaction with APNs. Push managers manage the queue of * outbound push notifications and manage connections to the various APNs servers. Push managers should always be * created using the {@link PushManagerFactory} class.</p> * * <p>Callers send push notifications by adding them to the push manager's queue. The push manager will send * notifications from the queue as quickly as it is able to do so, and will never put notifications back in the queue * (push managers maintain a separate, internal queue for notifications that should be re-sent).</p> * * @author <a href="mailto:jon@relayrides.com">Jon Chambers</a> * * @see PushManagerFactory */ public class PushManager<T extends ApnsPushNotification> implements ApnsConnectionListener<T> { private final BlockingQueue<T> queue; private final LinkedBlockingQueue<T> retryQueue; private final ApnsEnvironment environment; private final SSLContext sslContext; private final int concurrentConnectionCount; private final HashSet<ApnsConnection<T>> activeConnections; private final ApnsConnectionPool<T> writableConnectionPool; private final FeedbackServiceClient feedbackServiceClient; private final Vector<RejectedNotificationListener<? super T>> rejectedNotificationListeners; private final Vector<FailedConnectionListener<? super T>> failedConnectionListeners; private Thread dispatchThread; private final NioEventLoopGroup eventLoopGroup; private final boolean shouldShutDownEventLoopGroup; private final ExecutorService listenerExecutorService; private final boolean shouldShutDownListenerExecutorService; private boolean shutDownStarted = false; private boolean shutDownFinished = false; private static final Logger log = LoggerFactory.getLogger(PushManager.class); private static class DispatchThreadExceptionHandler<T extends ApnsPushNotification> implements UncaughtExceptionHandler { private final Logger log = LoggerFactory.getLogger(DispatchThreadExceptionHandler.class); final PushManager<T> manager; public DispatchThreadExceptionHandler(final PushManager<T> manager) { this.manager = manager; } public void uncaughtException(final Thread t, final Throwable e) { log.error("Dispatch thread died unexpectedly. Please file a bug with the exception details.", e); if (this.manager.isStarted()) { this.manager.createAndStartDispatchThread(); } } } protected PushManager(final ApnsEnvironment environment, final SSLContext sslContext, final int concurrentConnectionCount, final NioEventLoopGroup eventLoopGroup, final ExecutorService listenerExecutorService, final BlockingQueue<T> queue) { this.queue = queue != null ? queue : new LinkedBlockingQueue<T>(); this.retryQueue = new LinkedBlockingQueue<T>(); this.rejectedNotificationListeners = new Vector<RejectedNotificationListener<? super T>>(); this.failedConnectionListeners = new Vector<FailedConnectionListener<? super T>>(); this.environment = environment; this.sslContext = sslContext; this.concurrentConnectionCount = concurrentConnectionCount; this.writableConnectionPool = new ApnsConnectionPool<T>(); this.activeConnections = new HashSet<ApnsConnection<T>>(); if (eventLoopGroup != null) { this.eventLoopGroup = eventLoopGroup; this.shouldShutDownEventLoopGroup = false; } else { this.eventLoopGroup = new NioEventLoopGroup(); this.shouldShutDownEventLoopGroup = true; } if (listenerExecutorService != null) { this.listenerExecutorService = listenerExecutorService; this.shouldShutDownListenerExecutorService = false; } else { this.listenerExecutorService = Executors.newSingleThreadExecutor(); this.shouldShutDownListenerExecutorService = true; } this.feedbackServiceClient = new FeedbackServiceClient(this.environment, this.sslContext, this.eventLoopGroup); } public synchronized void start() { if (this.isStarted()) { throw new IllegalStateException("Push manager has already been started."); } if (this.isShutDown()) { throw new IllegalStateException("Push manager has already been shut down and may not be restarted."); } log.info("Push manager starting."); for (int i = 0; i < this.concurrentConnectionCount; i++) { this.startNewConnection(); } this.createAndStartDispatchThread(); } private void createAndStartDispatchThread() { this.dispatchThread = createDispatchThread(); this.dispatchThread.setUncaughtExceptionHandler(new DispatchThreadExceptionHandler<T>(this)); this.dispatchThread.start(); } protected Thread createDispatchThread() { return new Thread(new Runnable() { public void run() { while (!shutDownStarted) { try { final ApnsConnection<T> connection = writableConnectionPool.getNextConnection(); T notification = retryQueue.poll(); if (notification == null) { // We'll park here either until a new notification is available from the outside or until // something shows up in the retry queue, at which point we'll be interrupted. notification = queue.take(); } connection.sendNotification(notification); } catch (InterruptedException e) { continue; } } } }); } /** * Indicates whether this push manager has been started and not yet shut down. * * @return {@code true} if this push manager has been started and has not yet been shut down or {@code false} * otherwise */ public boolean isStarted() { if (this.isShutDown()) { return false; } else { return this.dispatchThread != null; } } /** * Indicates whether this push manager has been shut down (or is in the process of shutting down). Once a push * manager has been shut down, it may not be restarted. * * @return {@code true} if this push manager has been shut down or is in the process of shutting down or * {@code false} otherwise */ public boolean isShutDown() { return this.shutDownStarted; } public synchronized List<T> shutdown() throws InterruptedException { return this.shutdown(0); } public synchronized List<T> shutdown(long timeout) throws InterruptedException { if (this.isShutDown()) { log.warn("Push manager has already been shut down; shutting down multiple times is harmless, but may " + "indicate a problem elsewhere."); } else { log.info("Push manager shutting down."); } if (this.shutDownFinished) { // the same result without harm. final ArrayList<T> unsentNotifications = new ArrayList<T>(); unsentNotifications.addAll(this.retryQueue); unsentNotifications.addAll(this.getQueue()); return unsentNotifications; } if (!this.isStarted()) { throw new IllegalStateException("Push manager has not yet been started and cannot be shut down."); } this.shutDownStarted = true; for (final ApnsConnection<T> connection : this.activeConnections) { connection.shutdownGracefully(); } final Date deadline = timeout > 0 ? new Date(System.currentTimeMillis() + timeout) : null; this.waitForAllOperationsToFinish(deadline); this.dispatchThread.interrupt(); this.dispatchThread.join(); this.rejectedNotificationListeners.clear(); this.failedConnectionListeners.clear(); if (this.shouldShutDownListenerExecutorService) { this.listenerExecutorService.shutdown(); } if (this.shouldShutDownEventLoopGroup) { if (!this.eventLoopGroup.isShutdown()) { this.eventLoopGroup.shutdownGracefully().await(); } } this.shutDownFinished = true; final ArrayList<T> unsentNotifications = new ArrayList<T>(); unsentNotifications.addAll(this.retryQueue); unsentNotifications.addAll(this.getQueue()); return unsentNotifications; } public void registerRejectedNotificationListener(final RejectedNotificationListener<? super T> listener) { if (this.isShutDown()) { throw new IllegalStateException("Rejected notification listeners may not be registered after a push manager has been shut down."); } this.rejectedNotificationListeners.add(listener); } /** * <p>Un-registers a rejected notification listener.</p> * * @param listener the listener to un-register * * @return {@code true} if the given listener was registered with this push manager and removed or {@code false} if * the listener was not already registered with this push manager */ public boolean unregisterRejectedNotificationListener(final RejectedNotificationListener<? super T> listener) { return this.rejectedNotificationListeners.remove(listener); } public void registerFailedConnectionListener(final FailedConnectionListener<? super T> listener) { if (this.isShutDown()) { throw new IllegalStateException("Failed connection listeners may not be registered after a push manager has been shut down."); } this.failedConnectionListeners.add(listener); } /** * <p>Un-registers a connection failure listener.</p> * * @param listener the listener to un-register * * @return {@code true} if the given listener was registered with this push manager and removed or {@code false} if * the listener was not already registered with this push manager */ public boolean unregisterFailedConnectionListener(final FailedConnectionListener<? super T> listener) { return this.failedConnectionListeners.remove(listener); } /** * <p>Returns the queue of messages to be sent to the APNs gateway. Callers should add notifications to this queue * directly to send notifications. Notifications will be removed from this queue by Pushy when a send attempt is * started, but no guarantees are made as to when the notification will actually be sent. Successful delivery is * neither guaranteed nor acknowledged by the APNs gateway. Notifications rejected by APNs for specific reasons * will be passed to registered {@link RejectedNotificationListener}s, and notifications that could not be sent due * to temporary I/O problems will be scheduled for re-transmission in a separate, internal queue.</p> * * <p>Notifications in this queue will only be consumed when the {@code PushManager} is running and has active * connections and when the internal &quot;retry queue&quot; is empty.</p> * * @return the queue of new notifications to send to the APNs gateway * * @see PushManager#registerRejectedNotificationListener(RejectedNotificationListener) */ public BlockingQueue<T> getQueue() { return this.queue; } public List<ExpiredToken> getExpiredTokens() throws InterruptedException, FeedbackConnectionException { return this.getExpiredTokens(1, TimeUnit.SECONDS); } public List<ExpiredToken> getExpiredTokens(final long timeout, final TimeUnit timeoutUnit) throws InterruptedException, FeedbackConnectionException { if (!this.isStarted()) { throw new IllegalStateException("Push manager has not been started yet."); } if (this.isShutDown()) { throw new IllegalStateException("Push manager has already been shut down."); } return this.feedbackServiceClient.getExpiredTokens(timeout, timeoutUnit); } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleConnectionSuccess(com.relayrides.pushy.apns.ApnsConnection) */ public void handleConnectionSuccess(final ApnsConnection<T> connection) { if (this.isShutDown()) { // We DON'T want to decrement the counter here; we'll do so when handleConnectionClosure fires later connection.shutdownImmediately(); } else { this.writableConnectionPool.addConnection(connection); } } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleConnectionFailure(com.relayrides.pushy.apns.ApnsConnection, java.lang.Throwable) */ public void handleConnectionFailure(final ApnsConnection<T> connection, final Throwable cause) { this.removeActiveConnection(connection); // We tried to open a connection, but failed. As long as we're not shut down, try to open a new one. final PushManager<T> pushManager = this; for (final FailedConnectionListener<? super T> listener : this.failedConnectionListeners) { // Handle connection failures in a separate thread in case a handler takes a long time to run this.listenerExecutorService.submit(new Runnable() { public void run() { listener.handleFailedConnection(pushManager, cause); } }); } // As long as we're not shut down, keep trying to open a replacement connection. if (!this.isShutDown()) { this.startNewConnection(); } } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleConnectionWritabilityChange(com.relayrides.pushy.apns.ApnsConnection, boolean) */ public void handleConnectionWritabilityChange(final ApnsConnection<T> connection, final boolean writable) { if (writable) { this.writableConnectionPool.addConnection(connection); } else { this.writableConnectionPool.removeConnection(connection); this.dispatchThread.interrupt(); } } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleConnectionClosure(com.relayrides.pushy.apns.ApnsConnection) */ public void handleConnectionClosure(final ApnsConnection<T> connection) { if (!this.isShutDown()) { this.startNewConnection(); } this.writableConnectionPool.removeConnection(connection); this.dispatchThread.interrupt(); this.listenerExecutorService.execute(new Runnable() { public void run() { try { connection.waitForPendingOperationsToFinish(); removeActiveConnection(connection); } catch (InterruptedException e) { log.warn("Interrupted while waiting for closed connection's pending operations to finish."); } } }); } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleWriteFailure(com.relayrides.pushy.apns.ApnsConnection, com.relayrides.pushy.apns.ApnsPushNotification, java.lang.Throwable) */ public void handleWriteFailure(ApnsConnection<T> connection, T notification, Throwable cause) { this.retryQueue.add(notification); this.dispatchThread.interrupt(); } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleRejectedNotification(com.relayrides.pushy.apns.ApnsConnection, com.relayrides.pushy.apns.ApnsPushNotification, com.relayrides.pushy.apns.RejectedNotificationReason) */ public void handleRejectedNotification(final ApnsConnection<T> connection, final T rejectedNotification, final RejectedNotificationReason reason) { final PushManager<T> pushManager = this; for (final RejectedNotificationListener<? super T> listener : this.rejectedNotificationListeners) { // Handle the notifications in a separate thread in case a listener takes a long time to run this.listenerExecutorService.execute(new Runnable() { public void run() { listener.handleRejectedNotification(pushManager, rejectedNotification, reason); } }); } } /* * (non-Javadoc) * @see com.relayrides.pushy.apns.ApnsConnectionListener#handleUnprocessedNotifications(com.relayrides.pushy.apns.ApnsConnection, java.util.Collection) */ public void handleUnprocessedNotifications(ApnsConnection<T> connection, Collection<T> unprocessedNotifications) { this.retryQueue.addAll(unprocessedNotifications); this.dispatchThread.interrupt(); } private void startNewConnection() { synchronized (this.activeConnections) { final ApnsConnection<T> connection = new ApnsConnection<T>(this.environment, this.sslContext, this.eventLoopGroup, this); connection.connect(); this.activeConnections.add(connection); } } private void removeActiveConnection(final ApnsConnection<T> connection) { synchronized (this.activeConnections) { final boolean removedConnection = this.activeConnections.remove(connection); assert removedConnection; if (this.activeConnections.isEmpty()) { this.activeConnections.notifyAll(); } } } private void waitForAllOperationsToFinish(final Date deadline) throws InterruptedException { synchronized (this.activeConnections) { while (!this.activeConnections.isEmpty() && (deadline == null || deadline.getTime() > System.currentTimeMillis())) { if (deadline != null) { this.activeConnections.wait(Math.max(deadline.getTime() - System.currentTimeMillis(), 1)); } else { this.activeConnections.wait(); } } } } }
package com.rox.emu.processor.mos6502; import com.rox.emu.env.RoxByte; import com.rox.emu.env.RoxWord; import com.rox.emu.mem.Memory; import com.rox.emu.processor.mos6502.op.OpCode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.rox.emu.processor.mos6502.Registers.*; /** * A emulated representation of MOS 6502, 8 bit * microprocessor functionality. * * XXX: At this point, we are only emulating the NES custom version of the 6502 * * @author Ross Drew */ public class Mos6502 { private final Logger log = LoggerFactory.getLogger(this.getClass()); private final Memory memory; private final Registers registers = new Registers(); private final Mos6502Alu alu = new Mos6502Alu(registers); public Mos6502(Memory memory) { this.memory = memory; } /** * Reset the CPU; akin to firing the Reset pin on a 6502.<br/> * <br/> * This will * <ul> * <li>Set Accumulator &rarr; <code>0</code></li> * <li>Set Indexes &rarr; <code>0</code></li> * <li>Status register &rarr; <code>0x34</code></li> * <li>Set PC to the values at <code>0xFFFC</code> and <code>0xFFFD</code></li> * <li>Reset Stack Pointer &rarr; 0xFF</li> * </ul> * <br/> * Note: IRL this takes 6 CPU cycles but we'll cross that bridge IF we come to it- */ public void reset(){ log.debug("RESETTING..."); registers.setRegister(Register.ACCUMULATOR, RoxByte.ZERO); registers.setRegister(Register.X_INDEX, RoxByte.ZERO); registers.setRegister(Register.Y_INDEX, RoxByte.ZERO); registers.setRegister(Register.STATUS_FLAGS, RoxByte.fromLiteral(0x34)); registers.setRegister(Register.PROGRAM_COUNTER_HI, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFC))); registers.setRegister(Register.PROGRAM_COUNTER_LOW, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFD))); registers.setRegister(Register.STACK_POINTER_HI, RoxByte.fromLiteral(0xFF)); //XXX Shouldmaybe be a max log.debug("...READY!"); } /** * Fire an <b>I</b>nterrupt <b>R</b>e<b>Q</b>uest; akin to setting the IRQ pin on a 6502.<br/> * <br> * This will stash the PC and Status registers and set the Program Counter to the values at * <code>0xFFFE</code> and <code>0xFFFF</code> where the <b>I</b>nterrupt <b>S</b>ervice * <b>R</b>outine is expected to be */ public void irq() { log.debug("IRQ!"); registers.setFlag(Flag.IRQ_DISABLE); pushRegister(Register.PROGRAM_COUNTER_HI); pushRegister(Register.PROGRAM_COUNTER_LOW); pushRegister(Register.STATUS_FLAGS); registers.setRegister(Register.PROGRAM_COUNTER_HI, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFe))); registers.setRegister(Register.PROGRAM_COUNTER_LOW, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFF))); } /** * Fire a <b>N</b>on <b>M</b>askable <b>I</b>nterrupt; akin to setting the NMI pin on a 6502.<br/> * <br> * This will stash the PC and Status registers and set the Program Counter to the values at <code>0xFFFA</code> * and <code>0xFFFB</code> where the <b>I</b>nterrupt <b>S</b>ervice <b>R</b>outine is expected to be */ public void nmi() { log.debug("NMI!"); registers.setFlag(Flag.IRQ_DISABLE); pushRegister(Register.PROGRAM_COUNTER_HI); pushRegister(Register.PROGRAM_COUNTER_LOW); pushRegister(Register.STATUS_FLAGS); registers.setRegister(Register.PROGRAM_COUNTER_HI, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFA))); registers.setRegister(Register.PROGRAM_COUNTER_LOW, getByteOfMemoryAt(RoxWord.fromLiteral(0xFFFB))); } /** * @return the {@link Registers} being used */ public Registers getRegisters(){ return registers; } /** * Execute the next program instruction as per {@link Registers#getNextProgramCounter()} * * @param steps number of instructions to execute */ public void step(int steps){ for (int i=0; i<steps; i++) step(); } /** * Execute the next program instruction as per {@link Registers#getNextProgramCounter()} */ public void step() { log.debug("STEP >>>"); final OpCode opCode = OpCode.from(nextProgramByte().getRawValue()); //Execute the opcode log.debug("Instruction: {}...", opCode.getOpCodeName()); switch (opCode){ case JMP_ABS: //this is hard to deal with using my functional enums approach registers.setPC(nextProgramWord()); break; case JMP_IND: //this is hard to deal with using my functional enums approach registers.setPC(getWordOfMemoryAt(nextProgramWord())); break; default: opCode.perform(alu, registers, memory); break; } } private RoxByte getRegisterValue(Register registerID){ return registers.getRegister(registerID); } /** * Return the next byte from program memory, as defined * by the Program Counter.<br/> * <br/> * <em>Increments the Program Counter by 1</em> * * @return byte {@code from mem[ PC[0] ]} */ private RoxByte nextProgramByte(){ return getByteOfMemoryAt(registers.getAndStepProgramCounter()); } /** * Combine the next two bytes in program memory, as defined by * the Program Counter into a word so that:- * * PC[0] = high order byte * PC[1] = low order byte *<br/><br/> * <em>Increments the Program Counter by 1</em> * * @return word made up of both bytes */ private RoxWord nextProgramWord(){ return RoxWord.from(nextProgramByte(), nextProgramByte()); } private void pushRegister(Register registerID){ push(getRegisterValue(registerID)); } /** * @return {@link RoxByte} popped from the stack */ private RoxByte pop(){ registers.setRegister(Register.STACK_POINTER_HI, RoxByte.fromLiteral(getRegisterValue(Register.STACK_POINTER_HI).getRawValue() + 1)); RoxWord address = RoxWord.from(RoxByte.fromLiteral(0x01), getRegisterValue(Register.STACK_POINTER_HI)); RoxByte value = getByteOfMemoryAt(address); debug("POP {}(0b{}) from mem[0x{}]", value.toString(), Integer.toBinaryString(value.getRawValue()), Integer.toHexString(address.getRawValue()).toUpperCase()); return value; } /** * @param value {@link RoxByte} to push to the stack */ private void push(RoxByte value){ debug("PUSH {}(0b{}) to mem[0x{}]", value.toString(), Integer.toBinaryString(value.getRawValue()), Integer.toHexString(getRegisterValue(Register.STACK_POINTER_HI).getRawValue()).toUpperCase()); setByteOfMemoryAt(RoxWord.from(RoxByte.fromLiteral(0x01), getRegisterValue(Register.STACK_POINTER_HI)), value); registers.setRegister(Register.STACK_POINTER_HI, RoxByte.fromLiteral(getRegisterValue(Register.STACK_POINTER_HI).getRawValue() - 1)); } private RoxByte getByteOfMemoryAt(RoxWord location){ final RoxByte memoryByte = memory.getByte(RoxWord.fromLiteral(location.getRawValue())); debug("Got 0x{} from mem[{}]", Integer.toHexString(memoryByte.getRawValue()), Integer.toString(location.getRawValue())); return memoryByte; } private void setByteOfMemoryAt(RoxWord location, RoxByte newByte){ memory.setByteAt(RoxWord.fromLiteral(location.getRawValue()), newByte); debug("Stored 0x{} at mem[{}]", Integer.toHexString(newByte.getRawValue()), Integer.toString(location.getRawValue())); } private RoxWord getWordOfMemoryAt(RoxWord location) { final RoxWord memoryWord = memory.getWord(location); debug("Got 0x{} from mem[{}]", Integer.toHexString(memoryWord.getRawValue()), location.toString()); return memoryWord; } private void debug(final String message, String ... args){ if (log.isDebugEnabled()) log.debug(message, args); } }
package com.scottkillen.mod.dendrology; import com.scottkillen.mod.dendrology.block.ModBlocks; import com.scottkillen.mod.dendrology.config.ConfigHandler; import com.scottkillen.mod.dendrology.item.ModItems; import com.scottkillen.mod.dendrology.world.gen.BeechGenerator; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.init.Blocks; import net.minecraft.item.Item; @Mod(modid = TheMod.MOD_ID, name = TheMod.MOD_NAME, version = TheMod.MOD_VERSION, useMetadata = true, guiFactory = TheMod.MOD_GUI_FACTORY) public class TheMod { public static final String MOD_ID = "dendrology"; public static final String MOD_NAME = "Dendrology"; public static final String RESOURCE_PREFIX = MOD_ID.toLowerCase() + ':'; @SuppressWarnings("AnonymousInnerClass") public static final CreativeTabs CREATIVE_TAB = new CreativeTabs(MOD_ID.toLowerCase()) { @Override public Item getTabIconItem() { return Item.getItemFromBlock(Blocks.log); } }; @SuppressWarnings("WeakerAccess") static final String MOD_VERSION = "@MOD_VERSION@"; @SuppressWarnings("WeakerAccess") static final String MOD_GUI_FACTORY = "com.scottkillen.mod.dendrology.config.client.ModGuiFactory"; @SuppressWarnings("MethodMayBeStatic") @Mod.EventHandler public void onFMLPreInitialization(FMLPreInitializationEvent event) { ConfigHandler.init(event.getSuggestedConfigurationFile()); ModItems.init(); ModBlocks.init(); } @SuppressWarnings({ "UnusedParameters", "MethodMayBeStatic" }) @Mod.EventHandler public void onFMLInitialization(FMLInitializationEvent event) { FMLCommonHandler.instance().bus().register(ConfigHandler.INSTANCE); // Recipes.init(); BeechGenerator.init(); } @SuppressWarnings("UnusedParameters") @Mod.EventHandler public void onFMLPostInitialization(FMLPostInitializationEvent event) { // TODO: Handle interaction with other mods, complete your setup based on this. } }
package com.sdl.selenium.web.utils; import com.google.common.base.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.concurrent.Callable; public class RetryUtils { private static final Logger LOGGER = LoggerFactory.getLogger(RetryUtils.class); @FunctionalInterface public interface WaitIfIsNullOrEmpty { String run() throws AssertionError; } public static <V> V retry(int maxRetries, Callable<V> t) { return retry(maxRetries, t, false); } private static <V> V retry(int maxRetries, Callable<V> t, boolean safe) { int count = 0; long wait = 0; V execute = null; do { count++; wait = wait == 0 ? 10 : wait * 2; Utils.sleep(wait); try { execute = t.call(); } catch (Exception | AssertionError e) { if (!safe) { if (count >= maxRetries) { LOGGER.error("Retry {} and wait {} milliseconds ->{}", count, wait, e); throw new RuntimeException(e.getMessage(), e); } } } } while ((execute == null || isNotExpected(execute)) && count < maxRetries); if (count > 1) { LOGGER.info("Retry {} and wait {} milliseconds", count, wait); } return execute; } @Deprecated public static <V> V retryWithSuccess(int maxRetries, Callable<V> t) { return retry(maxRetries, t); } public static <V> V retrySafe(int maxRetries, Callable<V> t) { return retry(maxRetries, t, true); } private static <V> boolean isNotExpected(V execute) { if (execute instanceof Boolean) { return !(Boolean) execute; } else if (execute instanceof String) { return Strings.isNullOrEmpty((String) execute); } else if (execute instanceof List) { return ((List) execute).isEmpty(); } return execute == null; } @Deprecated public static String waitIfIsNullOrEmpty(int maxRetries, WaitIfIsNullOrEmpty t) { int count = 0; String text; do { text = t.run(); count++; } while (Strings.isNullOrEmpty(text) && count < maxRetries); return text; } public static String retryIfNotSame(int maxRetries, String expected, WaitIfIsNullOrEmpty t) { int count = 0; String text; do { text = t.run(); count++; } while (Strings.isNullOrEmpty(text) && expected.equals(text) && count < maxRetries); return text; } }
package com.secret.fastalign.main; import jaligner.Alignment; import jaligner.SmithWatermanGotoh; import jaligner.matrix.MatrixLoader; import jaligner.matrix.MatrixLoaderException; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import com.secret.fastalign.general.FastaData; import com.secret.fastalign.general.Sequence; import com.secret.fastalign.utils.IntervalTree; import com.secret.fastalign.utils.Utils; public class EstimateROC { private static final double MIN_IDENTITY = 0.60; private static final int DEFAULT_NUM_TRIALS = 10000; private static final int DEFAULT_MIN_OVL = 500; private static class Pair { public int first; public int second; public Pair(int startInRef, int endInRef) { this.first = startInRef; this.second = endInRef; } @SuppressWarnings("unused") public int size() { return (Math.max(this.first, this.second) - Math.min(this.first, this.second) + 1); } } private static class Overlap { public int afirst; public int bfirst; public int asecond; public int bsecond; public boolean isFwd; public String id1; public String id2; public Overlap() { // do nothing } @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("Overlap Aid="); stringBuilder.append(this.id1); stringBuilder.append(" ("); stringBuilder.append(this.afirst); stringBuilder.append(", "); stringBuilder.append(this.asecond); stringBuilder.append("), Bid="); stringBuilder.append(this.id2); stringBuilder.append(" ("); stringBuilder.append(this.bfirst); stringBuilder.append("), "); stringBuilder.append(this.bsecond); return stringBuilder.toString(); } } private static Random generator = null; public static int seed = 0; private HashMap<String, IntervalTree<Integer>> clusters = new HashMap<String, IntervalTree<Integer>>(); private HashMap<String, String> seqToChr = new HashMap<String, String>(10000000); private HashMap<String, Pair> seqToPosition = new HashMap<String, Pair>(10000000); private HashMap<Integer, String> seqToName = new HashMap<Integer, String>(10000000); private HashSet<String> ovlNames = new HashSet<String>(10000000*100); private HashMap<String, Overlap> ovlInfo = new HashMap<String, Overlap>(10000000*100); private HashMap<Integer, String> ovlToName = new HashMap<Integer, String>(10000000*100); private int minOvlLen = DEFAULT_MIN_OVL; private int numTrials = DEFAULT_NUM_TRIALS; private long tp = 0; private long fn = 0; private long tn = 0; private long fp = 0; private double ppv = 0; private Sequence[] dataSeq = null; public static void printUsage() { System.err .println("This program uses random sampling to estimate PPV/Sensitivity/Specificity"); System.err.println("The program requires 2 arguments:"); System.err .println("\t1. A blasr M4 file mapping sequences to a reference (or reference subset)"); System.err .println("\t2. All-vs-all mappings of same sequences in CA ovl format"); System.err.println("\t3. Minimum overlap length (default: " + DEFAULT_MIN_OVL); System.err.println("\t4. Number of random trials, 0 means full compute (default : " + DEFAULT_NUM_TRIALS); System.err.println("\t5. Sequences in fasta format."); } public static void main(String[] args) throws Exception { if (args.length < 2) { printUsage(); System.exit(1); } EstimateROC g = null; if (args.length > 3) { g = new EstimateROC(Integer.parseInt(args[2]), Integer.parseInt(args[3])); } else if (args.length > 2) { g = new EstimateROC(Integer.parseInt(args[2])); } else { g = new EstimateROC(); } System.err.println("Running, reference: " + args[0] + " matches: " + args[1]); System.err.println("Number trials: " + (g.numTrials == 0 ? "all" : g.numTrials)); System.err.println("Minimum ovl: " + g.minOvlLen); // load and cluster reference System.err.print("Loading reference..."); long startTime = System.nanoTime(); long totalTime = startTime; g.processReference(args[0]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); if (args.length > 4) { // load fasta System.err.print("Loading fasta..."); startTime = System.nanoTime(); g.loadFasta(args[4]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); } // load matches System.err.print("Loading matches..."); startTime = System.nanoTime(); g.processOverlaps(args[1]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); if (g.numTrials == 0) { System.err.print("Computing full statistics O(" + g.seqToName.size() + "^2) operations!..."); startTime = System.nanoTime(); g.fullEstimate(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); } else { System.err.print("Computing sensitivity..."); startTime = System.nanoTime(); g.estimateSensitivity(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // now estimate FP/TN by picking random match and checking reference // mapping System.err.print("Computing specificity..."); startTime = System.nanoTime(); g.estimateSpecificity(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // last but not least PPV, pick random subset of our matches and see what percentage are true System.err.print("Computing PPV..."); startTime = System.nanoTime(); g.estimatePPV(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); } System.err.println("Total time: " + (System.nanoTime() - totalTime) * 1.0e-9 + "s."); System.out.println("Estimated sensitivity:\t" + Utils.DECIMAL_FORMAT.format((double) g.tp / (g.tp + g.fn))); System.out.println("Estimated specificity:\t" + Utils.DECIMAL_FORMAT.format((double) g.tn / (g.fp + g.tn))); System.out.println("Estimated PPV:\t " + Utils.DECIMAL_FORMAT.format(g.ppv)); } public EstimateROC() { this(DEFAULT_MIN_OVL, DEFAULT_NUM_TRIALS); } public EstimateROC(int minOvlLen) { this(minOvlLen, DEFAULT_NUM_TRIALS); } @SuppressWarnings("unused") public EstimateROC(int minOvlLen, int numTrials) { this.minOvlLen = minOvlLen; this.numTrials = numTrials; if (false) { GregorianCalendar t = new GregorianCalendar(); int t1 = t.get(Calendar.SECOND); int t2 = t.get(Calendar.MINUTE); int t3 = t.get(Calendar.HOUR_OF_DAY); int t4 = t.get(Calendar.DAY_OF_MONTH); int t5 = t.get(Calendar.MONTH); int t6 = t.get(Calendar.YEAR); seed = t6 + 65 * (t5 + 12 * (t4 + 31 * (t3 + 24 * (t2 + 60 * t1)))); } generator = new Random(seed); } private String getOvlName(String id, String id2) { return (id.compareTo(id2) <= 0 ? id + "_" + id2 : id2 + "_" + id); } private String pickRandomSequence() { int val = generator.nextInt(this.seqToName.size()); return this.seqToName.get(val); } private String pickRandomMatch() { int val = generator.nextInt(this.ovlToName.size()); return this.ovlToName.get(val); } private int getOverlapSize(String id, String id2) { String chr = this.seqToChr.get(id); String chr2 = this.seqToChr.get(id2); Pair p1 = this.seqToPosition.get(id); Pair p2 = this.seqToPosition.get(id2); if (!chr.equalsIgnoreCase(chr2)) { System.err.println("Error: comparing wrong chromosomes!"); System.exit(1); } return Utils.getRangeOverlap(p1.first, p1.second, p2.first, p2.second); } private HashSet<String> getSequenceMatches(String id, int min) { String chr = this.seqToChr.get(id); Pair p1 = this.seqToPosition.get(id); List<Integer> intersect = this.clusters.get(chr).get(p1.first, p1.second); HashSet<String> result = new HashSet<String>(); Iterator<Integer> it = intersect.iterator(); while (it.hasNext()) { String id2 = this.seqToName.get(it.next()); Pair p2 = this.seqToPosition.get(id2); String chr2 = this.seqToChr.get(id2); if (!chr.equalsIgnoreCase(chr2)) { System.err.println("Error: comparing wrong chromosomes!"); System.exit(1); } int overlap = Utils.getRangeOverlap(p1.first, p1.second, p2.first, p2.second); if (overlap >= min && !id.equalsIgnoreCase(id2)) { result.add(id2); } } return result; } @SuppressWarnings("unused") private Overlap getOverlapInfo(String line) { Overlap overlap = new Overlap(); String[] splitLine = line.trim().split("\\s+"); try { if (splitLine.length == 7 || splitLine.length == 6) { overlap.id1 = splitLine[0]; overlap.id2 = splitLine[1]; double score = Double.parseDouble(splitLine[5]) * 5; int aoffset = Integer.parseInt(splitLine[3]); int boffset = Integer.parseInt(splitLine[4]); boolean isFwd = ("N".equals(splitLine[2])); if (this.dataSeq != null) { int alen = this.dataSeq[Integer.parseInt(overlap.id1)-1].length(); int blen = this.dataSeq[Integer.parseInt(overlap.id2)-1].length(); overlap.afirst = Math.max(0, aoffset); overlap.asecond = Math.min(alen, alen + boffset); overlap.bfirst = -1*Math.min(0, aoffset); overlap.bsecond = Math.min(blen, blen - boffset); } } else if (splitLine.length == 13) { overlap.afirst = Integer.parseInt(splitLine[5]); overlap.asecond = Integer.parseInt(splitLine[6]); overlap.bfirst = Integer.parseInt(splitLine[9]); overlap.bsecond = Integer.parseInt(splitLine[10]); overlap.isFwd = (Integer.parseInt(splitLine[8]) == 0); if (!overlap.isFwd) { overlap.bsecond = Integer.parseInt(splitLine[11]) - Integer.parseInt(splitLine[9]); overlap.bfirst = Integer.parseInt(splitLine[11]) - Integer.parseInt(splitLine[10]); } overlap.id1 = splitLine[0]; if (overlap.id1.indexOf("/") != -1) { overlap.id1 = overlap.id1.substring(0, splitLine[0].indexOf("/")); } if (overlap.id1.indexOf(",") != -1) { overlap.id1 = overlap.id1.split(",")[1]; } overlap.id2 = splitLine[1]; if (overlap.id2.indexOf(",") != -1) { overlap.id2 = overlap.id2.split(",")[1]; } } } catch (NumberFormatException e) { System.err.println("Warning: could not parse input line: " + line + " " + e.getMessage()); } return overlap; } private void loadFasta(String file) throws IOException { FastaData data = new FastaData(file, 0); data.enqueueFullFile(); this.dataSeq = data.toArray(); } private void processOverlaps(String file) throws Exception { BufferedReader bf = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String line = null; int counter = 0; while ((line = bf.readLine()) != null) { Overlap ovl = getOverlapInfo(line); String id = ovl.id1; String id2 = ovl.id2; if (id == null || id2 == null) { continue; } if (id.equalsIgnoreCase(id2)) { continue; } if (this.seqToChr.get(id) == null || this.seqToChr.get(id2) == null) { continue; } String ovlName = getOvlName(id, id2); if (this.ovlNames.contains(ovlName)) { continue; } this.ovlNames.add(ovlName); this.ovlToName.put(counter, ovlName); this.ovlInfo.put(ovlName, ovl); counter++; if (counter % 100000 == 0) { System.err.println("Loaded " + counter); } } System.err.print("Processed " + this.ovlNames.size() + " overlaps"); if (this.ovlNames.isEmpty()) { System.err .println("Error: No sequence matches to reference loaded!"); System.exit(1); } bf.close(); } /** * We are parsing file of the format 18903/0_100 ref000001|lambda_NEB3011 * -462 96.9697 0 0 99 100 0 2 101 48502 254 21589/0_100 * ref000001|lambda_NEB3011 -500 100 0 0 100 100 1 4 104 48502 254 * 15630/0_100 ref000001|lambda_NEB3011 -478 98 0 0 100 100 0 5 105 48502 * 254 **/ @SuppressWarnings("unused") private void processReference(String file) throws Exception { BufferedReader bf = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String line = null; int counter = 0; while ((line = bf.readLine()) != null) { String[] splitLine = line.trim().split("\\s+"); String id = splitLine[0]; if (id.indexOf("/") != -1) { id = id.substring(0, splitLine[0].indexOf("/")); } int start = Integer.parseInt(splitLine[5]); int end = Integer.parseInt(splitLine[6]); int length = Integer.parseInt(splitLine[7]); int startInRef = Integer.parseInt(splitLine[9]); int endInRef = Integer.parseInt(splitLine[10]); String chr = splitLine[1]; if (!this.clusters.containsKey(chr)) { this.clusters.put(chr, new IntervalTree<Integer>()); } this.clusters.get(chr).addInterval(startInRef, endInRef, counter); this.seqToPosition.put(id, new Pair(startInRef, endInRef)); this.seqToChr.put(id, chr); this.seqToName.put(counter, id); counter++; } bf.close(); for (String chr : this.clusters.keySet()) { this.clusters.get(chr).build(); } System.err.print("Processed " + this.clusters.size() + " chromosomes, " + this.seqToPosition.size() + " sequences matching ref"); if (this.seqToPosition.isEmpty()) { System.err .println("Error: No sequence matches to reference loaded!"); System.exit(1); } } private boolean overlapExists(String id, String id2) { return this.ovlNames.contains(getOvlName(id, id2)); } private void checkMatches(String id, HashSet<String> matches) { for (String m : matches) { if (overlapExists(id, m)) { this.tp++; } else { this.fn++; } } } private boolean computeDP(String id, String id2) { if (this.dataSeq == null) { return false; } Logger logger = Logger.getLogger(SmithWatermanGotoh.class.getName()); logger.setLevel(Level.OFF); logger = Logger.getLogger(MatrixLoader.class.getName()); logger.setLevel(Level.OFF); Overlap ovl = this.ovlInfo.get(getOvlName(id, id2)); jaligner.Sequence s1 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id1)-1].toString().substring(ovl.afirst, ovl.asecond)); jaligner.Sequence s2 = null; if (ovl.isFwd) { s2 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id2)-1].toString().substring(ovl.bfirst, ovl.bsecond)); } else { s2 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id2)-1].getReverseCompliment().toString().substring(ovl.bfirst, ovl.bsecond)); } Alignment alignment; try { alignment = SmithWatermanGotoh.align(s1, s2, MatrixLoader.load("IDENTITY"), 2f, 1f); } catch (MatrixLoaderException e) { return false; } return ((double)alignment.getSimilarity()/s1.length() > MIN_IDENTITY); } private void estimateSensitivity() { // we estimate TP/FN by randomly picking a sequence, getting its // cluster, and checking our matches for (int i = 0; i < this.numTrials; i++) { // pick cluster String id = pickRandomSequence(); HashSet<String> matches = getSequenceMatches(id, this.minOvlLen); checkMatches(id, matches); } } private void estimateSpecificity() { // we estimate FP/TN by randomly picking two sequences for (int i = 0; i < this.numTrials; i++) { // pick cluster String id = pickRandomSequence(); String other = pickRandomSequence(); while (id.equalsIgnoreCase(other)) { other = pickRandomSequence(); } HashSet<String> matches = getSequenceMatches(id, 0); if (overlapExists(id, other)) { if (!matches.contains(other)) { this.fp++; } } else { if (!matches.contains(other)) { this.tn++; } } } } private void estimatePPV() { int numTP = 0; for (int i = 0; i < this.numTrials; i++) { // pick an overlap String[] ovl = pickRandomMatch().split("_"); String id = ovl[0]; String id2 = ovl[1]; HashSet<String> matches = getSequenceMatches(id, 0); if (matches.contains(id2)) { numTP++; } else { if (computeDP(id, id2)) { numTP++; } } } // now our formula for PPV. Estimate percent of our matches which are true this.ppv = (double)numTP / this.numTrials; } private void fullEstimate() { for (int i = 0; i < this.seqToName.size(); i++) { String id = this.seqToName.get(i); for (int j = i+1; j < this.seqToName.size(); j++) { String id2 = this.seqToName.get(j); if (id == null || id2 == null) { continue; } HashSet<String> matches = getSequenceMatches(id, 0); if (!overlapExists(id, id2)) { if (!matches.contains(id2)) { this.tn++; } else if (getOverlapSize(id, id2) > this.minOvlLen) { this.fn++; } } else { if (matches.contains(id2)) { this.tp++; } else { if (computeDP(id, id2)) { this.tp++; } else { this.fp++; } } } } } this.ppv = (double)this.tp / (this.tp+this.fp); } }
package com.skelril.aurora; import com.google.common.collect.Lists; import com.sk89q.commandbook.CommandBook; import com.sk89q.commandbook.commands.PaginatedResult; import com.sk89q.commandbook.session.PersistentSession; import com.sk89q.commandbook.session.SessionComponent; import com.sk89q.minecraft.util.commands.*; import com.skelril.aurora.util.ChatUtil; import com.skelril.aurora.util.timer.IntegratedRunnable; import com.skelril.aurora.util.timer.TimedRunnable; import com.skelril.aurora.util.timer.TimerUtil; import com.zachsthings.libcomponents.ComponentInformation; import com.zachsthings.libcomponents.Depend; import com.zachsthings.libcomponents.InjectComponent; import com.zachsthings.libcomponents.bukkit.BukkitComponent; import com.zachsthings.libcomponents.config.ConfigurationBase; import com.zachsthings.libcomponents.config.Setting; import org.bukkit.*; import org.bukkit.command.CommandSender; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import java.text.DecimalFormat; import java.util.*; import java.util.logging.Logger; /** * @author Turtle9598 */ @ComponentInformation(friendlyName = "Auto Clear", desc = "Automatically clears items on the ground.") @Depend(components = {SessionComponent.class}) public class AutoClearComponent extends BukkitComponent implements Runnable { private final CommandBook inst = CommandBook.inst(); private final Logger log = CommandBook.logger(); private final Server server = CommandBook.server(); @InjectComponent private SessionComponent sessions; private LocalConfiguration config; private HashMap<World, TimedRunnable> worldTimer = new HashMap<>(); private Map<World, Collection<ChunkStats>> lastClear = new HashMap<>(); @Override public void enable() { this.config = configure(new LocalConfiguration()); registerCommands(Commands.class); server.getScheduler().scheduleSyncRepeatingTask(inst, this, 20 * 2, 10); } @Override public void reload() { super.reload(); configure(config); } private static class LocalConfiguration extends ConfigurationBase { @Setting("min-item-count") public int itemCountMin = 1000; @Setting("max-delay") public int maxDelay = 120; } private static Set<EntityType> checkedEntities = new HashSet<>(); static { checkedEntities.add(EntityType.DROPPED_ITEM); checkedEntities.add(EntityType.ARROW); checkedEntities.add(EntityType.EXPERIENCE_ORB); } @Override public void run() { for (World world : server.getWorlds()) { int itemCount = checkEntities(world).getEntities().size(); if (itemCount >= config.itemCountMin) { dropClear(world, itemCount >= (config.itemCountMin * 3) ? 0 : 10, false); } } } private CheckProfile checkEntities(World world) { Set<Entity> entities = new HashSet<>(); Set<ChunkStats> stats = new HashSet<>(); Chunk[] loaded = world.getLoadedChunks(); for (Chunk chunk : loaded) { ChunkStats cs = new ChunkStats(chunk); for (Entity e : chunk.getEntities()) { checkedEntities.stream().filter(eType -> eType == e.getType()).forEach(eType -> { cs.increase(eType, 1); entities.add(e); }); } if (cs.total() < 1) continue; stats.add(cs); } return new CheckProfile(entities, stats); } public class Commands { @Command(aliases = {"dropclear", "dc"}, usage = "[seconds] [world] or <world> [seconds]", desc = "Clear all drops", min = 0, max = 2) @CommandPermissions({"aurora.dropclear"}) public void dropClearCmd(CommandContext args, CommandSender sender) throws CommandException { World world; int seconds = 10; if (sender instanceof Player) { world = ((Player) sender).getWorld(); if (args.argsLength() > 1) { world = Bukkit.getWorld(args.getString(1)); seconds = args.getInteger(0); } else if (args.argsLength() == 1) { seconds = args.getInteger(0); } } else { if (args.argsLength() == 0) { throw new CommandException("You are not a player and must specify a world!"); } else if (args.argsLength() == 1) { world = Bukkit.getWorld(args.getString(0)); } else { world = Bukkit.getWorld(args.getString(0)); seconds = args.getInteger(1); } } if (world == null) { throw new CommandException("No world by that name found!"); } dropClear(world, Math.max(0, Math.min(seconds, config.maxDelay)), true); } @Command(aliases = {"dropstats", "ds"}, desc = "Drop statistics") @NestedCommand(DropStatsCommands.class) @CommandPermissions({"aurora.dropclear.stats"}) public void dropStatsCmds(CommandContext args, CommandSender sender) { } } public class DropStatsCommands { @Command(aliases = {"update"}, usage = "<world>", desc = "Updates your copy of stats for that world", min = 1, max = 1) public void updateCmd(CommandContext args, CommandSender sender) throws CommandException { World world = Bukkit.getWorld(args.getString(0)); if (world == null) { throw new CommandException("No world by that name found!"); } sessions.getSession(DropClearObserver.class, sender).setStats(world, checkEntities(world).getStats()); ChatUtil.sendNotice(sender, "Stats updated."); } @Command(aliases = {"info"}, usage = "<world> <drop|last> <x> <z>", desc = "View details of a certain chunk", min = 4, max = 4) public void infoCmd(CommandContext args, CommandSender sender) throws CommandException { World world = Bukkit.getWorld(args.getString(0)); if (world == null) { throw new CommandException("No world by that name found!"); } Collection<ChunkStats> stats; String typeString = args.getString(1); if (typeString.equalsIgnoreCase("drop")) { stats = lastClear.get(world); if (stats == null) { throw new CommandException("No recent drop clears on record."); } } else if (typeString.equalsIgnoreCase("last")) { stats = sessions.getSession(DropClearObserver.class, sender).getStats(world); if (stats == null) { throw new CommandException("No snapshots on record."); } } else { throw new CommandException("Unsupported stats specified!"); } int x = args.getInteger(2); int z = args.getInteger(3); ChunkStats target = null; for (ChunkStats cs : stats) { if (cs.getX() == x && cs.getZ() == z) { target = cs; break; } } if (target == null) { throw new CommandException("That chunk could not be found in the specified record"); } ChatUtil.sendNotice(sender, ChatColor.GOLD, "Chunk stats: (X: " + ChatColor.WHITE + target.getX() + "%p%, Z: " + ChatColor.WHITE + target.getZ() + "%p%)"); ChatUtil.sendNotice(sender, "Total Drops: " + target.total()); for (Map.Entry<EntityType, Integer> entry : target.getStats().entrySet()) { ChatUtil.sendNotice(sender, entry.getKey().name() + ": " + entry.getValue()); } } @Command(aliases = {"chunks"}, usage = "<world> <drop|last>", desc = "View chunk stats", flags = "p:", min = 2, max = 2) public void chunksCmd(CommandContext args, CommandSender sender) throws CommandException { World world = Bukkit.getWorld(args.getString(0)); if (world == null) { throw new CommandException("No world by that name found!"); } Collection<ChunkStats> stats; String typeString = args.getString(1); if (typeString.equalsIgnoreCase("drop")) { stats = lastClear.get(world); if (stats == null) { throw new CommandException("No recent drop clears on record."); } } else if (typeString.equalsIgnoreCase("last")) { stats = sessions.getSession(DropClearObserver.class, sender).getStats(world); if (stats == null) { throw new CommandException("No snapshots on record."); } } else { throw new CommandException("Unsupported stats specified!"); } List<ChunkStats> statsList = Lists.newArrayList(stats); statsList.sort((o1, o2) -> o2.total() - o1.total()); new PaginatedResult<ChunkStats>(ChatColor.GOLD + "Chunk Stats") { @Override public String format(ChunkStats chunkStats) { int total = chunkStats.total(); ChatColor recordColor = total >= config.itemCountMin / 8 ? ChatColor.RED : ChatColor.BLUE; return recordColor + String.valueOf(total) + ChatColor.YELLOW + " (X: " + ChatColor.WHITE + chunkStats.getX() + ChatColor.YELLOW + ", Z: " + ChatColor.WHITE + chunkStats.getZ() + ChatColor.YELLOW + ')'; } }.display(sender, statsList, args.getFlagInteger('p', 1)); } @Command(aliases = {"composition", "comp"}, usage = "<world> <drop|last>", desc = "View composition stats", min = 2, max = 2) public void compositionCmd(CommandContext args, CommandSender sender) throws CommandException { World world = Bukkit.getWorld(args.getString(0)); if (world == null) { throw new CommandException("No world by that name found!"); } Collection<ChunkStats> stats; String typeString = args.getString(1); if (typeString.equalsIgnoreCase("drop")) { stats = lastClear.get(world); if (stats == null) { throw new CommandException("No recent drop clears on record."); } } else if (typeString.equalsIgnoreCase("last")) { stats = sessions.getSession(DropClearObserver.class, sender).getStats(world); if (stats == null) { throw new CommandException("No snapshots on record."); } } else { throw new CommandException("Unsupported stats specified!"); } Map<EntityType, Integer> totals = new HashMap<>(); for (ChunkStats cs : stats) { Map<EntityType, Integer> mapping = cs.getStats(); for (EntityType type : checkedEntities) { Integer newVal = mapping.get(type); if (newVal == null) continue; Integer curVal = totals.get(type); if (curVal != null) { newVal += curVal; } totals.put(type, newVal); } } int total = 0; for (Integer i : totals.values()) { total += i; } ChatUtil.sendNotice(sender, ChatColor.GOLD, "Drop Composition Report"); DecimalFormat formatter = new DecimalFormat(" for (Map.Entry<EntityType, Integer> entry : totals.entrySet()) { ChatUtil.sendNotice(sender, ChatColor.YELLOW, entry.getKey().name() + " (Quantity: " + ChatColor.WHITE + entry.getValue() + "%p% - " + ChatColor.WHITE + formatter.format(((double) entry.getValue() / total) * 100) + "%p%%)"); } } } private void dropClear(World world, int seconds, boolean overwrite) { TimedRunnable runnable = worldTimer.get(world); // Check for old task, and overwrite if allowed if (runnable != null && !runnable.isComplete()) { if (overwrite) { runnable.setTimes(seconds); } return; } IntegratedRunnable dropClear = new IntegratedRunnable() { @Override public boolean run(int times) { if (TimerUtil.matchesFilter(times, 10, 5)) { Bukkit.broadcastMessage(ChatColor.RED + "Clearing all " + world.getName() + " drops in " + times + " seconds!"); } return true; } @Override public void end() { Bukkit.broadcastMessage(ChatColor.RED + "Clearing all " + world.getName() + " drops!"); CheckProfile profile = checkEntities(world); lastClear.put(world, profile.getStats()); Collection<Entity> entities = profile.getEntities(); entities.stream().forEach(Entity::remove); Bukkit.broadcastMessage(String.valueOf(ChatColor.GREEN) + entities.size() + " drops cleared!"); } }; // Setup new task runnable = new TimedRunnable(dropClear, seconds); // Offset this by one to prevent the drop clear from triggering twice runnable.setTask(server.getScheduler().runTaskTimer(inst, runnable, 1, 20)); worldTimer.put(world, runnable); } private class CheckProfile { private final Collection<Entity> entities; private final Collection<ChunkStats> stats; private CheckProfile(Collection<Entity> entities, Collection<ChunkStats> stats) { this.entities = entities; this.stats = stats; } public Collection<Entity> getEntities() { return entities; } public Collection<ChunkStats> getStats() { return stats; } } private class ChunkStats { private Map<EntityType, Integer> counterQuantity = new HashMap<>(); private final int x; private final int z; public ChunkStats(Chunk chunk) { x = chunk.getX(); z = chunk.getZ(); } public void increase(EntityType type, int amt) { Integer count = counterQuantity.get(type); if (count != null) { count += amt; } else { count = amt; } counterQuantity.put(type, count); } public int getX() { return x; } public int getZ() { return z; } public int total() { int total = 0; for (Integer i : counterQuantity.values()) { total += i; } return total; } public Map<EntityType, Integer> getStats() { return Collections.unmodifiableMap(counterQuantity); } } private static class DropClearObserver extends PersistentSession { private Map<World, Collection<ChunkStats>> lastSnapshot = new HashMap<>(); public DropClearObserver() { super(THIRTY_MINUTES); } public void setStats(World world, Collection<ChunkStats> stats) { lastSnapshot.put(world, stats); } public boolean hasStats(World world) { return lastSnapshot.containsKey(world); } public Collection<ChunkStats> getStats(World world) { Collection<ChunkStats> stats = lastSnapshot.get(world); return stats == null ? null : Collections.unmodifiableCollection(stats); } } }
package com.skelril.aurora.util.item; import com.sk89q.util.StringUtil; import com.sk89q.worldedit.blocks.BlockID; import com.sk89q.worldedit.blocks.ItemID; import java.util.*; /** * ItemType types w/ damage value support */ public enum ItemType { // Blocks AIR(BlockID.AIR, "Air", "air"), STONE(BlockID.STONE, "Stone", "stone", "rock"), GRASS(BlockID.GRASS, "Grass", "grass"), DIRT(BlockID.DIRT, "Dirt", "dirt"), COBBLESTONE(BlockID.COBBLESTONE, "Cobblestone", "cobblestone", "cobble"), WOOD(BlockID.WOOD, 0, "Wood", "wood", "woodplank", "plank", "woodplanks", "planks"), SPRUCE_WOOD(BlockID.WOOD, 1, "Spruce Wood", "sprucewood", "sprucewoodplank", "sprucewoodplanks", "spruceplanks"), BIRCH_WOOD(BlockID.WOOD, 2, "Birch Wood", "birchwood", "bitchwoodplank", "birchwoodplanks", "birchplanks"), JUNGLE_WOOD(BlockID.WOOD, 3, "Jungle Wood", "junglewood", "junglewoodplank", "junglewoodplanks", "jungleplanks"), SAPLING(BlockID.SAPLING, 0, "Sapling", "sapling", "seedling"), SPRUCE_SAPLING(BlockID.SAPLING, 1, "Spruce Sapling", "sprucesapling", "spruceseedling"), BIRCH_SAPLING(BlockID.SAPLING, 2, "Birch Sapling", "birchsapling", "birchseedling"), JUNGLE_SAPLING(BlockID.SAPLING, 2, "Jungle Sapling", "junglesapling", "jungleseedling"), BEDROCK(BlockID.BEDROCK, "Bedrock", "adminium", "bedrock"), WATER(BlockID.WATER, "Water", "watermoving", "movingwater", "flowingwater", "waterflowing"), STATIONARY_WATER(BlockID.STATIONARY_WATER, "Water (stationary)", "water", "waterstationary", "stationarywater", "stillwater"), LAVA(BlockID.LAVA, "Lava", "lavamoving", "movinglava", "flowinglava", "lavaflowing"), STATIONARY_LAVA(BlockID.STATIONARY_LAVA, "Lava (stationary)", "lava", "lavastationary", "stationarylava", "stilllava"), SAND(BlockID.SAND, "Sand", "sand"), GRAVEL(BlockID.GRAVEL, "Gravel", "gravel"), GOLD_ORE(BlockID.GOLD_ORE, "Gold ore", "goldore"), IRON_ORE(BlockID.IRON_ORE, "Iron ore", "ironore"), COAL_ORE(BlockID.COAL_ORE, "Coal ore", "coalore"), LOG(BlockID.LOG, 0, "Log", "log", "logs"), SPRUCE_LOG(BlockID.LOG, 1, "Spruce Log", "sprucelog", "sprucelogs"), BIRCH_LOG(BlockID.LOG, 2, "Birch Log", "birchlog", "birchlogs"), JUNGLE_LOG(BlockID.LOG, 3, "Jungle Log", "junglelog", "junglelogs"), LEAVES(BlockID.LEAVES, 0, "Leaves", "leaves", "leaf"), SPONGE(BlockID.SPONGE, "Sponge", "sponge"), GLASS(BlockID.GLASS, "Glass", "glass"), LAPIS_LAZULI_ORE(BlockID.LAPIS_LAZULI_ORE, "Lapis lazuli ore", "lapislazuliore", "blueore", "lapisore"), LAPIS_LAZULI(BlockID.LAPIS_LAZULI_BLOCK, "Lapis lazuli", "lapislazuli", "lapislazuliblock", "bluerock"), DISPENSER(BlockID.DISPENSER, "Dispenser", "dispenser"), SANDSTONE(BlockID.SANDSTONE, 0, "Sandstone", "sandstone"), NOTE_BLOCK(BlockID.NOTE_BLOCK, "Note block", "musicblock", "noteblock", "note", "music", "instrument"), BED(BlockID.BED, "Bed", "bed"), POWERED_RAIL(BlockID.POWERED_RAIL, "Powered Rail", "poweredrail", "boosterrail", "poweredtrack", "boostertrack", "booster"), DETECTOR_RAIL(BlockID.DETECTOR_RAIL, "Detector Rail", "detectorrail", "detector"), PISTON_STICKY_BASE(BlockID.PISTON_STICKY_BASE, "Sticky Piston", "stickypiston"), WEB(BlockID.WEB, "Web", "web", "spiderweb"), LONG_GRASS(BlockID.LONG_GRASS, 0, "Long grass", "longgrass", "tallgrass"), DEAD_BUSH(BlockID.DEAD_BUSH, "Shrub", "deadbush", "shrub", "deadshrub", "tumbleweed"), PISTON_BASE(BlockID.PISTON_BASE, "Piston", "piston"), PISTON_EXTENSION(BlockID.PISTON_EXTENSION, "Piston extension", "pistonextension", "pistonhead"), CLOTH(BlockID.CLOTH, 0, "Wool", "cloth", "wool"), PISTON_MOVING_PIECE(BlockID.PISTON_MOVING_PIECE, "Piston moving piece", "pistonmovingpiece", "movingpiston"), YELLOW_FLOWER(BlockID.YELLOW_FLOWER, "Yellow flower", "yellowflower", "flower"), RED_FLOWER(BlockID.RED_FLOWER, "Red rose", "redflower", "redrose", "rose"), BROWN_MUSHROOM(BlockID.BROWN_MUSHROOM, "Brown mushroom", "brownmushroom", "mushroom"), RED_MUSHROOM(BlockID.RED_MUSHROOM, "Red mushroom", "redmushroom"), GOLD_BLOCK(BlockID.GOLD_BLOCK, "Gold block", "gold", "goldblock"), IRON_BLOCK(BlockID.IRON_BLOCK, "Iron block", "iron", "ironblock"), DOUBLE_STEP(BlockID.DOUBLE_STEP, 0, "Double step", "doubleslab", "doublestoneslab", "doublestep"), STEP(BlockID.STEP, 0, "Step", "slab", "stoneslab", "step", "halfstep"), BRICK(BlockID.BRICK, "Brick", "brick", "brickblock"), TNT(BlockID.TNT, "TNT", "tnt", "c4", "explosive"), BOOKCASE(BlockID.BOOKCASE, "Bookcase", "bookshelf", "bookshelves", "bookcase", "bookcases"), MOSSY_COBBLESTONE(BlockID.MOSSY_COBBLESTONE, "Mossy Cobblestone", "mossycobblestone", "mossstone", "mossystone", "mosscobble", "mossycobble", "moss", "mossy", "sossymobblecone"), OBSIDIAN(BlockID.OBSIDIAN, "Obsidian", "obsidian"), TORCH(BlockID.TORCH, "Torch", "torch", "light", "candle"), FIRE(BlockID.FIRE, "Fire", "fire", "flame", "flames"), MOB_SPAWNER(BlockID.MOB_SPAWNER, "Mob spawner", "mobspawner", "spawner"), WOODEN_STAIRS(BlockID.WOODEN_STAIRS, "Wooden stairs", "woodstair", "woodstairs", "woodenstair", "woodenstairs"), CHEST(BlockID.CHEST, "Chest", "chest", "storage", "storagechest"), REDSTONE_WIRE(BlockID.REDSTONE_WIRE, "Redstone wire", "redstonewire", "redstone", "redstoneblock"), DIAMOND_ORE(BlockID.DIAMOND_ORE, "Diamond ore", "diamondore"), DIAMOND_BLOCK(BlockID.DIAMOND_BLOCK, "Diamond block", "diamond", "diamondblock"), WORKBENCH(BlockID.WORKBENCH, "Workbench", "workbench", "table", "craftingtable", "crafting"), CROPS(BlockID.CROPS, "Crops", "crops", "crop", "plant", "plants"), SOIL(BlockID.SOIL, "Soil", "soil", "farmland"), FURNACE(BlockID.FURNACE, "Furnace", "furnace"), BURNING_FURNACE(BlockID.BURNING_FURNACE, "Furnace (burning)", "furnaceburning", "burningfurnace", "litfurnace"), SIGN_POST(BlockID.SIGN_POST, "Sign post", "signpost"), WOODEN_DOOR(BlockID.WOODEN_DOOR, "Wooden door", "wooddoor", "woodendoor", "door"), LADDER(BlockID.LADDER, "Ladder", "ladder"), MINECART_TRACKS(BlockID.MINECART_TRACKS, "Minecart tracks", "track", "tracks", "minecrattrack", "minecarttracks", "rails", "rail"), COBBLESTONE_STAIRS(BlockID.COBBLESTONE_STAIRS, "Cobblestone stairs", "cobblestonestair", "cobblestonestairs", "cobblestair", "cobblestairs"), WALL_SIGN(BlockID.WALL_SIGN, "Wall sign", "wallsign"), LEVER(BlockID.LEVER, "Lever", "lever", "switch", "stonelever", "stoneswitch"), STONE_PRESSURE_PLATE(BlockID.STONE_PRESSURE_PLATE, "Stone pressure plate", "stonepressureplate", "stoneplate"), IRON_DOOR(BlockID.IRON_DOOR, "Iron Door", "irondoor"), WOODEN_PRESSURE_PLATE(BlockID.WOODEN_PRESSURE_PLATE, "Wooden pressure plate", "woodpressureplate", "woodplate", "woodenpressureplate", "woodenplate", "plate", "pressureplate"), REDSTONE_ORE(BlockID.REDSTONE_ORE, "Redstone ore", "redstoneore"), GLOWING_REDSTONE_ORE(BlockID.GLOWING_REDSTONE_ORE, "Glowing redstone ore", "glowingredstoneore"), REDSTONE_TORCH_OFF(BlockID.REDSTONE_TORCH_OFF, "Redstone torch (off)", "redstonetorchoff", "rstorchoff"), REDSTONE_TORCH_ON(BlockID.REDSTONE_TORCH_ON, "Redstone torch", "redstonetorch", "redstonetorchon", "rstorchon", "redtorch"), STONE_BUTTON(BlockID.STONE_BUTTON, "Stone Button", "stonebutton", "button"), SNOW(BlockID.SNOW, "Snow", "snow"), ICE(BlockID.ICE, "Ice", "ice"), SNOW_BLOCK(BlockID.SNOW_BLOCK, "Snow block", "snowblock"), CACTUS(BlockID.CACTUS, "Cactus", "cactus", "cacti"), CLAY(BlockID.CLAY, "Clay block", "clayblock"), SUGAR_CANE(BlockID.REED, "Reed", "reed", "cane", "sugarcane", "sugarcanes", "vine", "vines"), JUKEBOX(BlockID.JUKEBOX, "Jukebox", "jukebox", "stereo", "recordplayer"), FENCE(BlockID.FENCE, "Fence", "fence"), PUMPKIN(BlockID.PUMPKIN, "Pumpkin", "pumpkin"), NETHERRACK(BlockID.NETHERRACK, "Netherrack", "redmossycobblestone", "redcobblestone", "redmosstone", "redcobble", "netherstone", "netherrack", "nether", "hellstone"), SOUL_SAND(BlockID.SLOW_SAND, "Soul sand", "slowmud", "mud", "soulsand", "hellmud"), GLOWSTONE(BlockID.LIGHTSTONE, "Glowstone", "brittlegold", "glowstone", "lightstone", "brimstone", "australium"), PORTAL(BlockID.PORTAL, "Portal", "portal"), JACK_O_LANTERN(BlockID.JACKOLANTERN, "Jack o' Lantern", "pumpkinlighted", "pumpkinon", "litpumpkin", "jackolantern"), CAKE(BlockID.CAKE_BLOCK, "Cake", "cake", "cakeblock"), REDSTONE_REPEATER_OFF(BlockID.REDSTONE_REPEATER_OFF, "Redstone repeater", "diodeoff", "redstonerepeater", "repeateroff", "delayeroff"), REDSTONE_REPEATER_ON(BlockID.REDSTONE_REPEATER_ON, "Redstone repeater (on)", "diodeon", "redstonerepeateron", "repeateron", "delayeron"), LOCKED_CHEST(BlockID.LOCKED_CHEST, "Locked chest", "lockedchest", "steveco", "supplycrate", "valveneedstoworkonep3nottf2kthx"), TRAP_DOOR(BlockID.TRAP_DOOR, "Trap door", "trapdoor", "hatch", "floordoor"), SILVERFISH_BLOCK(BlockID.SILVERFISH_BLOCK, 0, "Silverfish block", "silverfishblock", "silverfish", "silver"), COBBLE_SILVERFISH_BLOCK(BlockID.SILVERFISH_BLOCK, 1, "Cobblestone Silverfish block", "cobblestonesilverfish", "cobblesilver"), STONE_BRICK_SILVER_FISH(BlockID.SILVERFISH_BLOCK, 1, "Stone brick Silverfish block", "stonebricksilverfish", "stonebricksilver"), STONE_BRICK(BlockID.STONE_BRICK, 0, "Stone brick", "stonebrick", "sbrick", "smoothstonebrick"), MOSSY_STONE_BRICK(BlockID.STONE_BRICK, 1, "Mossy Stone brick", "mossystonebrick", "msbrick"), CRACKED_STONE_BRICK(BlockID.STONE_BRICK, 2, "Cracked Stone brick", "crackedstonebrick", "csbrick"), RED_MUSHROOM_CAP(BlockID.RED_MUSHROOM_CAP, 0, "Red mushroom cap", "giantmushroomred", "redgiantmushroom", "redmushroomcap"), BROWN_MUSHROOM_CAP(BlockID.BROWN_MUSHROOM_CAP, 0, "Brown mushroom cap", "giantmushroombrown", "browngiantmushoom", "brownmushroomcap"), IRON_BARS(BlockID.IRON_BARS, "Iron bars", "ironbars", "ironfence"), GLASS_PANE(BlockID.GLASS_PANE, "Glass pane", "window", "glasspane", "glasswindow"), MELON_BLOCK(BlockID.MELON_BLOCK, "Melon (block)", "melonblock"), PUMPKIN_STEM(BlockID.PUMPKIN_STEM, "Pumpkin stem", "pumpkinstem"), MELON_STEM(BlockID.MELON_STEM, "Melon stem", "melonstem"), VINE(BlockID.VINE, "Vine", "vine", "vines", "creepers"), FENCE_GATE(BlockID.FENCE_GATE, "Fence gate", "fencegate", "gate"), BRICK_STAIRS(BlockID.BRICK_STAIRS, "Brick stairs", "brickstairs", "bricksteps"), STONE_BRICK_STAIRS(BlockID.STONE_BRICK_STAIRS, "Stone brick stairs", "stonebrickstairs", "smoothstonebrickstairs"), MYCELIUM(BlockID.MYCELIUM, "Mycelium", "mycelium", "fungus", "mycel"), LILY_PAD(BlockID.LILY_PAD, "Lily pad", "lilypad", "waterlily"), NETHER_BRICK(BlockID.NETHER_BRICK, "Nether brick", "netherbrick"), NETHER_BRICK_FENCE(BlockID.NETHER_BRICK_FENCE, "Nether brick fence", "netherbrickfence", "netherfence"), NETHER_BRICK_STAIRS(BlockID.NETHER_BRICK_STAIRS, "Nether brick stairs", "netherbrickstairs", "netherbricksteps", "netherstairs", "nethersteps"), NETHER_WART(BlockID.NETHER_WART, "Nether wart", "netherwart", "netherstalk"), ENCHANTMENT_TABLE(BlockID.ENCHANTMENT_TABLE, "Enchantment table", "enchantmenttable", "enchanttable"), BREWING_STAND(BlockID.BREWING_STAND, "Brewing Stand", "brewingstand"), CAULDRON(BlockID.CAULDRON, "Cauldron", "cauldron"), END_PORTAL(BlockID.END_PORTAL, "End Portal", "endportal", "blackstuff", "airportal", "weirdblackstuff"), END_PORTAL_FRAME(BlockID.END_PORTAL_FRAME, "End Portal Frame", "endportalframe", "airportalframe", "crystalblock"), END_STONE(BlockID.END_STONE, "End Stone", "endstone", "enderstone", "endersand"), DRAGON_EGG(BlockID.DRAGON_EGG, "Dragon Egg", "dragonegg", "dragons"), REDSTONE_LAMP_OFF(BlockID.REDSTONE_LAMP_OFF, "Redstone lamp", "redstonelamp", "redstonelampoff", "rslamp", "rslampoff", "rsglow", "rsglowoff"), REDSTONE_LAMP_ON(BlockID.REDSTONE_LAMP_ON, "Redstone lamp (on)", "redstonelampon", "rslampon", "rsglowon"), DOUBLE_WOODEN_STEP(BlockID.DOUBLE_WOODEN_STEP, 0, "Double wood step", "doublewoodslab", "doublewoodstep"), WOODEN_STEP(BlockID.WOODEN_STEP, 0, "Wood step", "woodenslab", "woodslab", "woodstep", "woodhalfstep"), COCOA_PLANT(BlockID.COCOA_PLANT, "Cocoa plant", "cocoplant", "cocoaplant"), SANDSTONE_STAIRS(BlockID.SANDSTONE_STAIRS, "Sandstone stairs", "sandstairs", "sandstonestairs"), EMERALD_ORE(BlockID.EMERALD_ORE, "Emerald ore", "emeraldore"), ENDER_CHEST(BlockID.ENDER_CHEST, "Ender chest", "enderchest"), TRIPWIRE_HOOK(BlockID.TRIPWIRE_HOOK, "Tripwire hook", "tripwirehook"), TRIPWIRE(BlockID.TRIPWIRE, "Tripwire", "tripwire", "string"), EMERALD_BLOCK(BlockID.EMERALD_BLOCK, "Emerald block", "emeraldblock", "emerald"), SPRUCE_WOOD_STAIRS(BlockID.SPRUCE_WOOD_STAIRS, "Spruce wood stairs", "sprucestairs", "sprucewoodstairs"), BIRCH_WOOD_STAIRS(BlockID.BIRCH_WOOD_STAIRS, "Birch wood stairs", "birchstairs", "birchwoodstairs"), JUNGLE_WOOD_STAIRS(BlockID.JUNGLE_WOOD_STAIRS, "Jungle wood stairs", "junglestairs", "junglewoodstairs"), COMMAND_BLOCK(BlockID.COMMAND_BLOCK, "Command block", "commandblock", "cmdblock", "command", "cmd"), BEACON(BlockID.BEACON, "Beacon", "beacon", "beaconblock"), COBBLESTONE_WALL(BlockID.COBBLESTONE_WALL, 0, "Cobblestone wall", "cobblestonewall", "cobblewall"), MOSSY_COBBLESTONE_WALL(BlockID.COBBLESTONE_WALL, 1, "Mossy Cobblestone wall", "mossycobblestonewall", "mossycobblewall"), FLOWER_POT_BLOCK(BlockID.FLOWER_POT, "Flower pot", "flowerpot", "plantpot", "pot", "flowerpotblock"), CARROTS_BLOCK(BlockID.CARROTS, "Carrots", "carrots", "carrotsplant", "carrotsblock"), POTATOES_BLOCK(BlockID.POTATOES, "Potatoes", "patatoes", "potatoesblock"), WOODEN_BUTTON(BlockID.WOODEN_BUTTON, "Wooden button", "woodbutton", "woodenbutton"), HEAD_BLOCK(BlockID.HEAD, "Head", "head", "headmount", "mount", "headblock", "mountblock"), ANVIL(BlockID.ANVIL, 0, "Anvil", "anvil", "blacksmith"), TRAPPED_CHEST(BlockID.TRAPPED_CHEST, "Trapped Chest", "trappedchest", "redstonechest"), PRESSURE_PLATE_LIGHT(BlockID.PRESSURE_PLATE_LIGHT, "Weighted Pressure Plate (Light)", "weightedpressureplatelight", "lightpressureplate"), PRESSURE_PLATE_HEAVY(BlockID.PRESSURE_PLATE_HEAVY, "Weighted Pressure Plate (Heavy)", "weightedpressureplateheavy", "heavypressureplate"), COMPARATOR_OFF(BlockID.COMPARATOR_OFF, "Redstone Comparator (inactive)", "restonecomparatorinactive", "redstonecomparator", "comparator"), COMPARATOR_ON(BlockID.COMPARATOR_ON, "Redstone Comparator (active)", "redstonecomparatoractive", "redstonecomparatoron", "comparatoron"), DAYLIGHT_SENSOR(BlockID.DAYLIGHT_SENSOR, "Daylight Sesnor", "daylightsensor", "lightsensor"), REDSTONE_BLOCK(BlockID.REDSTONE_BLOCK, "Block of Redstone", "redstoneblock", "blockofredstone"), QUARTZ_ORE(BlockID.QUARTZ_ORE, "Nether Quartz Ore", "quartzore", "netherquartzore"), HOPPER(BlockID.HOPPER, "Hopper", "hopper"), QUARTZ_BLOCK(BlockID.QUARTZ_BLOCK, 0, "Block of Quartz", "blockofquartz", "quartzblock"), QUARTZ_STAIRS(BlockID.QUARTZ_STAIRS, "Quartz Stairs", "quartzstairs"), ACTIVATOR_RAIL(BlockID.ACTIVATOR_RAIL, "Activator Rail", "activatorrail", "tntrail", "activatortrack"), DROPPER(BlockID.DROPPER, "Dropper", "dropper"), STAINED_CLAY(BlockID.STAINED_CLAY, 0, "Stained clay", "stainedclay"), HAY_BLOCK(BlockID.HAY_BLOCK, "Hay Block", "hayblock", "haybale", "wheatbale"), CARPET(BlockID.CARPET, 0, "Carpet", "carpet"), HARDENED_CLAY(BlockID.HARDENED_CLAY, "Hardened Clay", "hardenedclay", "hardclay"), COAL_BLOCK(BlockID.COAL_BLOCK, "Block of Coal", "coalblock", "blockofcoal"), // Items IRON_SHOVEL(ItemID.IRON_SHOVEL, "Iron shovel", "ironshovel"), IRON_PICK(ItemID.IRON_PICK, "Iron pickaxe", "ironpick", "ironpickaxe"), IRON_AXE(ItemID.IRON_AXE, "Iron axe", "ironaxe"), FLINT_AND_TINDER(ItemID.FLINT_AND_TINDER, "Flint and tinder", "flintandtinder", "lighter", "flintandsteel", "flintsteel", "flintandiron", "flintnsteel", "flintniron", "flintntinder"), RED_APPLE(ItemID.RED_APPLE, "Red apple", "redapple", "apple"), BOW(ItemID.BOW, "Bow", "bow"), ARROW(ItemID.ARROW, "Arrow", "arrow"), COAL(ItemID.COAL, 0, "Coal", "coal"), CHARCOAL(ItemID.COAL, 1, "Charcoal", "charcoal"), DIAMOND(ItemID.DIAMOND, "Diamond", "diamond"), IRON_BAR(ItemID.IRON_BAR, "Iron bar", "ironbar", "iron"), GOLD_BAR(ItemID.GOLD_BAR, "Gold bar", "goldbar", "gold"), IRON_SWORD(ItemID.IRON_SWORD, "Iron sword", "ironsword"), WOOD_SWORD(ItemID.WOOD_SWORD, "Wooden sword", "woodensword", "woodsword"), WOOD_SHOVEL(ItemID.WOOD_SHOVEL, "Wooden shovel", "woodenshovel", "woodshovel"), WOOD_PICKAXE(ItemID.WOOD_PICKAXE, "Wooden pickaxe", "woodenpickaxe", "woodpick", "woodpickaxe"), WOOD_AXE(ItemID.WOOD_AXE, "Wooden axe", "woodenaxe", "woodaxe"), STONE_SWORD(ItemID.STONE_SWORD, "Stone sword", "stonesword"), STONE_SHOVEL(ItemID.STONE_SHOVEL, "Stone shovel", "stoneshovel"), STONE_PICKAXE(ItemID.STONE_PICKAXE, "Stone pickaxe", "stonepick", "stonepickaxe"), STONE_AXE(ItemID.STONE_AXE, "Stone pickaxe", "stoneaxe", "stonepickaxe"), DIAMOND_SWORD(ItemID.DIAMOND_SWORD, "Diamond sword", "diamondsword"), DIAMOND_SHOVEL(ItemID.DIAMOND_SHOVEL, "Diamond shovel", "diamondshovel"), DIAMOND_PICKAXE(ItemID.DIAMOND_PICKAXE, "Diamond pickaxe", "diamondpick", "diamondpickaxe"), DIAMOND_AXE(ItemID.DIAMOND_AXE, "Diamond axe", "diamondaxe"), STICK(ItemID.STICK, "Stick", "stick"), BOWL(ItemID.BOWL, "Bowl", "bowl"), MUSHROOM_SOUP(ItemID.MUSHROOM_SOUP, "Mushroom soup", "mushroomsoup", "soup", "brbsoup"), GOLD_SWORD(ItemID.GOLD_SWORD, "Golden sword", "goldsword", "goldensword"), GOLD_SHOVEL(ItemID.GOLD_SHOVEL, "Golden shovel", "goldshovel", "goldenshovel"), GOLD_PICKAXE(ItemID.GOLD_PICKAXE, "Golden pickaxe", "goldpick", "goldpickaxe"), GOLD_AXE(ItemID.GOLD_AXE, "Golden axe", "goldaxe", "goldenaxe"), STRING(ItemID.STRING, "String", "string"), FEATHER(ItemID.FEATHER, "Feather", "feather"), GUN_POWDER(ItemID.SULPHUR, "Gun Powder", "sulphur", "sulfur", "gunpowder"), WOOD_HOE(ItemID.WOOD_HOE, "Wooden hoe", "woodhoe", "woodenhoe"), STONE_HOE(ItemID.STONE_HOE, "Stone hoe", "stonehoe"), IRON_HOE(ItemID.IRON_HOE, "Iron hoe", "ironhoe"), DIAMOND_HOE(ItemID.DIAMOND_HOE, "Diamond hoe", "diamondhoe"), GOLD_HOE(ItemID.GOLD_HOE, "Golden hoe", "goldhoe", "goldenhoe"), SEEDS(ItemID.SEEDS, "Seeds", "seeds", "seed"), WHEAT(ItemID.WHEAT, "Wheat", "wheat"), BREAD(ItemID.BREAD, "Bread", "bread"), LEATHER_HELMET(ItemID.LEATHER_HELMET, "Leather helmet", "leatherhelmet", "leatherhat"), LEATHER_CHEST(ItemID.LEATHER_CHEST, "Leather chestplate", "leatherchest", "leatherchestplate", "leathervest", "leatherbreastplate", "leatherplate", "leathercplate", "leatherbody"), LEATHER_PANTS(ItemID.LEATHER_PANTS, "Leather pants", "leatherpants", "leathergreaves", "leatherlegs", "leatherleggings", "leatherstockings", "leatherbreeches"), LEATHER_BOOTS(ItemID.LEATHER_BOOTS, "Leather boots", "leatherboots", "leathershoes", "leatherfoot", "leatherfeet"), CHAINMAIL_HELMET(ItemID.CHAINMAIL_HELMET, "Chainmail helmet", "chainmailhelmet", "chainmailhat"), CHAINMAIL_CHEST(ItemID.CHAINMAIL_CHEST, "Chainmail chestplate", "chainmailchest", "chainmailchestplate", "chainmailvest", "chainmailbreastplate", "chainmailplate", "chainmailcplate", "chainmailbody"), CHAINMAIL_PANTS(ItemID.CHAINMAIL_PANTS, "Chainmail pants", "chainmailpants", "chainmailgreaves", "chainmaillegs", "chainmailleggings", "chainmailstockings", "chainmailbreeches"), CHAINMAIL_BOOTS(ItemID.CHAINMAIL_BOOTS, "Chainmail boots", "chainmailboots", "chainmailshoes", "chainmailfoot", "chainmailfeet"), IRON_HELMET(ItemID.IRON_HELMET, "Iron helmet", "ironhelmet", "ironhat"), IRON_CHEST(ItemID.IRON_CHEST, "Iron chestplate", "ironchest", "ironchestplate", "ironvest", "ironbreastplate", "ironplate", "ironcplate", "ironbody"), IRON_PANTS(ItemID.IRON_PANTS, "Iron pants", "ironpants", "irongreaves", "ironlegs", "ironleggings", "ironstockings", "ironbreeches"), IRON_BOOTS(ItemID.IRON_BOOTS, "Iron boots", "ironboots", "ironshoes", "ironfoot", "ironfeet"), DIAMOND_HELMET(ItemID.DIAMOND_HELMET, "Diamond helmet", "diamondhelmet", "diamondhat"), DIAMOND_CHEST(ItemID.DIAMOND_CHEST, "Diamond chestplate", "diamondchest", "diamondchestplate", "diamondvest", "diamondbreastplate", "diamondplate", "diamondcplate", "diamondbody"), DIAMOND_PANTS(ItemID.DIAMOND_PANTS, "Diamond pants", "diamondpants", "diamondgreaves", "diamondlegs", "diamondleggings", "diamondstockings", "diamondbreeches"), DIAMOND_BOOTS(ItemID.DIAMOND_BOOTS, "Diamond boots", "diamondboots", "diamondshoes", "diamondfoot", "diamondfeet"), GOLD_HELMET(ItemID.GOLD_HELMET, "Gold helmet", "goldhelmet", "goldhat"), GOLD_CHEST(ItemID.GOLD_CHEST, "Gold chestplate", "goldchest", "goldchestplate", "goldvest", "goldbreastplate", "goldplate", "goldcplate", "goldbody"), GOLD_PANTS(ItemID.GOLD_PANTS, "Gold pants", "goldpants", "goldgreaves", "goldlegs", "goldleggings", "goldstockings", "goldbreeches"), GOLD_BOOTS(ItemID.GOLD_BOOTS, "Gold boots", "goldboots", "goldshoes", "goldfoot", "goldfeet"), FLINT(ItemID.FLINT, "Flint", "flint"), RAW_PORKCHOP(ItemID.RAW_PORKCHOP, "Raw porkchop", "rawpork", "rawporkchop", "rawbacon", "baconstrips", "rawmeat"), COOKED_PORKCHOP(ItemID.COOKED_PORKCHOP, "Cooked porkchop", "pork", "cookedpork", "cookedporkchop", "cookedbacon", "bacon", "meat"), PAINTING(ItemID.PAINTING, "Painting", "painting"), GOLD_APPLE(ItemID.GOLD_APPLE, 0, "Golden apple", "goldapple", "goldenapple"), ENHANCED_GOLD_APPLE(ItemID.GOLD_APPLE, 1, "Enhanced Golden apple", "enhancedgoldenapple", "enhancedgoldapple", "notchapple"), SIGN(ItemID.SIGN, "Wooden sign", "sign", "woodensign"), WOODEN_DOOR_ITEM(ItemID.WOODEN_DOOR_ITEM, "Wooden door", "wooddoor", "door"), BUCKET(ItemID.BUCKET, "Bucket", "bucket", "bukkit"), WATER_BUCKET(ItemID.WATER_BUCKET, "Water bucket", "waterbucket", "waterbukkit"), LAVA_BUCKET(ItemID.LAVA_BUCKET, "Lava bucket", "lavabucket", "lavabukkit"), MINECART(ItemID.MINECART, "Minecart", "minecart", "cart"), SADDLE(ItemID.SADDLE, "Saddle", "saddle"), IRON_DOOR_ITEM(ItemID.IRON_DOOR_ITEM, "Iron door", "irondoor"), REDSTONE_DUST(ItemID.REDSTONE_DUST, "Redstone dust", "redstonedust", "reddust", "redstone", "dust", "wire"), SNOWBALL(ItemID.SNOWBALL, "Snowball", "snowball"), WOOD_BOAT(ItemID.WOOD_BOAT, "Wooden boat", "woodboat", "woodenboat", "boat"), LEATHER(ItemID.LEATHER, "Leather", "leather", "cowhide"), MILK_BUCKET(ItemID.MILK_BUCKET, "Milk bucket", "milkbucket", "milk", "milkbukkit"), BRICK_BAR(ItemID.BRICK_BAR, "Brick", "brickbar"), CLAY_BALL(ItemID.CLAY_BALL, "Clay Ball", "clayball", "clay"), SUGAR_CANE_ITEM(ItemID.SUGAR_CANE_ITEM, "Sugar cane", "sugarcane", "reed", "reeds"), PAPER(ItemID.PAPER, "Paper", "paper"), BOOK(ItemID.BOOK, "Book", "book"), SLIME_BALL(ItemID.SLIME_BALL, "Slime ball", "slimeball", "slime"), STORAGE_MINECART(ItemID.STORAGE_MINECART, "Minecart with Chest", "storageminecart", "storagecart", "minecartwithchest", "minecartchest", "chestminecart"), POWERED_MINECART(ItemID.POWERED_MINECART, "Minecart with Furnace", "poweredminecart", "poweredcart", "minecartwithfurnace", "minecartfurnace", "furnaceminecart"), EGG(ItemID.EGG, "Egg", "egg"), COMPASS(ItemID.COMPASS, "Compass", "compass"), FISHING_ROD(ItemID.FISHING_ROD, "Fishing rod", "fishingrod", "fishingpole"), CLOCK(ItemID.WATCH, "Clock", "watch", "clock", "timer"), GLOWSTONE_DUST(ItemID.LIGHTSTONE_DUST, "Glowstone dust", "lightstonedust", "glowstonedone", "brightstonedust", "brittlegolddust", "brimstonedust"), RAW_FISH(ItemID.RAW_FISH, "Raw fish", "rawfish"), COOKED_FISH(ItemID.COOKED_FISH, "Cooked fish", "cookedfish", "fish"), INK_SACK(ItemID.INK_SACK, 0, "Ink sac", "inksac", "ink", "dye", "inksack"), BONE(ItemID.BONE, "Bone", "bone"), SUGAR(ItemID.SUGAR, "Sugar", "sugar"), CAKE_ITEM(ItemID.CAKE_ITEM, "Cake", "cake"), BED_ITEM(ItemID.BED_ITEM, "Bed", "bed"), REDSTONE_REPEATER(ItemID.REDSTONE_REPEATER, "Redstone repeater", "redstonerepeater", "diode", "delayer", "repeater"), COOKIE(ItemID.COOKIE, "Cookie", "cookie"), MAP(ItemID.MAP, 0, "Map", "map"), SHEARS(ItemID.SHEARS, "Shears", "shears", "scissors"), MELON(ItemID.MELON, "Melon Slice", "melon", "melonslice"), PUMPKIN_SEEDS(ItemID.PUMPKIN_SEEDS, "Pumpkin seeds", "pumpkinseed", "pumpkinseeds"), MELON_SEEDS(ItemID.MELON_SEEDS, "Melon seeds", "melonseed", "melonseeds"), RAW_BEEF(ItemID.RAW_BEEF, "Raw beef", "rawbeef", "rawcow", "beef"), COOKED_BEEF(ItemID.COOKED_BEEF, "Steak", "steak", "cookedbeef", "cookedcow"), RAW_CHICKEN(ItemID.RAW_CHICKEN, "Raw chicken", "rawchicken"), COOKED_CHICKEN(ItemID.COOKED_CHICKEN, "Cooked chicken", "cookedchicken", "chicken", "grilledchicken"), ROTTEN_FLESH(ItemID.ROTTEN_FLESH, "Rotten flesh", "rottenflesh", "zombiemeat", "flesh"), ENDER_PEARL(ItemID.ENDER_PEARL, "Ender pearl", "pearl", "enderpearl"), BLAZE_ROD(ItemID.BLAZE_ROD, "Blaze rod", "blazerod"), GHAST_TEAR(ItemID.GHAST_TEAR, "Ghast tear", "ghasttear"), GOLD_NUGGET(ItemID.GOLD_NUGGET, "Gold nuggest", "goldnugget"), NETHER_WART_ITEM(ItemID.NETHER_WART_SEED, "Nether wart", "netherwart", "netherwartseed"), POTION(ItemID.POTION, 0, "Potion", "potion"), GLASS_BOTTLE(ItemID.GLASS_BOTTLE, "Glass bottle", "glassbottle"), SPIDER_EYE(ItemID.SPIDER_EYE, "Spider eye", "spidereye"), FERMENTED_SPIDER_EYE(ItemID.FERMENTED_SPIDER_EYE, "Fermented spider eye", "fermentedspidereye", "fermentedeye"), BLAZE_POWDER(ItemID.BLAZE_POWDER, "Blaze powder", "blazepowder"), MAGMA_CREAM(ItemID.MAGMA_CREAM, "Magma cream", "magmacream"), BREWING_STAND_ITEM(ItemID.BREWING_STAND, "Brewing stand", "brewingstand"), CAULDRON_ITEM(ItemID.CAULDRON, "Cauldron", "cauldron"), EYE_OF_ENDER(ItemID.EYE_OF_ENDER, "Eye of Ender", "eyeofender", "endereye"), GLISTERING_MELON(ItemID.GLISTERING_MELON, "Glistering Melon", "glisteringmelon", "goldmelon"), SPAWN_EGG(ItemID.SPAWN_EGG, 0, "Spawn Egg", "spawnegg", "spawn", "mobspawnegg"), BOTTLE_O_ENCHANTING(ItemID.BOTTLE_O_ENCHANTING, "Bottle o' Enchanting", "xpbottle", "expbottle", "bottleoenchanting", "experiencebottle"), FIRE_CHARGE(ItemID.FIRE_CHARGE, "Fire Charge", "firecharge", "firestarter", "firerock"), BOOK_AND_QUILL(ItemID.BOOK_AND_QUILL, "Book and Quill", "bookandquill", "quill", "writingbook"), WRITTEN_BOOK(ItemID.WRITTEN_BOOK, "Written Book", "writtenbook"), EMERALD(ItemID.EMERALD, "Emerald", "emeraldingot", "emerald"), ITEM_FRAME(ItemID.ITEM_FRAME, "Item frame", "itemframe", "frame", "itempainting"), FLOWER_POT(ItemID.FLOWER_POT, "Flower pot", "flowerpot", "plantpot", "pot"), CARROT(ItemID.CARROT, "Carrot", "carrot"), POTATO(ItemID.POTATO, "Potato", "potato"), BAKED_POTATO(ItemID.BAKED_POTATO, "Baked potato", "bakedpotato", "potatobaked"), POISONOUS_POTATO(ItemID.POISONOUS_POTATO, "Poisonous potato", "poisonpotato", "poisonouspotato"), BLANK_MAP(ItemID.BLANK_MAP, 0, "Blank map", "blankmap", "emptymap"), GOLDEN_CARROT(ItemID.GOLDEN_CARROT, "Golden carrot", "goldencarrot", "goldcarrot"), SKULL(ItemID.HEAD, 0, "Skull", "skull", "head", "headmount", "mount"), WITHER_SKULL(ItemID.HEAD, 1, "Wither Skeleton Skull", "witherskull", "witherskeletonskull", "witherskeletonhead", "witherhead"), ZOMBIE_SKULL(ItemID.HEAD, 2, "Zombie Skull", "zombieskull", "zombiehead"), PLAYER_SKULL(ItemID.HEAD, 3, "Player Skull", "playerskull", "playerhead"), CARROT_ON_A_STICK(ItemID.CARROT_ON_A_STICK, "Carrot on a stick", "carrotonastick", "carrotonstick", "stickcarrot", "carrotstick"), NETHER_STAR(ItemID.NETHER_STAR, "Nether star", "netherstar", "starnether"), PUMPKIN_PIE(ItemID.PUMPKIN_PIE, "Pumpkin pie", "pumpkinpie"), FIREWORK_ROCKET(ItemID.FIREWORK_ROCKET, "Firework rocket", "fireworkrocket", "firework", "rocket"), FIREWORK_STAR(ItemID.FIREWORK_STAR, "Firework star", "fireworkstar", "fireworkcharge"), ENCHANTED_BOOK(ItemID.ENCHANTED_BOOK, "Enchanted book", "enchantedbook", "spellbook", "enchantedtome", "tome"), COMPARATOR(ItemID.COMPARATOR, "Comparator", "comparator", "capacitor"), NETHER_BRICK_ITEM(ItemID.NETHER_BRICK, "Nether Brick (item)", "netherbrickitem"), NETHER_QUARTZ(ItemID.NETHER_QUARTZ, "Nether Quartz", "netherquartz", "quartz"), TNT_MINECART(ItemID.TNT_MINECART, "Minecart with TNT", "minecraftwithtnt", "tntminecart", "minecarttnt"), HOPPER_MINECART(ItemID.HOPPER_MINECART, "Minecart with Hopper", "minecraftwithhopper", "hopperminecart", "minecarthopper"), HORSE_ARMOR_IRON(ItemID.HORSE_ARMOR_IRON, "Iron Horse Armor", "ironhorsearmor", "ironbarding"), HORSE_ARMOR_GOLD(ItemID.HORSE_ARMOR_GOLD, "Gold Horse Armor", "goldhorsearmor", "goldbarding"), HORSE_ARMOR_DIAMOND(ItemID.HORSE_ARMOR_DIAMOND, "Diamond Horse Armor", "diamondhorsearmor", "diamondbarding"), LEAD(ItemID.LEAD, "Lead", "lead", "leash"), NAME_TAG(ItemID.NAME_TAG, "Name Tag", "nametag"), DISC_13(ItemID.DISC_13, "Music Disc - 13", "musicdisc13", "disc_13"), DISC_CAT(ItemID.DISC_CAT, "Music Disc - Cat", "musicdisccat", "disc_cat"), DISC_BLOCKS(ItemID.DISC_BLOCKS, "Music Disc - blocks", "musicdiscblocks", "disc_blocks"), DISC_CHIRP(ItemID.DISC_CHIRP, "Music Disc - chirp", "musicdiscchirp", "disc_chirp"), DISC_FAR(ItemID.DISC_FAR, "Music Disc - far", "musicdiscfar", "disc_far"), DISC_MALL(ItemID.DISC_MALL, "Music Disc - mall", "musicdiscmall", "disc_mall"), DISC_MELLOHI(ItemID.DISC_MELLOHI, "Music Disc - mellohi", "musicdiscmellohi", "disc_mellohi"), DISC_STAL(ItemID.DISC_STAL, "Music Disc - stal", "musicdiscstal", "disc_stal"), DISC_STRAD(ItemID.DISC_STRAD, "Music Disc - strad", "musicdiscstrad", "disc_strad"), DISC_WARD(ItemID.DISC_WARD, "Music Disc - ward", "muiscdiscward", "disc_ward"), DISC_11(ItemID.DISC_11, "Music Disc - 11", "musicdisc11", "disc_11"), DISC_WAIT(ItemID.DISC_WAIT, "Music Disc - wait", "musicdiscwait", "disc_wait"); /** * Stores a map of the IDs for fast access. */ private static final Map<BaseItem, ItemType> ids = new HashMap<>(); /** * Stores a map of the names for fast access. */ private static final Map<String, ItemType> lookup = new LinkedHashMap<>(); private final int id; private final int data; private final String name; private final String[] lookupKeys; static { for (ItemType type : EnumSet.allOf(ItemType.class)) { ids.put(new BaseItem(type.id, type.data), type); for (String key : type.lookupKeys) { lookup.put(key, type); } } } /** * Construct the type. * * @param id * @param name * @param lookupKey */ ItemType(int id, String name, String lookupKey) { this.id = id; this.data = -1; this.name = name; this.lookupKeys = new String[]{lookupKey}; } /** * Construct the type. * * @param id * @param data * @param name * @param lookupKey */ ItemType(int id, int data, String name, String lookupKey) { this.id = id; this.data = data; this.name = name; this.lookupKeys = new String[]{lookupKey}; } /** * Construct the type. * * @param id * @param name * @param lookupKeys */ ItemType(int id, String name, String... lookupKeys) { this.id = id; this.data = -1; this.name = name; this.lookupKeys = lookupKeys; } /** * Construct the type. * * @param id * @param data * @param name * @param lookupKeys */ ItemType(int id, int data, String name, String... lookupKeys) { this.id = id; this.data = data; this.name = name; this.lookupKeys = lookupKeys; } /** * Return type from ID. May return null. * * @param id * @return */ public static ItemType fromNumberic(int id, short data) { BaseItem i = new BaseItem(id, data); for (Map.Entry<BaseItem, ItemType> entry : ids.entrySet()) { if (entry.getKey().equals(i)) return entry.getValue(); } return null; } /** * Return type from name. May return null. * * @param name * @return */ public static ItemType lookup(String name) { return lookup(name, true); } /** * Return type from name. May return null. * * @param name * @param fuzzy * @return */ public static ItemType lookup(String name, boolean fuzzy) { try { String[] split = name.split(":"); int id = Integer.parseInt(split[0]); short data = 0; if (split.length > 1) { data = Short.parseShort(split[1]); } return fromNumberic(id, data); } catch (NumberFormatException e) { return StringUtil.lookup(lookup, name, fuzzy); } } /** * Get item numeric ID. * * @return */ public int getID() { return id; } /** * Get item numeric data * * @return */ public int getData() { return data < 0 ? 0 : data; } /** * Get user-friendly item name. * * @return */ public String getName() { return name; } /** * Get a list of aliases. * * @return */ public String[] getAliases() { return lookupKeys; } private static final Set<Integer> shouldNotStack = new HashSet<Integer>(); static { shouldNotStack.add(ItemID.IRON_SHOVEL); shouldNotStack.add(ItemID.IRON_PICK); shouldNotStack.add(ItemID.IRON_AXE); shouldNotStack.add(ItemID.FLINT_AND_TINDER); shouldNotStack.add(ItemID.BOW); shouldNotStack.add(ItemID.IRON_SWORD); shouldNotStack.add(ItemID.WOOD_SWORD); shouldNotStack.add(ItemID.WOOD_SHOVEL); shouldNotStack.add(ItemID.WOOD_PICKAXE); shouldNotStack.add(ItemID.WOOD_AXE); shouldNotStack.add(ItemID.STONE_SWORD); shouldNotStack.add(ItemID.STONE_SHOVEL); shouldNotStack.add(ItemID.STONE_PICKAXE); shouldNotStack.add(ItemID.STONE_AXE); shouldNotStack.add(ItemID.DIAMOND_SWORD); shouldNotStack.add(ItemID.DIAMOND_SHOVEL); shouldNotStack.add(ItemID.DIAMOND_PICKAXE); shouldNotStack.add(ItemID.DIAMOND_AXE); shouldNotStack.add(ItemID.BOWL); shouldNotStack.add(ItemID.GOLD_SWORD); shouldNotStack.add(ItemID.GOLD_SHOVEL); shouldNotStack.add(ItemID.GOLD_PICKAXE); shouldNotStack.add(ItemID.GOLD_AXE); shouldNotStack.add(ItemID.WOOD_HOE); shouldNotStack.add(ItemID.STONE_HOE); shouldNotStack.add(ItemID.IRON_HOE); shouldNotStack.add(ItemID.DIAMOND_HOE); shouldNotStack.add(ItemID.GOLD_HOE); shouldNotStack.add(ItemID.LEATHER_HELMET); shouldNotStack.add(ItemID.LEATHER_CHEST); shouldNotStack.add(ItemID.LEATHER_PANTS); shouldNotStack.add(ItemID.LEATHER_BOOTS); shouldNotStack.add(ItemID.CHAINMAIL_CHEST); shouldNotStack.add(ItemID.CHAINMAIL_HELMET); shouldNotStack.add(ItemID.CHAINMAIL_BOOTS); shouldNotStack.add(ItemID.CHAINMAIL_PANTS); shouldNotStack.add(ItemID.IRON_HELMET); shouldNotStack.add(ItemID.IRON_CHEST); shouldNotStack.add(ItemID.IRON_PANTS); shouldNotStack.add(ItemID.IRON_BOOTS); shouldNotStack.add(ItemID.DIAMOND_HELMET); shouldNotStack.add(ItemID.DIAMOND_PANTS); shouldNotStack.add(ItemID.DIAMOND_CHEST); shouldNotStack.add(ItemID.DIAMOND_BOOTS); shouldNotStack.add(ItemID.GOLD_HELMET); shouldNotStack.add(ItemID.GOLD_CHEST); shouldNotStack.add(ItemID.GOLD_PANTS); shouldNotStack.add(ItemID.GOLD_BOOTS); shouldNotStack.add(ItemID.WOODEN_DOOR_ITEM); shouldNotStack.add(ItemID.WATER_BUCKET); shouldNotStack.add(ItemID.LAVA_BUCKET); shouldNotStack.add(ItemID.MINECART); shouldNotStack.add(ItemID.SADDLE); shouldNotStack.add(ItemID.IRON_DOOR_ITEM); shouldNotStack.add(ItemID.WOOD_BOAT); shouldNotStack.add(ItemID.MILK_BUCKET); shouldNotStack.add(ItemID.STORAGE_MINECART); shouldNotStack.add(ItemID.POWERED_MINECART); shouldNotStack.add(ItemID.WATCH); shouldNotStack.add(ItemID.CAKE_ITEM); shouldNotStack.add(ItemID.BED_ITEM); shouldNotStack.add(ItemID.MAP); shouldNotStack.add(ItemID.SHEARS); shouldNotStack.add(ItemID.HEAD); shouldNotStack.add(ItemID.FIREWORK_ROCKET); shouldNotStack.add(ItemID.FIREWORK_STAR); shouldNotStack.add(ItemID.ENCHANTED_BOOK); shouldNotStack.add(ItemID.TNT_MINECART); shouldNotStack.add(ItemID.HOPPER_MINECART); shouldNotStack.add(ItemID.HORSE_ARMOR_IRON); shouldNotStack.add(ItemID.HORSE_ARMOR_GOLD); shouldNotStack.add(ItemID.HORSE_ARMOR_DIAMOND); shouldNotStack.add(ItemID.DISC_13); shouldNotStack.add(ItemID.DISC_CAT); shouldNotStack.add(ItemID.DISC_BLOCKS); shouldNotStack.add(ItemID.DISC_CHIRP); shouldNotStack.add(ItemID.DISC_FAR); shouldNotStack.add(ItemID.DISC_MALL); shouldNotStack.add(ItemID.DISC_MELLOHI); shouldNotStack.add(ItemID.DISC_STAL); shouldNotStack.add(ItemID.DISC_STRAD); shouldNotStack.add(ItemID.DISC_WARD); shouldNotStack.add(ItemID.DISC_11); shouldNotStack.add(ItemID.DISC_WAIT); } /** * Returns true if an item should not be stacked. * * @param id * @return */ public static boolean shouldNotStack(int id) { return shouldNotStack.contains(id); } private static final Set<Integer> usesDamageValue = new HashSet<>(); static { usesDamageValue.add(BlockID.WOOD); usesDamageValue.add(BlockID.SAPLING); usesDamageValue.add(BlockID.LOG); usesDamageValue.add(BlockID.LEAVES); usesDamageValue.add(BlockID.SANDSTONE); usesDamageValue.add(BlockID.LONG_GRASS); usesDamageValue.add(BlockID.CLOTH); usesDamageValue.add(BlockID.DOUBLE_STEP); usesDamageValue.add(BlockID.STEP); usesDamageValue.add(BlockID.SILVERFISH_BLOCK); usesDamageValue.add(BlockID.STONE_BRICK); usesDamageValue.add(BlockID.BROWN_MUSHROOM_CAP); usesDamageValue.add(BlockID.RED_MUSHROOM_CAP); usesDamageValue.add(BlockID.DOUBLE_WOODEN_STEP); usesDamageValue.add(BlockID.WOODEN_STEP); usesDamageValue.add(BlockID.COBBLESTONE_WALL); usesDamageValue.add(BlockID.ANVIL); usesDamageValue.add(BlockID.QUARTZ_BLOCK); usesDamageValue.add(BlockID.STAINED_CLAY); usesDamageValue.add(BlockID.CARPET); usesDamageValue.add(ItemID.COAL); usesDamageValue.add(ItemID.INK_SACK); usesDamageValue.add(ItemID.POTION); usesDamageValue.add(ItemID.SPAWN_EGG); usesDamageValue.add(ItemID.MAP); usesDamageValue.add(ItemID.HEAD); usesDamageValue.add(ItemID.GOLD_APPLE); } /** * Returns true if an item uses its damage value for something * other than damage. * * @param id * @return */ public static boolean usesDamageValue(int id) { return usesDamageValue.contains(id); } }
package com.smin.controller; import com.smin.service.CompanyService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Controller; import javax.annotation.Resource; @Controller public class CompanyController { private static final Logger LOGGER = LoggerFactory.getLogger(CompanyController.class); // @Resource // private SimpMessagingTemplate messagingTemplate; @Value("${sudo.pwd}") private String sudoPwd; @Value("${db.host}") private String dbHost; @Value("${db.username}") private String dbUsername; @Value("${db.password}") private String dbPassword; @Value("${smin.cmd.install}") private String installCmd; @Resource private CompanyService companyService; // @ResponseBody // @RequestMapping(value = "/company", method = RequestMethod.GET) // public List<CompanyInfo> all() { // List<CompanyInfo> companyInfos = new ArrayList<>(); // Base.open("org.postgresql.Driver", dbHost, dbUsername, dbPassword); // List<Company> companies = Company.findAll(); // for (Company company : companies) { // CompanyInfo companyInfo = CompanyInfo.CompanyInfoBuilder.companyInfo() // .withInstallName(company.get("_installName").toString()) // .withPublicDomain(company.get("_publicDomain").toString()) // .withSysReport(company.get("_sysReport").toString()) // .withAdminPassword(company.get("_adminPassword").toString()).build(); // companyInfos.add(companyInfo); // Base.close(); // return companyInfos; // @MessageMapping("/user/company/register") // public void register(CompanyInfo companyInfo) { // final String destination = "/topic/" + companyInfo.getInstallName() + "/company/register"; // String command = String.format(installCmd, companyInfo.getInstallName(), companyInfo.getAdminPassword(), companyInfo.getPublicDomain()); // try { // String[] cmd = {"sh", "-c", "echo '" + sudoPwd + "'| sudo -S " + command}; // final Process process = Runtime.getRuntime().exec(cmd); // StringBuilder sysReportBuilder = new StringBuilder(); // Thread[] threads = new Thread[]{ // new InputStreamTask(process.getInputStream(), LOGGER, messagingTemplate, destination, sysReportBuilder), // new InputStreamTask(process.getErrorStream(), LOGGER, messagingTemplate, destination, sysReportBuilder) // for (Thread t : threads) { // t.start(); // RunningTask warmTask = new RunningTask(">>System running tasks in the background...", LOGGER, messagingTemplate, destination); // warmTask.start(); // for (Thread t : threads) { // t.join(); // LOGGER.debug("All threads stopped"); // warmTask.setStop(true); // companyInfo.setSysReport(sysReportBuilder.toString()); // companyService.register(companyInfo); // messagingTemplate.convertAndSend(destination, "ENDED!!!!"); // catch (Exception e) { // LOGGER.error("Error", e); // messagingTemplate.convertAndSend(destination, "Error, see log file for details"); }
package crawlercommons.sitemaps; import java.io.IOException; import java.net.URL; import java.util.Collection; import org.apache.commons.io.IOUtils; /** Sitemap Tool for recursively fetching all URL's from a sitemap (and all of it's children) **/ public class SiteMapTester { private static SiteMapParser parser = new SiteMapParser(false); public static void main(String[] args) throws IOException, UnknownFormatException { if (args.length < 1) { System.err.println("Usage: SiteMapTester <URL_TO_TEST> [MIME_TYPE]"); } else { URL url = new URL(args[0]); String mt = (args.length > 1) ? args[1] : null; parse(url, mt); } } /** Parses a Sitemap recursively meaning that if the sitemap is a sitemapIndex then it parses all of the internal sitemaps */ private static void parse(URL url, String mt) throws IOException, UnknownFormatException { byte[] content = IOUtils.toByteArray(url); AbstractSiteMap sm = null; // guesses the mimetype if (mt == null || mt.equals("")){ sm = parser.parseSiteMap(content, url); } else { sm = parser.parseSiteMap(mt, content, url); } if (sm.isIndex()) { Collection<AbstractSiteMap> links = ((SiteMapIndex) sm).getSitemaps(); for (AbstractSiteMap asm : links) { parse(asm.getUrl(), mt); // Recursive call } } else { Collection<SiteMapURL> links = ((SiteMap) sm).getSiteMapUrls(); for (SiteMapURL smu : links) { System.out.println(smu.getUrl()); } } } }
package de.domisum.lib.auxilium.util; import com.google.common.collect.Streams; import de.domisum.lib.auxilium.util.file.DirectoryCopy; import de.domisum.lib.auxilium.util.file.FileFilter; import de.domisum.lib.auxilium.util.java.ThreadUtil; import de.domisum.lib.auxilium.util.java.annotations.API; import de.domisum.lib.auxilium.util.java.exceptions.ShouldNeverHappenError; import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.Validate; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.channels.ClosedByInterruptException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; @API @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class FileUtil { // CONSTANTS @API public static final Charset DEFAULT_STRING_ENCODING = StandardCharsets.UTF_8; // TEMP private static final Collection<File> temporaryDirectories = new ArrayList<>(); // STRING @API public static String readString(File file) { return readString(file, DEFAULT_STRING_ENCODING); } @API public static String readString(File file, Charset encoding) { try { byte[] contentBytes = Files.readAllBytes(file.toPath()); return new String(contentBytes, encoding); } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static void writeString(File file, String toWrite) { writeString(file, toWrite, DEFAULT_STRING_ENCODING); } @API public static void writeString(File file, String toWrite, Charset encoding) { try { createParentDirectory(file); FileUtils.writeStringToFile(file, toWrite, encoding); } catch(IOException e) { throw new UncheckedIOException(e); } } // RAW @API public static byte[] readRaw(File file) { try { return Files.readAllBytes(file.toPath()); } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static void writeRaw(File file, byte[] toWrite) { try { createParentDirectory(file); Files.write(file.toPath(), toWrite); } catch(ClosedByInterruptException ignored) { // ignore this, because the thread was interrupted and no result is expected } catch(IOException e) { throw new UncheckedIOException(e); } } // IMAGE @API public static BufferedImage readImage(File file) { try { return ImageIO.read(file); } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static void writeImage(File file, BufferedImage image) { Validate.notNull(file, "file was null"); Validate.notNull(image, "image was null"); try { createParentDirectory(file); ImageIO.write(image, getExtension(file), file); } catch(IOException e) { throw new UncheckedIOException(e); } } // COPY @API public static void copyFile(File from, File to) { if(to.exists() && to.isDirectory()) throw new UncheckedIOException(new IOException( "can't copy to file '"+to+"', it is a directory and already "+"exists")); try { to.getAbsoluteFile().getParentFile().mkdirs(); Files.copy(from.getAbsoluteFile().toPath(), to.getAbsoluteFile().toPath(), StandardCopyOption.REPLACE_EXISTING); } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static void copyDirectory(File sourceRootDirectory, File targetRootDirectory, FileFilter... filters) { DirectoryCopy.fromTo(sourceRootDirectory, targetRootDirectory, filters).copy(); } // DIRECTORY @API public static void createParentDirectory(File file) { file.getAbsoluteFile().getParentFile().mkdirs(); } @API public static void deleteDirectory(File directory) { if(!directory.exists()) return; validateIsNotFile(directory); deleteDirectoryContents(directory); deleteFile(directory); } @API public static void deleteDirectoryContents(File directory) { if(!directory.exists()) return; validateIsNotFile(directory); for(File file : listFilesFlat(directory, FileType.FILE)) deleteFile(file); for(File dir : listFilesFlat(directory, FileType.DIRECTORY)) deleteDirectory(dir); } private static void validateIsNotFile(File directory) { if(directory.isFile()) throw new IllegalArgumentException("given directory is file, not directory"); } @API public static Collection<File> listFilesFlat(File directory, FileType fileType) { return listFiles(directory, fileType, false); } @API public static Collection<File> listFilesRecursively(File directory, FileType fileType) { return listFiles(directory, fileType, true); } private static Collection<File> listFiles(File directory, FileType fileType, boolean recursive) { validateIsNotFile(directory); Collection<File> directoryContents = new ArrayList<>(); try(DirectoryStream<Path> stream = Files.newDirectoryStream(directory.toPath())) { System.out.println("in list before iteration"); Streams.stream(stream).map(Path::toFile).forEach(f-> { if(fileType.isOfType(f)) directoryContents.add(f); if(recursive && f.isDirectory()) directoryContents.addAll(listFilesRecursively(f, fileType)); }); } catch(IOException e) { throw new UncheckedIOException(e); } return directoryContents; } // TEMP @API public static File createTemporaryFile() { return createTemporaryFile(null); } @API public static File createTemporaryFile(String extension) { String cleanedExtension = extension; if((cleanedExtension != null) && !cleanedExtension.startsWith(".")) cleanedExtension = "."+cleanedExtension; try { File file = File.createTempFile("tempFile", cleanedExtension); file.deleteOnExit(); return file; } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static File createTemporaryDirectory() { try { File directory = Files.createTempDirectory("tempDirectory").toFile(); deleteDirectoryOnShutdown(directory); return directory; } catch(IOException e) { throw new UncheckedIOException(e); } } private static void deleteDirectoryOnShutdown(File directory) { if(temporaryDirectories.isEmpty()) ThreadUtil.registerShutdownHook(()->temporaryDirectories.forEach(FileUtil::deleteDirectory)); temporaryDirectories.add(directory); } // GENERAL FILE @API public static void deleteFile(File file) { try { Files.delete(file.toPath()); } catch(IOException e) { throw new UncheckedIOException(e); } } @API public static String getExtension(File file) { return FilenameUtils.getExtension(file.getName()); } @API public static String getCompositeExtension(File file) { String fileName = file.getName(); if(!fileName.contains(".")) return ""; return fileName.substring(fileName.indexOf('.')); } @API public static String getNameWithoutCompositeExtension(File file) { String compositeFileExtension = getCompositeExtension(file); String fileName = file.getName(); String fileNameWithout = fileName.substring(0, fileName.length()-compositeFileExtension.length()); return fileNameWithout; } @API public static String getFilePath(File file) { String path = file.getAbsoluteFile().getPath(); path = unifyDelimiters(path); return path; } @API public static String unifyDelimiters(String path) { return path.replaceAll(StringUtil.escapeStringForRegex("\\"), "/"); } @API public static Instant getLastModified(File file) { return Instant.ofEpochMilli(file.lastModified()); } public enum FileType { FILE, DIRECTORY, FILE_AND_DIRECTORY; public boolean isOfType(File file) { if(!file.exists()) throw new IllegalArgumentException("file does not exist: "+file); if(this == FILE_AND_DIRECTORY) return true; else if(this == FILE) return file.isFile(); else if(this == DIRECTORY) return file.isDirectory(); throw new ShouldNeverHappenError("unknown file type: "+this); } } }
package de.mathan.maven.latex; import org.apache.commons.io.FileUtils; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.zeroturnaround.exec.ProcessExecutor; import org.zeroturnaround.exec.stream.LogOutputStream; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.*; /** * The MathanLatexMojo provides the goal "latex" to generate dvi, ps or pdf out of LaTeX (.tex) documents. Therefore * all the LaTeX tools are executed in a defined order. There are pre-defined defaults for all supported output formats. * By configuration the arguments for the tool execution can be modified. It is also possible to extend the process to * include own tool executions. * * @author Matthias Hanisch (reallyinsane) */ @Mojo(name = "latex") public class MathanLatexMojo extends AbstractMojo { /** * Identifier used as placeholder for the exeution of the latex tools to produce the output documment. */ private static final String LaTeX = "LaTeX"; /** * Identifier for a sub directory with common resources for all tex documents. */ private static final String DIRECTORY_COMMONS = "commons"; /** * The defualt execution chain defines the order of the tool execution. */ private static final String[] DEFAULT_EXECUTION_CHAIN = { LaTeX, Step.STEP_BIBTEX.getId(), Step.STEP_MAKEINDEX.getId(), Step.STEP_MAKEINDEXNOMENCL.getId(), LaTeX, LaTeX}; /** * This list includes the predefined execution steps supported by this plugin. */ private static final List<Step> DEFAULT_EXECUTABLES = Arrays.asList( Step.STEP_BIBER, Step.STEP_BIBTEX, Step.STEP_DVIPDFM, Step.STEP_DVIPS, Step.STEP_LATEX, Step.STEP_LULATEX, Step.STEP_MAKEINDEX, Step.STEP_MAKEINDEXNOMENCL, Step.STEP_PDFLATEX, Step.STEP_PS2PDF, Step.STEP_PSLATEX, Step.STEP_XELATEX); /** * The output format. Supported are dvi, pdf and ps. */ @Parameter(defaultValue = "pdf") private String outputFormat; /** * The bin directory of the LaTeX distribution. */ @Parameter(required = true) private String texBin; /** * The list of tools to be executed to create the output format. (without bibtex, biber, makeindex, etc.) */ @Parameter private String[] latexSteps; /** * The list of tools to be executed in the build. (including bibtex, biber, makeindex, etc.). The step to create * the output format is set using the placeholder {@link #LaTeX}. */ @Parameter private String[] buildSteps; /** * User-defined steps which can be included in {@link #buildSteps} or {@link #latexSteps}. */ @Parameter private Step[] steps; private Map<String, Step> stepRegistry = new HashMap<>(); @Parameter(defaultValue = "${project}", required = true, readonly = true) private MavenProject project; public MathanLatexMojo() { } /** * Splits the given string into tokens so that * sections of the string that are enclosed into quotes will * form one token (without the quotes). * <p> * E.g. string = "-editor \"echo %f:%l\" -q" * tokens = { "-editor", "echo %f:%l", "-q" } * * @param args the string * @param list tokens will be added to the end of this list * in the order they are extracted */ private static void tokenizeEscapedString(String args, List<String> list) { StringTokenizer st = new StringTokenizer(args, " "); while (st.hasMoreTokens()) { String token = st.nextToken(); if (token.charAt(0) == '"' && token.charAt(token.length() - 1) == '"') { list.add(token.substring(1, token.length() - 1)); } else if (token.charAt(0) == '"') { StringBuffer sb = new StringBuffer(); sb.append(token.substring(1)); token = st.nextToken(); while (!token.endsWith("\"") && st.hasMoreTokens()) { sb.append(' '); sb.append(token); token = st.nextToken(); } sb.append(' '); sb.append(token.substring(0, token.length() - 1)); list.add(sb.toString()); } else { list.add(token); } } } private static File getFile(File directory, String extension) throws MojoExecutionException { File[] files = directory.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith("." + extension); } }); if (files == null || files.length == 0) { throw new MojoExecutionException("No " + extension + " file found"); } else if (files.length > 1) { throw new MojoExecutionException("Multiple " + extension + " files found"); } else { return files[0]; } } public void execute() throws MojoExecutionException, MojoFailureException { List<Step> stepsToExecute = configureSteps(); getLog().info("[mathan] bin directory of tex distribution: " + texBin); getLog().info("[mathan] output format : " + outputFormat); getLog().info("[mathan] latex steps: " + String.join(",", latexSteps)); getLog().info("[mathan] build steps: " + String.join(",", buildSteps)); File baseDirectory = project.getBasedir(); File texDirectory = new File(baseDirectory, "src/main/tex"); List<File> subDirectories = getSubdirectories(texDirectory); File commonsDirectory = getCommonsDirectory(texDirectory); if (subDirectories.isEmpty()) { execute(stepsToExecute, texDirectory, null); } else { for (File subDirectory : subDirectories) { execute(stepsToExecute, subDirectory, commonsDirectory); } } } private void execute(List<Step> stepsToExecute, File source, File commons) throws MojoExecutionException { File targetDirectory = new File(project.getBasedir(), "target/latex/" + source.getName()); if (!targetDirectory.mkdirs()) { throw new MojoExecutionException(String.format("Could not create directory %s", targetDirectory.getAbsolutePath())); } if (commons != null) { try { FileUtils.copyDirectory(commons, targetDirectory); } catch (IOException e) { throw new MojoExecutionException(String.format("Could not copy context from %s to %s", commons.getAbsolutePath(), targetDirectory.getAbsolutePath())); } } try { FileUtils.copyDirectory(source, targetDirectory); } catch (IOException e) { throw new MojoExecutionException(String.format("Could not copy context from %s to %s", source.getAbsolutePath(), targetDirectory.getAbsolutePath())); } File texFile = getFile(targetDirectory, "tex"); getLog().info(String.format("[mathan] processing %s", texFile.getName())); for (Step step : stepsToExecute) { getLog().info("[mathan] execution: " + step.getName()); execute(step, targetDirectory, texFile); } } private List<File> getSubdirectories(File texDirectory) { File[] files = texDirectory.listFiles(e -> e.isDirectory() && !DIRECTORY_COMMONS.equals(e.getName())); if (files == null) { return Collections.EMPTY_LIST; } else { return Arrays.asList(files); } } private File getCommonsDirectory(File texDirectory) { File[] files = texDirectory.listFiles(e -> e.isDirectory() && DIRECTORY_COMMONS.equals(e.getName())); if (files != null && files.length == 1) { return files[0]; } else { return null; } } private List<Step> configureSteps() throws MojoExecutionException { // check output format if (outputFormat.length() == 0) { throw new MojoExecutionException("No outputFormat specified. Supported values are: dvi, pdf, ps."); } if (!Arrays.asList("dvi", "pdf", "ps").contains(outputFormat)) { throw new MojoExecutionException(String.format("Invalid outputFormat '%s' specified. Supported values are: dvi, pdf, ps.", outputFormat)); } // setup step registry DEFAULT_EXECUTABLES.forEach(e -> stepRegistry.put(e.getId(), e)); if (steps != null) { Arrays.asList(steps).forEach(e -> stepRegistry.put(e.getId(), e)); } // setup latex steps if (latexSteps == null) { switch (outputFormat) { case "dvi": latexSteps = new String[]{Step.STEP_LATEX.getId()}; break; case "ps": latexSteps = new String[]{Step.STEP_PSLATEX.getId()}; break; case "pdf": latexSteps = new String[]{Step.STEP_PDFLATEX.getId()}; break; } } List<Step> listLatexSteps = new ArrayList<>(); for (String latexStep : latexSteps) { Step step = stepRegistry.get(latexStep); if (step == null) { throw new MojoExecutionException(String.format("Step '%s' defined in 'latexSteps' is unknown. Consider to provide the definition of the step with the configuration 'steps'.")); } listLatexSteps.add(step); } // setup build steps List<Step> listBuildSteps = new ArrayList<>(); for (String buildStep : buildSteps) { if (LaTeX.equals(buildStep)) { listBuildSteps.addAll(listLatexSteps); } else { Step step = stepRegistry.get(buildStep); if (step == null) { throw new MojoExecutionException(String.format("Step '%s' defined in 'buildSteps' is unknown. Consider to provide the definition of the step with the configuration 'steps'.")); } listBuildSteps.add(step); } } return listBuildSteps; } private void execute(Step executionStep, File texDir, File texFile) throws MojoExecutionException { String executableName = executionStep.getName(); String os = System.getProperty("os.name").toLowerCase(); if (os.indexOf("windows") >= 0) { executableName += ".exe"; } File exec = new File(texBin, executableName); // split command into array List<String> list = new ArrayList<>(); list.add(exec.getAbsolutePath()); tokenizeEscapedString(getArguments(executionStep, texFile), list); String[] command = (String[]) list.toArray(new String[0]); String prefix = "[mathan][" + executionStep.getName() + "]"; try { int exitValue = new ProcessExecutor().command(command).directory(texDir).redirectOutput(new LatexPluginLogOutputStream(getLog(), prefix)).redirectError(new LatexPluginLogOutputStream(getLog(), prefix, true)).destroyOnExit().execute().getExitValue(); } catch (Exception e) { throw new MojoExecutionException("Building the project: ", e); } } private String getArguments(Step executionStep, File resource) { String args = executionStep.getArguments(); if (args == null) { return null; } String name = resource.getName(); String baseName = name.substring(0, resource.getName().lastIndexOf('.')); String inputName = baseName + "." + executionStep.getInputFormat(); String outputName = baseName + "." + executionStep.getOutputFormat(); if (baseName.indexOf(' ') >= 0) { inputName = "\"" + inputName + "\""; outputName = "\"" + outputName + "\""; } if (args.indexOf("%input") >= 0) { args = args.replaceAll("%input", inputName); } if (args.indexOf("%base") >= 0) { args = args.replaceAll("%base", baseName); } if (args.indexOf("%output") >= 0) { args = args.replaceAll("%output", outputName); } return args; } private class LatexPluginLogOutputStream extends LogOutputStream { private final String prefix; private final Log log; private final boolean error; LatexPluginLogOutputStream(Log log, String prefix) { this(log, prefix, false); } LatexPluginLogOutputStream(Log log, String prefix, boolean error) { this.log = log; this.prefix = prefix; this.error = error; } @Override protected void processLine(String line) { if (error) { log.error(prefix + " " + line); } else { log.info(prefix + " " + line); } } } }
package de.retest.configuration; import static de.retest.util.FileUtil.canonicalPathQuietly; import java.io.File; import java.util.Properties; import de.retest.util.FileUtil; public class Configuration { /** * Package private for tests only! Use {@link RetestWorkspace#getPropertiesFileArgument()} instead. */ public static final String PROP_CONFIG_FILE_PATH = "de.retest.configFile"; public static final String RETEST_PROPERTIES_FILE_NAME = "retest.properties"; private static Configuration instance; private static final Object lockField = new Object[0]; static boolean isLoaded() { return instance != null && instance.retestWorkspace != null; } public static void ensureLoaded() { synchronized ( lockField ) { if ( instance == null ) { instance = new Configuration(); } } instance.systemPropertyHandler.checkSystemPropertyStillIsCorrect(); } private final SystemPropertyHandler systemPropertyHandler; private final RetestWorkspace retestWorkspace; private Configuration() { systemPropertyHandler = new SystemPropertyHandler(); final File configFile = RetestWorkspace.tryToFindConfigFile(); if ( configFile != null ) { systemPropertyHandler.loadUserPropertiesFile( configFile ); } retestWorkspace = new RetestWorkspace( configFile ); } public static synchronized void setConfigFile( final File configFile ) throws ConfigurationException { ensureLoaded(); final File verifiedFile = FileUtil.readableCanonicalFileOrNull( configFile ); if ( verifiedFile != null ) { instance.systemPropertyHandler.loadUserPropertiesFile( verifiedFile ); instance = new Configuration( instance.systemPropertyHandler, new RetestWorkspace( verifiedFile ) ); } else { throw new ConfigurationException( new Property( Configuration.PROP_CONFIG_FILE_PATH ), "Configuration file '" + canonicalPathQuietly( configFile ) + "' doesn't exists or isn't readable!" ); } } private Configuration( final SystemPropertyHandler systemPropertyHandler, final RetestWorkspace reTestWorkspace ) { this.systemPropertyHandler = systemPropertyHandler; retestWorkspace = reTestWorkspace; } /** * Only for tests!!! */ public static synchronized void resetRetest() { if ( instance != null ) { instance.systemPropertyHandler.tearDown(); instance = null; } } // getter public static File getRetestWorkspace() { ensureLoaded(); if ( !instance.retestWorkspace.workspaceFolder.exists() ) { instance.retestWorkspace.workspaceFolder.mkdirs(); } return instance.retestWorkspace.workspaceFolder; } public static String getPropertiesFileArgument() { ensureLoaded(); return instance.retestWorkspace.getPropertiesFileArgument(); } public static File getUserPropertiesFile() { ensureLoaded(); return instance.retestWorkspace.getPropertiesFile(); } public static Properties getUserConfigProps() { final Properties result = new Properties(); // userConfigFileProps overwrites retestDefaultProps result.putAll( instance.systemPropertyHandler.userConfigFileProps ); // originConsoleProps overwrites userConfigFileProps, result.putAll( instance.systemPropertyHandler.originConsoleProps ); return result; } }
package ecse321.fall2014.group3.bomberman; import org.spout.renderer.lwjgl.LWJGLUtil; import ecse321.fall2014.group3.bomberman.database.Login; import javax.swing.*; import java.awt.*; import java.awt.event.*; public class App { public static void main(String[] args) { LWJGLUtil.deployNatives(null); createLoginScreen(); //new Game().open(); } private static void createLoginScreen() { final JFrame frame = new JFrame("Login"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); JPanel panel = new JPanel(new GridLayout(4, 4)); frame.add(panel, BorderLayout.CENTER); JLabel userLabel = new JLabel("User"); panel.add(userLabel); final JTextField userText = new JTextField(10); panel.add(userText); JLabel passwordLabel = new JLabel("Password"); panel.add(passwordLabel); final JTextField passwordText = new JTextField(10); panel.add(passwordText); JButton loginButton = new JButton("login"); panel.add(loginButton); JButton createButton = new JButton("New"); panel.add(createButton); final JLabel error = new JLabel(); error.setForeground(Color.red); error.setVisible(false); panel.add(error); error.setText("wrong username or password"); loginButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (Login.login(userText.getText(), passwordText.getText())) { frame.dispose(); new Game().open(); } else { userText.setText(""); passwordText.setText(""); error.setText("wrong username or password"); error.setVisible(true); } } }); createButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (Login.createAccount(userText.getText(), passwordText.getText())) { frame.dispose(); new Game().open(); } else { userText.setText(""); passwordText.setText(""); error.setText("enter a new username and password"); error.setVisible(true); } } }); frame.pack(); frame.setVisible(true); } }
package ecse321.fall2014.group3.bomberman; import javax.swing.JButton; import java.util.Arrays; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JPasswordField; import javax.swing.JTextField; import javax.swing.WindowConstants; import java.awt.BorderLayout; import java.awt.Color; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.sql.Connection; import java.sql.SQLException; import java.util.concurrent.Semaphore; import ecse321.fall2014.group3.bomberman.database.Login; import org.spout.renderer.lwjgl.LWJGLUtil; public class App { private static final Semaphore loginWait = new Semaphore(0); public static void main(String[] args) { LWJGLUtil.deployNatives(null); final Connection connection = Login.openDB(); final JFrame frame = createLoginScreen(connection); loginWait.acquireUninterruptibly(); frame.dispose(); try { connection.close(); } catch (SQLException e) { e.printStackTrace(); } new Game().open(); } private static JFrame createLoginScreen(final Connection connection) { final JFrame frame = new JFrame("Login"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); JPanel panel = new JPanel(new GridLayout(6, 2)); frame.add(panel, BorderLayout.CENTER); JLabel realNameLabel = new JLabel("Real Name"); panel.add(realNameLabel); final JTextField realNameText = new JTextField(10); panel.add(realNameText); JLabel userLabel = new JLabel("User"); panel.add(userLabel); final JTextField userText = new JTextField(10); panel.add(userText); JLabel passwordLabel = new JLabel("Password"); panel.add(passwordLabel); final JPasswordField passwordText = new JPasswordField(10); panel.add(passwordText); JLabel verifyLabel = new JLabel("Verify Password"); panel.add(verifyLabel); final JPasswordField verifyText = new JPasswordField(10); panel.add(verifyText); JButton loginButton = new JButton("login"); panel.add(loginButton); JButton createButton = new JButton("New"); panel.add(createButton); final JLabel error = new JLabel("user already exists or missing password"); error.setForeground(Color.RED); error.setVisible(false); panel.add(error); loginButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (!Arrays.equals(passwordText.getPassword(), verifyText.getPassword())) { passwordText.setText(""); verifyText.setText(""); error.setText("passwords dont match"); error.setVisible(true); } else { //TODO: add real name to database //if(Login.login(realNameText.getText(),userText.getText(), String.valueOf(passwordText.getPassword()), connection)) if (Login.login(userText.getText(), String.valueOf(passwordText.getPassword()), connection)) { loginWait.release(); } else { passwordText.setText(""); verifyText.setText(""); error.setText("wrong username or password"); error.setVisible(true); } } } }); createButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (!Arrays.equals(passwordText.getPassword(), verifyText.getPassword())) { passwordText.setText(""); verifyText.setText(""); error.setText("passwords dont match"); error.setVisible(true); } else { //TODO: add real name to database //if(Login.login(realNameText.getText(),userText.getText(), String.valueOf(passwordText.getPassword()), connection)) if (Login.createAccount(userText.getText(), String.valueOf(passwordText.getPassword()), connection)) { loginWait.release(); } else { realNameText.setText(""); userText.setText(""); passwordText.setText(""); verifyText.setText(""); error.setText("user already exist"); error.setVisible(true); } } } }); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); return frame; } }
package edu.neu.ccs.pyramid.experiment; import edu.neu.ccs.pyramid.configuration.Config; import edu.neu.ccs.pyramid.dataset.*; import edu.neu.ccs.pyramid.eval.*; import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMClassifier; import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMInitializer; import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMOptimizer; import edu.neu.ccs.pyramid.util.Grid; import edu.neu.ccs.pyramid.util.Pair; import org.apache.commons.io.FileUtils; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.FilenameFilter; import java.nio.file.Paths; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; public class Exp211 { public static BMMOptimizer getOptimizer(Config config, BMMClassifier bmmClassifier, MultiLabelClfDataSet trainSet){ BMMOptimizer optimizer = new BMMOptimizer(bmmClassifier,trainSet); optimizer.setMeanRegVariance(config.getDouble("lr.meanRegVariance")); optimizer.setMeanRegularization(config.getBoolean("lr.meanRegularization")); optimizer.setPriorVarianceMultiClass(config.getDouble("lr.multiClassVariance")); optimizer.setPriorVarianceBinary(config.getDouble("lr.binaryVariance")); optimizer.setNumIterationsBinary(config.getInt("boost.numIterationsBinary")); optimizer.setNumIterationsMultiClass(config.getInt("boost.numIterationsMultiClass")); optimizer.setShrinkageBinary(config.getDouble("boost.shrinkageBinary")); optimizer.setShrinkageMultiClass(config.getDouble("boost.shrinkageMultiClass")); optimizer.setNumLeavesBinary(config.getInt("boost.numLeavesBinary")); optimizer.setNumLeavesMultiClass(config.getInt("boost.numLeavesMultiClass")); return optimizer; } public static Pair<BMMClassifier,Integer> loadOldBMM(Config config) throws Exception{ BMMClassifier bmmClassifier; int completedIterations = 0; String output = config.getString("output"); String modelName = config.getString("modelName"); File folder = Paths.get(output,modelName).toFile(); File[] modeFiles = folder.listFiles((dir, name) -> name.startsWith("iter.") && (name.endsWith(".model"))); File lastFile = null; int lastIter = -1; for (File file: modeFiles){ String[] split = file.getName().split(Pattern.quote(".")); int iter = Integer.parseInt(split[1]); if (iter>lastIter){ lastIter = iter; lastFile = file; completedIterations = lastIter; } } bmmClassifier = BMMClassifier.deserialize(lastFile); System.out.println("bmm loaded, with "+completedIterations+ " iterations completed"); bmmClassifier.setPredictMode(config.getString("predict.mode")); return new Pair<>(bmmClassifier,completedIterations); } public static Pair<BMMClassifier,Integer> loadNewBMM(Config config, MultiLabelClfDataSet trainSet) throws Exception{ BMMClassifier bmmClassifier; int completedIterations = 0; bmmClassifier = BMMClassifier.getBuilder() .setNumClasses(trainSet.getNumClasses()) .setNumFeatures(trainSet.getNumFeatures()) .setNumClusters(config.getInt("mixture.numClusters")) .setMultiClassClassifierType(config.getString("mixture.multiClassClassifierType")) .setBinaryClassifierType(config.getString("mixture.binaryClassifierType")) .build(); bmmClassifier.setPredictMode(config.getString("predict.mode")); bmmClassifier.setNumSample(config.getInt("predict.sampling.numSamples")); String allowEmpty = config.getString("predict.allowEmpty"); switch (allowEmpty){ case "true": bmmClassifier.setAllowEmpty(true); break; case "false": bmmClassifier.setAllowEmpty(false); break; case "auto": Set<MultiLabel> seen = DataSetUtil.gatherMultiLabels(trainSet).stream().collect(Collectors.toSet()); MultiLabel empty = new MultiLabel(); if (seen.contains(empty)){ bmmClassifier.setAllowEmpty(true); System.out.println("training set contains empty labels, automatically set allow empty = true"); } else { bmmClassifier.setAllowEmpty(false); System.out.println("training set does not contain empty labels, automatically set allow empty = false"); } break; default: throw new IllegalArgumentException("unknown value for predict.allowEmpty"); } if (config.getBoolean("train.initialize")) { System.out.println("start initialization with temperature "+config.getDouble("em.startTemperature")); BMMOptimizer optimizer = getOptimizer(config,bmmClassifier,trainSet); optimizer.setTemperature(config.getDouble("em.startTemperature")); BMMInitializer.initialize(bmmClassifier, trainSet, optimizer); System.out.println("finish initialization"); } System.out.println("bmm loaded, with "+completedIterations+ " iterations completed"); return new Pair<>(bmmClassifier,completedIterations); } public static Pair<BMMClassifier,Integer> loadBMM(Config config, MultiLabelClfDataSet trainSet) throws Exception{ String mode = config.getString("train.warmStart"); Pair<BMMClassifier,Integer> pair = null; switch (mode){ case "true": pair = loadOldBMM(config); break; case "false": pair = loadNewBMM(config,trainSet); break; case "auto": String output = config.getString("output"); String modelName = config.getString("modelName"); File folder = Paths.get(output,modelName).toFile(); File[] modeFiles = folder.listFiles((dir, name) -> name.startsWith("iter.") && (name.endsWith(".model"))); if (modeFiles.length==0){ pair = loadNewBMM(config,trainSet); } else { pair = loadOldBMM(config); } break; default: throw new IllegalArgumentException("unknown value for train.warmStart"); } return pair; } public static void main(String[] args) throws Exception { if (args.length != 1) { throw new IllegalArgumentException("Please specify a properties file."); } Config config = new Config(args[0]); System.out.println(config); String matrixType = config.getString("input.matrixType"); MultiLabelClfDataSet trainSet; MultiLabelClfDataSet testSet; switch (matrixType){ case "sparse_random": trainSet= TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_SPARSE, true); testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"), DataSetType.ML_CLF_SPARSE, true); break; case "sparse_sequential": trainSet= TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_SEQ_SPARSE, true); testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"), DataSetType.ML_CLF_SEQ_SPARSE, true); break; case "dense": trainSet= TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_DENSE, true); testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"), DataSetType.ML_CLF_DENSE, true); break; default: throw new IllegalArgumentException("unknown type"); } int numIterations = config.getInt("em.numIterations"); String output = config.getString("output"); String modelName = config.getString("modelName"); File path = Paths.get(output, modelName).toFile(); path.mkdirs(); Pair<BMMClassifier,Integer> pair = loadBMM(config,trainSet); BMMClassifier bmmClassifier = pair.getFirst(); int completedIterations = pair.getSecond(); BMMOptimizer optimizer = getOptimizer(config,bmmClassifier,trainSet); double startTemperature = config.getDouble("em.startTemperature"); double endTemperature = config.getDouble("em.endTemperature"); int numTemperatures = config.getInt("em.numTemperatures"); List<Double> temperatures = Grid.uniformDecreasing(endTemperature,startTemperature,numTemperatures); int totalIter = completedIterations+1; for (double temperature: temperatures){ System.out.println(" System.out.println("temperature = "+temperature); optimizer.setTemperature(temperature); for (int i=1;i<=numIterations;i++){ System.out.print("iter : "+totalIter + "\t"); optimizer.iterate(); MultiLabel[] trainPredict; MultiLabel[] testPredict; trainPredict = bmmClassifier.predict(trainSet); testPredict = bmmClassifier.predict(testSet); System.out.print("objective: "+optimizer.getTerminator().getLastValue() + "\t"); System.out.print("trainAcc : "+ Accuracy.accuracy(trainSet.getMultiLabels(),trainPredict)+ "\t"); System.out.print("trainOver: "+ Overlap.overlap(trainSet.getMultiLabels(), trainPredict)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(testSet.getMultiLabels(),testPredict)+ "\t"); System.out.print("testOver : " + Overlap.overlap(testSet.getMultiLabels(), testPredict) + "\t"); System.out.println("testHam: "+HammingLoss.hammingLoss(testSet.getMultiLabels(), testPredict, bmmClassifier.getNumClasses())); if (config.getBoolean("saveModelForEachIter")) { File serializeModel = new File(path, "iter." + totalIter + ".model"); bmmClassifier.serialize(serializeModel); double[][] gammas = optimizer.getGammas(); double[][] PIs = optimizer.getPIs(); BufferedWriter bw = new BufferedWriter(new FileWriter(new File(path, "iter."+totalIter+".gammas"))); BufferedWriter bw1 = new BufferedWriter(new FileWriter(new File(path, "iter."+totalIter+".PIs"))); for (int n=0; n<gammas.length; n++) { for (int k=0; k<gammas[n].length; k++) { bw.write(gammas[n][k] + "\t"); bw1.write(PIs[n][k] + "\t"); } bw.write("\n"); bw1.write("\n"); } bw.close(); bw1.close(); } totalIter += 1; } } System.out.println("history = "+optimizer.getTerminator().getHistory()); System.out.println(" System.out.println(); MultiLabel[] testPredicts = bmmClassifier.predict(testSet); // System.out.print("trainAcc : " + Accuracy.accuracy(bmmClassifier, trainSet) + "\t"); // System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.println("Acc : "+ Accuracy.accuracy(testSet.getMultiLabels(),testPredicts)); System.out.println("Overlap : "+ Overlap.overlap(testSet.getMultiLabels(), testPredicts)); System.out.println("hamming loss: " + HammingLoss.hammingLoss(testSet.getMultiLabels(), testPredicts, testSet.getNumClasses())); System.out.println("F1: " + FMeasure.f1(testSet.getMultiLabels(), testPredicts)); System.out.println(); System.out.println(); // System.out.println(bmmClassifier); if (config.getBoolean("saveModel")) { File serializeModel = new File(path, "model"); bmmClassifier.serialize(serializeModel); } } }
package eu.scape_project.pt.fs.util; import java.io.File; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** * Handles the transportation of files from the local filesystem to HDFS and vice-versa. * * @author Rainer Schmidt [rschmidt13] * @author Matthias Rella [myrho] * @author Martin Schenck [schenck] */ public class HDFSFiler implements Filer{ private static Log LOG = LogFactory.getLog(HDFSFiler.class); protected FileSystem hdfs = null; public HDFSFiler(FileSystem hdfs) { super(); this.hdfs = hdfs; } public boolean exists(String file) throws IOException { Path path = new Path(file); return hdfs.exists(path); } @Override public File copyFile(String strSrc, String strDest) throws IOException { Path path = new Path(strSrc); if(!hdfs.exists(path)) throw new IOException("file does not exist! "+strSrc); //File temp = File.createTempFile(path.getName(), "", tempDir); File temp = new File( strDest ); hdfs.copyToLocalFile(path, new Path(strDest)); return temp; } @Override public void depositDirectoryOrFile(String strSrc, String strDest) throws IOException { File file = new File( strSrc ); if(file.isDirectory()) { depositDirectory(strSrc, strDest); } else { depositFile(strSrc, strDest); } } @Override public void depositDirectory(String strSrc, String strDest) throws IOException { // Get output directory name from strSrc File dir = new File( strSrc ); if(!dir.isDirectory()) { LOG.error("Could not find correct local output directory: " + dir ); return; } LOG.info("Local directory is: " + dir ); // FIXME if strSrc is a directory then strDest should be a directory too for(File file : dir.listFiles()) { depositDirectoryOrFile(file.getCanonicalPath(), strDest + File.separator + file.getName()); } } @Override public void depositFile(String strSrc, String strDest) throws IOException { Path src = new Path(strSrc); Path dest = new Path(strDest); LOG.info("local file name is: "+src+" destination path is:" +dest); hdfs.copyFromLocalFile(src, dest); } }
package fgis.server.services; import fgis.server.entity.fgis.Resource; import fgis.server.entity.fgis.ResourceTrack; import fgis.server.entity.fgis.dao.ResourceRepository; import fgis.server.entity.fgis.dao.ResourceTrackRepository; import java.util.Date; import java.util.Random; import javax.ejb.EJB; import javax.ejb.Schedule; import javax.ejb.Stateless; import org.geolatte.geom.DimensionalFlag; import org.geolatte.geom.Point; import org.geolatte.geom.PointSequenceBuilder; import org.geolatte.geom.PointSequenceBuilders; import org.geolatte.geom.crs.CrsId; @javax.ejb.Local @Stateless public class DataGeneratorEJB { @EJB private ResourceRepository _resourceService; @EJB private ResourceTrackRepository _resourceTrackRepository; @Schedule( second = "*/5", minute = "*", hour = "*", persistent = false ) public void insertData() { final String resourceName = "Peter"; Resource resource = _resourceService.findByName( resourceName ); if ( null == resource ) { resource = new Resource( "Person", resourceName ); _resourceService.persist( resource ); } final Random random = new Random(); final PointSequenceBuilder builder = PointSequenceBuilders.variableSized( DimensionalFlag.d2D, CrsId.UNDEFINED ); final double latPosition = 144.92978643046 + random.nextDouble() / 1; final double longPosition = -37.794939500455 + random.nextDouble() / 1; builder.add( latPosition , longPosition ); System.out.println( "Generating point " + latPosition + ", " + longPosition + " for resource " + resource.getName() ); final Point point = new Point( builder.toPointSequence() ); _resourceTrackRepository.persist( new ResourceTrack( resource, new Date(), point ) ); resource.setLocation( point ); } }
package innovimax.mixthem.arguments; import innovimax.mixthem.io.InputResource; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.EnumMap; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; /** * <p>Mix-them command line arguments management.</p> * @author Innovimax * @version 1.0 */ public class Arguments { private FileMode fileMode = null; private Rule rule = null; private Map<RuleParam, ParamValue> ruleParams = null; private Set<Integer> selection = null; private final List<InputResource> inputs = new ArrayList<InputResource>(); private void setFileMode(final FileMode fileMode) { this.fileMode = fileMode; } public FileMode getFileMode() { return this.fileMode; } private void setRule(final Rule rule) { this.rule = rule; } public Rule getRule() { return this.rule; } void setRuleParameters(final Map<RuleParam, ParamValue> ruleParams) { this.ruleParams = ruleParams; } public Map<RuleParam, ParamValue> getRuleParameters() { return this.ruleParams; } public void setSelection(final Set<Integer> selection) { this.selection = selection; } public Set<Integer> getSelection() { return this.selection; } void addInput(final InputResource input) { this.inputs.add(input); } public List<InputResource> getInputs() { return this.inputs; } public static Arguments checkArguments(final String[] args) throws ArgumentException, IOException, ZipException { final Arguments mixArgs = new Arguments(); int index = 0; // get file mode [char|byte] FileMode fileMode = findFileModeArgument(args, index); if (fileMode != null) { index++; } else { fileMode = FileMode.CHAR; } mixArgs.setFileMode(fileMode); // get selection final Set<Integer> selection = findSelectionArgument(args, index); mixArgs.setSelection(selection); if (!selection.isEmpty()) { index += selection.size(); } // get rule & parameters Rule rule = findRuleArgument(args, index, fileMode); Map<RuleParam, ParamValue> ruleParams = null; if (rule != null) { index++; ruleParams = findRuleParameters(args, index, rule); index += ruleParams.size(); } else { rule = Rule.ADD; ruleParams = Collections.emptyMap(); } mixArgs.setRule(rule); mixArgs.setRuleParameters(ruleParams); // get input files final String zipOption = findZipOptionArgument(args, index); if (zipOption == null) { final List<File> files = findFilesArgument(args, index); files.stream().forEach(file -> mixArgs.addInput(InputResource.createFile(file))); } else { final ZipFile zipFile = new ZipFile(findZipFileArgument(args, ++index)); final List<InputStream> inputs = extractZipEntries(zipFile); inputs.stream().forEach(input -> mixArgs.addInput(InputResource.createInputStream(input))); } // check selection vs input file count checkSelection(mixArgs); return mixArgs; } private static FileMode findFileModeArgument(final String[] args, final int index) throws ArgumentException { if (args.length > index) { return FileMode.findByName(args[index]); } return null; } private static Set<Integer> findSelectionArgument(final String[] args, int index) throws ArgumentException { final Set<Integer> selection = new LinkedHashSet<Integer>(); while (args.length > index) { final int fileIndex; try { fileIndex = Integer.parseInt(args[index++]); if (index <= 0) { throw new ArgumentException("Selection index is not valid: " + fileIndex); } selection.add(Integer.valueOf(fileIndex)); } catch(NumberFormatException e) { break; } } return selection; } private static Rule findRuleArgument(final String[] args, final int index, final FileMode fileMode) throws ArgumentException { Rule rule = null; if (args.length > index) { final String ruleString = args[index]; if (ruleString.startsWith("-") && !ruleString.startsWith(" rule = Rule.findByName(ruleString.substring(1), fileMode); if (rule == null) { throw new ArgumentException("Rule argument is incorrect: " + ruleString); } } } return rule; } private static Map<RuleParam, ParamValue> findRuleParameters(final String[] args, final int index, final Rule rule) throws ArgumentException { final Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class); final Iterator<RuleParam> iterator = rule.getParams().iterator(); if (iterator.hasNext()) { final RuleParam param = iterator.next(); if (args.length > index) { final String arg = args[index]; if (arg.startsWith(" final String paramString = arg.substring(1); try { final ParamValue value = param.createValue(paramString); map.put(param, value); } catch (NumberFormatException e) { throw new ArgumentException("#" + param.getName() + " parameter is incorrect: " + paramString); } } } if (param.isMandatory() && !map.containsKey(param)) { throw new ArgumentException("#" + param.getName() + " parameter is mandatory."); } } return map; } private static List<File> findFilesArgument(final String[] args, int index) throws ArgumentException { final List<File> files = new ArrayList<File>(); while (args.length > index) { final String filepath = args[index++]; final File file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (Files.isReadable(path)) { files.add(file); } else { throw new ArgumentException("Input file cannot be read: " + filepath); } } else { throw new ArgumentException("Input file not found: " + filepath); } } switch (files.size()) { case 0: throw new ArgumentException("First input file argument missing."); case 1: throw new ArgumentException("Second input file argument missing."); } return files; } private static String findZipOptionArgument(final String[] args, final int index) { if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) { return args[index].substring(2); } return null; } private static File findZipFileArgument(final String[] args, final int index) throws ArgumentException { File file = null; if (args.length > index) { final String filepath = args[index]; file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (!Files.isReadable(path)) { throw new ArgumentException("Zip/Jar file cannot be read: " + filepath); } } else { throw new ArgumentException("Zip/Jar file not found: " + filepath); } } else { throw new ArgumentException("Zip/Jar argument missing."); } return file; } private static List<InputStream> extractZipEntries(final ZipFile zipFile) throws ArgumentException, IOException, ZipException { final List<InputStream> inputs = new ArrayList<InputStream>(); final Enumeration entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); if (entry.getName().toUpperCase().startsWith("META-INF")) { continue; } inputs.add(zipFile.getInputStream(entry)); } switch (inputs.size()) { case 0: throw new ArgumentException("First input entry missing."); case 1: throw new ArgumentException("Second input entry missing."); } return inputs; } private static void checkSelection(Arguments mixArgs) throws ArgumentException { Iterator<Integer> iterator = mixArgs.getSelection().iterator(); while (iterator.hasNext()) { Integer index = iterator.next(); if (index.intValue() > mixArgs.getInputs().size()) { throw new ArgumentException("Selection index is greater than file count: " + index.intValue()); } } } public static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them [char|byte] <file1> <file2>[ <file3>... <fileN>]"); System.out.println(" (will generate on standard out any file based on file1 to fileN)"); System.out.println(" (by default it assumes that all files are character based, not binary)"); System.out.println(" "); System.out.println(" mix-them [char|byte] <rule> <file1> <file2>[ <file3>... <fileN>]"); System.out.println(" (will generate on standard out a file based on the rule)"); System.out.println(" "); System.out.println(" mix-them [char|byte] <index1> <index2>[ <index3>...] <rule> <file1> <file2>[ <file3>... <fileN>]"); System.out.println(" (will generate on standard out a file based on the rule and a selection of files designed by their index)"); System.out.println(" "); System.out.println(" mix-them --zip <zipfile>"); System.out.println(" mix-them --jar <jarfile>"); System.out.println(" (will generate on standard out any file based on entry1 to entryN of zip/jar file)"); System.out.println(" (by default it assumes zip/jar entries are character based, not binary)"); System.out.println(" "); System.out.println(" mix-them <rule> --zip <zipFile>"); System.out.println(" mix-them <rule> --jar <jarFile>"); System.out.println(" (will generate on standard out a file based on the rule)"); System.out.println(" "); System.out.println(" mix-them <index1> <index2>[ <index3>...] <rule> --zip <zipFile>"); System.out.println(" mix-them <index1> <index2>[ <index3>...] <rule> --jar <jarFile>"); System.out.println(" (will generate on standard out a file based on the rule and a selection of entries designed by their index)"); System.out.println(" "); System.out.println("Here are the list of rules:"); for(Rule rule : Rule.values()) { System.out.print(" -" + rule.getName()); for(RuleParam param : rule.getParams()) { if (param.isMandatory()) { System.out.print(" #" + param.getName()); } else { System.out.print(" [#" + param.getName() + "]"); } } System.out.println(": " + rule.getDescription()); for(RuleParam param : rule.getParams()) { System.out.println(" (#" +param.getName() + " " + param.getComment() + ")"); } } System.out.println(" "); } }
package innovimax.mixthem.arguments; import innovimax.mixthem.io.InputResource; import java.io.File; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.EnumMap; import java.util.Iterator; import java.util.Map; /** * <p>Mix-them command line arguments management.</p> * @author Innovimax * @version 1.0 */ public class Arguments { private Rule rule = null; private Map<RuleParam, ParamValue> ruleParams = null; private InputResource input1 = null; private InputResource input2 = null; private void setRule(Rule rule) { this.rule = rule; } public Rule getRule() { return this.rule; } void setRuleParameters(Map<RuleParam, ParamValue> ruleParams) { this.ruleParams = ruleParams; } public Map<RuleParam, ParamValue> getRuleParameters() { return this.ruleParams; } void setFirstInput(InputResource input) { this.input1 = input; } public InputResource getFirstInput() { return this.input1; } void setSecondInput(InputResource input) { this.input2 = input; } public InputResource getSecondInput() { return this.input2; } public static Arguments checkArguments(String[] args) throws ArgumentException { Arguments mixArgs = new Arguments(); int index = 0; Rule rule = findRuleArgument(args, index, "rule"); Map<RuleParam, ParamValue> ruleParams = null; if (rule != null) { index++; ruleParams = findRuleParameters(args, index, rule); index += ruleParams.size(); } else { rule = Rule.ADD; } mixArgs.setRule(rule); mixArgs.setRuleParameters(ruleParams); String zipOption = findZipOptionArgument(args, index); if (zipOption == null) { File file1 = findFileArgument(args, index, "file1"); File file2 = findFileArgument(args, ++index, "file2"); mixArgs.setFirstInput(InputResource.createFile(file1)); mixArgs.setSecondInput(InputResource.createFile(file2)); } else { ZipFile zipFile = new ZipFile(findFileArgument(args, ++index, zipOption)); InputStream input1 = extractFileEntry(zipFile, 1, "file1"); InputStream input2 = extractFileEntry(zipFile, 2, "file2"); mixArgs.setFirstInput(InputResource.createInputStream(input1)); mixArgs.setSecondInput(InputResource.createInputStream(input2)); } return mixArgs; } private static Rule findRuleArgument(String[] args, int index, String name) throws ArgumentException { Rule rule = null; if (args.length > index) { final String ruleString = args[index]; if (ruleString.startsWith("-")) { rule = Rule.findByName(ruleString.substring(1)); if (rule == null) { throw new ArgumentException(name + " argument is incorrect: " + ruleString); } } } return rule; } private static Map<RuleParam, ParamValue> findRuleParameters(String[] args, int index, Rule rule) throws ArgumentException { Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class); Iterator<RuleParam> iterator = rule.getParams().iterator(); if (iterator.hasNext()) { RuleParam param = iterator.next(); if (args.length > index) { String arg = args[index]; if (arg.startsWith(" final String paramString = arg.substring(1); try { ParamValue value = param.createValue(paramString); map.put(param, value); index++; } catch (NumberFormatException e) { throw new ArgumentException("[" + param.getName() + "] parameter is incorrect: " + paramString); } } } } return map; } private static File findFileArgument(String[] args, int index, String name) throws ArgumentException { File file = null; if (args.length > index) { String filepath = args[index]; file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (!Files.isReadable(path)) { throw new ArgumentException(name + " cannot be read: " + filepath); } } else { throw new ArgumentException(name + " not found: " + filepath); } } else { throw new ArgumentException(name + " argument missing."); } return file; } private static String findZipOptionArgument(String[] args, int index) { String zipOption = null; if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) { zipOption = args[index].substring(2); } return zipOption; } private static InputStream extractFileEntry(ZipFile zipFile, int index, String name) throws ArgumentException, IOException { InputStream input = null; if (zipFile.size() >= index) { Enumeration<ZipEntry> entries = zipFile.entries(); if (index > 1) { entries.nextElement(); } input = zipFile.getInputStream(entries.nextElement()); } else { throw new ArgumentException(name + " entry missing."); } return input; } public static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them file1 file2"); System.out.println(" (will generate any file based on file1 and file2)"); System.out.println(" "); System.out.println(" mix-them -[rule] file1 file2"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); System.out.println(" Here are the list of rules"); for(Rule rule : Rule.values()) { System.out.print(" - " + rule.getName()); for(RuleParam param : rule.getParams()) { System.out.print(" [#" + param.getName() + "]"); } System.out.println(": " + rule.getDescription()); } System.out.println(" "); } }
package innovimax.mixthem.arguments; import innovimax.mixthem.io.InputResource; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.ArrayList; import java.util.Enumeration; import java.util.EnumMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; /** * <p>Mix-them command line arguments management.</p> * @author Innovimax * @version 1.0 */ public class Arguments { private FileMode fileMode = null; private Rule rule = null; private Map<RuleParam, ParamValue> ruleParams = null; private final List<InputResource> inputs = new ArrayList<InputResource>(); private void setFileMode(final FileMode fileMode) { this.fileMode = fileMode; } public FileMode getFileMode() { return this.fileMode; } private void setRule(final Rule rule) { this.rule = rule; } public Rule getRule() { return this.rule; } void setRuleParameters(final Map<RuleParam, ParamValue> ruleParams) { this.ruleParams = ruleParams; } public Map<RuleParam, ParamValue> getRuleParameters() { return this.ruleParams; } void addInput(final InputResource input) { this.inputs.add(input); } public List<InputResource> getInputs() { return this.inputs; } public static Arguments checkArguments(final String[] args) throws ArgumentException, IOException, ZipException { final Arguments mixArgs = new Arguments(); int index = 0; FileMode fileMode = findFileModeArgument(args, index); if (fileMode != null) { index++; } else { fileMode = FileMode.CHAR; } Rule rule = findRuleArgument(args, index, fileMode); Map<RuleParam, ParamValue> ruleParams = null; if (rule != null) { index++; ruleParams = findRuleParameters(args, index, rule); index += ruleParams.size(); } else { rule = Rule.ADD; } mixArgs.setFileMode(fileMode); mixArgs.setRule(rule); mixArgs.setRuleParameters(ruleParams); final String zipOption = findZipOptionArgument(args, index); if (zipOption == null) { final List<File> files = findFilesArgument(args, index); files.stream().forEach(file -> mixArgs.addInput(InputResource.createFile(file))); } else { final ZipFile zipFile = new ZipFile(findZipFileArgument(args, ++index)); final List<InputStream> inputs = extractZipEntries(zipFile); inputs.stream().forEach(input -> mixArgs.addInput(InputResource.createInputStream(input))); } return mixArgs; } private static FileMode findFileModeArgument(final String[] args, final int index) throws ArgumentException { if (args.length > index) { return FileMode.findByName(args[index]); } return null; } private static Rule findRuleArgument(final String[] args, final int index, final FileMode fileMode) throws ArgumentException { Rule rule = null; if (args.length > index) { final String ruleString = args[index]; if (ruleString.startsWith("-") && !ruleString.startsWith(" rule = Rule.findByName(ruleString.substring(1), fileMode); if (rule == null) { throw new ArgumentException("Rule argument is incorrect: " + ruleString); } } } return rule; } private static Map<RuleParam, ParamValue> findRuleParameters(final String[] args, final int index, final Rule rule) throws ArgumentException { System.out.println(">>> ARGS"+Arrays.toString(args)); final Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class); final Iterator<RuleParam> iterator = rule.getParams().iterator(); if (iterator.hasNext()) { final RuleParam param = iterator.next(); System.out.println(">>> PARAM="+param.getName()); if (args.length > index) { final String arg = args[index]; if (arg.startsWith(" final String paramString = arg.substring(1); System.out.println(">>> VALUE="+paramString); try { final ParamValue value = param.createValue(paramString); map.put(param, value); } catch (NumberFormatException e) { throw new ArgumentException("#" + param.getName() + " parameter is incorrect: " + paramString); } } } System.out.println(">>> MANDATORY="+param.isMandatory()); if (param.isMandatory() && !map.containsKey(param)) { throw new ArgumentException("#" + param.getName() + " parameter is mandatory."); } } return map; } private static List<File> findFilesArgument(final String[] args, int index) throws ArgumentException { final List<File> files = new ArrayList<File>(); while (args.length > index) { final String filepath = args[index++]; final File file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (Files.isReadable(path)) { files.add(file); } else { throw new ArgumentException("Input file cannot be read: " + filepath); } } else { throw new ArgumentException("Input file not found: " + filepath); } } switch (files.size()) { case 0: throw new ArgumentException("First input file argument missing."); case 1: throw new ArgumentException("Second input file argument missing."); } return files; } private static String findZipOptionArgument(final String[] args, final int index) { if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) { return args[index].substring(2); } return null; } private static File findZipFileArgument(final String[] args, final int index) throws ArgumentException { File file = null; if (args.length > index) { final String filepath = args[index]; file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (!Files.isReadable(path)) { throw new ArgumentException("Zip/Jar file cannot be read: " + filepath); } } else { throw new ArgumentException("Zip/Jar file not found: " + filepath); } } else { throw new ArgumentException("Zip/Jar argument missing."); } return file; } private static List<InputStream> extractZipEntries(final ZipFile zipFile) throws ArgumentException, IOException, ZipException { final List<InputStream> inputs = new ArrayList<InputStream>(); final Enumeration entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); if (entry.getName().toUpperCase().startsWith("META-INF")) { continue; } inputs.add(zipFile.getInputStream(entry)); } switch (inputs.size()) { case 0: throw new ArgumentException("First input entry missing."); case 1: throw new ArgumentException("Second input entry missing."); } return inputs; } public static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them file1 file2... fileN"); System.out.println(" (will generate any file based on file1 and file2 to fileN)"); System.out.println(" "); System.out.println(" mix-them -[rule] file1 file2... fileN"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); System.out.println(" Here are the list of rules"); for(Rule rule : Rule.values()) { System.out.print(" - " + rule.getName()); for(RuleParam param : rule.getParams()) { if (param.isMandatory()) { System.out.print(" #" + param.getName()); } else { System.out.print(" [#" + param.getName() + "]"); } } System.out.println(": " + rule.getDescription()); for(RuleParam param : rule.getParams()) { System.out.println(" (#" +param.getName() + " " + param.getComment() + ")"); } } System.out.println(" "); System.out.println(" mix-them --zip zipfile"); System.out.println(" mix-them --jar jarfile"); System.out.println(" (will generate any entry based on zip/jar file first and second to nth entries)"); System.out.println(" "); System.out.println(" mix-them -[rule] --zip zipFile"); System.out.println(" mix-them -[rule] --jar jarFile"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); } }
package io.aif.language.semantic; import io.aif.language.semantic.weights.node.INodeWeightCalculator; import io.aif.language.semantic.weights.node.word.IWordWeightCalculator; import io.aif.language.word.IWord; import java.util.*; import java.util.stream.Collectors; class SemanticWord implements ISemanticNode<IWord> { private static final int MAX_DISTANCE_BETWEEN_WORDS = 5; private final INodeWeightCalculator<IWord> weightCalculator; private final IWord word; private final Map<ISemanticNode<IWord>, Connection> connections = new HashMap<>(); public SemanticWord(final IWord word, final INodeWeightCalculator<IWord> weightCalculator) { this.word = word; this.weightCalculator = weightCalculator; } public SemanticWord(final IWord word) { this(word, IWordWeightCalculator.createDefaultWeightCalculator()); } @Override public double weight() { return weightCalculator.calculateWeight(this); } @Override public double connectionWeight(final ISemanticNode<IWord> semanticNode) { return (connections.get(semanticNode).getDistances().stream() .collect(Collectors.summarizingDouble(x -> x)) .getAverage() / MAX_DISTANCE_BETWEEN_WORDS) * semanticNode.weight(); } @Override public Set<ISemanticNode<IWord>> connectedItems() { return connections.keySet(); } @Override public IWord item() { return word; } public void addConnection(final ISemanticNode<IWord> node, final Connection connection) { connections.put(node, connection); } public static class Connection { private final List<Double> distances = new ArrayList<>(); public void addDistance(final Double distance) { distances.add(distance); } public List<Double> getDistances() { return this.distances; } } }
package io.sigpipe.sing.query; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Set; import io.sigpipe.sing.dataset.feature.FeatureType; import io.sigpipe.sing.graph.GraphMetrics; import io.sigpipe.sing.graph.Vertex; import io.sigpipe.sing.serialization.SerializationOutputStream; public class RelationalQuery extends Query { protected Set<Vertex> pruned; protected GraphMetrics metrics; public RelationalQuery() { } public RelationalQuery(GraphMetrics metrics) { this.metrics = metrics; } public int numPruned() { return this.pruned.size(); } @Override public void execute(Vertex root) throws IOException, QueryException { if (this.metrics != null) { /* To make sure we don't spend time resizing the pruned HashSet, set * it to the number of vertices in the graph divided by the default * load factor. */ this.pruned = new HashSet<Vertex>( (int) (metrics.getVertexCount() / 0.75)); } else { this.pruned = new HashSet<>(); } prune(root, 0); } public void serializeResults(Vertex vertex, SerializationOutputStream out) throws IOException { if (pruned.contains(vertex)) { return; } vertex.getLabel().serialize(out); out.writeBoolean(vertex.hasData()); if (vertex.hasData() == true) { vertex.getData().serialize(out); } /* How many neighbors are still valid after the pruning process? */ int numNeighbors = 0; for (Vertex v : vertex.getAllNeighbors()) { if (pruned.contains(v) == false) { numNeighbors++; } } out.writeInt(numNeighbors); for (Vertex v : vertex.getAllNeighbors()) { if (pruned.contains(v) == false) { serializeResults(v, out); } } } private boolean prune(Vertex vertex, int expressionsEvaluated) throws QueryException { if (expressionsEvaluated == this.expressions.size()) { /* There are no further expressions to evaluate. Therefore, we must * assume all children from this point are relevant to the query. */ return true; } boolean foundSubMatch = false; String childFeature = vertex.getFirstNeighbor().getLabel().getName(); List<Expression> expList = this.expressions.get(childFeature); if (expList != null) { Set<Vertex> matches = evaluate(vertex, expList); if (matches.size() == 0) { pruned.add(vertex); return false; } for (Vertex match : matches) { if (match == null) { continue; } if (match.getLabel().getType() == FeatureType.NULL) { continue; } if (prune(match, expressionsEvaluated + 1) == true) { foundSubMatch = true; } } Set<Vertex> nonMatches = new HashSet<>(vertex.getAllNeighbors()); nonMatches.removeAll(matches); for (Vertex nonMatch : nonMatches) { pruned.add(nonMatch); } } else { /* No expression operates on this vertex. Consider all children. */ for (Vertex neighbor : vertex.getAllNeighbors()) { if (prune(neighbor, expressionsEvaluated) == true) { foundSubMatch = true; } } } if (foundSubMatch == false) { pruned.add(vertex); } return foundSubMatch; } }
package jp.gecko655.fujimiya.bot; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Base64; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.StreamHandler; import org.bson.Document; import twitter4j.Status; import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.model.Filters; import com.mongodb.client.model.UpdateOptions; public class DBConnection { private static final Logger logger; static{ logger = Logger.getLogger(DBConnection.class.getName()); logger.setUseParentHandlers(false); logger.addHandler(new StreamHandler(){{setOutputStream(System.out);}}); } private static final MongoClientURI mongoClientURI = new MongoClientURI(System.getenv("MONGOLAB_URI")); private static final MongoClient client = new MongoClient(mongoClientURI); private static final MongoDatabase db = client.getDatabase(mongoClientURI.getDatabase()); private static final String imageUrlCollectionName = "imageUrl"; private static final String blackListCollectionName = "blackList"; private static final String lastStatusCollectionName = "lastStatus"; private static final String urlKey = "url"; private static final String reportedUserKey = "reporteduser"; private static final String statusIdKey = "statusid"; private static final String lastStatusKey = "laststatus"; public static void storeImageUrl(Status succeededStatus, FetchedImage fetchedImage) { MongoCollection<Document> collection = db.getCollection(imageUrlCollectionName); Document doc = new Document(statusIdKey, succeededStatus.getId()); doc.put(urlKey, fetchedImage.getUrl()); collection.insertOne(doc); } public static void storeImageUrlToBlackList(Status reply) { MongoCollection<Document> imageUrlCollection = db.getCollection(imageUrlCollectionName); Document imageUrlDoc = imageUrlCollection.find(Filters.eq(statusIdKey, reply.getInReplyToStatusId())).first(); if(imageUrlDoc!=null){ String url = imageUrlDoc.getString(urlKey); MongoCollection<Document> blackListCollection = db.getCollection(blackListCollectionName); Document doc = new Document(urlKey,url); doc.put(reportedUserKey, reply.getUser().getScreenName()); blackListCollection.insertOne(doc); }else{ logger.log(Level.WARNING,"Image URL was not found in data collection"); } } public static boolean isInBlackList(String link) { MongoCollection<Document> blackListCollection = db.getCollection(blackListCollectionName); return blackListCollection.find(Filters.eq(urlKey, link)).iterator().hasNext(); } public static Status getLastStatus() { MongoCollection<Document> lastStatusCollection = db.getCollection(lastStatusCollectionName); Document doc = lastStatusCollection.find(Filters.exists(lastStatusKey)).first(); if(doc==null) return null; return fromBase64(doc.getString(lastStatusKey)); } public static void setLastStatus(Status status) { MongoCollection<Document> lastStatusCollection = db.getCollection(lastStatusCollectionName); Document document = new Document(lastStatusKey, toBase64(status)); lastStatusCollection.replaceOne(Filters.exists(lastStatusKey), document, new UpdateOptions().upsert(true)); } private static String toBase64(Status status) { try{ ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bos); out.writeObject(status); return Base64.getEncoder().encodeToString(bos.toByteArray()); }catch(IOException e){ throw new Error(); } } private static Status fromBase64(String s) { try{ byte[] bArray = Base64.getDecoder().decode(s); ByteArrayInputStream bis = new ByteArrayInputStream(bArray); ObjectInputStream in = new ObjectInputStream(bis); return (Status) in.readObject(); }catch(IOException | ClassNotFoundException e){ throw new Error(); } } }
package mcjty.deepresonance.setup; import elec332.core.api.client.IIconRegistrar; import elec332.core.api.client.ITextureLoader; import mcjty.deepresonance.DeepResonance; import mcjty.deepresonance.RadiationOverlayRenderer; import mcjty.deepresonance.client.sound.GeneratorSoundController; import mcjty.deepresonance.fluid.DRFluidRegistry; import mcjty.lib.setup.DefaultClientProxy; import net.minecraftforge.client.event.RenderGameOverlayEvent; import net.minecraftforge.client.model.obj.OBJLoader; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class ClientProxy extends DefaultClientProxy implements ITextureLoader { @Override public void preInit(FMLPreInitializationEvent e) { super.preInit(e); MinecraftForge.EVENT_BUS.register(this); OBJLoader.INSTANCE.addDomain(DeepResonance.MODID); GeneratorSoundController.init(); } @SubscribeEvent public void renderGameOverlayEvent(RenderGameOverlayEvent evt) { RadiationOverlayRenderer.onRender(evt); } @Override public void registerTextures(IIconRegistrar iIconRegistrar) { DRFluidRegistry.registerIcons(iIconRegistrar); } }
package net.finmath.stochastic; import java.io.Serializable; import java.util.List; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleUnaryOperator; import java.util.function.IntToDoubleFunction; import java.util.stream.DoubleStream; import net.finmath.functions.DoubleTernaryOperator; /** * This interface describes the methods implemented by an immutable random variable, i.e. * methods that leave a random variable unchanged (immutable). * This is used to ensure that arguments or return values are not changed. * * For C++ guys: In C++ you could achieve this by making a return value const. * * <br> * * <b>IMPORTANT:</b> As of version 1.3 / revision 487 the design of RandomVariableFromDoubleArray, RandomVariable has changed: * All methods of RandomVariableFromDoubleArray leave the object immutable and the interface ImmutableRandomVariableInterface has been renamed * to RandomVariable. Your code remains compatible if you perform the following changes: * <ul> * <li>Change calls to RandomVariable objects value like <code>value.mult(argument);</code> to <code>value = value.mult(argument);</code> * <li>Remove calls to getMutableCopy() since they are no longer needed. * <li>Remove wrapping in RandomVariableMutableClone since they are no longer needed. * </ul> * The change has some performance impact, however, the original performance may be achieved again * via the use of Java 8 lambdas and the concept of the <code>RandomVariableAccumulatorInterface</code>. * <br> * * @author Christian Fries * @version 1.5 */ public interface RandomVariable extends Serializable { /** * Compare this random variable with a given one * * @param randomVariable Random variable to compare with. * @return True if this random variable and the given one are equal, otherwise false */ boolean equals(RandomVariable randomVariable); /** * Returns the filtration time. * * @return The filtration time. */ double getFiltrationTime(); int getTypePriority(); /** * Evaluate at a given path or state. * * @param pathOrState Index of the path or state. * @return Value of this random variable at the given path or state. */ double get(int pathOrState); /** * Returns the number of paths or states. * * @return Number of paths or states. */ int size(); /** * Check if this random variable is deterministic in the sense that it is represented by a single double value. * Note that the methods returns false, if the random variable is represented by a vector where each element has the same value. * * @return True if this random variable is deterministic. */ boolean isDeterministic(); /** * Returns the underlying values and a random variable. * * If the implementation supports an "inner representation", returns the inner representation. Otherwise just returns this. * * @return The underling values. */ default RandomVariable getValues() { return this; } /** * Returns a vector representing the realization of this random variable. * This method is merely useful for analysis. Its interpretation depends on the context (Monte-Carlo or lattice). * The method does not expose an internal data model. * * @return Vector of realizations of this random variable. */ double[] getRealizations(); /** * Returns the double value if isDeterministic() is true. otherwise throws an {@link UnsupportedOperationException}. * * @return The double value if isDeterministic() is true, otherwise throws an an {@link UnsupportedOperationException}. */ Double doubleValue(); /** * Returns the operator path &rarr; this.get(path) corresponding to this random variable. * * @return The operator path &rarr; this.get(path) corresponding to this random variable. */ IntToDoubleFunction getOperator(); /** * Returns a stream of doubles corresponding to the realizations of this random variable. * * @return A stream of doubles corresponding to the realizations of this random variable. */ DoubleStream getRealizationsStream(); /** * Returns the minimum value attained by this random variable. * * @return The minimum value. */ double getMin(); /** * Returns the maximum value attained by this random variable. * * @return The maximum value. */ double getMax(); /** * Returns the expectation of this random variable. * The result of this method has to agrees with <code>average().doubleValue()</code>. * * @return The average assuming equi-distribution. */ double getAverage(); /** * Returns the expectation of this random variable for a given probability measure (weight). * * The result of this method is (mathematically) equivalent to * <br> * <code>this.mult(probabilities).getAverage() / probabilities.getAverage()</code> * <br> * while the internal implementation may differ, e.g. being more efficient by performing multiplication and summation in the same loop. * * @param probabilities The probability weights. * @return The average assuming the given probability weights. */ double getAverage(RandomVariable probabilities); /** * Returns the variance of this random variable, i.e., * V where V = ((X-m)^2).getAverage() and X = this and m = X.getAverage(). * * @return The average assuming equi-distribution. */ double getVariance(); /** * Returns the variance of this random variable, i.e., * V where V = ((X-m)^2).getAverage(probabilities) and X = this and m = X.getAverage(probabilities). * * @param probabilities The probability weights. * @return The average assuming the given probability weights. */ double getVariance(RandomVariable probabilities); /** * Returns the sample variance of this random variable, i.e., * V * size()/(size()-1) where V = getVariance(). * * @return The sample variance. */ double getSampleVariance(); /** * Returns the standard deviation of this random variable, i.e., * sqrt(V) where V = ((X-m)^2).getAverage() and X = this and m = X.getAverage(). * * @return The standard deviation assuming equi-distribution. */ double getStandardDeviation(); /** * Returns the standard deviation of this random variable, i.e., * sqrt(V) where V = ((X-m)^2).getAverage(probabilities) and X = this and m = X.getAverage(probabilities). * * @param probabilities The probability weights. * @return The standard error assuming the given probability weights. */ double getStandardDeviation(RandomVariable probabilities); /** * Returns the standard error (discretization error) of this random variable. * For a Monte-Carlo simulation this is 1/Math.sqrt(n) * {@link #getStandardDeviation() }. * * @return The standard error assuming equi-distribution. */ double getStandardError(); /** * Returns the standard error (discretization error) of this random variable. * For a Monte-Carlo simulation this is 1/Math.sqrt(n) * {@link #getStandardDeviation(RandomVariable) }. * * @param probabilities The probability weights. * @return The standard error assuming the given probability weights. */ double getStandardError(RandomVariable probabilities); /** * Returns the quantile value for this given random variable, i.e., the value x such that P(this &lt; x) = quantile, * where P denotes the probability measure. * The method will consider picewise constant values (with constant extrapolation) in the random variable. * That is getQuantile(0) wiil return the smallest value and getQuantile(1) will return the largest value. * * @param quantile The quantile level. * @return The quantile value assuming equi-distribution. */ double getQuantile(double quantile); /** * Returns the quantile value for this given random variable, i.e., the value x such that P(this &lt; x) = quantile, * where P denotes the probability measure. * * @param quantile The quantile level. * @param probabilities The probability weights. * @return The quantile value assuming the given probability weights. */ double getQuantile(double quantile, RandomVariable probabilities); /** * Returns the expectation over a quantile for this given random variable. * The method will consider picewise constant values (with constant extrapolation) in the random variable. * For a &le; b the method returns (&Sigma;<sub>a &le; i &le; b</sub> x[i]) / (b-a+1), where * <ul> * <li>a = min(max((n+1) * quantileStart - 1, 0, 1);</li> * <li>b = min(max((n+1) * quantileEnd - 1, 0, 1);</li> * <li>n = this.size();</li> * </ul> * For quantileStart &gt; quantileEnd the method returns getQuantileExpectation(quantileEnd, quantileStart). * * @param quantileStart Lower bound of the integral. * @param quantileEnd Upper bound of the integral. * @return The (conditional) expectation of the values between two quantile levels assuming equi-distribution. */ double getQuantileExpectation(double quantileStart, double quantileEnd); /** * Generates a Histogram based on the realizations stored in this random variable. * The returned <code>result</code> array's length is <code>intervalPoints.length+1</code>. * <ul> * <li>The value result[0] equals the relative frequency of values observed in the interval ( -infinity, intervalPoints[0] ].</li> * <li>The value result[i] equals the relative frequency of values observed in the interval ( intervalPoints[i-1], intervalPoints[i] ].</li> * <li>The value result[n] equals the relative frequency of values observed in the interval ( intervalPoints[n-1], infinity ).</li> * </ul> * where n = intervalPoints.length. Note that the intervals are open on the left, closed on the right, i.e., * result[i] contains the number of elements x with intervalPoints[i-1] &lt; x &le; intervalPoints[i]. * * Thus, is you have a random variable which only takes values contained in the (sorted) array * <code>possibleValues</code>, then <code>result = getHistogram(possibleValues)</code> returns an * array where <code>result[i]</code> is the relative frequency of occurrence of <code>possibleValues[i]</code>. * * The sum of result[i] over all i is equal to 1, except for uninitialized random * variables where all values are 0. * * @param intervalPoints Array of ascending values defining the interval boundaries. * @return A histogram with respect to a provided interval. */ double[] getHistogram(double[] intervalPoints); /** * Generates a histogram based on the realizations stored in this random variable * using interval points calculated from the arguments, see also {@link #getHistogram(double[])}. * The interval points are * set with equal distance over an the interval of the specified standard deviation. * * The interval points used are * <center> * <code>x[i] = mean + alpha[i] * standardDeviations * sigma</code> * </center> * where * <ul> * <li>i = 0,..., numberOfPoints-1,</li> * <li>alpha[i] = (i - (numberOfPoints-1)/2.0) / ((numberOfPoints-1)/2.0),</li> * <li>mean = {@link #getAverage()},</li> * <li>sigma = {@link #getStandardDeviation()}.</li> * </ul> * * The methods <code>result</code> is an array of two vectors, where result[0] are the * intervals center points ('anchor points') and result[1] contains the relative frequency for the interval. * The 'anchor point' for the interval (-infinity, x[0]) is x[0] - 1/2 (x[1]-x[0]) * and the 'anchor point' for the interval (x[n], infinity) is x[n] + 1/2 (x[n]-x[n-1]). * Here n = numberOfPoints is the number of interval points. * * @param numberOfPoints The number of interval points. * @param standardDeviations The number of standard deviations defining the discretization radius. * @return A histogram, given as double[2][], where result[0] are the center point of the intervals and result[1] is the value of {@link #getHistogram(double[])} for the given the interval points. The length of result[0] and result[1] is numberOfPoints+1. */ double[][] getHistogram(int numberOfPoints, double standardDeviations); /** * Return a cacheable version of this object (often a self-reference). * This method should be called when you store the object for later use, * i.e., assign it, or when the object is consumed in a function, but later * used also in another function. * * @return A cacheable version of this object (often a self-reference). */ RandomVariable cache(); /** * Applies x &rarr; operator(x) to this random variable. * It returns a new random variable with the result. * * @param operator An unary operator/function, mapping double to double. * @return New random variable with the result of the function. */ RandomVariable apply(DoubleUnaryOperator operator); /** * Applies x &rarr; operator(x,y) to this random variable, where x is this random variable and y is a given random variable. * It returns a new random variable with the result. * * @param operator A binary operator/function, mapping (double,double) to double. * @param argument A random variable. * @return New random variable with the result of the function. */ RandomVariable apply(DoubleBinaryOperator operator, RandomVariable argument); /** * Applies x &rarr; operator(x,y,z) to this random variable, where x is this random variable and y and z are given random variable. * It returns a new random variable with the result. * * @param operator A ternary operator/function, mapping (double,double,double) to double. * @param argument1 A random variable representing y. * @param argument2 A random variable representing z. * @return New random variable with the result of the function. */ RandomVariable apply(DoubleTernaryOperator operator, RandomVariable argument1, RandomVariable argument2); /** * Applies x &rarr; min(x,cap) to this random variable. * It returns a new random variable with the result. * * @param cap The cap. * @return New random variable with the result of the function. */ RandomVariable cap(double cap); /** * Applies x &rarr; max(x,floor) to this random variable. * It returns a new random variable with the result. * * @param floor The floor. * @return New random variable with the result of the function. */ RandomVariable floor(double floor); /** * Applies x &rarr; x + value to this random variable. * It returns a new random variable with the result. * * @param value The value to add. * @return New random variable with the result of the function. */ RandomVariable add(double value); /** * Applies x &rarr; x - value to this random variable. * @param value The value to subtract. * @return New random variable with the result of the function. */ RandomVariable sub(double value); /** * Applies x &rarr; x * value to this random variable. * @param value The value to multiply. * @return New random variable with the result of the function. */ RandomVariable mult(double value); /** * Applies x &rarr; x / value to this random variable. * @param value The value to divide. * @return New random variable with the result of the function. */ RandomVariable div(double value); /** * Applies x &rarr; pow(x,exponent) to this random variable. * @param exponent The exponent. * @return New random variable with the result of the function. */ RandomVariable pow(double exponent); /** * Returns a random variable which is deterministic and corresponds * the expectation of this random variable. * * @return New random variable being the expectation of this random variable. */ RandomVariable average(); /** * Returns the conditional expectation using a given conditional expectation estimator. * * @param conditionalExpectationOperator A given conditional expectation estimator. * @return The conditional expectation of this random variable (as a random variable) */ default RandomVariable getConditionalExpectation(ConditionalExpectationEstimator conditionalExpectationOperator) { return conditionalExpectationOperator.getConditionalExpectation(this); } /** * Applies x &rarr; x * x to this random variable. * @return New random variable with the result of the function. */ RandomVariable squared(); /** * Applies x &rarr; sqrt(x) to this random variable. * @return New random variable with the result of the function. */ RandomVariable sqrt(); /** * Applies x &rarr; exp(x) to this random variable. * @return New random variable with the result of the function. */ RandomVariable exp(); /** * Applies x &rarr; log(x) to this random variable. * @return New random variable with the result of the function. */ RandomVariable log(); /** * Applies x &rarr; sin(x) to this random variable. * @return New random variable with the result of the function. */ RandomVariable sin(); /** * Applies x &rarr; cos(x) to this random variable. * @return New random variable with the result of the function. */ RandomVariable cos(); /** * Applies x &rarr; x+randomVariable to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable add(RandomVariable randomVariable); /** * Applies x &rarr; x-randomVariable to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable sub(RandomVariable randomVariable); /** * Applies x &rarr; randomVariable-x to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable bus(RandomVariable randomVariable); /** * Applies x &rarr; x*randomVariable to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable mult(RandomVariable randomVariable); /** * Applies x &rarr; x/randomVariable to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable div(RandomVariable randomVariable); /** * Applies x &rarr; randomVariable/x to this random variable. * @param randomVariable A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable vid(RandomVariable randomVariable); /** * Applies x &rarr; min(x,cap) to this random variable. * @param cap The cap. A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable cap(RandomVariable cap); /** * Applies x &rarr; max(x,floor) to this random variable. * @param floor The floor. A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable floor(RandomVariable floor); /** * Applies x &rarr; x * (1.0 + rate * periodLength) to this random variable. * @param rate The accruing rate. A random variable (compatible with this random variable). * @param periodLength The period length * @return New random variable with the result of the function. */ RandomVariable accrue(RandomVariable rate, double periodLength); /** * Applies x &rarr; x / (1.0 + rate * periodLength) to this random variable. * @param rate The discounting rate. A random variable (compatible with this random variable). * @param periodLength The period length * @return New random variable with the result of the function. */ RandomVariable discount(RandomVariable rate, double periodLength); /** * Applies x &rarr; (x &ge; 0 ? valueIfTriggerNonNegative : valueIfTriggerNegative) * @param valueIfTriggerNonNegative The value used if this is greater or equal 0 * @param valueIfTriggerNegative The value used if the this is less than 0 * @return New random variable with the result of the function. */ RandomVariable choose(RandomVariable valueIfTriggerNonNegative, RandomVariable valueIfTriggerNegative); /** * Applies x &rarr; 1/x to this random variable. * @return New random variable with the result of the function. */ RandomVariable invert(); /** * Applies x &rarr; Math.abs(x), i.e. x &rarr; |x| to this random variable. * @return New random variable with the result of the function. */ RandomVariable abs(); /** * Applies x &rarr; x + factor1 * factor2 * @param factor1 The factor 1. A random variable (compatible with this random variable). * @param factor2 The factor 2. * @return New random variable with the result of the function. */ RandomVariable addProduct(RandomVariable factor1, double factor2); /** * Applies x &rarr; x + factor1 * factor2 * @param factor1 The factor 1. A random variable (compatible with this random variable). * @param factor2 The factor 2. A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable addProduct(RandomVariable factor1, RandomVariable factor2); /** * Applies x &rarr; x + numerator / denominator * * @param numerator The numerator of the ratio to add. A random variable (compatible with this random variable). * @param denominator The denominator of the ratio to add. A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable addRatio(RandomVariable numerator, RandomVariable denominator); /** * Applies x &rarr; x - numerator / denominator * * @param numerator The numerator of the ratio to sub. A random variable (compatible with this random variable). * @param denominator The denominator of the ratio to sub. A random variable (compatible with this random variable). * @return New random variable with the result of the function. */ RandomVariable subRatio(RandomVariable numerator, RandomVariable denominator); /** * Applies \( x \mapsto x + \sum_{i=0}^{n-1} factor1_{i} * factor2_{i} * @param factor1 The factor 1. A list of random variables (compatible with this random variable). * @param factor2 The factor 2. A list of random variables (compatible with this random variable). * @return New random variable with the result of the function. */ default RandomVariable addSumProduct(List<RandomVariable> factor1, List<RandomVariable> factor2) { RandomVariable result = this; for(int i=0; i<factor1.size(); i++) { result = result.addProduct(factor1.get(i), factor2.get(i)); } return result; } /** * Applies x &rarr; (Double.isNaN(x) ? 1.0 : 0.0) * * @return A random variable which is 1.0 for all states that are NaN, otherwise 0.0. */ RandomVariable isNaN(); }
package net.glowstone.inventory; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Objects; import java.util.Set; import lombok.Getter; import lombok.Setter; import net.glowstone.GlowServer; import net.glowstone.entity.GlowPlayer; import net.glowstone.util.InventoryUtil; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.entity.HumanEntity; import org.bukkit.event.inventory.InventoryType; import org.bukkit.event.inventory.InventoryType.SlotType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.InventoryView; import org.bukkit.inventory.ItemStack; /** * A class which represents an inventory. */ public class GlowInventory implements Inventory { /** * This inventory's slots. */ private List<GlowInventorySlot> slots; /** * The list of humans viewing this inventory. */ private Set<HumanEntity> viewers; /** * The owner of this inventory. */ @Getter private InventoryHolder holder; /** * The type of this inventory. */ @Getter private InventoryType type; /** * The inventory's name. */ @Getter private String title; /** * The inventory's maximum stack size. */ @Getter @Setter private int maxStackSize = 64; protected GlowInventory() { } public GlowInventory(InventoryHolder holder, InventoryType type) { this(holder, type, type.getDefaultSize(), type.getDefaultTitle()); } public GlowInventory(InventoryHolder holder, InventoryType type, int size) { this(holder, type, size, type.getDefaultTitle()); } public GlowInventory(InventoryHolder holder, InventoryType type, int size, String title) { initialize(GlowInventorySlot.createList(size), new HashSet<>(), holder, type, title); } /** * Initializes some key components of this inventory. * * <p>This should be called in the constructor. * * @param slots List of slots this inventory has. * @param viewers Set for storage of current inventory viewers. * @param owner InventoryHolder which owns this Inventory. * @param type The inventory type. * @param title Inventory title, displayed in the client. */ protected void initialize(List<GlowInventorySlot> slots, Set<HumanEntity> viewers, InventoryHolder owner, InventoryType type, String title) { this.slots = slots; this.viewers = viewers; this.holder = owner; this.type = type; this.title = title; } // Internals /** * Add a viewer to the inventory. * * @param viewer The HumanEntity to add. */ public void addViewer(HumanEntity viewer) { viewers.add(viewer); } /** * Remove a viewer from the inventory. * * @param viewer The HumanEntity to remove. */ public void removeViewer(HumanEntity viewer) { viewers.remove(viewer); } /** * Returns the set which contains viewers. * * @return Viewers set. */ public Set<HumanEntity> getViewersSet() { return Collections.synchronizedSet(viewers); } // Basic Stuff /** * Returns a certain slot. * * @param slot index. * @return The requested slot. */ public GlowInventorySlot getSlot(int slot) { if (slot < 0 || slot > slots.size()) { GlowServer.logger.info("Out of bound slot: " + slot + " (max " + slots.size() + ")"); return null; } return slots.get(slot); } /** * Get the type of the specified slot. * * @param slot The slot number. * @return The SlotType of the slot. */ public SlotType getSlotType(int slot) { if (slot < 0) { return SlotType.OUTSIDE; } return slots.get(slot).getType(); } /** * Check whether it is allowed for a player to insert the given ItemStack at the slot, * regardless of the slot's current contents. * * <p>Should return false for crafting output slots or armor slots which cannot accept the given * item. * * @param slot The slot number. * @param stack The stack to add. * @return Whether the stack can be added there. */ public boolean itemPlaceAllowed(int slot, ItemStack stack) { return getSlotType(slot) != SlotType.RESULT; } /** * Check whether, in a shift-click operation, an item of the specified type may be placed in the * given slot. * * @param slot The slot number. * @param stack The stack to add. * @return Whether the stack can be added there. */ public boolean itemShiftClickAllowed(int slot, ItemStack stack) { return itemPlaceAllowed(slot, stack); } /** * Handle a shift click in this inventory by the specified player. * * <p>The default implementation distributes items from the right to the left and from the * bottom to the top. * * @param player The player who clicked * @param view The inventory view in which was clicked * @param clickedSlot The slot in the view * @param clickedItem The item at which was clicked */ public void handleShiftClick(GlowPlayer player, InventoryView view, int clickedSlot, ItemStack clickedItem) { clickedItem = player.getInventory().tryToFillSlots(clickedItem, 8, -1, 35, 8); view.setItem(clickedSlot, clickedItem); } /** * Tries to put the given items into the specified slots of this inventory from the start slot * (inclusive) to the end slot (exclusive). * * <p>The slots are supplied in pairs, first the start then the end slots. * * <p>This will first try to fill up all partial slots and if items are still left after doing * so, it places them into the first empty slot. * * <p>If no empty slot was found and there are still items left, they're returned from this * method. * * @param stack The items to place down * @param slots Pairs of start/end slots * @return The remaining items or {@code null} if non are remaining */ public ItemStack tryToFillSlots(ItemStack stack, int... slots) { if (slots.length % 2 != 0) { throw new IllegalArgumentException("Slots must be pairs."); } ItemStack maxStack = stack.clone(); maxStack.setAmount(stack.getMaxStackSize()); int firstEmpty = -1; for (int s = 0; s < slots.length && stack.getAmount() > 0; s += 2) { // Iterate through all pairs of start and end slots int start = slots[s]; int end = slots[s + 1]; int delta = start < end ? 1 : -1; for (int i = start; i != end && stack.getAmount() > 0; i += delta) { // Check whether shift clicking is allowed in that slot of the inventory if (!itemShiftClickAllowed(i, stack)) { continue; } ItemStack currentStack = getItem(i); // Store the first empty slot if (firstEmpty == -1 && InventoryUtil.isEmpty(currentStack)) { firstEmpty = i; } else if (currentStack .isSimilar(stack)) { // Non empty slot of similar items, try to fill stack // Calculate the amount of transferable items int amount = currentStack.getAmount(); int maxStackSize = Math.min(currentStack.getMaxStackSize(), getMaxStackSize()); int transfer = Math.min(stack.getAmount(), maxStackSize - amount); if (transfer > 0) { // And if there are any, transfer them currentStack.setAmount(amount + transfer); stack.setAmount(stack.getAmount() - transfer); } setItem(i, currentStack); } } } if (firstEmpty != -1) { // Fill empty slot if (stack.getAmount() > stack.getMaxStackSize()) { setItem(firstEmpty, maxStack); stack.setAmount(stack.getAmount() - stack.getMaxStackSize()); } else { ItemStack finalStack = stack.clone(); setItem(firstEmpty, finalStack); stack.setAmount(0); } } if (stack.getAmount() <= 0) { stack = InventoryUtil.createEmptyStack(); } return stack; } /** * Gets the number of slots in this inventory according to the protocol. * * <p>Some inventories have 0 slots in the protocol, despite having slots. * * @return The numbers of slots */ public int getRawSlots() { return getSize(); } @Override public int getSize() { return slots.size(); } // Basic Stuff /** * Returns the whole slot list. * * @return Slot list. */ public List<GlowInventorySlot> getSlots() { return Collections.unmodifiableList(slots); } @Override public final String getName() { // Can't be fully Lombokified because getTitle() is identical return title; } /** * Set the custom title of this inventory or reset it to the default. * * @param title The new title, or null to reset. */ public void setTitle(String title) { if (title == null) { this.title = type.getDefaultTitle(); } else { this.title = title; } } @Override public List<HumanEntity> getViewers() { return new ArrayList<>(viewers); } @Override public ListIterator<ItemStack> iterator() { return new InventoryIterator(this); } @Override public ListIterator<ItemStack> iterator(int index) { if (index < 0) { // negative indices go from back index += getSize() + 1; } return new InventoryIterator(this, index); } @Override public Location getLocation() { return null; } // Get, Set, Add, Remove @Override public ItemStack getItem(int index) { return slots.get(index).getItem(); } @Override public void setItem(int index, ItemStack item) { if (index == -1) { return; } slots.get(index).setItem(item); } @Override public HashMap<Integer, ItemStack> addItem(ItemStack... items) { HashMap<Integer, ItemStack> result = new HashMap<>(); for (int i = 0; i < items.length; ++i) { ItemStack remaining = addItemStack(items[i], false); if (!InventoryUtil.isEmpty(remaining)) { result.put(i, remaining); } } return result; } /** * Adds the contents of the given ItemStack to the inventory. * * @param item the ItemStack to add * @param ignoreMeta if true, can convert to items with different NBT data in order to stack * with existing copies of those items, provided the material and damage value match * @return the items that couldn't be added, or an empty stack if all were added */ public ItemStack addItemStack(ItemStack item, boolean ignoreMeta) { int maxStackSize = item.getType() == null ? 64 : item.getType().getMaxStackSize(); int toAdd = item.getAmount(); Iterator<GlowInventorySlot> iterator = slots.iterator(); while (toAdd > 0 && iterator.hasNext()) { GlowInventorySlot slot = iterator.next(); // Look for existing stacks to add to ItemStack slotItem = InventoryUtil.itemOrEmpty(slot.getItem()); if (!InventoryUtil.isEmpty(slotItem) && compareItems(item, slotItem, ignoreMeta)) { int space = maxStackSize - slotItem.getAmount(); if (space < 0) { continue; } if (space > toAdd) { space = toAdd; } slotItem.setAmount(slotItem.getAmount() + space); toAdd -= space; } } if (toAdd > 0) { // Look for empty slots to add to iterator = slots.iterator(); while (toAdd > 0 && iterator.hasNext()) { GlowInventorySlot slot = iterator.next(); ItemStack slotItem = slot.getItem(); if (InventoryUtil.isEmpty(slotItem) && itemPlaceAllowed(slots.indexOf(slot), item)) { int num = toAdd > maxStackSize ? maxStackSize : toAdd; slotItem = item.clone(); slotItem.setAmount(num); slot.setItem(slotItem); toAdd -= num; } } } if (toAdd > 0) { ItemStack remaining = new ItemStack(item); remaining.setAmount(toAdd); return remaining; } return InventoryUtil.createEmptyStack(); } @Override public HashMap<Integer, ItemStack> removeItem(ItemStack... items) { HashMap<Integer, ItemStack> result = new HashMap<>(); for (int i = 0; i < items.length; ++i) { ItemStack remaining = removeItemStack(items[i], true); if (!InventoryUtil.isEmpty(remaining)) { result.put(i, remaining); } } return result; } /** * Removes the given ItemStack from the inventory. * * @param item the ItemStack to remove * @param ignoreMeta if true, can choose an item with different NBT data, provided the material * and damage value match * @return the items that couldn't be removed, or an empty stack if all were removed */ public ItemStack removeItemStack(ItemStack item, boolean ignoreMeta) { int toRemove = item.getAmount(); Iterator<GlowInventorySlot> iterator = slots.iterator(); while (toRemove > 0 && iterator.hasNext()) { GlowInventorySlot slot = iterator.next(); ItemStack slotItem = slot.getItem(); // Look for stacks to remove from. if (!InventoryUtil.isEmpty(slotItem) && compareItems(item, slotItem, ignoreMeta)) { if (slotItem.getAmount() > toRemove) { slotItem.setAmount(slotItem.getAmount() - toRemove); } else { toRemove -= slotItem.getAmount(); item.setAmount(0); slot.setItem(new ItemStack(Material.AIR, 0)); } } } if (toRemove > 0) { ItemStack remaining = new ItemStack(item); remaining.setAmount(toRemove); return remaining; } return InventoryUtil.createEmptyStack(); } private boolean compareItems(ItemStack a, ItemStack b, boolean ignoreMeta) { if (ignoreMeta) { return a.getTypeId() == b.getTypeId() && a.getDurability() == b.getDurability(); } return a.isSimilar(b); } @Override public ItemStack[] getContents() { ItemStack[] contents = new ItemStack[getSize()]; int i = 0; for (ItemStack itemStack : this) { contents[i] = InventoryUtil.itemOrEmpty(itemStack); i++; } return contents; } @Override public void setContents(ItemStack[] items) { if (items == null) { throw new IllegalArgumentException("Cannot set contents to null array!"); } if (items.length != getSize()) { throw new IllegalArgumentException("Length of items must be " + getSize()); } Iterator<GlowInventorySlot> iterator = slots.iterator(); for (int i = 0; i < getSize(); i++) { iterator.next().setItem(InventoryUtil.itemOrEmpty(items[i])); } } @Override public ItemStack[] getStorageContents() { return getContents(); } @Override public void setStorageContents(ItemStack[] itemStacks) throws IllegalArgumentException { setContents(items); } @Override public String toString() { StringBuilder sb = new StringBuilder(getClass().getSimpleName()); sb.append(" for ").append(getHolder()).append(":\n"); for (GlowInventorySlot slot : slots) { ItemStack item = slot.getItem(); SlotType type = slot.getType(); if (type != SlotType.CONTAINER || !InventoryUtil.isEmpty(item)) { sb.append(item).append(" in ").append(slot.getType()).append('\n'); } } return sb.toString(); } // Contains @Override public boolean contains(int materialId) { return first(materialId) >= 0; } @Override public boolean contains(Material material) { return first(material) >= 0; } @Override public boolean contains(ItemStack item) { return first(item) >= 0; } @Override public boolean contains(int materialId, int amount) { HashMap<Integer, ? extends ItemStack> found = all(materialId); int total = 0; for (ItemStack stack : found.values()) { total += stack.getAmount(); } return total >= amount; } @Override public boolean contains(Material material, int amount) { return contains(material.getId(), amount); } @Override public boolean contains(ItemStack item, int amount) { return contains(item.getTypeId(), amount); } @Override public boolean containsAtLeast(ItemStack item, int amount) { return false; // todo } // Find all @Override public HashMap<Integer, ItemStack> all(int materialId) { HashMap<Integer, ItemStack> result = new HashMap<>(); int i = 0; for (ItemStack slotItem : this) { if (!InventoryUtil.isEmpty(slotItem) && slotItem.getTypeId() == materialId) { result.put(i, InventoryUtil.itemOrEmpty(slotItem)); } i++; } return result; } @Override public HashMap<Integer, ItemStack> all(Material material) { return all(material.getId()); } @Override public HashMap<Integer, ItemStack> all(ItemStack item) { HashMap<Integer, ItemStack> result = new HashMap<>(); int i = 0; for (ItemStack slotItem : this) { if (Objects.equals(slotItem, item)) { result.put(i, slotItem); } i++; } return result; } // Find first @Override public int first(int materialId) { int i = 0; for (ItemStack slotItem : this) { if (slotItem == null) { if (materialId == 0) { return i; } } else if (slotItem.getTypeId() == materialId) { return i; } i++; } return -1; } @Override public int first(Material material) { return first(material != null ? material.getId() : 0); } @Override public int first(ItemStack item) { int i = 0; for (ItemStack slotItem : this) { if (Objects.equals(slotItem, item)) { return i; } i++; } return -1; } @Override public int firstEmpty() { return first((Material) null); } // Remove @Override public void remove(int materialId) { HashMap<Integer, ? extends ItemStack> stacks = all(materialId); stacks.keySet().forEach(this::clear); } @Override public void remove(Material material) { HashMap<Integer, ? extends ItemStack> stacks = all(material); stacks.keySet().forEach(this::clear); } @Override public void remove(ItemStack item) { HashMap<Integer, ? extends ItemStack> stacks = all(item); stacks.keySet().forEach(this::clear); } // Clear @Override public void clear(int index) { setItem(index, null); } @Override public void clear() { for (GlowInventorySlot slot : slots) { slot.setItem(InventoryUtil.createEmptyStack()); } } /** * Consumes an item or the full stack in the given slot. * @param slot The slot to consume. * @param wholeStack True if we should remove the complete stack. * @return The number of item really consumed. */ public int consumeItem(int slot, boolean wholeStack) { ItemStack item = InventoryUtil.itemOrEmpty(getItem(slot)); if (InventoryUtil.isEmpty(item)) { return 0; } if (wholeStack || item.getAmount() == 1) { setItem(slot, InventoryUtil.createEmptyStack()); } else { item.setAmount(item.getAmount() - 1); setItem(slot, item); } return wholeStack ? item.getAmount() : 1; } /** * Consumes an item in the given slot. * @param slot The slot to consume. * @return The number of item really consumed. */ public int consumeItem(int slot) { return this.consumeItem(slot, false); } }
package net.imagej.ui.swing.script; import java.awt.Dimension; import java.awt.Font; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.KeyEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedReader; import java.io.CharArrayWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.io.PrintWriter; import java.io.Reader; import java.io.Writer; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import java.util.concurrent.ExecutionException; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.zip.ZipException; import javax.script.ScriptEngine; import javax.script.ScriptException; import javax.swing.AbstractAction; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.JCheckBoxMenuItem; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JRadioButtonMenuItem; import javax.swing.JTabbedPane; import javax.swing.JTextArea; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.BadLocationException; import javax.swing.text.Position; import net.imagej.ui.swing.script.commands.ChooseFontSize; import net.imagej.ui.swing.script.commands.ChooseTabSize; import net.imagej.ui.swing.script.commands.GitGrep; import net.imagej.ui.swing.script.commands.KillScript; import org.fife.ui.rsyntaxtextarea.AbstractTokenMakerFactory; import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea; import org.fife.ui.rsyntaxtextarea.TokenMakerFactory; import org.scijava.Context; import org.scijava.command.CommandService; import org.scijava.event.ContextDisposingEvent; import org.scijava.event.EventHandler; import org.scijava.io.IOService; import org.scijava.log.LogService; import org.scijava.module.ModuleException; import org.scijava.module.ModuleService; import org.scijava.platform.PlatformService; import org.scijava.plugin.Parameter; import org.scijava.plugin.PluginInfo; import org.scijava.plugin.PluginService; import org.scijava.plugins.scripting.java.JavaEngine; import org.scijava.prefs.PrefService; import org.scijava.script.ScriptHeaderService; import org.scijava.script.ScriptInfo; import org.scijava.script.ScriptLanguage; import org.scijava.script.ScriptModule; import org.scijava.script.ScriptService; import org.scijava.ui.CloseConfirmable; import org.scijava.ui.UIService; import org.scijava.util.AppUtils; import org.scijava.util.FileUtils; import org.scijava.util.MiscUtils; import org.scijava.widget.FileWidget; @SuppressWarnings("serial") public class TextEditor extends JFrame implements ActionListener, ChangeListener, CloseConfirmable, DocumentListener { private static final Set<String> TEMPLATE_PATHS = new HashSet<String>(); public static final String AUTO_IMPORT_PREFS = "script.editor.AutoImport"; public static final String WINDOW_HEIGHT = "script.editor.height"; public static final String WINDOW_WIDTH = "script.editor.width"; public static final int DEFAULT_WINDOW_WIDTH = 800; public static final int DEFAULT_WINDOW_HEIGHT = 600; static { // Add known script template paths. addTemplatePath("script_templates"); // This path interferes with javadoc generation but is preserved for // backwards compatibility addTemplatePath("script-templates"); } private static AbstractTokenMakerFactory tokenMakerFactory = null; private JTabbedPane tabbed; private JMenuItem newFile, open, save, saveas, compileAndRun, compile, close, undo, redo, cut, copy, paste, find, replace, selectAll, kill, gotoLine, makeJar, makeJarWithSource, removeUnusedImports, sortImports, removeTrailingWhitespace, findNext, findPrevious, openHelp, addImport, clearScreen, nextError, previousError, openHelpWithoutFrames, nextTab, previousTab, runSelection, extractSourceJar, toggleBookmark, listBookmarks, openSourceForClass, openSourceForMenuItem, openMacroFunctions, decreaseFontSize, increaseFontSize, chooseFontSize, chooseTabSize, gitGrep, openInGitweb, replaceTabsWithSpaces, replaceSpacesWithTabs, toggleWhiteSpaceLabeling, zapGremlins, savePreferences; private RecentFilesMenuItem openRecent; private JMenu gitMenu, tabsMenu, fontSizeMenu, tabSizeMenu, toolsMenu, runMenu, whiteSpaceMenu; private int tabsMenuTabsStart; private Set<JMenuItem> tabsMenuItems; private FindAndReplaceDialog findDialog; private JCheckBoxMenuItem autoSave, wrapLines, tabsEmulated, autoImport; private JTextArea errorScreen = new JTextArea(); private int compileStartOffset; private Position compileStartPosition; private ErrorHandler errorHandler; private boolean respectAutoImports; @Parameter private Context context; @Parameter private LogService log; @Parameter private ModuleService moduleService; @Parameter private PlatformService platformService; @Parameter private IOService ioService; @Parameter private CommandService commandService; @Parameter private ScriptService scriptService; @Parameter private PluginService pluginService; @Parameter private ScriptHeaderService scriptHeaderService; @Parameter private UIService uiService; @Parameter private PrefService prefService; private Map<ScriptLanguage, JRadioButtonMenuItem> languageMenuItems; private JRadioButtonMenuItem noneLanguageItem; public TextEditor(final Context context) { super("Script Editor"); context.inject(this); initializeTokenMakers(); loadPreferences(); // Initialize menu final int ctrl = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(); final int shift = ActionEvent.SHIFT_MASK; final JMenuBar mbar = new JMenuBar(); setJMenuBar(mbar); final JMenu file = new JMenu("File"); file.setMnemonic(KeyEvent.VK_F); newFile = addToMenu(file, "New", KeyEvent.VK_N, ctrl); newFile.setMnemonic(KeyEvent.VK_N); open = addToMenu(file, "Open...", KeyEvent.VK_O, ctrl); open.setMnemonic(KeyEvent.VK_O); openRecent = new RecentFilesMenuItem(prefService, this); openRecent.setMnemonic(KeyEvent.VK_R); file.add(openRecent); save = addToMenu(file, "Save", KeyEvent.VK_S, ctrl); save.setMnemonic(KeyEvent.VK_S); saveas = addToMenu(file, "Save as...", 0, 0); saveas.setMnemonic(KeyEvent.VK_A); file.addSeparator(); makeJar = addToMenu(file, "Export as .jar", 0, 0); makeJar.setMnemonic(KeyEvent.VK_E); makeJarWithSource = addToMenu(file, "Export as .jar (with source)", 0, 0); makeJarWithSource.setMnemonic(KeyEvent.VK_X); file.addSeparator(); close = addToMenu(file, "Close", KeyEvent.VK_W, ctrl); mbar.add(file); final JMenu edit = new JMenu("Edit"); edit.setMnemonic(KeyEvent.VK_E); undo = addToMenu(edit, "Undo", KeyEvent.VK_Z, ctrl); redo = addToMenu(edit, "Redo", KeyEvent.VK_Y, ctrl); edit.addSeparator(); selectAll = addToMenu(edit, "Select All", KeyEvent.VK_A, ctrl); cut = addToMenu(edit, "Cut", KeyEvent.VK_X, ctrl); copy = addToMenu(edit, "Copy", KeyEvent.VK_C, ctrl); paste = addToMenu(edit, "Paste", KeyEvent.VK_V, ctrl); edit.addSeparator(); find = addToMenu(edit, "Find...", KeyEvent.VK_F, ctrl); find.setMnemonic(KeyEvent.VK_F); findNext = addToMenu(edit, "Find Next", KeyEvent.VK_F3, 0); findNext.setMnemonic(KeyEvent.VK_N); findPrevious = addToMenu(edit, "Find Previous", KeyEvent.VK_F3, shift); findPrevious.setMnemonic(KeyEvent.VK_P); replace = addToMenu(edit, "Find and Replace...", KeyEvent.VK_H, ctrl); gotoLine = addToMenu(edit, "Goto line...", KeyEvent.VK_G, ctrl); gotoLine.setMnemonic(KeyEvent.VK_G); toggleBookmark = addToMenu(edit, "Toggle Bookmark", KeyEvent.VK_B, ctrl); toggleBookmark.setMnemonic(KeyEvent.VK_B); listBookmarks = addToMenu(edit, "List Bookmarks", 0, 0); listBookmarks.setMnemonic(KeyEvent.VK_O); edit.addSeparator(); // Font adjustments decreaseFontSize = addToMenu(edit, "Decrease font size", KeyEvent.VK_MINUS, ctrl); decreaseFontSize.setMnemonic(KeyEvent.VK_D); increaseFontSize = addToMenu(edit, "Increase font size", KeyEvent.VK_PLUS, ctrl); increaseFontSize.setMnemonic(KeyEvent.VK_C); fontSizeMenu = new JMenu("Font sizes"); fontSizeMenu.setMnemonic(KeyEvent.VK_Z); final boolean[] fontSizeShortcutUsed = new boolean[10]; final ButtonGroup buttonGroup = new ButtonGroup(); for (final int size : new int[] { 8, 10, 12, 16, 20, 28, 42 }) { final JRadioButtonMenuItem item = new JRadioButtonMenuItem("" + size + " pt"); item.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent event) { getEditorPane().setFontSize(size); updateTabAndFontSize(false); } }); for (final char c : ("" + size).toCharArray()) { final int digit = c - '0'; if (!fontSizeShortcutUsed[digit]) { item.setMnemonic(KeyEvent.VK_0 + digit); fontSizeShortcutUsed[digit] = true; break; } } buttonGroup.add(item); fontSizeMenu.add(item); } chooseFontSize = new JRadioButtonMenuItem("Other...", false); chooseFontSize.setMnemonic(KeyEvent.VK_O); chooseFontSize.addActionListener(this); buttonGroup.add(chooseFontSize); fontSizeMenu.add(chooseFontSize); edit.add(fontSizeMenu); // Add tab size adjusting menu tabSizeMenu = new JMenu("Tab sizes"); tabSizeMenu.setMnemonic(KeyEvent.VK_T); final ButtonGroup bg = new ButtonGroup(); for (final int size : new int[] { 2, 4, 8 }) { final JRadioButtonMenuItem item = new JRadioButtonMenuItem("" + size); item.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent event) { getEditorPane().setTabSize(size); updateTabAndFontSize(false); } }); item.setMnemonic(KeyEvent.VK_0 + (size % 10)); bg.add(item); tabSizeMenu.add(item); } chooseTabSize = new JRadioButtonMenuItem("Other...", false); chooseTabSize.setMnemonic(KeyEvent.VK_O); chooseTabSize.addActionListener(this); bg.add(chooseTabSize); tabSizeMenu.add(chooseTabSize); edit.add(tabSizeMenu); wrapLines = new JCheckBoxMenuItem("Wrap lines"); wrapLines.addChangeListener(new ChangeListener() { @Override public void stateChanged(final ChangeEvent e) { getEditorPane().setLineWrap(wrapLines.getState()); } }); edit.add(wrapLines); // Add Tab inserts as spaces tabsEmulated = new JCheckBoxMenuItem("Tab key inserts spaces"); tabsEmulated.addChangeListener(new ChangeListener() { @Override public void stateChanged(final ChangeEvent e) { getEditorPane().setTabsEmulated(tabsEmulated.getState()); } }); edit.add(tabsEmulated); savePreferences = addToMenu(edit, "Save Preferences", 0, 0); edit.addSeparator(); clearScreen = addToMenu(edit, "Clear output panel", 0, 0); clearScreen.setMnemonic(KeyEvent.VK_L); zapGremlins = addToMenu(edit, "Zap Gremlins", 0, 0); edit.addSeparator(); addImport = addToMenu(edit, "Add import...", 0, 0); addImport.setMnemonic(KeyEvent.VK_I); removeUnusedImports = addToMenu(edit, "Remove unused imports", 0, 0); removeUnusedImports.setMnemonic(KeyEvent.VK_U); sortImports = addToMenu(edit, "Sort imports", 0, 0); sortImports.setMnemonic(KeyEvent.VK_S); respectAutoImports = prefService.getBoolean(AUTO_IMPORT_PREFS, false); autoImport = new JCheckBoxMenuItem("Auto-import (deprecated)", respectAutoImports); autoImport.addItemListener(new ItemListener() { @Override public void itemStateChanged(final ItemEvent e) { respectAutoImports = e.getStateChange() == ItemEvent.SELECTED; prefService.put(AUTO_IMPORT_PREFS, respectAutoImports); } }); edit.add(autoImport); mbar.add(edit); whiteSpaceMenu = new JMenu("Whitespace"); whiteSpaceMenu.setMnemonic(KeyEvent.VK_W); removeTrailingWhitespace = addToMenu(whiteSpaceMenu, "Remove trailing whitespace", 0, 0); removeTrailingWhitespace.setMnemonic(KeyEvent.VK_W); replaceTabsWithSpaces = addToMenu(whiteSpaceMenu, "Replace tabs with spaces", 0, 0); replaceTabsWithSpaces.setMnemonic(KeyEvent.VK_S); replaceSpacesWithTabs = addToMenu(whiteSpaceMenu, "Replace spaces with tabs", 0, 0); replaceSpacesWithTabs.setMnemonic(KeyEvent.VK_T); toggleWhiteSpaceLabeling = new JRadioButtonMenuItem("Label whitespace"); toggleWhiteSpaceLabeling.setMnemonic(KeyEvent.VK_L); toggleWhiteSpaceLabeling.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { getTextArea().setWhitespaceVisible( toggleWhiteSpaceLabeling.isSelected()); } }); whiteSpaceMenu.add(toggleWhiteSpaceLabeling); edit.add(whiteSpaceMenu); languageMenuItems = new LinkedHashMap<ScriptLanguage, JRadioButtonMenuItem>(); final Set<Integer> usedShortcuts = new HashSet<Integer>(); final JMenu languages = new JMenu("Language"); languages.setMnemonic(KeyEvent.VK_L); final ButtonGroup group = new ButtonGroup(); // get list of languages, and sort them by name final ArrayList<ScriptLanguage> list = new ArrayList<ScriptLanguage>(scriptService.getLanguages()); Collections.sort(list, new Comparator<ScriptLanguage>() { @Override public int compare(final ScriptLanguage l1, final ScriptLanguage l2) { final String name1 = l1.getLanguageName(); final String name2 = l2.getLanguageName(); return MiscUtils.compare(name1, name2); } }); list.add(null); final Map<String, ScriptLanguage> languageMap = new HashMap<String, ScriptLanguage>(); for (final ScriptLanguage language : list) { final String name = language == null ? "None" : language.getLanguageName(); languageMap.put(name, language); final JRadioButtonMenuItem item = new JRadioButtonMenuItem(name); if (language == null) { noneLanguageItem = item; } else { languageMenuItems.put(language, item); } int shortcut = -1; for (final char ch : name.toCharArray()) { final int keyCode = KeyStroke.getKeyStroke(ch, 0).getKeyCode(); if (usedShortcuts.contains(keyCode)) continue; shortcut = keyCode; usedShortcuts.add(shortcut); break; } if (shortcut > 0) item.setMnemonic(shortcut); item.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { setLanguage(language, true); } }); group.add(item); languages.add(item); } noneLanguageItem.setSelected(true); mbar.add(languages); final JMenu templates = new JMenu("Templates"); templates.setMnemonic(KeyEvent.VK_T); addTemplates(templates); mbar.add(templates); runMenu = new JMenu("Run"); runMenu.setMnemonic(KeyEvent.VK_R); compileAndRun = addToMenu(runMenu, "Compile and Run", KeyEvent.VK_R, ctrl); compileAndRun.setMnemonic(KeyEvent.VK_R); runSelection = addToMenu(runMenu, "Run selected code", KeyEvent.VK_R, ctrl | shift); runSelection.setMnemonic(KeyEvent.VK_S); compile = addToMenu(runMenu, "Compile", KeyEvent.VK_C, ctrl | shift); compile.setMnemonic(KeyEvent.VK_C); autoSave = new JCheckBoxMenuItem("Auto-save before compiling"); runMenu.add(autoSave); runMenu.addSeparator(); nextError = addToMenu(runMenu, "Next Error", KeyEvent.VK_F4, 0); nextError.setMnemonic(KeyEvent.VK_N); previousError = addToMenu(runMenu, "Previous Error", KeyEvent.VK_F4, shift); previousError.setMnemonic(KeyEvent.VK_P); runMenu.addSeparator(); kill = addToMenu(runMenu, "Kill running script...", 0, 0); kill.setMnemonic(KeyEvent.VK_K); kill.setEnabled(false); mbar.add(runMenu); toolsMenu = new JMenu("Tools"); toolsMenu.setMnemonic(KeyEvent.VK_O); openHelpWithoutFrames = addToMenu(toolsMenu, "Open Help for Class...", 0, 0); openHelpWithoutFrames.setMnemonic(KeyEvent.VK_O); openHelp = addToMenu(toolsMenu, "Open Help for Class (with frames)...", 0, 0); openHelp.setMnemonic(KeyEvent.VK_P); openMacroFunctions = addToMenu(toolsMenu, "Open Help on Macro Functions...", 0, 0); openMacroFunctions.setMnemonic(KeyEvent.VK_H); extractSourceJar = addToMenu(toolsMenu, "Extract source .jar...", 0, 0); extractSourceJar.setMnemonic(KeyEvent.VK_E); openSourceForClass = addToMenu(toolsMenu, "Open .java file for class...", 0, 0); openSourceForClass.setMnemonic(KeyEvent.VK_J); openSourceForMenuItem = addToMenu(toolsMenu, "Open .java file for menu item...", 0, 0); openSourceForMenuItem.setMnemonic(KeyEvent.VK_M); mbar.add(toolsMenu); gitMenu = new JMenu("Git"); gitMenu.setMnemonic(KeyEvent.VK_G); /* showDiff = addToMenu(gitMenu, "Show diff...", 0, 0); showDiff.setMnemonic(KeyEvent.VK_D); commit = addToMenu(gitMenu, "Commit...", 0, 0); commit.setMnemonic(KeyEvent.VK_C); */ gitGrep = addToMenu(gitMenu, "Grep...", 0, 0); gitGrep.setMnemonic(KeyEvent.VK_G); openInGitweb = addToMenu(gitMenu, "Open in gitweb", 0, 0); openInGitweb.setMnemonic(KeyEvent.VK_W); mbar.add(gitMenu); tabsMenu = new JMenu("Tabs"); tabsMenu.setMnemonic(KeyEvent.VK_A); nextTab = addToMenu(tabsMenu, "Next Tab", KeyEvent.VK_PAGE_DOWN, ctrl); nextTab.setMnemonic(KeyEvent.VK_N); previousTab = addToMenu(tabsMenu, "Previous Tab", KeyEvent.VK_PAGE_UP, ctrl); previousTab.setMnemonic(KeyEvent.VK_P); tabsMenu.addSeparator(); tabsMenuTabsStart = tabsMenu.getItemCount(); tabsMenuItems = new HashSet<JMenuItem>(); mbar.add(tabsMenu); // Add the editor and output area tabbed = new JTabbedPane(); tabbed.addChangeListener(this); open(null); // make sure the editor pane is added tabbed.setBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4)); getContentPane().setLayout( new BoxLayout(getContentPane(), BoxLayout.Y_AXIS)); getContentPane().add(tabbed); // for Eclipse and MS Visual Studio lovers addAccelerator(compileAndRun, KeyEvent.VK_F11, 0, true); addAccelerator(compileAndRun, KeyEvent.VK_F5, 0, true); addAccelerator(nextTab, KeyEvent.VK_PAGE_DOWN, ctrl, true); addAccelerator(previousTab, KeyEvent.VK_PAGE_UP, ctrl, true); addAccelerator(increaseFontSize, KeyEvent.VK_EQUALS, ctrl | shift, true); // make sure that the window is not closed by accident addWindowListener(new WindowAdapter() { @Override public void windowClosing(final WindowEvent e) { if (!confirmClose()) return; dispose(); } }); addWindowFocusListener(new WindowAdapter() { @Override public void windowGainedFocus(final WindowEvent e) { checkForOutsideChanges(); } }); final Font font = new Font("Courier", Font.PLAIN, 12); errorScreen.setFont(font); errorScreen.setEditable(false); errorScreen.setLineWrap(true); setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); try { if (SwingUtilities.isEventDispatchThread()) { pack(); } else { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { pack(); } }); } } catch (final Exception ie) { /* ignore */ } findDialog = new FindAndReplaceDialog(this); // Save the size of the window in the preferences addComponentListener(new ComponentAdapter() { @Override public void componentResized(final ComponentEvent e) { saveWindowSizeToPrefs(); } }); setLocationRelativeTo(null); // center on screen open(null); final EditorPane editorPane = getEditorPane(); editorPane.requestFocus(); } public LogService log() { return log; } public PlatformService getPlatformService() { return platformService; } public JTextArea getErrorScreen() { return errorScreen; } public void setErrorScreen(final JTextArea errorScreen) { this.errorScreen = errorScreen; } public ErrorHandler getErrorHandler() { return errorHandler; } public void setErrorHandler(final ErrorHandler errorHandler) { this.errorHandler = errorHandler; } private synchronized void initializeTokenMakers() { if (tokenMakerFactory != null) return; tokenMakerFactory = (AbstractTokenMakerFactory) TokenMakerFactory.getDefaultInstance(); for (final PluginInfo<SyntaxHighlighter> info : pluginService .getPluginsOfType(SyntaxHighlighter.class)) try { tokenMakerFactory.putMapping("text/" + info.getName(), info .getClassName()); } catch (final Throwable t) { log.warn("Could not register " + info.getName(), t); } } /** * Check whether the file was edited outside of this {@link EditorPane} and * ask the user whether to reload. */ public void checkForOutsideChanges() { final EditorPane editorPane = getEditorPane(); if (editorPane.wasChangedOutside()) { reload("The file " + editorPane.getFile().getName() + "was changed outside of the editor"); } } /** * Adds a script template path that will be scanned by future TextEditor * instances. * * @param path Resource path to scan for scripts. */ public static void addTemplatePath(final String path) { TEMPLATE_PATHS.add(path); } @SuppressWarnings("unused") @EventHandler private void onEvent(final ContextDisposingEvent e) { if (isDisplayable()) dispose(); } /** * Loads the preferences for the JFrame from file */ public void loadPreferences() { final Dimension dim = getSize(); // If a dimension is 0 then use the default dimension size if (0 == dim.width) { dim.width = DEFAULT_WINDOW_WIDTH; } if (0 == dim.height) { dim.height = DEFAULT_WINDOW_HEIGHT; } setPreferredSize(new Dimension(prefService.getInt(WINDOW_WIDTH, dim.width), prefService.getInt(WINDOW_HEIGHT, dim.height))); } /** * Saves the window size to preferences. * <p> * Separated from savePreferences because we always want to save the window * size when it's resized, however, we don't want to automatically save the * font, tab size, etc. without the user pressing "Save Preferences" * </p> */ public void saveWindowSizeToPrefs() { final Dimension dim = getSize(); prefService.put(WINDOW_HEIGHT, dim.height); prefService.put(WINDOW_WIDTH, dim.width); } final public RSyntaxTextArea getTextArea() { return getEditorPane(); } /** * Get the currently selected tab. * * @return The currently selected tab. Never null. */ public TextEditorTab getTab() { int index = tabbed.getSelectedIndex(); if (index < 0) { // should not happen, but safety first. if (tabbed.getTabCount() == 0) { // should not happen either, but, again, safety first. createNewDocument(); } // Ensure the new document is returned - otherwise we would pass // the negative index to the getComponentAt call below. tabbed.setSelectedIndex(0); index = 0; } return (TextEditorTab) tabbed.getComponentAt(index); } /** * Get tab at provided index. * * @param index the index of the tab. * @return the {@link TextEditorTab} at given index or <code>null</code>. */ public TextEditorTab getTab(final int index) { return (TextEditorTab) tabbed.getComponentAt(index); } /** * Return the {@link EditorPane} of the currently selected * {@link TextEditorTab}. * * @return the current {@link EditorPane}. Never <code>null</code>. */ public EditorPane getEditorPane() { return getTab().editorPane; } /** * @return {@link ScriptLanguage} used in the current {@link EditorPane}. */ public ScriptLanguage getCurrentLanguage() { return getEditorPane().getCurrentLanguage(); } public JMenuItem addToMenu(final JMenu menu, final String menuEntry, final int key, final int modifiers) { final JMenuItem item = new JMenuItem(menuEntry); menu.add(item); if (key != 0) item.setAccelerator(KeyStroke.getKeyStroke(key, modifiers)); item.addActionListener(this); return item; } protected static class AcceleratorTriplet { JMenuItem component; int key, modifiers; } protected List<AcceleratorTriplet> defaultAccelerators = new ArrayList<AcceleratorTriplet>(); public void addAccelerator(final JMenuItem component, final int key, final int modifiers) { addAccelerator(component, key, modifiers, false); } public void addAccelerator(final JMenuItem component, final int key, final int modifiers, final boolean record) { if (record) { final AcceleratorTriplet triplet = new AcceleratorTriplet(); triplet.component = component; triplet.key = key; triplet.modifiers = modifiers; defaultAccelerators.add(triplet); } final RSyntaxTextArea textArea = getTextArea(); if (textArea != null) addAccelerator(textArea, component, key, modifiers); } public void addAccelerator(final RSyntaxTextArea textArea, final JMenuItem component, final int key, final int modifiers) { textArea.getInputMap().put(KeyStroke.getKeyStroke(key, modifiers), component); textArea.getActionMap().put(component, new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { if (!component.isEnabled()) return; final ActionEvent event = new ActionEvent(component, 0, "Accelerator"); TextEditor.this.actionPerformed(event); } }); } public void addDefaultAccelerators(final RSyntaxTextArea textArea) { for (final AcceleratorTriplet triplet : defaultAccelerators) addAccelerator(textArea, triplet.component, triplet.key, triplet.modifiers); } private JMenu getMenu(final JMenu root, final String menuItemPath, final boolean createIfNecessary) { final int slash = menuItemPath.indexOf('/'); if (slash < 0) return root; final String menuLabel = menuItemPath.substring(0, slash); final String rest = menuItemPath.substring(slash + 1); for (int i = 0; i < root.getItemCount(); i++) { final JMenuItem item = root.getItem(i); if (item instanceof JMenu && menuLabel.equals(item.getText())) { return getMenu((JMenu) item, rest, createIfNecessary); } } if (!createIfNecessary) return null; final JMenu subMenu = new JMenu(menuLabel); root.add(subMenu); return getMenu(subMenu, rest, createIfNecessary); } /** * Initializes the template menu. * <p> * Other components can add templates simply by providing scripts in their * resources, identified by a path of the form * {@code /script_templates/<menu path>/<menu label>}. * </p> * * @param templatesMenu the top-level menu to populate */ private void addTemplates(final JMenu templatesMenu) { for (final String templatePath : TEMPLATE_PATHS) { for (final Map.Entry<String, URL> entry : new TreeMap<String, URL>( FileFunctions.findResources(null, templatePath)).entrySet()) { final String path = entry.getKey().replace('_', ' '); final String ext = FileUtils.getExtension(path); final JMenu menu = getMenu(templatesMenu, path, true); final int labelIndex = path.lastIndexOf('/') + 1; final String label = ext.isEmpty() ? path.substring(labelIndex) : path.substring(labelIndex, path.length() - ext.length() - 1); final JMenuItem item = new JMenuItem(label); menu.add(item); final URL url = entry.getValue(); item.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { loadTemplate(url); } }); } } } /** * Loads a template file from the given resource * * @param url The resource to load. */ public void loadTemplate(final String url) { try { loadTemplate(new URL(url)); } catch (final Exception e) { log.error(e); error("The template '" + url + "' was not found."); } } public void loadTemplate(final URL url) { final String path = url.getPath(); final String ext = FileUtils.getExtension(path); final ScriptLanguage language = ext.isEmpty() ? null : scriptService.getLanguageByExtension(ext); loadTemplate(url, language); } public void loadTemplate(final URL url, final ScriptLanguage language) { createNewDocument(); try { // Load the template final InputStream in = url.openStream(); getTextArea().read(new BufferedReader(new InputStreamReader(in)), null); if (language != null) { setLanguage(language); } final String path = url.getPath(); setEditorPaneFileName(path.substring(path.lastIndexOf('/') + 1)); } catch (final Exception e) { e.printStackTrace(); error("The template '" + url + "' was not found."); } } public void createNewDocument() { open(null); } public void createNewDocument(final String title, final String text) { open(null); final EditorPane editorPane = getEditorPane(); editorPane.setText(text); setEditorPaneFileName(title); editorPane.setLanguageByFileName(title); updateLanguageMenu(editorPane.getCurrentLanguage()); } /** * Open a new editor to edit the given file, with a templateFile if the file * does not exist yet */ public void createNewFromTemplate(final File file, final File templateFile) { open(file.exists() ? file : templateFile); if (!file.exists()) { final EditorPane editorPane = getEditorPane(); try { editorPane.open(file); } catch (final IOException e) { handleException(e); } editorPane.setLanguageByFileName(file.getName()); updateLanguageMenu(editorPane.getCurrentLanguage()); } } public boolean fileChanged() { return getEditorPane().fileChanged(); } public boolean handleUnsavedChanges() { return handleUnsavedChanges(false); } public boolean handleUnsavedChanges(final boolean beforeCompiling) { if (!fileChanged()) return true; if (beforeCompiling && autoSave.getState()) { save(); return true; } switch (JOptionPane.showConfirmDialog(this, "Do you want to save changes?")) { case JOptionPane.NO_OPTION: // Compiled languages should not progress if their source is unsaved return !beforeCompiling; case JOptionPane.YES_OPTION: if (save()) return true; } return false; } @Override public void actionPerformed(final ActionEvent ae) { final Object source = ae.getSource(); if (source == newFile) createNewDocument(); else if (source == open) { final EditorPane editorPane = getEditorPane(); final File defaultDir = editorPane.getFile() != null ? editorPane.getFile().getParentFile() : AppUtils.getBaseDirectory("imagej.dir", TextEditor.class, null); final File file = openWithDialog(defaultDir); if (file != null) new Thread() { @Override public void run() { open(file); } }.start(); return; } else if (source == save) save(); else if (source == saveas) saveAs(); else if (source == makeJar) makeJar(false); else if (source == makeJarWithSource) makeJar(true); else if (source == compileAndRun) runText(); else if (source == compile) compile(); else if (source == runSelection) runText(true); else if (source == nextError) new Thread() { @Override public void run() { nextError(true); } }.start(); else if (source == previousError) new Thread() { @Override public void run() { nextError(false); } }.start(); else if (source == kill) chooseTaskToKill(); else if (source == close) if (tabbed.getTabCount() < 2) processWindowEvent(new WindowEvent( this, WindowEvent.WINDOW_CLOSING)); else { if (!handleUnsavedChanges()) return; int index = tabbed.getSelectedIndex(); removeTab(index); if (index > 0) index switchTo(index); } else if (source == cut) getTextArea().cut(); else if (source == copy) getTextArea().copy(); else if (source == paste) getTextArea().paste(); else if (source == undo) getTextArea().undoLastAction(); else if (source == redo) getTextArea().redoLastAction(); else if (source == find) findOrReplace(false); else if (source == findNext) findDialog.searchOrReplace(false); else if (source == findPrevious) findDialog.searchOrReplace(false, false); else if (source == replace) findOrReplace(true); else if (source == gotoLine) gotoLine(); else if (source == toggleBookmark) toggleBookmark(); else if (source == listBookmarks) listBookmarks(); else if (source == selectAll) { getTextArea().setCaretPosition(0); getTextArea().moveCaretPosition(getTextArea().getDocument().getLength()); } else if (source == chooseFontSize) { commandService.run(ChooseFontSize.class, true, "editor", this); } else if (source == chooseTabSize) { commandService.run(ChooseTabSize.class, true, "editor", this); } else if (source == addImport) { addImport(getSelectedClassNameOrAsk()); } else if (source == removeUnusedImports) new TokenFunctions(getTextArea()) .removeUnusedImports(); else if (source == sortImports) new TokenFunctions(getTextArea()) .sortImports(); else if (source == removeTrailingWhitespace) new TokenFunctions( getTextArea()).removeTrailingWhitespace(); else if (source == replaceTabsWithSpaces) getTextArea() .convertTabsToSpaces(); else if (source == replaceSpacesWithTabs) getTextArea() .convertSpacesToTabs(); else if (source == clearScreen) { getTab().getScreen().setText(""); } else if (source == zapGremlins) zapGremlins(); else if (source == savePreferences) { getEditorPane().savePreferences(); } else if (source == openHelp) openHelp(null); else if (source == openHelpWithoutFrames) openHelp(null, false); else if (source == openMacroFunctions) try { new MacroFunctions(this).openHelp(getTextArea().getSelectedText()); } catch (final IOException e) { handleException(e); } else if (source == extractSourceJar) extractSourceJar(); else if (source == openSourceForClass) { final String className = getSelectedClassNameOrAsk(); if (className != null) try { final String path = new FileFunctions(this).getSourcePath(className); if (path != null) open(new File(path)); else { final String url = new FileFunctions(this).getSourceURL(className); try { platformService.open(new URL(url)); } catch (final Throwable e) { handleException(e); } } } catch (final ClassNotFoundException e) { error("Could not open source for class " + className); } } /* TODO else if (source == showDiff) { new Thread() { public void run() { EditorPane pane = getEditorPane(); new FileFunctions(TextEditor.this).showDiff(pane.file, pane.getGitDirectory()); } }.start(); } else if (source == commit) { new Thread() { public void run() { EditorPane pane = getEditorPane(); new FileFunctions(TextEditor.this).commit(pane.file, pane.getGitDirectory()); } }.start(); } */ else if (source == gitGrep) { final String searchTerm = getTextArea().getSelectedText(); File searchRoot = getEditorPane().getFile(); if (searchRoot == null) { error("File was not yet saved; no location known!"); return; } searchRoot = searchRoot.getParentFile(); commandService.run(GitGrep.class, true, "editor", this, "searchTerm", searchTerm, "searchRoot", searchRoot); } else if (source == openInGitweb) { final EditorPane editorPane = getEditorPane(); new FileFunctions(this).openInGitweb(editorPane.getFile(), editorPane .getGitDirectory(), editorPane.getCaretLineNumber() + 1); } else if (source == increaseFontSize || source == decreaseFontSize) { getEditorPane().increaseFontSize( (float) (source == increaseFontSize ? 1.2 : 1 / 1.2)); updateTabAndFontSize(false); } else if (source == nextTab) switchTabRelative(1); else if (source == previousTab) switchTabRelative(-1); else if (handleTabsMenu(source)) return; } protected boolean handleTabsMenu(final Object source) { if (!(source instanceof JMenuItem)) return false; final JMenuItem item = (JMenuItem) source; if (!tabsMenuItems.contains(item)) return false; for (int i = tabsMenuTabsStart; i < tabsMenu.getItemCount(); i++) if (tabsMenu.getItem(i) == item) { switchTo(i - tabsMenuTabsStart); return true; } return false; } @Override public void stateChanged(final ChangeEvent e) { final int index = tabbed.getSelectedIndex(); if (index < 0) { setTitle(""); return; } final EditorPane editorPane = getEditorPane(index); editorPane.requestFocus(); checkForOutsideChanges(); toggleWhiteSpaceLabeling.setSelected(editorPane.isWhitespaceVisible()); editorPane.setLanguageByFileName(editorPane.getFileName()); updateLanguageMenu(editorPane.getCurrentLanguage()); setTitle(); } public EditorPane getEditorPane(final int index) { return getTab(index).editorPane; } public void findOrReplace(final boolean doReplace) { findDialog.setLocationRelativeTo(this); // override search pattern only if // there is sth. selected final String selection = getTextArea().getSelectedText(); if (selection != null) findDialog.setSearchPattern(selection); findDialog.show(doReplace); } public void gotoLine() { final String line = JOptionPane.showInputDialog(this, "Line:", "Goto line...", JOptionPane.QUESTION_MESSAGE); if (line == null) return; try { gotoLine(Integer.parseInt(line)); } catch (final BadLocationException e) { error("Line number out of range: " + line); } catch (final NumberFormatException e) { error("Invalid line number: " + line); } } public void gotoLine(final int line) throws BadLocationException { getTextArea().setCaretPosition(getTextArea().getLineStartOffset(line - 1)); } public void toggleBookmark() { getEditorPane().toggleBookmark(); } public void listBookmarks() { final Vector<Bookmark> bookmarks = new Vector<Bookmark>(); for (int i = 0; i < tabbed.getTabCount(); i++) { final TextEditorTab tab = (TextEditorTab) tabbed.getComponentAt(i); tab.editorPane.getBookmarks(tab, bookmarks); } final BookmarkDialog dialog = new BookmarkDialog(this, bookmarks); dialog.setVisible(true); } public boolean reload() { return reload("Reload the file?"); } public boolean reload(final String message) { final EditorPane editorPane = getEditorPane(); final File file = editorPane.getFile(); if (file == null || !file.exists()) return true; final boolean modified = editorPane.fileChanged(); final String[] options = { "Reload", "Do not reload" }; if (modified) options[0] = "Reload (discarding changes)"; switch (JOptionPane.showOptionDialog(this, message, "Reload", JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[0])) { case 0: try { editorPane.open(file); return true; } catch (final IOException e) { error("Could not reload " + file.getPath()); } updateLanguageMenu(editorPane.getCurrentLanguage()); break; } return false; } public static boolean isBinary(final File file) { if (file == null) return false; // heuristic: read the first up to 8000 bytes, and say that it is binary if // it contains a NUL try { final FileInputStream in = new FileInputStream(file); int left = 8000; final byte[] buffer = new byte[left]; while (left > 0) { final int count = in.read(buffer, 0, left); if (count < 0) break; for (int i = 0; i < count; i++) if (buffer[i] == 0) { in.close(); return true; } left -= count; } in.close(); return false; } catch (final IOException e) { return false; } } /** * Open a new tab with some content; the languageExtension is like ".java", * ".py", etc. */ public TextEditorTab newTab(final String content, final String language) { String lang = language; final TextEditorTab tab = open(null); if (null != lang && lang.length() > 0) { lang = lang.trim().toLowerCase(); if ('.' != lang.charAt(0)) { lang = "." + language; } tab.editorPane.setLanguage(scriptService.getLanguageByName(language)); } if (null != content) { tab.editorPane.setText(content); } return tab; } public TextEditorTab open(final File file) { if (isBinary(file)) { // TODO! throw new RuntimeException("TODO: open image using IJ2"); // return null; } try { TextEditorTab tab = (tabbed.getTabCount() == 0) ? null : getTab(); final boolean wasNew = tab != null && tab.editorPane.isNew(); if (!wasNew) { tab = new TextEditorTab(this); context.inject(tab.editorPane); tab.editorPane.loadPreferences(); tab.editorPane.getDocument().addDocumentListener(this); addDefaultAccelerators(tab.editorPane); } synchronized (tab.editorPane) { // tab is never null at this location. tab.editorPane.open(file); if (wasNew) { final int index = tabbed.getSelectedIndex() + tabsMenuTabsStart; tabsMenu.getItem(index).setText(tab.editorPane.getFileName()); } else { tabbed.addTab("", tab); switchTo(tabbed.getTabCount() - 1); tabsMenuItems.add(addToMenu(tabsMenu, tab.editorPane.getFileName(), 0, 0)); } setEditorPaneFileName(tab.editorPane.getFile()); try { updateTabAndFontSize(true); } catch (final NullPointerException e) { /* ignore */ } } if (file != null) openRecent.add(file.getAbsolutePath()); updateLanguageMenu(tab.editorPane.getCurrentLanguage()); return tab; } catch (final FileNotFoundException e) { e.printStackTrace(); error("The file '" + file + "' was not found."); } catch (final Exception e) { e.printStackTrace(); error("There was an error while opening '" + file + "': " + e); } return null; } public boolean saveAs() { final EditorPane editorPane = getEditorPane(); File file = editorPane.getFile(); if (file == null) { final File ijDir = AppUtils.getBaseDirectory("imagej.dir", TextEditor.class, null); file = new File(ijDir, editorPane.getFileName()); } final File fileToSave = uiService.chooseFile(file, FileWidget.SAVE_STYLE); if (fileToSave == null) return false; return saveAs(fileToSave.getAbsolutePath(), true); } public void saveAs(final String path) { saveAs(path, true); } public boolean saveAs(final String path, final boolean askBeforeReplacing) { final File file = new File(path); if (file.exists() && askBeforeReplacing && JOptionPane.showConfirmDialog(this, "Do you want to replace " + path + "?", "Replace " + path + "?", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) return false; if (!write(file)) return false; setEditorPaneFileName(file); openRecent.add(path); return true; } public boolean save() { final File file = getEditorPane().getFile(); if (file == null) { return saveAs(); } if (!write(file)) { return false; } setTitle(); return true; } public boolean write(final File file) { try { getEditorPane().write(file); return true; } catch (final IOException e) { error("Could not save " + file.getName()); e.printStackTrace(); return false; } } public boolean makeJar(final boolean includeSources) { final File file = getEditorPane().getFile(); if ((file == null || isCompiled()) && !handleUnsavedChanges(true)) { return false; } String name = getEditorPane().getFileName(); final String ext = FileUtils.getExtension(name); if (!"".equals(ext)) name = name.substring(0, name.length() - ext.length()); if (name.indexOf('_') < 0) name += "_"; name += ".jar"; final File selectedFile = uiService.chooseFile(file, FileWidget.SAVE_STYLE); if (selectedFile == null) return false; if (selectedFile.exists() && JOptionPane.showConfirmDialog(this, "Do you want to replace " + selectedFile + "?", "Replace " + selectedFile + "?", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) return false; try { makeJar(selectedFile, includeSources); return true; } catch (final IOException e) { e.printStackTrace(); error("Could not write " + selectedFile + ": " + e.getMessage()); return false; } } /** * @throws IOException */ public void makeJar(final File file, final boolean includeSources) throws IOException { if (!handleUnsavedChanges(true)) return; final ScriptEngine interpreter = getCurrentLanguage().getScriptEngine(); if (interpreter instanceof JavaEngine) { final JavaEngine java = (JavaEngine) interpreter; final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log); markCompileStart(); getTab().showErrors(); new Thread() { @Override public void run() { java.makeJar(getEditorPane().getFile(), includeSources, file, errors); errorScreen.insert("Compilation finished.\n", errorScreen .getDocument().getLength()); markCompileEnd(); } }.start(); } } static void getClasses(final File directory, final List<String> paths, final List<String> names) { getClasses(directory, paths, names, ""); } static void getClasses(final File directory, final List<String> paths, final List<String> names, final String inPrefix) { String prefix = inPrefix; if (!prefix.equals("")) prefix += "/"; for (final File file : directory.listFiles()) if (file.isDirectory()) getClasses(file, paths, names, prefix + file.getName()); else { paths.add(file.getAbsolutePath()); names.add(prefix + file.getName()); } } static void writeJarEntry(final JarOutputStream out, final String name, final byte[] buf) throws IOException { try { final JarEntry entry = new JarEntry(name); out.putNextEntry(entry); out.write(buf, 0, buf.length); out.closeEntry(); } catch (final ZipException e) { e.printStackTrace(); throw new IOException(e.getMessage()); } } static byte[] readFile(final String fileName) throws IOException { final File file = new File(fileName); final InputStream in = new FileInputStream(file); final byte[] buffer = new byte[(int) file.length()]; in.read(buffer); in.close(); return buffer; } static void deleteRecursively(final File directory) { for (final File file : directory.listFiles()) if (file.isDirectory()) deleteRecursively(file); else file.delete(); directory.delete(); } void setLanguage(final ScriptLanguage language) { setLanguage(language, false); } void setLanguage(final ScriptLanguage language, final boolean addHeader) { getEditorPane().setLanguage(language, addHeader); setTitle(); updateLanguageMenu(language); updateTabAndFontSize(true); } void updateLanguageMenu(final ScriptLanguage language) { JMenuItem item = languageMenuItems.get(language); if (item == null) item = noneLanguageItem; if (!item.isSelected()) { item.setSelected(true); } final boolean isRunnable = item != noneLanguageItem; final boolean isCompileable = language != null && language.isCompiledLanguage(); runMenu.setVisible(isRunnable); compileAndRun.setText(isCompileable ? "Compile and Run" : "Run"); compileAndRun.setEnabled(isRunnable); runSelection.setVisible(isRunnable && !isCompileable); compile.setVisible(isCompileable); autoSave.setVisible(isCompileable); makeJar.setVisible(isCompileable); makeJarWithSource.setVisible(isCompileable); final boolean isJava = language != null && language.getLanguageName().equals("Java"); addImport.setVisible(isJava); removeUnusedImports.setVisible(isJava); sortImports.setVisible(isJava); openSourceForMenuItem.setVisible(isJava); final boolean isMacro = language != null && language.getLanguageName().equals("ImageJ Macro"); openMacroFunctions.setVisible(isMacro); openSourceForClass.setVisible(!isMacro); openHelp.setVisible(!isMacro && isRunnable); openHelpWithoutFrames.setVisible(!isMacro && isRunnable); nextError.setVisible(!isMacro && isRunnable); previousError.setVisible(!isMacro && isRunnable); final boolean isInGit = getEditorPane().getGitDirectory() != null; gitMenu.setVisible(isInGit); updateTabAndFontSize(false); } public void updateTabAndFontSize(final boolean setByLanguage) { final EditorPane pane = getEditorPane(); if (pane.getCurrentLanguage() == null) return; if (setByLanguage) { if (pane.getCurrentLanguage().getLanguageName().equals("Python")) { pane.setTabSize(4); } else { // set tab size to current preferences. pane.resetTabSize(); } } final int tabSize = pane.getTabSize(); boolean defaultSize = false; for (int i = 0; i < tabSizeMenu.getItemCount(); i++) { final JMenuItem item = tabSizeMenu.getItem(i); if (item == chooseTabSize) { item.setSelected(!defaultSize); item.setText("Other" + (defaultSize ? "" : " (" + tabSize + ")") + "..."); } else if (tabSize == Integer.parseInt(item.getText())) { item.setSelected(true); defaultSize = true; } } final int fontSize = (int) pane.getFontSize(); defaultSize = false; for (int i = 0; i < fontSizeMenu.getItemCount(); i++) { final JMenuItem item = fontSizeMenu.getItem(i); if (item == chooseFontSize) { item.setSelected(!defaultSize); item.setText("Other" + (defaultSize ? "" : " (" + fontSize + ")") + "..."); continue; } String label = item.getText(); if (label.endsWith(" pt")) label = label.substring(0, label.length() - 3); if (fontSize == Integer.parseInt(label)) { item.setSelected(true); defaultSize = true; } } wrapLines.setState(pane.getLineWrap()); tabsEmulated.setState(pane.getTabsEmulated()); } public void setEditorPaneFileName(final String baseName) { getEditorPane().setFileName(baseName); } public void setEditorPaneFileName(final File file) { final EditorPane editorPane = getEditorPane(); editorPane.setFileName(file); // update language menu updateLanguageMenu(editorPane.getCurrentLanguage()); updateGitDirectory(); } void setTitle() { final EditorPane editorPane = getEditorPane(); final boolean fileChanged = editorPane.fileChanged(); final String fileName = editorPane.getFileName(); final String title = (fileChanged ? "*" : "") + fileName + (executingTasks.isEmpty() ? "" : " (Running)"); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { setTitle(title); // to the main window // Update all tabs: could have changed for (int i = 0; i < tabbed.getTabCount(); i++) tabbed.setTitleAt(i, ((TextEditorTab) tabbed.getComponentAt(i)) .getTitle()); } }); } @Override public synchronized void setTitle(final String title) { super.setTitle(title); final int index = tabsMenuTabsStart + tabbed.getSelectedIndex(); if (index < tabsMenu.getItemCount()) { final JMenuItem item = tabsMenu.getItem(index); if (item != null) item.setText(title); } } private final ArrayList<Executer> executingTasks = new ArrayList<Executer>(); /** * Generic Thread that keeps a starting time stamp, sets the priority to * normal and starts itself. */ public abstract class Executer extends ThreadGroup { JTextAreaWriter output, errors; Executer(final JTextAreaWriter output, final JTextAreaWriter errors) { super("Script Editor Run :: " + new Date().toString()); this.output = output; this.errors = errors; // Store itself for later executingTasks.add(this); setTitle(); // Enable kill menu kill.setEnabled(true); // Fork a task, as a part of this ThreadGroup new Thread(this, getName()) { { setPriority(Thread.NORM_PRIORITY); start(); } @Override public void run() { try { execute(); // Wait until any children threads die: int activeCount = getThreadGroup().activeCount(); while (activeCount > 1) { if (isInterrupted()) break; try { Thread.sleep(500); final List<Thread> ts = getAllThreads(); activeCount = ts.size(); if (activeCount <= 1) break; log.debug("Waiting for " + ts.size() + " threads to die"); int count_zSelector = 0; for (final Thread t : ts) { if (t.getName().equals("zSelector")) { count_zSelector++; } log.debug("THREAD: " + t.getName()); } if (activeCount == count_zSelector + 1) { // Do not wait on the stack slice selector thread. break; } } catch (final InterruptedException ie) { /* ignore */ } } } catch (final Throwable t) { handleException(t); } finally { executingTasks.remove(Executer.this); try { if (null != output) output.shutdown(); if (null != errors) errors.shutdown(); } catch (final Exception e) { handleException(e); } // Leave kill menu item enabled if other tasks are running kill.setEnabled(executingTasks.size() > 0); setTitle(); } } }; } /** The method to extend, that will do the actual work. */ abstract void execute(); /** Fetch a list of all threads from all thread subgroups, recursively. */ List<Thread> getAllThreads() { final ArrayList<Thread> threads = new ArrayList<Thread>(); // From all subgroups: final ThreadGroup[] tgs = new ThreadGroup[activeGroupCount() * 2 + 100]; this.enumerate(tgs, true); for (final ThreadGroup tg : tgs) { if (null == tg) continue; final Thread[] ts = new Thread[tg.activeCount() * 2 + 100]; tg.enumerate(ts); for (final Thread t : ts) { if (null == t) continue; threads.add(t); } } // And from this group: final Thread[] ts = new Thread[activeCount() * 2 + 100]; this.enumerate(ts); for (final Thread t : ts) { if (null == t) continue; threads.add(t); } return threads; } /** * Totally destroy/stop all threads in this and all recursive thread * subgroups. Will remove itself from the executingTasks list. */ @SuppressWarnings("deprecation") void obliterate() { try { // Stop printing to the screen if (null != output) output.shutdownNow(); if (null != errors) errors.shutdownNow(); } catch (final Exception e) { e.printStackTrace(); } for (final Thread thread : getAllThreads()) { try { thread.interrupt(); Thread.yield(); // give it a chance thread.stop(); } catch (final Throwable t) { t.printStackTrace(); } } executingTasks.remove(this); } @Override public String toString() { return getName(); } } /** Returns a list of currently executing tasks */ public List<Executer> getExecutingTasks() { return executingTasks; } public void kill(final Executer executer) { for (int i = 0; i < tabbed.getTabCount(); i++) { final TextEditorTab tab = (TextEditorTab) tabbed.getComponentAt(i); if (executer == tab.getExecuter()) { tab.kill(); break; } } } /** * Query the list of running scripts and provide a dialog to choose one and * kill it. */ public void chooseTaskToKill() { if (executingTasks.size() == 0) { error("\nNo running scripts\n"); return; } commandService.run(KillScript.class, true, "editor", this); } /** Run the text in the textArea without compiling it, only if it's not java. */ public void runText() { runText(false); } public void runText(final boolean selectionOnly) { if (isCompiled()) { if (selectionOnly) { error("Cannot run selection of compiled language!"); return; } if (handleUnsavedChanges(true)) runScript(); else write("Compiled languages must be saved before they can be run."); return; } final ScriptLanguage currentLanguage = getCurrentLanguage(); if (currentLanguage == null) { error("Select a language first!"); // TODO guess the language, if possible. return; } markCompileStart(); try { final TextEditorTab tab = getTab(); tab.showOutput(); execute(selectionOnly); } catch (final Throwable t) { t.printStackTrace(); } } /** Invoke in the context of the event dispatch thread. */ private void execute(final boolean selectionOnly) throws IOException { final TextEditorTab tab = getTab(); tab.prepare(); final JTextAreaWriter output = new JTextAreaWriter(tab.screen, log); final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log); final File file = getEditorPane().getFile(); // Pipe current text into the runScript: final PipedInputStream pi = new PipedInputStream(); final PipedOutputStream po = new PipedOutputStream(pi); // The Executer creates a Thread that // does the reading from PipedInputStream tab.setExecutor(new Executer(output, errors) { @Override public void execute() { try { evalScript(file == null ? getEditorPane().getFileName() : file .getAbsolutePath(), new InputStreamReader(pi), output, errors); output.flush(); errors.flush(); markCompileEnd(); } catch (final Throwable t) { output.flush(); errors.flush(); if (t instanceof ScriptException && t.getCause() != null && t.getCause().getClass().getName().endsWith("CompileError")) { errorScreen.append("Compilation failed"); tab.showErrors(); } else { handleException(t); } } finally { tab.restore(); } } }); // Write into PipedOutputStream // from another Thread try { final String text; if (selectionOnly) { final String selected = tab.getEditorPane().getSelectedText(); if (selected == null) { error("Selection required!"); text = null; } else text = selected + "\n"; // Ensure code blocks are terminated } else { text = tab.getEditorPane().getText(); } new Thread() { { setPriority(Thread.NORM_PRIORITY); } @Override public void run() { final PrintWriter pw = new PrintWriter(po); pw.write(text); pw.flush(); // will lock and wait in some cases try { po.close(); } catch (final Throwable tt) { tt.printStackTrace(); } pw.close(); } }.start(); } catch (final Throwable t) { t.printStackTrace(); } finally { // Re-enable when all text to send has been sent tab.getEditorPane().setEditable(true); } } public void runScript() { if (isCompiled()) getTab().showErrors(); else getTab().showOutput(); markCompileStart(); final JTextAreaWriter output = new JTextAreaWriter(getTab().screen, log); final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log); final File file = getEditorPane().getFile(); new TextEditor.Executer(output, errors) { @Override public void execute() { Reader reader = null; try { reader = evalScript(getEditorPane().getFile().getPath(), new FileReader(file), output, errors); output.flush(); errors.flush(); markCompileEnd(); } catch (final Throwable e) { handleException(e); } finally { if (reader != null) { try { reader.close(); } catch (final IOException exc) { handleException(exc); } } } } }; } public void compile() { if (!handleUnsavedChanges(true)) return; final ScriptEngine interpreter = getCurrentLanguage().getScriptEngine(); if (interpreter instanceof JavaEngine) { final JavaEngine java = (JavaEngine) interpreter; final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log); markCompileStart(); getTab().showErrors(); new Thread() { @Override public void run() { java.compile(getEditorPane().getFile(), errors); errorScreen.insert("Compilation finished.\n", errorScreen .getDocument().getLength()); markCompileEnd(); } }.start(); } } public String getSelectedTextOrAsk(final String label) { String selection = getTextArea().getSelectedText(); if (selection == null || selection.indexOf('\n') >= 0) { selection = JOptionPane.showInputDialog(this, label + ":", label + "...", JOptionPane.QUESTION_MESSAGE); if (selection == null) return null; } return selection; } public String getSelectedClassNameOrAsk() { String className = getSelectedTextOrAsk("Class name"); if (className != null) className = className.trim(); return className; } private static void append(final JTextArea textArea, final String text) { final int length = textArea.getDocument().getLength(); textArea.insert(text, length); textArea.setCaretPosition(length); } public void markCompileStart() { errorHandler = null; final String started = "Started " + getEditorPane().getFileName() + " at " + new Date() + "\n"; final int offset = errorScreen.getDocument().getLength(); append(errorScreen, started); append(getTab().screen, started); compileStartOffset = errorScreen.getDocument().getLength(); try { compileStartPosition = errorScreen.getDocument().createPosition(offset); } catch (final BadLocationException e) { handleException(e); } ExceptionHandler.addThread(Thread.currentThread(), this); } public void markCompileEnd() { if (errorHandler == null) { errorHandler = new ErrorHandler(getCurrentLanguage(), errorScreen, compileStartPosition.getOffset()); if (errorHandler.getErrorCount() > 0) getTab().showErrors(); } if (compileStartOffset != errorScreen.getDocument().getLength()) getTab() .showErrors(); if (getTab().showingErrors) { errorHandler.scrollToVisible(compileStartOffset); } } public boolean nextError(final boolean forward) { if (errorHandler != null && errorHandler.nextError(forward)) try { File file = new File(errorHandler.getPath()); if (!file.isAbsolute()) file = getFileForBasename(file.getName()); errorHandler.markLine(); switchTo(file, errorHandler.getLine()); getTab().showErrors(); errorScreen.invalidate(); return true; } catch (final Exception e) { handleException(e); } return false; } public void switchTo(final String path, final int lineNumber) throws IOException { switchTo(new File(path).getCanonicalFile(), lineNumber); } public void switchTo(final File file, final int lineNumber) { if (!editorPaneContainsFile(getEditorPane(), file)) switchTo(file); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { try { gotoLine(lineNumber); } catch (final BadLocationException e) { // ignore } } }); } public void switchTo(final File file) { for (int i = 0; i < tabbed.getTabCount(); i++) if (editorPaneContainsFile(getEditorPane(i), file)) { switchTo(i); return; } open(file); } public void switchTo(final int index) { if (index == tabbed.getSelectedIndex()) return; tabbed.setSelectedIndex(index); } private void switchTabRelative(final int delta) { final int count = tabbed.getTabCount(); int index = ((tabbed.getSelectedIndex() + delta) % count); if (index < 0) { index += count; } switchTo(index); } private void removeTab(final int index) { final int menuItemIndex = index + tabsMenuTabsStart; tabbed.remove(index); tabsMenuItems.remove(tabsMenu.getItem(menuItemIndex)); tabsMenu.remove(menuItemIndex); } boolean editorPaneContainsFile(final EditorPane editorPane, final File file) { try { return file != null && editorPane != null && editorPane.getFile() != null && file.getCanonicalFile().equals(editorPane.getFile().getCanonicalFile()); } catch (final IOException e) { return false; } } public File getFile() { return getEditorPane().getFile(); } public File getFileForBasename(final String baseName) { File file = getFile(); if (file != null && file.getName().equals(baseName)) return file; for (int i = 0; i < tabbed.getTabCount(); i++) { file = getEditorPane(i).getFile(); if (file != null && file.getName().equals(baseName)) return file; } return null; } /** Updates the git directory to the git directory of the current file. */ private void updateGitDirectory() { final EditorPane editorPane = getEditorPane(); editorPane.setGitDirectory(new FileFunctions(this) .getGitDirectory(editorPane.getFile())); } public void addImport(final String className) { if (className != null) { new TokenFunctions(getTextArea()).addImport(className.trim()); } } public void openHelp(final String className) { openHelp(className, true); } /** * @param className * @param withFrames */ public void openHelp(final String className, final boolean withFrames) { if (className == null) { // FIXME: This cannot be right. getSelectedClassNameOrAsk(); } } public void extractSourceJar() { final File file = openWithDialog(null); if (file != null) extractSourceJar(file); } public void extractSourceJar(final File file) { try { final FileFunctions functions = new FileFunctions(this); final File workspace = uiService.chooseFile(new File(System.getProperty("user.home")), FileWidget.DIRECTORY_STYLE); if (workspace == null) return; final List<String> paths = functions.extractSourceJar(file.getAbsolutePath(), workspace); for (final String path : paths) if (!functions.isBinaryFile(path)) { open(new File(path)); final EditorPane pane = getEditorPane(); new TokenFunctions(pane).removeTrailingWhitespace(); if (pane.fileChanged()) save(); } } catch (final IOException e) { error("There was a problem opening " + file + ": " + e.getMessage()); } } /* extensionMustMatch == false means extension must not match */ private File openWithDialog(final File defaultDir) { return uiService.chooseFile(defaultDir, FileWidget.OPEN_STYLE); } /** * Write a message to the output screen * * @param message The text to write */ public void write(String message) { final TextEditorTab tab = getTab(); if (!message.endsWith("\n")) message += "\n"; tab.screen.insert(message, tab.screen.getDocument().getLength()); } public void writeError(String message) { getTab().showErrors(); if (!message.endsWith("\n")) message += "\n"; errorScreen.insert(message, errorScreen.getDocument().getLength()); } private void error(final String message) { JOptionPane.showMessageDialog(this, message); } public void handleException(final Throwable e) { handleException(e, errorScreen); getTab().showErrors(); } public static void handleException(final Throwable e, final JTextArea textArea) { final CharArrayWriter writer = new CharArrayWriter(); final PrintWriter out = new PrintWriter(writer); e.printStackTrace(out); for (Throwable cause = e.getCause(); cause != null; cause = cause.getCause()) { out.write("Caused by: "); cause.printStackTrace(out); } out.close(); textArea.append(writer.toString()); } /** * Removes invalid characters, shows a dialog. * * @return The amount of invalid characters found. */ public int zapGremlins() { final int count = getEditorPane().zapGremlins(); final String msg = count > 0 ? "Zap Gremlins converted " + count + " invalid characters to spaces" : "No invalid characters found!"; JOptionPane.showMessageDialog(this, msg); return count; } // -- Helper methods -- private boolean isCompiled() { final ScriptLanguage language = getCurrentLanguage(); if (language == null) return false; return language.isCompiledLanguage(); } private Reader evalScript(final String filename, Reader reader, final Writer output, final Writer errors) throws ModuleException { final ScriptLanguage language = getCurrentLanguage(); if (respectAutoImports) { reader = DefaultAutoImporters.prefixAutoImports(context, language, reader, errors); } // create script module for execution final ScriptInfo info = new ScriptInfo(context, filename, reader); final ScriptModule module = info.createModule(); context.inject(module); // use the currently selected language to execute the script module.setLanguage(language); // map stdout and stderr to the UI module.setOutputWriter(output); module.setErrorWriter(errors); // execute the script try { moduleService.run(module, true).get(); } catch (final InterruptedException e) { error("Interrupted"); } catch (final ExecutionException e) { log.error(e); } return reader; } @Override public boolean confirmClose() { while (tabbed.getTabCount() > 0) { if (!handleUnsavedChanges()) return false; final int index = tabbed.getSelectedIndex(); removeTab(index); } return true; } @Override public void insertUpdate(final DocumentEvent e) { setTitle(); checkForOutsideChanges(); } @Override public void removeUpdate(final DocumentEvent e) { setTitle(); checkForOutsideChanges(); } @Override public void changedUpdate(final DocumentEvent e) { setTitle(); } }
package net.openhft.chronicle.bytes; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.Maths; import net.openhft.chronicle.core.Memory; import net.openhft.chronicle.core.annotation.ForceInline; import net.openhft.chronicle.core.pool.StringBuilderPool; import net.openhft.chronicle.core.pool.StringInterner; import net.openhft.chronicle.core.util.StringUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.UTFDataFormatException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; public enum BytesUtil { ; static final char[] HEXI_DECIMAL = "0123456789ABCDEF".toCharArray(); private static final byte[] MIN_VALUE_TEXT = ("" + Long.MIN_VALUE).getBytes(); private static final StringBuilderPool SBP = new StringBuilderPool(); private static final StringInterner SI = new StringInterner(1024); private static final byte[] Infinity = "Infinity".getBytes(); private static final byte[] NaN = "NaN".getBytes(); private static final long MAX_VALUE_DIVIDE_5 = Long.MAX_VALUE / 5; private static final ThreadLocal<byte[]> NUMBER_BUFFER = ThreadLocal.withInitial(() -> new byte[20]); private static final long MAX_VALUE_DIVIDE_10 = Long.MAX_VALUE / 10; private static final Constructor<String> STRING_CONSTRUCTOR; private static final Field SB_VALUE, SB_COUNT; private static final ThreadLocal<DateCache> dateCacheTL = new ThreadLocal<DateCache>(); static { try { STRING_CONSTRUCTOR = String.class.getDeclaredConstructor(char[].class, boolean.class); STRING_CONSTRUCTOR.setAccessible(true); SB_VALUE = Class.forName("java.lang.AbstractStringBuilder").getDeclaredField("value"); SB_VALUE.setAccessible(true); SB_COUNT = Class.forName("java.lang.AbstractStringBuilder").getDeclaredField("count"); SB_COUNT.setAccessible(true); } catch (Exception e) { throw new AssertionError(e); } } public static boolean contentEqual(BytesStore a, BytesStore b) { if (a == null) return b == null; if (b == null) return false; if (a.start() != b.start() || a.readRemaining() != b.readRemaining()) return false; long aPos = a.readPosition(); long bPos = b.readPosition(); long length = a.readRemaining(); long i; for (i = 0; i < length - 7; i += 8) { if (a.readLong(aPos + i) != b.readLong(bPos + i)) return false; } for (i = 0; i < length; i++) { if (a.readByte(aPos + i) != b.readByte(bPos + i)) return false; } return true; } public static boolean bytesEqual( RandomDataInput a, long aOffset, RandomDataInput b, long bOffset, long len) { return a.bytesEqual(aOffset, b, bOffset, len); } public static void parseUTF(StreamingDataInput bytes, Appendable appendable, int utflen) throws UTFDataFormatRuntimeException { if (((AbstractBytes) bytes).bytesStore() instanceof NativeBytesStore && appendable instanceof StringBuilder) { parseUTF_SB1((AbstractBytes) bytes, (StringBuilder) appendable, utflen); } else { parseUTF1(bytes, appendable, utflen); } } static void parseUTF1(StreamingDataInput bytes, Appendable appendable, int utflen) throws UTFDataFormatRuntimeException { try { int count = 0; assert bytes.readRemaining() >= utflen; while (count < utflen) { int c = bytes.readUnsignedByte(); if (c >= 128) { bytes.readSkip(-1); break; } else if (c < 0) { break; } count++; appendable.append((char) c); } if (utflen > count) parseUTF2(bytes, appendable, utflen, count); } catch (IOException e) { throw new AssertionError(e); } } static void parseUTF_SB1(AbstractBytes bytes, StringBuilder sb, int utflen) throws UTFDataFormatRuntimeException { try { int count = 0; if (utflen > bytes.readRemaining()) throw new BufferUnderflowException(); NativeBytesStore nbs = (NativeBytesStore) bytes.bytesStore; long address = nbs.address + nbs.translate(bytes.readPosition()); Memory memory = nbs.memory; sb.ensureCapacity(utflen); char[] chars = (char[]) SB_VALUE.get(sb); while (count < utflen) { int c = memory.readByte(address + count); if (c < 0) break; chars[count++] = (char) c; } bytes.readSkip(count); SB_COUNT.setInt(sb, count); if (count < utflen) parseUTF2(bytes, sb, utflen, count); } catch (IOException | IllegalAccessException e) { throw new AssertionError(e); } } static void parseUTF2(StreamingDataInput bytes, Appendable appendable, int utflen, int count) throws IOException { while (count < utflen) { int c = bytes.readUnsignedByte(); switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx */ count++; appendable.append((char) c); break; case 12: case 13: { /* 110x xxxx 10xx xxxx */ count += 2; if (count > utflen) throw new UTFDataFormatRuntimeException( "malformed input: partial character at end"); int char2 = bytes.readUnsignedByte(); if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatRuntimeException( "malformed input around byte " + count + " was " + char2); int c2 = (char) (((c & 0x1F) << 6) | (char2 & 0x3F)); appendable.append((char) c2); break; } case 14: { /* 1110 xxxx 10xx xxxx 10xx xxxx */ count += 3; if (count > utflen) throw new UTFDataFormatRuntimeException( "malformed input: partial character at end"); int char2 = bytes.readUnsignedByte(); int char3 = bytes.readUnsignedByte(); if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatRuntimeException( "malformed input around byte " + (count - 1) + " was " + char2 + " " + char3); int c3 = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | (char3 & 0x3F)); appendable.append((char) c3); break; } // TODO add code point of characters > 0xFFFF support. default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatRuntimeException( "malformed input around byte " + count); } } } @ForceInline public static void writeUTF(StreamingDataOutput bytes, CharSequence str) { if (str == null) { bytes.writeStopBit(-1); } else { long utfLength = findUTFLength(str); bytes.writeStopBit(utfLength); appendUTF(bytes, str, 0, str.length()); } } private static long findUTFLength(@NotNull CharSequence str) { int strlen = str.length(); long utflen = strlen;/* use charAt instead of copying String to char array */ for (int i = 0; i < strlen; i++) { char c = str.charAt(i); if (c <= 0x007F) { continue; } if (c <= 0x07FF) { utflen++; } else { utflen += 2; } } return utflen; } @NotNull public static Bytes asBytes(RandomDataOutput bytes, long position, long limit) { Bytes sbytes = bytes.bytesForWrite(); sbytes.writeLimit(limit); sbytes.readLimit(limit); sbytes.readPosition(position); return sbytes; } public static void appendUTF(StreamingDataOutput bytes, @NotNull CharSequence str, int offset, int length) { if (((AbstractBytes) bytes).bytesStore() instanceof NativeBytesStore) { if (str instanceof VanillaBytes) { ((VanillaBytes) bytes).write((VanillaBytes) str, offset, length); return; } if (str instanceof String) { ((VanillaBytes) bytes).write((String) str, offset, length); return; } } appendUTF0(bytes, str, offset, length); } private static void appendUTF0(StreamingDataOutput bytes, CharSequence str, int offset, int length) { int i; for (i = 0; i < length; i++) { char c = str.charAt(offset + i); if (c > 0x007F) break; bytes.writeByte((byte) c); } for (; i < length; i++) { char c = str.charAt(offset + i); appendUTF(bytes, c); } } public static void append8bit(StreamingDataOutput bytes, @NotNull CharSequence str, int offset, int length) { if (bytes instanceof VanillaBytes) { if (str instanceof VanillaBytes) { ((VanillaBytes) bytes).write((VanillaBytes) str, offset, length); return; } if (str instanceof String) { ((NativeBytes) bytes).write((String) str, offset, length); return; } } for (int i = 0; i < length; i++) { char c = str.charAt(offset + i); if (c > 255) c = '?'; bytes.writeUnsignedByte(c); } } public static void appendUTF(StreamingDataOutput bytes, int c) { if (c <= 0x007F) { bytes.writeByte((byte) c); } else if (c <= 0x07FF) { bytes.writeByte((byte) (0xC0 | ((c >> 6) & 0x1F))); bytes.writeByte((byte) (0x80 | c & 0x3F)); } else if (c <= 0xFFFF) { bytes.writeByte((byte) (0xE0 | ((c >> 12) & 0x0F))); bytes.writeByte((byte) (0x80 | ((c >> 6) & 0x3F))); bytes.writeByte((byte) (0x80 | (c & 0x3F))); } else { bytes.writeByte((byte) (0xF0 | ((c >> 18) & 0x07))); bytes.writeByte((byte) (0x80 | ((c >> 12) & 0x3F))); bytes.writeByte((byte) (0x80 | ((c >> 6) & 0x3F))); bytes.writeByte((byte) (0x80 | (c & 0x3F))); } } public static void writeStopBit(StreamingDataOutput out, long n) { if ((n & ~0x7F) == 0) { out.writeByte((byte) (n & 0x7f)); return; } if ((n & ~0x3FFF) == 0) { out.writeByte((byte) ((n & 0x7f) | 0x80)); out.writeByte((byte) (n >> 7)); return; } writeStopBit0(out, n); } public static int stopBitLength(long n) { if ((n & ~0x7F) == 0) { return 1; } if ((n & ~0x3FFF) == 0) { return 2; } return stopBitlength0(n); } static void writeStopBit0(StreamingDataOutput out, long n) { boolean neg = false; if (n < 0) { neg = true; n = ~n; } long n2; while ((n2 = n >>> 7) != 0) { out.writeByte((byte) (0x80L | n)); n = n2; } // final byte if (!neg) { out.writeByte((byte) n); } else { out.writeByte((byte) (0x80L | n)); out.writeByte((byte) 0); } } static int stopBitlength0(long n) { int len = 0; if (n < 0) { len = 1; n = ~n; } while ((n >>>= 7) != 0) len++; return len + 1; } public static String toDebugString(RandomDataInput bytes, long maxLength) { StringBuilder sb = new StringBuilder(200); long position = bytes.readPosition(); sb.append("[") .append("pos: ").append(position) .append(", rlim: ").append(bytes.readLimit()) .append(", wlim: ").append(asSize(bytes.writeLimit())) .append(", cap: ").append(asSize(bytes.capacity())) .append(" ] "); toString(bytes, sb, position - maxLength, position, position + maxLength); return sb.toString(); } public static Object asSize(long size) { return size == Bytes.MAX_CAPACITY ? "8EiB" : size; } public static String to8bitString(BytesStore bytes) { long pos = bytes.readPosition(); int len = Maths.toInt32(bytes.readRemaining()); char[] chars = new char[len]; if (bytes instanceof VanillaBytes) { ((VanillaBytes) bytes).read8Bit(chars, len); } else { for (int i = 0; i < len; i++) chars[i] = (char) bytes.readUnsignedByte(pos + i); } return newString(chars); } private static String newString(char[] chars) { try { return STRING_CONSTRUCTOR.newInstance(chars, true); } catch (Exception e) { throw new AssertionError(e); } } public static String toString(RandomDataInput bytes) { StringBuilder sb = new StringBuilder(200); toString(bytes, sb); return sb.toString(); } private static void toString(RandomDataInput bytes, Appendable sb, long start, long position, long end) { try { // before if (start < 0) start = 0; if (position > start) { long last = Math.min(position, bytes.readLimit()); for (long i = start; i < last; i++) { sb.append(bytes.printable(i)); } sb.append('\u2016'); if (position >= bytes.readLimit()) { return; } } if (end > bytes.readLimit()) end = bytes.readLimit(); // after for (long i = position; i < end; i++) { sb.append(bytes.printable(i)); } } catch (IOException e) { try { sb.append(e.toString()); } catch (IOException e1) { throw new AssertionError(e); } } } private static void toString(RandomDataInput bytes, StringBuilder sb) { bytes.reserve(); assert bytes.start() <= bytes.readPosition(); assert bytes.readPosition() <= bytes.readLimit(); assert bytes.readLimit() <= bytes.realCapacity(); for (long i = bytes.readPosition(); i < bytes.readLimit(); i++) { sb.append((char) bytes.readUnsignedByte(i)); } bytes.release(); } @ForceInline public static long readStopBit(StreamingDataInput in) { long l; if ((l = in.readByte()) >= 0) return l; return readStopBit0(in, l); } static long readStopBit0(StreamingDataInput in, long l) { l &= 0x7FL; long b; int count = 7; while ((b = in.readByte()) < 0) { l |= (b & 0x7FL) << count; count += 7; } if (b != 0) { if (count > 56) throw new IllegalStateException( "Cannot read more than 9 stop bits of positive value"); return l | (b << count); } else { if (count > 63) throw new IllegalStateException( "Cannot read more than 10 stop bits of negative value"); return ~l; } } public static <S extends ByteStringAppender> void append(S out, long num) { if (num < 0) { if (num == Long.MIN_VALUE) { out.write(MIN_VALUE_TEXT); return; } out.writeByte((byte) '-'); num = -num; } if (num == 0) { out.writeByte((byte) '0'); } else { appendLong0(out, num); } } /** * The length of the number must be fixed otherwise short numbers will not overwrite longer numbers */ public static void append(RandomDataOutput out, long offset, long num, int digits) { boolean negative = num < 0; num = Math.abs(num); for (int i = digits - 1; i > 0; i out.writeByte(offset + i, (byte) (num % 10 + '0')); num /= 10; } if (negative) { if (num != 0) numberTooLarge(digits); out.writeByte(offset, '-'); } else { if (num > 9) numberTooLarge(digits); out.writeByte(offset, (byte) (num % 10 + '0')); } } private static void numberTooLarge(int digits) { throw new IllegalArgumentException("Number too large for " + digits + "digits"); } private static void appendLong0(StreamingDataOutput out, long num) { byte[] numberBuffer = NUMBER_BUFFER.get(); // Extract digits into the end of the numberBuffer int endIndex = appendLong1(numberBuffer, num); // Bulk copy the digits into the front of the buffer out.write(numberBuffer, endIndex, numberBuffer.length - endIndex); } private static int appendLong1(byte[] numberBuffer, long num) { numberBuffer[19] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 19; numberBuffer[18] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 18; numberBuffer[17] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 17; numberBuffer[16] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 16; numberBuffer[15] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 15; numberBuffer[14] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 14; numberBuffer[13] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 13; numberBuffer[12] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 12; numberBuffer[11] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 11; numberBuffer[10] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 10; numberBuffer[9] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 9; numberBuffer[8] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 8; numberBuffer[7] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 7; numberBuffer[6] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 6; numberBuffer[5] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 5; numberBuffer[4] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 4; numberBuffer[3] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 3; numberBuffer[2] = (byte) (num % 10L + '0'); num /= 10; if (num <= 0) return 2; numberBuffer[1] = (byte) (num % 10L + '0'); return 1; } public static void append(StreamingDataOutput out, double d) { long val = Double.doubleToRawLongBits(d); int sign = (int) (val >>> 63); int exp = (int) ((val >>> 52) & 2047); long mantissa = val & ((1L << 52) - 1); if (sign != 0) { out.writeByte((byte) '-'); } if (exp == 0 && mantissa == 0) { out.writeByte((byte) '0'); return; } else if (exp == 2047) { if (mantissa == 0) { out.write(Infinity); } else { out.write(NaN); } return; } else if (exp > 0) { mantissa += 1L << 52; } final int shift = (1023 + 52) - exp; if (shift > 0) { // integer and faction if (shift < 53) { long intValue = mantissa >> shift; appendLong0(out, intValue); mantissa -= intValue << shift; if (mantissa > 0) { out.writeByte((byte) '.'); mantissa <<= 1; mantissa++; int precision = shift + 1; long error = 1; long value = intValue; int decimalPlaces = 0; while (mantissa > error) { // times 5*2 = 10 mantissa *= 5; error *= 5; precision long num = (mantissa >> precision); value = value * 10 + num; out.writeByte((byte) ('0' + num)); mantissa -= num << precision; final double parsedValue = asDouble(value, 0, sign != 0, ++decimalPlaces); if (parsedValue == d) break; } } return; } else { // faction. out.writeByte((byte) '0'); out.writeByte((byte) '.'); mantissa <<= 6; mantissa += (1 << 5); int precision = shift + 6; long error = (1 << 5); long value = 0; int decimalPlaces = 0; while (mantissa > error) { while (mantissa > MAX_VALUE_DIVIDE_5) { mantissa >>>= 1; error = (error + 1) >>> 1; precision } // times 5*2 = 10 mantissa *= 5; error *= 5; precision if (precision >= 64) { decimalPlaces++; out.writeByte((byte) '0'); continue; } long num = (mantissa >>> precision); value = value * 10 + num; final char c = (char) ('0' + num); assert !(c < '0' || c > '9'); out.writeByte((byte) c); mantissa -= num << precision; final double parsedValue = asDouble(value, 0, sign != 0, ++decimalPlaces); if (parsedValue == d) break; } return; } } // large number mantissa <<= 10; int precision = -10 - shift; int digits = 0; while ((precision > 53 || mantissa > Long.MAX_VALUE >> precision) && precision > 0) { digits++; precision long mod = mantissa % 5; mantissa /= 5; int modDiv = 1; while (mantissa < MAX_VALUE_DIVIDE_5 && precision > 1) { precision -= 1; mantissa <<= 1; modDiv <<= 1; } mantissa += modDiv * mod / 5; } long val2 = precision > 0 ? mantissa << precision : mantissa >>> -precision; appendLong0(out, val2); for (int i = 0; i < digits; i++) out.writeByte((byte) '0'); } private static double asDouble(long value, int exp, boolean negative, int decimalPlaces) { if (decimalPlaces > 0 && value < Long.MAX_VALUE / 2) { if (value < Long.MAX_VALUE / (1L << 32)) { exp -= 32; value <<= 32; } if (value < Long.MAX_VALUE / (1L << 16)) { exp -= 16; value <<= 16; } if (value < Long.MAX_VALUE / (1L << 8)) { exp -= 8; value <<= 8; } if (value < Long.MAX_VALUE / (1L << 4)) { exp -= 4; value <<= 4; } if (value < Long.MAX_VALUE / (1L << 2)) { exp -= 2; value <<= 2; } if (value < Long.MAX_VALUE / (1L << 1)) { exp -= 1; value <<= 1; } } for (; decimalPlaces > 0; decimalPlaces exp long mod = value % 5; value /= 5; int modDiv = 1; if (value < Long.MAX_VALUE / (1L << 4)) { exp -= 4; value <<= 4; modDiv <<= 4; } if (value < Long.MAX_VALUE / (1L << 2)) { exp -= 2; value <<= 2; modDiv <<= 2; } if (value < Long.MAX_VALUE / (1L << 1)) { exp -= 1; value <<= 1; modDiv <<= 1; } if (decimalPlaces > 1) value += modDiv * mod / 5; else value += (modDiv * mod + 4) / 5; } final double d = Math.scalb((double) value, exp); return negative ? -d : d; } @ForceInline public static String readUTFΔ(StreamingDataInput in) { StringBuilder sb = SBP.acquireStringBuilder(); return in.readUTFΔ(sb) ? SI.intern(sb) : null; } @NotNull @ForceInline public static String parseUTF(StreamingDataInput bytes, @NotNull StopCharTester tester) { StringBuilder utfReader = SBP.acquireStringBuilder(); parseUTF(bytes, utfReader, tester); return SI.intern(utfReader); } @ForceInline public static void parseUTF(StreamingDataInput bytes, @NotNull Appendable builder, @NotNull StopCharTester tester) { try { if (builder instanceof StringBuilder && ((AbstractBytes) bytes).bytesStore() instanceof NativeBytesStore) { VanillaBytes vb = (VanillaBytes) bytes; StringBuilder sb = (StringBuilder) builder; sb.setLength(0); readUTF_SB1(vb, sb, tester); } else { setLength(builder, 0); readUTF1(bytes, builder, tester); } } catch (IOException e) { throw Jvm.rethrow(e); } } private static void readUTF_SB1(VanillaBytes bytes, @NotNull StringBuilder appendable, @NotNull StopCharTester tester) throws IOException { NativeBytesStore nb = (NativeBytesStore) bytes.bytesStore; int i = 0, len = Maths.toInt32(bytes.readRemaining()); long address = nb.address + nb.translate(bytes.readPosition()); Memory memory = nb.memory; for (; i < len; i++) { int c = memory.readByte(address + i); if (c < 0) break; if (tester.isStopChar(c)) { bytes.readSkip(i + 1); return; } appendable.append((char) c); } bytes.readSkip(i); if (i < len) { readUTF_SB2(bytes, appendable, tester); } } private static void readUTF_SB2(StreamingDataInput bytes, StringBuilder appendable, StopCharTester tester) throws UTFDataFormatException { while (true) { int c = bytes.readUnsignedByte(); switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx */ if (tester.isStopChar(c)) return; appendable.append((char) c); break; case 12: case 13: { /* 110x xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatException( "malformed input around byte"); int c2 = (char) (((c & 0x1F) << 6) | (char2 & 0x3F)); if (tester.isStopChar(c2)) return; appendable.append((char) c2); break; } case 14: { /* 1110 xxxx 10xx xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); int char3 = bytes.readUnsignedByte(); if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatException( "malformed input around byte "); int c3 = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | (char3 & 0x3F)); if (tester.isStopChar(c3)) return; appendable.append((char) c3); break; } default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatException( "malformed input around byte "); } } } private static void readUTF1(StreamingDataInput bytes, @NotNull Appendable appendable, @NotNull StopCharTester tester) throws IOException { int len = Maths.toInt32(bytes.readRemaining()); while (len int c = bytes.readUnsignedByte(); if (c >= 128) { bytes.readSkip(-1); break; } if (tester.isStopChar(c)) return; appendable.append((char) c); } if (len <= 0) return; readUTF2(bytes, appendable, tester); } private static void readUTF2(StreamingDataInput bytes, Appendable appendable, StopCharTester tester) throws IOException { while (true) { int c = bytes.readUnsignedByte(); switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx */ if (tester.isStopChar(c)) return; appendable.append((char) c); break; case 12: case 13: { /* 110x xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatException( "malformed input around byte"); int c2 = (char) (((c & 0x1F) << 6) | (char2 & 0x3F)); if (tester.isStopChar(c2)) return; appendable.append((char) c2); break; } case 14: { /* 1110 xxxx 10xx xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); int char3 = bytes.readUnsignedByte(); if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatException( "malformed input around byte "); int c3 = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | (char3 & 0x3F)); if (tester.isStopChar(c3)) return; appendable.append((char) c3); break; } default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatException( "malformed input around byte "); } } } @ForceInline public static void parseUTF(StreamingDataInput bytes, @NotNull Appendable builder, @NotNull StopCharsTester tester) { setLength(builder, 0); try { readUTF0(bytes, builder, tester); } catch (IOException e) { throw new AssertionError(e); } } private static void readUTF0(StreamingDataInput bytes, @NotNull Appendable appendable, @NotNull StopCharsTester tester) throws IOException { while (true) { int c = bytes.readUnsignedByte(); if (c >= 128) { bytes.readSkip(-1); break; } if (tester.isStopChar(c, bytes.peekUnsignedByte())) return; appendable.append((char) c); if (bytes.readRemaining() == 0) return; } while (true) { int c = bytes.readUnsignedByte(); switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx */ if (tester.isStopChar(c, bytes.peekUnsignedByte())) return; appendable.append((char) c); break; case 12: case 13: { /* 110x xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); if ((char2 & 0xC0) != 0x80) throw new UTFDataFormatException( "malformed input around byte"); int c2 = (char) (((c & 0x1F) << 6) | (char2 & 0x3F)); if (tester.isStopChar(c2, bytes.peekUnsignedByte())) return; appendable.append((char) c2); break; } case 14: { /* 1110 xxxx 10xx xxxx 10xx xxxx */ int char2 = bytes.readUnsignedByte(); int char3 = bytes.readUnsignedByte(); if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) throw new UTFDataFormatException( "malformed input around byte "); int c3 = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | (char3 & 0x3F)); if (tester.isStopChar(c3, bytes.peekUnsignedByte())) return; appendable.append((char) c3); break; } default: /* 10xx xxxx, 1111 xxxx */ throw new UTFDataFormatException( "malformed input around byte "); } } } @ForceInline public static void parse8bit(StreamingDataInput bytes, @NotNull StringBuilder builder, @NotNull StopCharsTester tester) { builder.setLength(0); read8bit0(bytes, builder, tester); } @ForceInline public static void parse8bit(StreamingDataInput bytes, @NotNull Bytes builder, @NotNull StopCharsTester tester) { builder.readPosition(0); read8bit0(bytes, builder, tester); } private static void read8bit0(StreamingDataInput bytes, @NotNull StringBuilder appendable, @NotNull StopCharsTester tester) { while (true) { int c = bytes.readUnsignedByte(); if (tester.isStopChar(c, bytes.peekUnsignedByte())) return; appendable.append((char) c); if (bytes.readRemaining() == 0) return; } } private static void read8bit0(StreamingDataInput bytes, @NotNull Bytes bytes2, @NotNull StopCharsTester tester) { int ch = bytes.readUnsignedByte(); do { int next = bytes.readUnsignedByte(); if (tester.isStopChar(ch, next)) { bytes.readSkip(-1); return; } bytes2.writeUnsignedByte(ch); ch = next; } while (bytes.readRemaining() > 1); if (tester.isStopChar(ch, -1)) { bytes.readSkip(-1); return; } bytes2.writeUnsignedByte(ch); } public static double parseDouble(StreamingDataInput in) { long value = 0; int exp = 0; boolean negative = false; int decimalPlaces = Integer.MIN_VALUE; int ch = in.readUnsignedByte(); switch (ch) { case 'N': if (compareRest(in, "aN")) return Double.NaN; in.readSkip(-1); return Double.NaN; case 'I': //noinspection SpellCheckingInspection if (compareRest(in, "nfinity")) return Double.POSITIVE_INFINITY; in.readSkip(-1); return Double.NaN; case '-': if (compareRest(in, "Infinity")) return Double.NEGATIVE_INFINITY; negative = true; ch = in.readUnsignedByte(); break; } while (true) { if (ch >= '0' && ch <= '9') { while (value >= MAX_VALUE_DIVIDE_10) { value >>>= 1; exp++; } value = value * 10 + (ch - '0'); decimalPlaces++; } else if (ch == '.') { decimalPlaces = 0; } else { break; } if (in.readRemaining() == 0) break; ch = in.readUnsignedByte(); } return asDouble(value, exp, negative, decimalPlaces); } static boolean compareRest(StreamingDataInput in, String s) { if (s.length() > in.readRemaining()) return false; long position = in.readPosition(); for (int i = 0; i < s.length(); i++) { if (in.readUnsignedByte() != s.charAt(i)) { in.readPosition(position); return false; } } return true; } @ForceInline public static long parseLong(StreamingDataInput in) { long num = 0; boolean negative = false; while (in.readRemaining() > 0) { int b = in.readUnsignedByte(); // if (b >= '0' && b <= '9') if ((b - ('0' + Integer.MIN_VALUE)) <= 9 + Integer.MIN_VALUE) { num = num * 10 + b - '0'; } else if (b == '-') { negative = true; } else if (b == ']' || b == '}') { in.readSkip(-1); break; } else { break; } } return negative ? -num : num; } public static long parseLong(RandomDataInput in, long offset) { long num = 0; boolean negative = false; while (true) { int b = in.readUnsignedByte(offset++); // if (b >= '0' && b <= '9') if ((b - ('0' + Integer.MIN_VALUE)) <= 9 + Integer.MIN_VALUE) num = num * 10 + b - '0'; else if (b == '-') negative = true; else break; } return negative ? -num : num; } public static boolean skipTo(ByteStringParser parser, StopCharTester tester) { while (parser.readRemaining() > 0) { int ch = parser.readUnsignedByte(); if (tester.isStopChar(ch)) return true; } return false; } public static int getAndAddInt(BytesStore in, long offset, int adding) { for (; ; ) { int value = in.readVolatileInt(offset); if (in.compareAndSwapInt(offset, value, value + adding)) return value; } } public static long getAndAddLong(BytesStore in, long offset, long adding) { for (; ; ) { long value = in.readVolatileLong(offset); if (in.compareAndSwapLong(offset, value, value + adding)) return value; } } public static int asInt(@NotNull String str) { ByteBuffer bb = ByteBuffer.wrap(str.getBytes(StandardCharsets.ISO_8859_1)).order(ByteOrder.nativeOrder()); return bb.getInt(); } public static String toHexString(@NotNull final Bytes bytes) { return toHexString(bytes, bytes.readPosition(), bytes.readRemaining()); } /** * display the hex data of {@link Bytes} from the position() to the limit() * * @param bytes the buffer you wish to toString() * @return hex representation of the buffer, from example [0D ,OA, FF] */ public static String toHexString(@NotNull final Bytes bytes, long offset, long len) { if (len == 0) return ""; int width = 16; int[] lastLine = new int[width]; String sep = ""; long position = bytes.readPosition(); long limit = bytes.readLimit(); try { bytes.readLimit(offset + len); bytes.readPosition(offset); final StringBuilder builder = new StringBuilder(); long start = offset / width * width; long end = (offset + len + width - 1) / width * width; for (long i = start; i < end; i += width) { // check for duplicate rows if (i + width < end) { boolean same = true; for (int j = 0; j < width && i + j < offset + len; j++) { int ch = bytes.readUnsignedByte(i + j); same &= (ch == lastLine[j]); lastLine[j] = ch; } if (i > start && same) { sep = "........\n"; continue; } } builder.append(sep); sep = ""; String str = Long.toHexString(i); for (int j = str.length(); j < 8; j++) builder.append('0'); builder.append(str); for (int j = 0; j < width; j++) { if (j == width / 2) builder.append(' '); if (i + j < start || i + j >= offset + len) { builder.append(" "); } else { builder.append(' '); int ch = bytes.readUnsignedByte(i + j); builder.append(HEXI_DECIMAL[ch >> 4]); builder.append(HEXI_DECIMAL[ch & 15]); } } builder.append(' '); for (int j = 0; j < width; j++) { if (j == width / 2) builder.append(' '); if (i + j < start || i + j >= offset + len) { builder.append(' '); } else { int ch = bytes.readUnsignedByte(i + j); if (ch < ' ' || ch > 126) ch = '\u00B7'; builder.append((char) ch); } } builder.append("\n"); } return builder.toString(); } finally { bytes.readLimit(limit); bytes.readPosition(position); } } public static void setCharAt(@NotNull Appendable sb, int index, char ch) { if (sb instanceof StringBuilder) ((StringBuilder) sb).setCharAt(index, ch); else if (sb instanceof Bytes) ((Bytes) sb).writeByte(index, ch); else throw new IllegalArgumentException("" + sb.getClass()); } @ForceInline public static void setLength(@NotNull Appendable sb, int newLength) { if (sb instanceof StringBuilder) ((StringBuilder) sb).setLength(newLength); else if (sb instanceof Bytes) ((Bytes) sb).readPosition(newLength); else throw new IllegalArgumentException("" + sb.getClass()); } public static void append(@NotNull Appendable sb, double value) { if (sb instanceof StringBuilder) ((StringBuilder) sb).append(value); else if (sb instanceof Bytes) ((Bytes) sb).append(value); else throw new IllegalArgumentException("" + sb.getClass()); } public static void append(@NotNull Appendable sb, long value) { if (sb instanceof StringBuilder) ((StringBuilder) sb).append(value); else if (sb instanceof Bytes) ((Bytes) sb).append(value); else throw new IllegalArgumentException("" + sb.getClass()); } public static <ACS extends Appendable & CharSequence> void append(ACS sb, String str) { try { sb.append(str); } catch (IOException e) { throw new AssertionError(e); } } public static boolean equals(Object o1, Object o2) { if (o1 == o2) return true; if (o1 instanceof CharSequence && o2 instanceof CharSequence) return StringUtils.isEqual((CharSequence) o1, (CharSequence) o2); return o1 != null && o1.equals(o2); } public static void appendTimeMillis(ByteStringAppender b, long timeInMS) { int hours = (int) (timeInMS / (60 * 60 * 1000)); if (hours > 99) { b.append(hours); // can have over 24 hours. } else { b.writeByte((byte) (hours / 10 + '0')); b.writeByte((byte) (hours % 10 + '0')); } b.writeByte((byte) ':'); int minutes = (int) ((timeInMS / (60 * 1000)) % 60); b.writeByte((byte) (minutes / 10 + '0')); b.writeByte((byte) (minutes % 10 + '0')); b.writeByte((byte) ':'); int seconds = (int) ((timeInMS / 1000) % 60); b.writeByte((byte) (seconds / 10 + '0')); b.writeByte((byte) (seconds % 10 + '0')); b.writeByte((byte) '.'); int millis = (int) (timeInMS % 1000); b.writeByte((byte) (millis / 100 + '0')); b.writeByte((byte) (millis / 10 % 10 + '0')); b.writeByte((byte) (millis % 10 + '0')); } public static boolean equalBytesAny(BytesStore b1, BytesStore b2, long remaining) { BytesStore bs1 = b1.bytesStore(); BytesStore bs2 = b2.bytesStore(); long i = 0; for (; i < remaining - 7; i++) { long l1 = bs1.readLong(b1.readPosition() + i); long l2 = bs2.readLong(b2.readPosition() + i); if (l1 != l2) return false; } if (i < remaining - 3) { int i1 = bs1.readInt(b1.readPosition() + i); int i2 = bs2.readInt(b2.readPosition() + i); if (i1 != i2) return false; i += 4; } for (; i < remaining; i++) { byte i1 = bs1.readByte(b1.readPosition() + i); byte i2 = bs2.readByte(b2.readPosition() + i); if (i1 != i2) return false; } return true; } public static void appendDateMillis(ByteStringAppender b, long timeInMS) { DateCache dateCache = dateCacheTL.get(); if (dateCache == null) { dateCacheTL.set(dateCache = new DateCache()); } long date = timeInMS / 86400000; if (dateCache.lastDay != date) { dateCache.lastDateStr = dateCache.dateFormat.format(new Date(timeInMS)).getBytes(StandardCharsets.ISO_8859_1); dateCache.lastDay = date; } else { assert dateCache.lastDateStr != null; } b.write(dateCache.lastDateStr); } static class DateCache { final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); private long lastDay = Long.MIN_VALUE; @Nullable private byte[] lastDateStr = null; DateCache() { dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); } } }
package net.spy.memcached; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import net.spy.SpyObject; import net.spy.memcached.ops.Operation; import net.spy.memcached.ops.OperationState; /** * Connection to a cluster of memcached servers. */ public final class MemcachedConnection extends SpyObject { // The number of empty selects we'll allow before assuming we may have // missed one and should check the current selectors. This generally // indicates a bug, but we'll check it nonetheless. private static final int DOUBLE_CHECK_EMPTY = 256; // The number of empty selects we'll allow before blowing up. It's too // easy to write a bug that causes it to loop uncontrollably. This helps // find those bugs and often works around them. private static final int EXCESSIVE_EMPTY = 0x1000000; // maximum amount of time to wait between reconnect attempts private static final long MAX_DELAY = 30000; private volatile boolean shutDown=false; // If true, get optimization will collapse multiple sequential get ops private boolean optimizeGets=true; private Selector selector=null; private final NodeLocator locator; private int emptySelects=0; // AddedQueue is used to track the QueueAttachments for which operations // have recently been queued. private final ConcurrentLinkedQueue<MemcachedNode> addedQueue; // reconnectQueue contains the attachments that need to be reconnected // The key is the time at which they are eligible for reconnect private final SortedMap<Long, MemcachedNode> reconnectQueue; /** * Construct a memcached connection. * * @param bufSize the size of the buffer used for reading from the server * @param f the factory that will provide an operation queue * @param a the addresses of the servers to connect to * * @throws IOException if a connection attempt fails early */ public MemcachedConnection(int bufSize, ConnectionFactory f, List<InetSocketAddress> a) throws IOException { reconnectQueue=new TreeMap<Long, MemcachedNode>(); addedQueue=new ConcurrentLinkedQueue<MemcachedNode>(); selector=Selector.open(); List<MemcachedNode> connections=new ArrayList<MemcachedNode>(a.size()); for(SocketAddress sa : a) { SocketChannel ch=SocketChannel.open(); ch.configureBlocking(false); MemcachedNode qa=f.createMemcachedNode(sa, ch, bufSize); int ops=0; if(ch.connect(sa)) { getLogger().info("Connected to %s immediately", qa); qa.connected(); assert ch.isConnected(); } else { getLogger().info("Added %s to connect queue", qa); ops=SelectionKey.OP_CONNECT; } qa.setSk(ch.register(selector, ops, qa)); assert ch.isConnected() || qa.getSk().interestOps() == SelectionKey.OP_CONNECT : "Not connected, and not wanting to connect"; connections.add(qa); } locator=f.createLocator(connections); } /** * Enable or disable get optimization. * * When enabled (default), multiple sequential gets are collapsed into one. */ public void setGetOptimization(boolean to) { optimizeGets=to; } private boolean selectorsMakeSense() { for(MemcachedNode qa : locator.getAll()) { if(qa.getSk() != null && qa.getSk().isValid()) { if(qa.getChannel().isConnected()) { int sops=qa.getSk().interestOps(); int expected=0; if(qa.hasReadOp()) { expected |= SelectionKey.OP_READ; } if(qa.hasWriteOp()) { expected |= SelectionKey.OP_WRITE; } if(qa.getBytesRemainingToWrite() > 0) { expected |= SelectionKey.OP_WRITE; } assert sops == expected : "Invalid ops: " + qa + ", expected " + expected + ", got " + sops; } else { int sops=qa.getSk().interestOps(); assert sops == SelectionKey.OP_CONNECT : "Not connected, and not watching for connect: " + sops; } } } getLogger().debug("Checked the selectors."); return true; } /** * MemcachedClient calls this method to handle IO over the connections. */ @SuppressWarnings("unchecked") public void handleIO() throws IOException { if(shutDown) { throw new IOException("No IO while shut down"); } // Deal with all of the stuff that's been added, but may not be marked // writable. handleInputQueue(); getLogger().debug("Done dealing with queue."); long delay=0; if(!reconnectQueue.isEmpty()) { long now=System.currentTimeMillis(); long then=reconnectQueue.firstKey(); delay=Math.max(then-now, 1); } getLogger().debug("Selecting with delay of %sms", delay); assert selectorsMakeSense() : "Selectors don't make sense."; int selected=selector.select(delay); Set<SelectionKey> selectedKeys=selector.selectedKeys(); if(selectedKeys.isEmpty() && !shutDown) { getLogger().debug("No selectors ready, interrupted: " + Thread.interrupted()); if(++emptySelects > DOUBLE_CHECK_EMPTY) { for(SelectionKey sk : selector.keys()) { getLogger().info("%s has %s, interested in %s", sk, sk.readyOps(), sk.interestOps()); if(sk.readyOps() != 0) { getLogger().info("%s has a ready op, handling IO", sk); handleIO(sk); } else { queueReconnect((MemcachedNode)sk.attachment()); } } assert emptySelects < EXCESSIVE_EMPTY : "Too many empty selects"; } } else { getLogger().debug("Selected %d, selected %d keys", selected, selectedKeys.size()); emptySelects=0; for(SelectionKey sk : selectedKeys) { handleIO(sk); } // for each selector selectedKeys.clear(); } if(!shutDown && !reconnectQueue.isEmpty()) { attemptReconnects(); } } // Handle any requests that have been made against the client. private void handleInputQueue() { if(!addedQueue.isEmpty()) { getLogger().debug("Handling queue"); // If there's stuff in the added queue. Try to process it. Collection<MemcachedNode> toAdd=new HashSet<MemcachedNode>(); // Transfer the queue into a hashset. There are very likely more // additions than there are nodes. Collection<MemcachedNode> todo=new HashSet<MemcachedNode>(); try { MemcachedNode qa=null; while((qa=addedQueue.remove()) != null) { todo.add(qa); } } catch(NoSuchElementException e) { // Found everything } // Now process the queue. for(MemcachedNode qa : todo) { boolean readyForIO=false; if(qa.isActive()) { if(qa.getCurrentWriteOp() != null) { readyForIO=true; getLogger().debug("Handling queued write %s", qa); } } else { toAdd.add(qa); } qa.copyInputQueue(); if(readyForIO) { try { if(qa.getWbuf().hasRemaining()) { handleWrites(qa.getSk(), qa); } } catch(IOException e) { getLogger().warn("Exception handling write", e); queueReconnect(qa); } } qa.fixupOps(); } addedQueue.addAll(toAdd); } } // Handle IO for a specific selector. Any IOException will cause a // reconnect private void handleIO(SelectionKey sk) { MemcachedNode qa=(MemcachedNode)sk.attachment(); try { getLogger().debug( "Handling IO for: %s (r=%s, w=%s, c=%s, op=%s)", sk, sk.isReadable(), sk.isWritable(), sk.isConnectable(), sk.attachment()); if(sk.isConnectable()) { getLogger().info("Connection state changed for %s", sk); final SocketChannel channel=qa.getChannel(); if(channel.finishConnect()) { assert channel.isConnected() : "Not connected."; qa.connected(); addedQueue.offer(qa); if(qa.getWbuf().hasRemaining()) { handleWrites(sk, qa); } } else { assert !channel.isConnected() : "connected"; } } else { if(sk.isReadable()) { handleReads(sk, qa); } if(sk.isWritable()) { handleWrites(sk, qa); } } } catch(Exception e) { // Various errors occur on Linux that wind up here. However, any // particular error processing an item should simply cause us to // reconnect to the server. getLogger().info("Reconnecting due to exception on %s", qa, e); queueReconnect(qa); } qa.fixupOps(); } private void handleWrites(SelectionKey sk, MemcachedNode qa) throws IOException { qa.fillWriteBuffer(optimizeGets); boolean canWriteMore=qa.getBytesRemainingToWrite() > 0; while(canWriteMore) { int wrote=qa.writeSome(); qa.fillWriteBuffer(optimizeGets); canWriteMore = wrote > 0 && qa.getBytesRemainingToWrite() > 0; } } private void handleReads(SelectionKey sk, MemcachedNode qa) throws IOException { Operation currentOp = qa.getCurrentReadOp(); ByteBuffer rbuf=qa.getRbuf(); final SocketChannel channel = qa.getChannel(); int read=channel.read(rbuf); while(read > 0) { getLogger().debug("Read %d bytes", read); rbuf.flip(); while(rbuf.remaining() > 0) { if(currentOp == null) { throw new IllegalStateException("No read operation."); } currentOp.readFromBuffer(rbuf); if(currentOp.getState() == OperationState.COMPLETE) { getLogger().debug( "Completed read op: %s and giving the next %d bytes", currentOp, rbuf.remaining()); Operation op=qa.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; currentOp=qa.getCurrentReadOp(); } } rbuf.clear(); read=channel.read(rbuf); } } // Make a debug string out of the given buffer's values static String dbgBuffer(ByteBuffer b, int size) { StringBuilder sb=new StringBuilder(); byte[] bytes=b.array(); for(int i=0; i<size; i++) { char ch=(char)bytes[i]; if(Character.isWhitespace(ch) || Character.isLetterOrDigit(ch)) { sb.append(ch); } else { sb.append("\\x"); sb.append(Integer.toHexString(bytes[i] & 0xff)); } } return sb.toString(); } private void queueReconnect(MemcachedNode qa) { if(!shutDown) { getLogger().warn("Closing, and reopening %s, attempt %d.", qa, qa.getReconnectCount()); if(qa.getSk() != null) { qa.getSk().cancel(); assert !qa.getSk().isValid() : "Cancelled selection key is valid"; } qa.reconnecting(); try { if(qa.getChannel() != null && qa.getChannel().socket() != null) { qa.getChannel().socket().close(); } else { getLogger().info("The channel or socket was null for %s", qa); } } catch(IOException e) { getLogger().warn("IOException trying to close a socket", e); } qa.setChannel(null); long delay=Math.min((100*qa.getReconnectCount()) ^ 2, MAX_DELAY); reconnectQueue.put(System.currentTimeMillis() + delay, qa); // Need to do a little queue management. qa.setupResend(); } } private void attemptReconnects() throws IOException { final long now=System.currentTimeMillis(); final Map<MemcachedNode, Boolean> seen= new IdentityHashMap<MemcachedNode, Boolean>(); final List<MemcachedNode> rereQueue=new ArrayList<MemcachedNode>(); for(Iterator<MemcachedNode> i= reconnectQueue.headMap(now).values().iterator(); i.hasNext();) { final MemcachedNode qa=i.next(); i.remove(); try { if(!seen.containsKey(qa)) { seen.put(qa, Boolean.TRUE); getLogger().info("Reconnecting %s", qa); final SocketChannel ch=SocketChannel.open(); ch.configureBlocking(false); int ops=0; if(ch.connect(qa.getSocketAddress())) { getLogger().info("Immediately reconnected to %s", qa); assert ch.isConnected(); } else { ops=SelectionKey.OP_CONNECT; } qa.registerChannel(ch, ch.register(selector, ops, qa)); assert qa.getChannel() == ch : "Channel was lost."; } else { getLogger().debug( "Skipping duplicate reconnect request for %s", qa); } } catch(ConnectException e) { getLogger().warn("Error on reconnect", e); rereQueue.add(qa); } } // Requeue any fast-failed connects. for(MemcachedNode n : rereQueue) { queueReconnect(n); } } /** * Get the node locator used by this connection. */ NodeLocator getLocator() { return locator; } /** * Add an operation to the given connection. * * @param which the connection offset * @param o the operation */ public void addOperation(final String key, final Operation o) { MemcachedNode placeIn=null; MemcachedNode primary = locator.getPrimary(key); if(primary.isActive()) { placeIn=primary; } else { // Look for another node in sequence that is ready. for(Iterator<MemcachedNode> i=locator.getSequence(key); placeIn == null && i.hasNext(); ) { MemcachedNode n=i.next(); if(n.isActive()) { placeIn=n; } } // If we didn't find an active node, queue it in the primary node // and wait for it to come back online. if(placeIn == null) { placeIn = primary; } } assert placeIn != null : "No node found for key " + key; addOperation(placeIn, o); } public void addOperation(final MemcachedNode node, final Operation o) { o.initialize(); node.addOp(o); addedQueue.offer(node); Selector s=selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; getLogger().debug("Added %s to %s", o, node); } public void addOperations(final Map<MemcachedNode, Operation> ops) { for(Map.Entry<MemcachedNode, Operation> me : ops.entrySet()) { final MemcachedNode node=me.getKey(); Operation o=me.getValue(); o.initialize(); node.addOp(o); addedQueue.offer(node); } Selector s=selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; } /** * Broadcast an operation to all nodes. */ public CountDownLatch broadcastOperation(final BroadcastOpFactory of) { final CountDownLatch latch=new CountDownLatch(locator.getAll().size()); for(MemcachedNode node : locator.getAll()) { Operation op = of.newOp(node, latch); op.initialize(); node.addOp(op); addedQueue.offer(node); } Selector s=selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; return latch; } /** * Shut down all of the connections. */ public void shutdown() throws IOException { shutDown=true; Selector s=selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; for(MemcachedNode qa : locator.getAll()) { if(qa.getChannel() != null) { qa.getChannel().close(); qa.setSk(null); if(qa.getBytesRemainingToWrite() > 0) { getLogger().warn( "Shut down with %d bytes remaining to write", qa.getBytesRemainingToWrite()); } getLogger().debug("Shut down channel %s", qa.getChannel()); } } selector.close(); getLogger().debug("Shut down selector %s", selector); } @Override public String toString() { StringBuilder sb=new StringBuilder(); sb.append("{MemcachedConnection to"); for(MemcachedNode qa : locator.getAll()) { sb.append(" "); sb.append(qa.getSocketAddress()); } sb.append("}"); return sb.toString(); } }
package no.dusken.momus.config; import java.util.Arrays; import java.util.Collections; import java.util.Timer; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; import org.apache.velocity.app.VelocityEngine; import org.opensaml.saml2.metadata.provider.HTTPMetadataProvider; import org.opensaml.xml.parse.StaticBasicParserPool; import org.opensaml.xml.parse.XMLParserException; import org.springframework.context.annotation.*; import org.springframework.core.env.Environment; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.saml.SAMLAuthenticationProvider; import org.springframework.security.saml.SAMLBootstrap; import org.springframework.security.saml.SAMLEntryPoint; import org.springframework.security.saml.SAMLLogoutFilter; import org.springframework.security.saml.SAMLLogoutProcessingFilter; import org.springframework.security.saml.SAMLProcessingFilter; import org.springframework.security.saml.SAMLWebSSOHoKProcessingFilter; import org.springframework.security.saml.context.SAMLContextProviderLB; import org.springframework.security.saml.key.EmptyKeyManager; import org.springframework.security.saml.key.KeyManager; import org.springframework.security.saml.log.SAMLDefaultLogger; import org.springframework.security.saml.metadata.CachingMetadataManager; import org.springframework.security.saml.metadata.ExtendedMetadata; import org.springframework.security.saml.metadata.ExtendedMetadataDelegate; import org.springframework.security.saml.metadata.MetadataDisplayFilter; import org.springframework.security.saml.metadata.MetadataGenerator; import org.springframework.security.saml.metadata.MetadataGeneratorFilter; import org.springframework.security.saml.parser.ParserPoolHolder; import org.springframework.security.saml.processor.HTTPArtifactBinding; import org.springframework.security.saml.processor.HTTPPAOS11Binding; import org.springframework.security.saml.processor.HTTPPostBinding; import org.springframework.security.saml.processor.HTTPRedirectDeflateBinding; import org.springframework.security.saml.processor.HTTPSOAP11Binding; import org.springframework.security.saml.processor.SAMLProcessorImpl; import org.springframework.security.saml.util.VelocityFactory; import org.springframework.security.saml.websso.ArtifactResolutionProfileImpl; import org.springframework.security.saml.websso.SingleLogoutProfileImpl; import org.springframework.security.saml.websso.WebSSOProfileConsumer; import org.springframework.security.saml.websso.WebSSOProfileConsumerHoKImpl; import org.springframework.security.saml.websso.WebSSOProfileConsumerImpl; import org.springframework.security.saml.websso.WebSSOProfileECPImpl; import org.springframework.security.saml.websso.WebSSOProfileImpl; import org.springframework.security.saml.websso.WebSSOProfileOptions; import org.springframework.security.web.DefaultSecurityFilterChain; import org.springframework.security.web.FilterChainProxy; import org.springframework.security.web.access.channel.ChannelProcessingFilter; import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler; import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler; import org.springframework.security.web.authentication.logout.LogoutHandler; import org.springframework.security.web.authentication.logout.SecurityContextLogoutHandler; import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler; import org.springframework.security.web.authentication.www.BasicAuthenticationFilter; import org.springframework.security.web.util.matcher.AntPathRequestMatcher; import no.dusken.momus.authentication.UserDetailsService; @Configuration @EnableWebSecurity @EnableGlobalMethodSecurity(prePostEnabled = true) @ComponentScan("org.springframework.security.saml") @PropertySource(value = {"classpath:momus.properties","classpath:local.properties"}, ignoreResourceNotFound = true) @Profile("!noAuth") public class SecurityConfig extends WebSecurityConfigurerAdapter { private final Environment env; private final UserDetailsService userDetailsService; public SecurityConfig(Environment env, UserDetailsService userDetailsService) { this.env = env; this.userDetailsService = userDetailsService; } @Override public void configure(HttpSecurity http) throws Exception{ http .csrf().disable() .addFilterBefore(metadataGeneratorFilter(), ChannelProcessingFilter.class) .addFilterAfter(samlFilter(), BasicAuthenticationFilter.class) .authorizeRequests()
package org.animotron.graph; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.LinkedList; import java.util.List; import java.util.Stack; import java.util.StringTokenizer; import org.animotron.Statement; import org.animotron.Statements; import org.animotron.instruction.Instruction; import org.animotron.instruction.InstructionContainer; import org.animotron.instruction.ml.ELEMENT; import org.animotron.instruction.ml.TEXT; import org.animotron.operator.Evaluable; import org.animotron.operator.External; import org.animotron.operator.Operator; import org.animotron.operator.Property; import org.animotron.operator.Relation; import org.animotron.operator.THE; import org.exist.security.MessageDigester; import org.neo4j.graphdb.Direction; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.Transaction; /** * @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a> * */ public class AnimoGraphBuilder { private Relationship the = null; private static final String CACHE_ALGOTHIM = "SHA-256"; private Transaction tx; private Stack<Object[]> statements; public Relationship getTHE() { return this.the; } private void setTHE(Relationship the) { this.the = the; } private MessageDigest md() { try { return MessageDigest.getInstance(CACHE_ALGOTHIM); } catch (NoSuchAlgorithmException e) { //can't be, but throw runtime error throw new RuntimeException(e); } } public void startDocument(){ statements = new Stack<Object[]>(); tx = AnimoGraph.beginTx(); the = null; }; public void startElement(String ns, String name) { Statement statement = Statements.namespace(ns); if (statement instanceof InstructionContainer) { statement = ((InstructionContainer) statement).getInstruction(name); } if (statement == null) { statement = ELEMENT.getInstance(); } Node the = null; //move the instance to GC & create new if (statement instanceof THE){ the = ((THE) statement).build(AnimoGraph.THE, name); } boolean external = statement instanceof External; if (!statements.empty()) external |= (Boolean) statements.peek()[5]; MessageDigest md = md(); md.update(ns.getBytes()); md.update(name.getBytes()); Object[] item = {statement, name, md, new LinkedList<Node>(), the, external, null}; statements.push(item); } public void endElement(String ns, String name) { try { Object[] currentItem = statements.pop(); Statement currentStatement = (Statement) currentItem[0]; if (currentStatement instanceof THE){ Node node = (Node) currentItem[4]; addChildren(node, (List<Node>) currentItem[3]); return; } boolean isProperty = currentStatement instanceof Property; boolean isRelation = currentStatement instanceof Relation; boolean isCachable = !((isProperty || isRelation) && !statements.empty()); if (!isCachable){ Object[] parentItem = statements.peek(); Statement parentStatement = (Statement) parentItem[0]; Node tmp = (Node) parentItem[6]; boolean isTHE = parentStatement instanceof THE; if (isTHE) { tmp = (Node) parentItem[4]; } else { if (tmp == null) { tmp = AnimoGraph.createNode(); parentItem[6] = tmp; } } Operator operator = (Operator) currentStatement; Node res = operator.build(tmp, (String) currentItem[1]); if (isProperty) addChildren(res, (List<Node>) currentItem[3]); if (isTHE) return; } MessageDigest md = (MessageDigest) currentItem[2]; byte [] digest = md.digest(); String hash = MessageDigester.byteArrayToHex(digest); THE the = THE.getInstance(); if (isCachable){ Node cache = the.node(AnimoGraph.CACHE, hash); if (cache == null) { cache = the.create(AnimoGraph.CACHE, hash); if (currentStatement instanceof Operator) { Operator operator = (Operator) currentStatement; Node tmp = (Node) currentItem[6]; Node node = tmp != null ? operator.build(cache, tmp, name) : operator.build(cache, name); addChildren(node, (List<Node>) currentItem[3]); } else if (currentStatement instanceof ELEMENT) { ELEMENT element = ELEMENT.getInstance(); Node tmp = (Node) currentItem[6]; Node node = tmp != null ? element.build(cache, tmp, ns, name) : element.build(cache, ns, name); addChildren(node, (List<Node>) currentItem[3]); } else { Instruction instruction = (Instruction) currentStatement; Node tmp = (Node) currentItem[6]; Node node = tmp != null ? instruction.build(cache, tmp) : instruction.build(cache); addChildren(node, (List<Node>) currentItem[3]); } boolean external = false; if (!statements.empty()) { external = (Boolean) statements.peek()[5]; } if (currentStatement instanceof Evaluable && !external){ AnimoGraph.CALC.createRelationshipTo(cache, RelationshipTypes.CALCULATE); } } if (!statements.empty()) { ((List<Node>) statements.peek()[3]).add(cache); } } if (!statements.empty()) { ((MessageDigest) statements.peek()[2]).update(digest); } } catch (Exception e){ e.printStackTrace(System.out); tx.finish(); } } public void attribute(String ns, String name, String value) { return; // try { // MessageDigest md = CACHEStack.peek(); // //CACHE-function depend on namespace, name & value // md.update(ns.getBytes()); // md.update(name.getBytes()); // md.update(value.getBytes()); // } catch (Exception e){ // tx.finish(); } public Node characters(String text) { StringBuilder buf = new StringBuilder(); if (text.length() > 0) { StringTokenizer tok = new StringTokenizer(text); while (tok.hasMoreTokens()) { buf.append(tok.nextToken()); if (tok.hasMoreTokens()) buf.append(' '); } } if (buf.length() > 0) try { String value = buf.toString(); MessageDigest md = md(); md.update(value.getBytes()); byte[] digest = md.digest(); String hash = MessageDigester.byteArrayToHex(digest); THE the = THE.getInstance(); Node cache = the.node(AnimoGraph.CACHE, hash); if (cache == null){ cache = the.create(AnimoGraph.CACHE, hash); TEXT.getInstance().build(cache, value); } if (!statements.empty()) { Object[] item = statements.peek(); ((MessageDigest) item[2]).update(digest); ((List<Node>) item[3]).add(cache); } return cache; } catch (Exception e){ e.printStackTrace(System.out); tx.finish(); } return null; } public void cdata (String text) { Node node = characters(text); if (node != null) { //CDATA.getInstance().build(parent, node); } } public void endDocument(){ tx.success(); tx.finish(); } private void addChildren(Node node, List<Node> children) { for (Node n : children) { for (Relationship r : n.getRelationships(Direction.OUTGOING)){ node.createRelationshipTo(r.getEndNode(), r.getType()); } } } }
package org.arachb.owlbuilder.lib; import java.sql.SQLException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.arachb.arachadmin.AbstractConnection; import org.arachb.arachadmin.PElementBean; import org.arachb.arachadmin.ParticipantBean; import org.arachb.arachadmin.PropertyBean; import org.arachb.arachadmin.TermBean; import org.arachb.owlbuilder.Owlbuilder; import org.semanticweb.owlapi.model.AddAxiom; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; public class Participant implements GeneratingEntity{ final static String BADTAXQuantifiedParticipant = "Term without IRI referenced as participant taxon: participant QuantifiedParticipantxon id = %s"; final static String BADANATOMYIRI = "Term without IRQuantifiedParticipantd as participant anatomy: participant id = %s; anatomy id = %s"; final static String BADSUBSTRATEIRI = "Term without IRI referenced as participant substrate; participant id = %s; substrate id = %s"; private static Logger log = Logger.getLogger(Participant.class); private final ParticipantBean bean; final Set<ParticipantElement> elements = new HashSet<ParticipantElement>(); private ParticipantElement headElement; private PropertyTerm property; public Participant(ParticipantBean b){ bean = b; property = new PropertyTerm(PropertyBean.getCached(b.getProperty())); headElement = ParticipantElement.getElement(PElementBean.getCached(bean.getHeadElement())); } /** * Utility for making Participant Sets from sets of beans */ public static Set<Participant> wrapSet(Set<ParticipantBean> bset){ final Set<Participant>result = new HashSet<Participant>(); for(ParticipantBean b : bset){ result.add(new Participant(b)); } return result; } final static String BADELEMENTMSG = "head Element generated neither a class or a individual: %s"; @Override public OWLObject generateOWL(Owlbuilder builder, Map<String,OWLObject> owlElements) throws Exception{ OWLObject headObject = headElement.generateOWL(builder, owlElements); Set <Integer>children = headElement.getChildren(); switch (children.size()){ case 0: if (headObject instanceof OWLClassExpression){ return generateNoDependentOWL(builder, property, headObject,owlElements); } else if (headObject instanceof OWLIndividual){ return generateNoDependentOWL(builder, property, headObject,owlElements); } else { throw new RuntimeException(String.format(BADELEMENTMSG,headElement)); } case 1: Integer childIndex = children.iterator().next(); ParticipantElement child = headElement.getChildElement(childIndex); PropertyTerm childProperty = headElement.getChildProperty(childIndex); if (headObject instanceof OWLClassExpression){ OWLObject childObject = generateRestrictionClass(builder, owlElements, child, childProperty); return generateDependentOWLClass(builder, childProperty, (OWLClassExpression)headObject, childObject, owlElements); } else if (headObject instanceof OWLIndividual){ OWLObject childObject = generateRestrictionClass(builder, owlElements, child, childProperty); return generateDependentOWLIndividual(builder, childProperty, (OWLIndividual)headObject, childObject, owlElements); } else { throw new RuntimeException(String.format(BADELEMENTMSG,headElement)); } default: throw new RuntimeException(String.format("Didn't expect %d children",children.size())); } } final Map<String,OWLObject> defaultElementTable = new HashMap<String,OWLObject>(); @Override public OWLObject generateOWL(Owlbuilder b) throws Exception{ defaultElementTable.clear(); OWLObject result = null; try{ result = generateOWL(b,defaultElementTable); } finally{ defaultElementTable.clear(); } return result; } /** * @param factory * @param headProperty * @param headObject * @param childObject * @param names * @return */ private OWLObject generateDependentOWLClass(Owlbuilder builder, PropertyTerm childProperty, OWLClassExpression headClassExpr, OWLObject childObject, Map<String,OWLObject> names) throws Exception{ final OWLDataFactory factory = builder.getDataFactory(); OWLObjectProperty elementProperty = (OWLObjectProperty)childProperty.generateOWL(builder,names); if (childObject != null){ if (childObject instanceof OWLClassExpression){ final OWLClassExpression childClass = (OWLClassExpression)childObject; OWLClassExpression intersect = factory.getOWLObjectIntersectionOf(headClassExpr,childClass); OWLClassExpression propertyRestriction = factory.getOWLObjectSomeValuesFrom(elementProperty,intersect); log.info("Generated Property restriction(2): " + propertyRestriction); return propertyRestriction; } else if (childObject instanceof OWLIndividual){ log.info("Individual child of class"); } else { throw new RuntimeException("child is neither a class expression or individual: " + childObject); } } OWLClassExpression propertyRestriction = factory.getOWLObjectSomeValuesFrom(elementProperty,headClassExpr); log.info("Generated Property restriction: " + propertyRestriction); return propertyRestriction; } private OWLObject generateDependentOWLIndividual(Owlbuilder builder, PropertyTerm childProperty, OWLIndividual headInd, OWLObject childObject, Map<String,OWLObject> names) throws Exception{ final OWLDataFactory factory = builder.getDataFactory(); OWLObjectProperty elementProperty = (OWLObjectProperty)childProperty.generateOWL(builder,names); log.info("Generated Individual reference: " + headInd); if (childObject != null){ if (childObject instanceof OWLIndividual){ OWLIndividual childIndividual = (OWLIndividual)childObject; OWLObjectPropertyAssertionAxiom assertion = factory.getOWLObjectPropertyAssertionAxiom(elementProperty, headInd, childIndividual); // Finally, add the axiom to our ontology and save AddAxiom addAxiomChange = new AddAxiom(builder.getTarget(), assertion); builder.getOntologyManager().applyChange(addAxiomChange); } else { //child is class expression? log.info("class child of individual"); } } return headInd; //TODO finish implementing individual case } /** * @param builder * @param headProperty * @param headObject * @param names * @return */ private OWLObject generateNoDependentOWL(Owlbuilder builder, PropertyTerm headProperty, OWLObject headObject, Map<String,OWLObject> names) throws Exception { final OWLDataFactory factory = builder.getDataFactory(); final OWLObjectProperty elementProperty = (OWLObjectProperty)headProperty.generateOWL(builder,names); if (headObject instanceof OWLClassExpression){ final OWLClassExpression headClass = (OWLClassExpression)headObject; OWLClassExpression propertyRestriction = factory.getOWLObjectSomeValuesFrom(elementProperty,headClass); log.info("Generated Property restriction: " + propertyRestriction); return propertyRestriction; } else if (headObject instanceof OWLIndividual){ log.info("Generated Individual reference: " + headObject); OWLIndividual eventIndividual = factory.getOWLAnonymousIndividual(); //OWLClassAssertionAxiom clAssertion = factory.getOWLClassAssertionAxiom(headObject, eventIndividual); //builder.getOntologyManager().addAxiom(builder.getTarget(), clAssertion); OWLIndividual headIndividual = (OWLIndividual)headObject; OWLObjectPropertyAssertionAxiom assertion = factory.getOWLObjectPropertyAssertionAxiom(elementProperty, eventIndividual, headIndividual); // Finally, add the axiom to our ontology and save AddAxiom addAxiomChange = new AddAxiom(builder.getTarget(), assertion); builder.getOntologyManager().applyChange(addAxiomChange); return headObject; //TODO finish implementing individual case } else { throw new RuntimeException("Bad head object in participant: " + headObject); } } /** * @param builder * @param owlElements * @param factory * @param childBean * @return */ private OWLObject generateRestrictionClass(Owlbuilder builder, final Map<String, OWLObject> owlElements, ParticipantElement pe, PropertyTerm pt) throws Exception{ final OWLDataFactory factory = builder.getDataFactory(); //PropertyBean childProp = peb.getParentProperty(parentIndex); IRI childPropIRI = IRI.create(pt.getSourceId()); OWLObjectProperty childProperty = factory.getOWLObjectProperty(childPropIRI); OWLObject childElement = pe.generateOWL(builder, owlElements); int s = pe.getChildren().size(); if (s >0){ log.info("children size: " + s); } if (childElement instanceof OWLClassExpression){ final OWLClassExpression childClass = (OWLClassExpression)childElement; OWLClassExpression childPropertyRestriction = factory.getOWLObjectSomeValuesFrom(childProperty, childClass); log.info("Generated (child) Property restriction: " + childPropertyRestriction); return childPropertyRestriction; } else if (childElement instanceof OWLIndividual){ return childElement; } else { throw new RuntimeException("Bad child element: " + childElement); } } public OWLClassExpression generateTermOWL(TermBean tb, Owlbuilder builder, Map<String, OWLObject> elements){ final OWLDataFactory factory = builder.getDataFactory(); IRI termIRI; try { String termString = tb.checkIRIString(builder.getIRIManager()); if (elements.containsKey(termString)){ return (OWLClassExpression)elements.get(termString); } termIRI = IRI.create(termString); log.info("Creating OWL class: " + termIRI); OWLClass termClass = factory.getOWLClass(termIRI); builder.initializeMiscTermAndParents(termClass); elements.put(termString, termClass); return termClass; } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } //TODO should these be merged? public void loadElements(AbstractConnection c) throws Exception{ log.info("In load elements"); // special handling for head participation property if (bean.getProperty() > 0){ property = new PropertyTerm(c.getProperty(bean.getProperty())); } else { throw new IllegalStateException("No participantProperty specified"); } if (bean.getElements().isEmpty()){ throw new RuntimeException("bean " + bean.getId() + " has no elements"); } log.info("Should be listing elements here"); for (Integer id : bean.getElements()){ c.getPElement(id).cache(); final ParticipantElement pe = ParticipantElement.getElement(PElementBean.getCached(id)); log.info(" loading element" + pe); log.info(" id is" + pe.getId()); log.info(" entity is " + pe.entity); elements.add(pe); } for (ParticipantElement pe : elements){ pe.resolveLinks(c); } } public void resolveElements() throws Exception{ assert elements.size() > 0 : "Participant: " + bean.getId() + " has no elements"; log.debug(" pb: " + this.getId() + " element count: " + elements.size()); assert bean.getHeadElement() > 0; assert bean.getProperty() > 0; assert PElementBean.getCached(bean.getHeadElement()) != null; //assert elements.contains(bean.getHeadElement()) : "Participant: " + bean.getId() + " has unregistered head element: " + bean.getHeadElement(); PElementBean head = PElementBean.getCached(bean.getHeadElement()); headElement = ParticipantElement.getElement(head); } void processTaxon(Owlbuilder builder,OWLClass taxon){ final OWLOntologyManager manager = builder.getOntologyManager(); final OWLOntology merged = builder.getMergedSources(); final OWLOntology extracted = builder.getTarget(); if (true){ //add appropriate when figured out //log.info("Need to add taxon: " + taxon.getIRI()); //log.info("Defining Axioms"); manager.addAxioms(extracted, merged.getAxioms(taxon,org.semanticweb.owlapi.model.parameters.Imports.INCLUDED)); //log.info("Annotations"); Set<OWLAnnotationAssertionAxiom> taxonAnnotations = merged.getAnnotationAssertionAxioms(taxon.getIRI()); for (OWLAnnotationAssertionAxiom a : taxonAnnotations){ //log.info(" Annotation Axiom: " + a.toString()); if (a.getAnnotation().getProperty().isLabel()){ log.debug("Label is " + a.getAnnotation().getValue().toString()); manager.addAxiom(extracted, a); } } } } public void processAnatomy(Owlbuilder builder, OWLClass anatomyClass) { final OWLOntologyManager manager = builder.getOntologyManager(); final OWLOntology merged = builder.getMergedSources(); final OWLOntology extracted = builder.getTarget(); if (true){ log.info("Need to add anatomy: " + anatomyClass.getIRI()); Set<OWLClassAxiom> anatAxioms = merged.getAxioms(anatomyClass,org.semanticweb.owlapi.model.parameters.Imports.INCLUDED); manager.addAxioms(extracted, anatAxioms); Set<OWLAnnotationAssertionAxiom> anatAnnotations = merged.getAnnotationAssertionAxioms(anatomyClass.getIRI()); for (OWLAnnotationAssertionAxiom a : anatAnnotations){ //log.info(" Annotation Axiom: " + a.toString()); if (a.getAnnotation().getProperty().isLabel()){ log.info("Label is " + a.getAnnotation().getValue().toString()); manager.addAxiom(extracted, a); } } } builder.initializeMiscTermAndParents(anatomyClass); } public void processSubstrate(Owlbuilder builder, OWLClass substrateClass) { builder.initializeMiscTermAndParents(substrateClass); } // /** // * // * @param builder // * @param iri // */ // void processParticipantSubstrateForIndividual(Owlbuilder builder, IRI iri){ // final OWLOntology target = builder.getTarget(); // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // boolean substrate_duplicate = target.containsClassInSignature(iri); // if (!substrate_duplicate){ // boolean substrate_exists = merged.containsClassInSignature(iri); // if (substrate_exists){ // log.info("Found class in signature of merged ontology for: " + iri); // OWLClass substrateClass = factory.getOWLClass(iri); // processSubstrate(builder,substrateClass); // /** // * @param builder // * @param factory // * @param target // * @param manager // * @param partofProperty // * @param ind // */ // private void generateOWLforAnatomy(Owlbuilder builder, final OWLIndividual ind) { // final OWLDataFactory factory = builder.getDataFactory(); // OWLOntology target = builder.getTarget(); // OWLOntologyManager manager = builder.getOntologyManager(); // final OWLObjectProperty partofProperty = factory.getOWLObjectProperty(Vocabulary.partOfProperty); // if (bean.getAnatomyIri() != null){ // final OWLClass anatomyClass =processParticipantAnatomy(builder,IRI.create(bean.getAnatomyIri())); // log.info("anatomy is " + anatomyClass); // // and the taxon (anatomy w/o taxon should be flagged as a curation error // if (bean.getTaxon() != 0){ // if (bean.getTaxonIri() != null){ // // This will require some more attention - curation should be able to // // label the organisms because different parts of the same organism or // // the same part will be mentioned multiple times - this is why arachb // // uses individuals in the first place // log.info("taxon is " + bean.getTaxonIri()); // OWLIndividual organism = factory.getOWLAnonymousIndividual(); // OWLClass taxon = processParticipantTaxon(builder,IRI.create(bean.getTaxonIri())); // OWLClassAssertionAxiom taxonAssertion = factory.getOWLClassAssertionAxiom(taxon,organism); // log.warn("assert " + organism + " is " + taxon); // manager.addAxiom(target, taxonAssertion); // OWLObjectPropertyAssertionAxiom partofAssertion = // factory.getOWLObjectPropertyAssertionAxiom(partofProperty, organism, ind); // log.warn("assert " + organism + " part of " + ind); // manager.addAxiom(target, partofAssertion); // OWLClassAssertionAxiom anatomyAssertion = factory.getOWLClassAssertionAxiom(anatomyClass, ind); // log.warn("assert " + ind + " is " + anatomyClass); // manager.addAxiom(target, anatomyAssertion); // else { // final String msg = String.format("No taxon IRI available; id = %s",getId()); // else { // final String msg = String.format("No taxon specified; id = %s", getId()); // else{ // final String msg = String.format("No anatomy IRI available; id = %s",getId()); // /** // * // * @param builder // * @param iri // * @return // */ // OWLClass processParticipantTaxon(Owlbuilder builder,IRI iri){ // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // OWLOntology target = builder.getTarget(); // boolean taxon_duplicate = target.containsClassInSignature(iri); // if (!taxon_duplicate){ // if (merged.containsClassInSignature(iri)) // taxon in merged (so from NCBI) // return processNCBITaxon(builder, iri); // else // return processNonNCBITaxon(builder, iri); // else{ // OWLClass taxonClass = factory.getOWLClass(iri); // return taxonClass; // may not be right // /** // * // * @param builder // * @param iri // * @return class for anatomy // */ // OWLClass processParticipantAnatomy(Owlbuilder builder, IRI iri){ // final OWLOntology target = builder.getTarget(); // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // boolean anatomy_duplicate = target.containsClassInSignature(iri); // if (!anatomy_duplicate){ // boolean anatomy_exists = merged.containsClassInSignature(iri); // if (anatomy_exists){ // log.info("Found class in signature of merged ontology for: " + iri); // OWLClass anatomyClass = factory.getOWLClass(iri); // processAnatomyForIndividual(builder,anatomyClass); // return anatomyClass; // else{ // log.info("Did not find class in signature of merged ontology for: " + bean.getTaxonIri()); // return null; // else{ // OWLClass anatomyClass = factory.getOWLClass(iri); // return anatomyClass; // may not be right // OWLClass processParticipantTaxonForClass(Owlbuilder builder,IRI iri){ // final OWLOntology target = builder.getTarget(); // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // final OWLReasoner reasoner = builder.getPreReasoner(); // boolean taxon_duplicate = target.containsClassInSignature(iri); // if (!taxon_duplicate){ // boolean taxon_exists = merged.containsClassInSignature(iri); // if (taxon_exists){ // log.info("Found class in signature of merged ontology for: " + getTaxonIri()); // OWLClass taxonClass = factory.getOWLClass(iri); // final NodeSet<OWLClass> taxonParents = reasoner.getSuperClasses(taxonClass, false); // log.info("Node count = " + taxonParents.getNodes().size()); // Set<OWLClass>parentList = taxonParents.getFlattened(); // log.info("Flattened parent count = " + parentList.size()); // parentList.add(taxonClass); // for (OWLClass taxon : parentList){ // participantProcessTaxon(builder,taxon); // return taxonClass; // else{ // log.info("Did not find taxon class in signature of merged ontology for: " + getTaxonIri()); // final IRI taxonIri = IRI.create(getTaxonIri()); // final Map<IRI,Taxon> nonNCBITaxa = builder.getNonNCBITaxa(); // final OWLOntologyManager manager = builder.getOntologyManager(); // Taxon t = nonNCBITaxa.get(taxonIri); // if (t == null){ // log.info("Taxon IRI not found in declared non-NCBI taxa"); // final OWLClass taxonClass = factory.getOWLClass(iri); // if (t.getParentSourceId() != null){ // IRI parentIri = IRI.create(t.getParentSourceId()); // OWLClass parentClass = factory.getOWLClass(parentIri); // log.info("Parent IRI is " + parentIri.toString()); // OWLAxiom sc_ax = factory.getOWLSubClassOfAxiom(taxonClass, parentClass); // manager.addAxiom(target, sc_ax); // else{ // log.info("failed to find IRI of parent of " + getTaxonIri()); // if (t.getName() != null){ // OWLAnnotation labelAnno = factory.getOWLAnnotation(factory.getRDFSLabel(), // factory.getOWLLiteral(t.getName())); // OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(iri, labelAnno); // // Add the axiom to the ontology // manager.addAxiom(target,ax); // return taxonClass; // else{ // OWLClass taxonClass = factory.getOWLClass(iri); // return taxonClass; // may not be right // void participantProcessTaxon(Owlbuilder builder,OWLClass taxon){ // final OWLOntologyManager manager = builder.getOntologyManager(); // final OWLOntology merged = builder.getMergedSources(); // final OWLOntology extracted = builder.getTarget(); // if (true){ //add appropriate when figured out // log.info("Need to add taxon: " + taxon.getIRI()); // //log.info("Defining Axioms"); // manager.addAxioms(extracted, merged.getAxioms(taxon)); // //log.info("Annotations"); // Set<OWLAnnotationAssertionAxiom> taxonAnnotations = merged.getAnnotationAssertionAxioms(taxon.getIRI()); // for (OWLAnnotationAssertionAxiom a : taxonAnnotations){ // //log.info(" Annotation Axiom: " + a.toString()); // if (a.getAnnotation().getProperty().isLabel()){ // log.info("Label is " + a.getAnnotation().getValue().toString()); // manager.addAxiom(extracted, a); // /** // * // * @param builder // * @param iri // * @return // */ // OWLClass processParticipantTaxonForIndividual(Owlbuilder builder,IRI iri){ // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // OWLOntology target = builder.getTarget(); // boolean taxon_duplicate = target.containsClassInSignature(iri); // if (!taxon_duplicate){ // if (merged.containsClassInSignature(iri)) // taxon in merged (so from NCBI) // return processNCBITaxon(builder, iri); // else // return processNonNCBITaxon(builder, iri); // else{ // OWLClass taxonClass = factory.getOWLClass(iri); // return taxonClass; // may not be right // /** // * // * @param builder // * @param iri // * @return // */ // private OWLClass processNCBITaxon(Owlbuilder builder, IRI iri) { // log.info("Found class in signature of merged ontology for: " + iri); // final OWLDataFactory factory = builder.getDataFactory(); // final OWLReasoner reasoner = builder.getPreReasoner(); // OWLClass taxonClass = factory.getOWLClass(iri); // final NodeSet<OWLClass> taxonParents = reasoner.getSuperClasses(taxonClass, false); // log.info("Node count = " + taxonParents.getNodes().size()); // Set<OWLClass>parentList = taxonParents.getFlattened(); // log.info("Flattened parent count = " + parentList.size()); // parentList.add(taxonClass); // for (OWLClass taxon : parentList){ // processTaxon(builder,taxon); // return taxonClass; // void processTaxon(Owlbuilder builder,OWLClass taxon){ // final OWLOntologyManager manager = builder.getOntologyManager(); // final OWLOntology merged = builder.getMergedSources(); // final OWLOntology extracted = builder.getTarget(); // if (true){ //add appropriate when figured out // log.info("Need to add taxon: " + taxon.getIRI()); // //log.info("Defining Axioms"); // manager.addAxioms(extracted, merged.getAxioms(taxon)); // //log.info("Annotations"); // Set<OWLAnnotationAssertionAxiom> taxonAnnotations = merged.getAnnotationAssertionAxioms(taxon.getIRI()); // for (OWLAnnotationAssertionAxiom a : taxonAnnotations){ // //log.info(" Annotation Axiom: " + a.toString()); // if (a.getAnnotation().getProperty().isLabel()){ // log.info("Label is " + a.getAnnotation().getValue().toString()); // manager.addAxiom(extracted, a); // /** // * @param builder // * @param iri // * @return // */ // private OWLClass processNonNCBITaxon(Owlbuilder builder, IRI iri) { // final OWLDataFactory factory = builder.getDataFactory(); // final OWLOntology target = builder.getTarget(); // log.info("Did not find taxon class in signature of merged ontology for: " + bean.getTaxonIri()); // final IRI taxonIri = IRI.create(bean.getTaxonIri()); // final Map<IRI, Taxon> nonNCBITaxa = builder.getNonNCBITaxa(); // final OWLOntologyManager manager = builder.getOntologyManager(); // final Taxon t = nonNCBITaxa.get(taxonIri); // if (t == null){ // log.info("Taxon IRI not found in declared non-NCBI taxa"); // final OWLClass taxonClass = factory.getOWLClass(iri); // if (t.getParentSourceId() != null){ // IRI parentIri = IRI.create(t.getParentSourceId()); // OWLClass parentClass = factory.getOWLClass(parentIri); // log.info("Parent IRI is " + parentIri.toString()); // OWLAxiom sc_ax = factory.getOWLSubClassOfAxiom(taxonClass, parentClass); // manager.addAxiom(target, sc_ax); // else{ // log.info("failed to find IRI of parent of " + bean.getTaxonIri()); // if (t.getName() != null){ // OWLAnnotation labelAnno = factory.getOWLAnnotation(factory.getRDFSLabel(), // factory.getOWLLiteral(t.getName())); // OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(iri, labelAnno); // // Add the axiom to the ontology // manager.addAxiom(target,ax); // return taxonClass; // OWLClass processParticipantAnatomyForClass(Owlbuilder builder, IRI iri){ // final OWLOntology target = builder.getTarget(); // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // boolean anatomy_duplicate = target.containsClassInSignature(iri); // if (!anatomy_duplicate){ // boolean anatomy_exists = merged.containsClassInSignature(iri); // if (anatomy_exists){ // log.info("Found class in signature of merged ontology for: " + iri); // OWLClass anatomyClass = factory.getOWLClass(iri); // participantProcessAnatomy(builder,anatomyClass); // return anatomyClass; // else{ // log.info("Did not find class in signature of merged ontology for: " + getAnatomyIri()); // return null; // else{ // OWLClass taxonClass = factory.getOWLClass(iri); // return taxonClass; // may not be right // public void participantProcessAnatomy(Owlbuilder builder, OWLClass anatomyClass) { // final OWLOntologyManager manager = builder.getOntologyManager(); // final OWLOntology merged = builder.getMergedSources(); // final OWLOntology extracted = builder.getTarget(); // if (true){ // log.info("Need to add anatomy: " + anatomyClass.getIRI()); // Set<OWLClassAxiom> anatAxioms = merged.getAxioms(anatomyClass); // manager.addAxioms(extracted, anatAxioms); // Set<OWLAnnotationAssertionAxiom> anatAnnotations = // merged.getAnnotationAssertionAxioms(anatomyClass.getIRI()); // for (OWLAnnotationAssertionAxiom a : anatAnnotations){ // //log.info(" Annotation Axiom: " + a.toString()); // if (a.getAnnotation().getProperty().isLabel()){ // log.info("Label is " + a.getAnnotation().getValue().toString()); // manager.addAxiom(extracted, a); // builder.initializeMiscTermAndParents(anatomyClass); // public void processAnatomyForIndividual(Owlbuilder builder, OWLClass anatomyClass) { // final OWLOntologyManager manager = builder.getOntologyManager(); // final OWLOntology merged = builder.getMergedSources(); // final OWLOntology extracted = builder.getTarget(); // if (true){ // log.info("Need to add anatomy: " + anatomyClass.getIRI()); // Set<OWLClassAxiom> anatAxioms = merged.getAxioms(anatomyClass); // manager.addAxioms(extracted, anatAxioms); // Set<OWLAnnotationAssertionAxiom> anatAnnotations = // merged.getAnnotationAssertionAxioms(anatomyClass.getIRI()); // for (OWLAnnotationAssertionAxiom a : anatAnnotations){ // //log.info(" Annotation Axiom: " + a.toString()); // if (a.getAnnotation().getProperty().isLabel()){ // log.info("Label is " + a.getAnnotation().getValue().toString()); // manager.addAxiom(extracted, a); // builder.initializeMiscTermAndParents(anatomyClass); // void processParticipantSubstrateForClass(Owlbuilder builder, IRI iri){ // final OWLOntology target = builder.getTarget(); // final OWLOntology merged = builder.getMergedSources(); // final OWLDataFactory factory = builder.getDataFactory(); // boolean substrate_duplicate = target.containsClassInSignature(iri); // if (!substrate_duplicate){ // boolean substrate_exists = merged.containsClassInSignature(iri); // if (substrate_exists){ // log.info("Found class in signature of merged ontology for: " + iri); // OWLClass substrateClass = factory.getOWLClass(iri); // processSubstrate(builder,substrateClass); // public void processSubstrate(Owlbuilder builder, OWLClass substrateClass) { // builder.initializeMiscTermAndParents(substrateClass); public int getId(){ return bean.getId(); } public int getTaxon(){ return bean.getTaxon(); } public int getSubstrate(){ return bean.getSubstrate(); } public int getAnatomy(){ return bean.getAnatomy(); } public String getQuantification(){ return bean.getQuantification(); } public String getPublicationTaxon(){ return bean.getPublicationTaxon(); } public String getLabel(){ return bean.getLabel(); } public String getPublicationAnatomy(){ return bean.getPublicationAnatomy(); } public String getPublicationSubstrate(){ return bean.getPublicationSubstrate(); } public String getTaxonIri(){ return bean.getTaxonIri(); } public String getSubstrateIri(){ return bean.getSubstrateIri(); } public String getAnatomyIri(){ return bean.getAnatomyIri(); } public String getGeneratedId(){ return bean.getGeneratedId(); } public void setGeneratedId(String s){ bean.setGeneratedId(s); } public String getIriString(){ if (getGeneratedId() == null){ throw new IllegalStateException("Individual has neither assigned nor generated id"); } return getGeneratedId(); } }
package org.dynmap.standalone; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.InflaterInputStream; import org.dynmap.DynmapChunk; import org.dynmap.DynmapLocation; import org.dynmap.DynmapWorld; import org.dynmap.Log; import org.dynmap.utils.MapChunkCache; import org.spout.nbt.Tag; import org.spout.nbt.CompoundTag; import org.spout.nbt.CompoundMap; import org.spout.nbt.IntTag; import org.spout.nbt.ByteTag; import org.spout.nbt.LongTag; import org.spout.nbt.TagType; import org.spout.nbt.stream.NBTInputStream; /** * Handler class for standalone worlds (based on Anvil format world data directory) */ public class StandaloneWorld extends DynmapWorld { private CompoundMap level_dat; private File wdir; private File regiondir; private DynmapLocation spawn; private String env; private boolean is_nether; private boolean is_raining; private boolean is_thunder; private long world_time; private RegionMap regions; private Object regionsem = new Object(); private static final int MAX_FILES_ACTIVE = 10; private static final int SECTOR_SIZE = 4096; private static final int VERSION_GZIP = 1; private static final int VERSION_DEFLATE = 2; private static class CoordPair { int x, z; public int hashCode() { return x ^ (z * 71); } public boolean equals(Object o) { if (o instanceof CoordPair) { CoordPair c = (CoordPair) o; return ((c.x == x) && (c.z == z)); } else { return false; } } } private static class RegionFileHandler { private File f; private RandomAccessFile raf; private int[] offsets; private int[] timestamps; public RegionFileHandler(File dir, int x, int z) throws IOException { f = new File(dir, "r." + x + "." + z + ".mca"); if (!f.canRead()) { throw new IOException(); } raf = new RandomAccessFile(f, "rw"); offsets = new int[32 * 32]; timestamps = new int[32 * 32]; /* Load offsets */ raf.seek(0); for (int i = 0; i < offsets.length; i++) { offsets[i] = raf.readInt(); } for (int i = 0; i < timestamps.length; i++) { timestamps[i] = raf.readInt(); } } public Tag readChunk(int x, int z) { byte[] rec = null; int ver = 0; synchronized(this) { int off = offsets[x + (32 * z)]; if((off == 0) || (raf == null)) { return null; } try { raf.seek((off >> 8) * SECTOR_SIZE); int len = raf.readInt(); // Get length if (len > ((off & 0xFF) * SECTOR_SIZE)) { return null; } ver = raf.readByte(); // Get version rec = new byte[len - 1]; raf.read(rec); } catch (IOException iox) { return null; } } ByteArrayInputStream bais = new ByteArrayInputStream(rec); InputStream in = null; if (ver == VERSION_GZIP) { try { in = new GZIPInputStream(bais); } catch (IOException iox) { return null; } } else if (ver == VERSION_DEFLATE) { in = new InflaterInputStream(bais); } else { return null; } Tag t = null; try { NBTInputStream nis = new NBTInputStream(new BufferedInputStream(in), false); t = nis.readTag(); nis.close(); } catch (IOException iox) { return null; } return t; } public synchronized void cleanup() { if(raf != null) { try { raf.close(); } catch (IOException iox) { } raf = null; } } } private static class RegionMap extends LinkedHashMap<CoordPair, RegionFileHandler> { public RegionMap() { super(MAX_FILES_ACTIVE, 0.7F, true); // Make access-order based linking (for LRU) } protected boolean removeEldestEntry(Map.Entry<CoordPair, RegionFileHandler> eldest) { if (this.size() >= MAX_FILES_ACTIVE) { eldest.getValue().cleanup(); // Clean up oldest region handler return true; } return false; } } public static void main(String[] v) { StandaloneWorld w; try { w = new StandaloneWorld("world", new File("/Users/mike/mcpc/world_nether"), "nether"); Tag t = w.getChunk(0, 0); StandaloneChunkSnapshot ss = new StandaloneChunkSnapshot(t); } catch (IOException x) { Log.severe("Error: " + x.getMessage()); } } public void updateLevelDat() throws IOException { /* Read level.dat - required */ File lvl = new File(wdir, "level.dat"); FileInputStream fis = null; try { fis = new FileInputStream(lvl); NBTInputStream nis = new NBTInputStream(fis); Tag t = nis.readTag(); if (t.getType() == TagType.TAG_COMPOUND) { CompoundTag ct = (CompoundTag)(((CompoundTag)t).getValue().get("Data")); level_dat = ct.getValue(); /* Initialize spawn location */ spawn.x = ((IntTag)level_dat.get("SpawnX")).getValue(); spawn.y = ((IntTag)level_dat.get("SpawnY")).getValue(); spawn.z = ((IntTag)level_dat.get("SpawnZ")).getValue(); /* Test if raining */ this.is_raining = (((ByteTag)level_dat.get("raining")).getValue() != 0); this.is_thunder = (((ByteTag)level_dat.get("thundering")).getValue() != 0); /* Get world time */ this.world_time = ((LongTag)level_dat.get("Time")).getValue(); } nis.close(); } catch (IOException iox) { Log.info("Error opening level.dat for world " + getName() + " in " + wdir.getPath()); throw iox; } finally { if (fis != null) { try { fis.close(); } catch (IOException iox) {} } } } public StandaloneWorld(String wname, File wdir, String env) throws IOException { super(wname, 256, 64); this.wdir = wdir; this.env = env; this.is_nether = env.equals("nether"); if(env.equals("nether")) this.regiondir = new File(wdir, "DIM-1/region"); else if(env.equals("the_end")) this.regiondir = new File(wdir, "DIM1/region"); else this.regiondir = new File(wdir, "region"); spawn = new DynmapLocation(); spawn.world = this.getName(); regions = new RegionMap(); /* Load level.dat */ updateLevelDat(); } @Override public boolean isNether() { return is_nether; } @Override public DynmapLocation getSpawnLocation() { return spawn; } @Override public long getTime() { return this.world_time; } @Override public boolean hasStorm() { return this.is_raining; } @Override public boolean isThundering() { return this.is_thunder; } @Override public boolean isLoaded() { return false; } @Override public int getLightLevel(int x, int y, int z) { return 0; } @Override public int getHighestBlockYAt(int x, int z) { return 0; } @Override public boolean canGetSkyLightLevel() { return true; } @Override public int getSkyLightLevel(int x, int y, int z) { return 0; } @Override public String getEnvironment() { return env; } @Override public MapChunkCache getChunkCache(List<DynmapChunk> chunks) { // TODO Auto-generated method stub return null; } public Tag getChunk(int x, int z) { RegionFileHandler rf = null; CoordPair cp = new CoordPair(); cp.x = (x >> 5); cp.z = (z >> 5); synchronized(regionsem) { rf = regions.get(cp); if (rf == null) { try { rf = new RegionFileHandler(regiondir, cp.x, cp.z); } catch (IOException iox) { return null; } regions.put(cp, rf); } } return rf.readChunk(x & 0x1F, z & 0x1F); } @Override public void setWorldUnloaded() { } }
package org.jtrfp.trcl.beh; import java.util.Collection; import org.jtrfp.trcl.Submitter; import org.jtrfp.trcl.obj.Player; public class DamageableBehavior extends Behavior{ private int maxHealth=65535; private int health=maxHealth; private long invincibilityExpirationTime=System.currentTimeMillis()+100;//Safety time in case init causes damage public DamageableBehavior impactDamage(int dmg){ generalDamage(dmg); return this; } public DamageableBehavior shearDamage(int dmg){ generalDamage(dmg); return this; } protected void generalDamage(int dmg){ if(isInvincible())return; health-=dmg; if(health<=0){ getParent().destroy(); getParent().getBehavior().probeForBehaviors(deathSub, DeathListener.class); }//end if(dead) else{ if(getParent() instanceof Player)addInvincibility(2500);//Safety/Escape }//end (!dead) }//end generalDamage(...) public boolean isInvincible(){ return invincibilityExpirationTime>System.currentTimeMillis(); } public int getHealth(){ return health; } public void unDamage(int amt){ health+=amt; if(health>maxHealth)health=maxHealth; } public void unDamage(){ health=maxHealth; } public DamageableBehavior setHealth(int val){ health=val;return this; } private final Submitter<DeathListener> deathSub = new Submitter<DeathListener>(){ @Override public void submit(DeathListener item) { item.notifyDeath(); } @Override public void submit(Collection<DeathListener> items) { for(DeathListener l:items){submit(l);} } }; public void addInvincibility(int invincibilityTimeDeltaMillis) { ensureIsInvincible(); invincibilityExpirationTime+=invincibilityTimeDeltaMillis; } protected void ensureIsInvincible() { if(!isInvincible())invincibilityExpirationTime=System.currentTimeMillis()+10;//10 for padding } /** * @return the maxHealth */ public int getMaxHealth() { return maxHealth; } /** * @param maxHealth the maxHealth to set */ public void setMaxHealth(int maxHealth) { this.maxHealth = maxHealth; } }//end DamageableBehavior
package org.lightmare.libraries; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.log4j.Logger; import org.lightmare.libraries.loaders.EjbClassLoader; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.fs.FileUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Class for load jar or class files from specified path * * @author levan * @since 0.0.15-SNAPSHOT */ public class LibraryLoader { // Method name to add URL to class loader private static final String ADD_URL_METHOD_NAME = "addURL"; private static final String CLOSE_METHOD_NAME = "close"; // Caches if class URLClassLoader has close method private static Boolean hasCloseMethod; // Name of class loader using thread private static final String LOADER_THREAD_NAME = "library-class-loader-thread"; //Inaccessible method to add URL to existing class loader private static Method addURLMethod; private static final Lock LOCK = new ReentrantLock(); private static final Logger LOG = Logger.getLogger(LibraryLoader.class); /** * implementation of {@link Callable}<ClassLoader> interface to initialize * {@link ClassLoader} in separate thread * * @author levan * */ private static class LibraryLoaderInit implements Callable<ClassLoader> { private URL[] urls; private ClassLoader parent; public LibraryLoaderInit(final URL[] urls, final ClassLoader parent) { this.urls = urls; this.parent = parent; } @Override public ClassLoader call() throws Exception { ClassLoader loader = cloneContextClassLoader(urls, parent); return loader; } } /** * Gets {@link URLClassLoader} class addURL method * * @return Method * @throws IOException */ private static Method getURLMethod() throws IOException { if (addURLMethod == null) { ObjectUtils.lock(LOCK); try { if (addURLMethod == null && MetaUtils.hasMethod(URLClassLoader.class, ADD_URL_METHOD_NAME)) { addURLMethod = MetaUtils.getDeclaredMethod( URLClassLoader.class, ADD_URL_METHOD_NAME, URL.class); } } finally { ObjectUtils.unlock(LOCK); } } return addURLMethod; } /** * If passed {@link ClassLoader} is instance of {@link URLClassLoader} then * gets {@link URL}[] of this {@link ClassLoader} calling * {@link URLClassLoader#getURLs()} method * * @param loader * @return {@link URL}[] */ private static URL[] getURLs(ClassLoader loader) { URL[] urls; if (loader instanceof URLClassLoader) { urls = ((URLClassLoader) loader).getURLs(); } else { urls = CollectionUtils.emptyArray(URL.class); } return urls; } /** * Initializes and returns enriched {@link ClassLoader} in separated * {@link Thread} to load bean and library classes * * @param urls * @return {@link ClassLoader} * @throws IOException */ public static ClassLoader initializeLoader(final URL[] urls) throws IOException { ClassLoader ejbLoader; ClassLoader parent = getContextClassLoader(); LibraryLoaderInit initializer = new LibraryLoaderInit(urls, parent); FutureTask<ClassLoader> task = new FutureTask<ClassLoader>(initializer); Thread thread = new Thread(task); thread.setName(LOADER_THREAD_NAME); thread.setPriority(Thread.MAX_PRIORITY); thread.start(); try { ejbLoader = task.get(); } catch (InterruptedException ex) { throw new IOException(ex); } catch (ExecutionException ex) { throw new IOException(ex); } return ejbLoader; } /** * Gets current {@link Thread}'s context {@link ClassLoader} object * * @return {@link ClassLoader} */ public static ClassLoader getContextClassLoader() { PrivilegedAction<ClassLoader> action = new PrivilegedAction<ClassLoader>() { public ClassLoader run() { Thread currentThread = Thread.currentThread(); ClassLoader classLoader = currentThread.getContextClassLoader(); return classLoader; } }; ClassLoader loader = AccessController.doPrivileged(action); return loader; } /** * Gets new {@link ClassLoader} enriched with passed {@link URL} array and * parent {@link ClassLoader} classes * * @param urls * @param parent * @return {@link ClassLoader} * @throws IOException */ public static ClassLoader getEnrichedLoader(URL[] urls, ClassLoader parent) { ClassLoader enrichedLoader; if (CollectionUtils.valid(urls)) { if (parent == null) { parent = getContextClassLoader(); } enrichedLoader = EjbClassLoader.newInstance(urls, parent); } else { enrichedLoader = null; } return enrichedLoader; } /** * Gets new {@link ClassLoader} enriched with passed {@link File} and it's * sub files {@link URL}s and parent {@link ClassLoader} classes * * @param file * @param urls * @return {@link ClassLoader} * @throws IOException */ public static ClassLoader getEnrichedLoader(File file, Set<URL> urls) throws IOException { FileUtils.getSubfiles(file, urls); URL[] paths = CollectionUtils.toArray(urls, URL.class); ClassLoader parent = getContextClassLoader(); ClassLoader enrichedLoader = getEnrichedLoader(paths, parent); return enrichedLoader; } /** * Initializes new {@link ClassLoader} from loaded {@link URL}'s from * enriched {@link ClassLoader} for beans and libraries * * @param urls * @return {@link ClassLoader} * @throws IOException */ public static ClassLoader cloneContextClassLoader(final URL[] urls, ClassLoader parent) throws IOException { URLClassLoader loader = (URLClassLoader) getEnrichedLoader(urls, parent); try { // get all resources for cloning URL[] urlArray = loader.getURLs(); URL[] urlClone = urlArray.clone(); if (parent == null) { parent = getContextClassLoader(); } ClassLoader clone = EjbClassLoader.newInstance(urlClone, parent); return clone; } finally { closeClassLoader(loader); // dereference cloned class loader instance loader = null; } } /** * Merges two {@link ClassLoader}s in one * * @param newLoader * @param oldLoader * @return {@link ClassLoader} */ public static ClassLoader createCommon(ClassLoader newLoader, ClassLoader oldLoader) { URL[] urls = getURLs(oldLoader); ClassLoader commonLoader = URLClassLoader.newInstance(urls, oldLoader); urls = getURLs(newLoader); commonLoader = getEnrichedLoader(urls, newLoader); return commonLoader; } /** * Sets passed {@link Thread}'s context class loader appropriated * {@link ClassLoader} instance * * @param thread * @param loader */ public static void loadCurrentLibraries(Thread thread, ClassLoader loader) { if (ObjectUtils.notNull(loader)) { thread.setContextClassLoader(loader); } } /** * Sets passed {@link ClassLoader} instance as current {@link Thread}'s * context class loader * * @param loader */ public static void loadCurrentLibraries(ClassLoader loader) { Thread thread = Thread.currentThread(); loadCurrentLibraries(thread, loader); } /** * Adds {@link URL} array to system {@link ClassLoader} instance * * @param urls * @param method * @param urlLoader * @throws IOException */ public static void loadURLToSystem(URL[] urls, Method method, URLClassLoader urlLoader) throws IOException { for (URL url : urls) { MetaUtils.invokePrivate(method, urlLoader, url); } } /** * Loads all files and sub files {@link URL}s to system class loader * * @param libraryPath * @throws IOException */ private static void loadLibraryFromPath(String libraryPath) throws IOException { File file = new File(libraryPath); if (file.exists()) { Set<URL> urls = new HashSet<URL>(); FileUtils.getSubfiles(file, urls); URL[] paths = CollectionUtils.toArray(urls, URL.class); ClassLoader systemLoader = ClassLoader.getSystemClassLoader(); if (systemLoader instanceof URLClassLoader) { URLClassLoader urlLoader = (URLClassLoader) systemLoader; Method method = getURLMethod(); if (ObjectUtils.notNull(method)) { loadURLToSystem(paths, method, urlLoader); } } } } /** * Loads jar or <code>.class</code> files to the current thread from * libraryPaths recursively * * @param libraryPaths * @throws IOException */ public static void loadLibraries(String... libraryPaths) throws IOException { if (CollectionUtils.valid(libraryPaths)) { for (String libraryPath : libraryPaths) { loadLibraryFromPath(libraryPath); } } } /** * Loads passed classes to specified {@link ClassLoader} instance * * @param classes * @param loader */ public static void loadClasses(Collection<String> classes, ClassLoader loader) throws IOException { if (CollectionUtils.valid(classes) && ObjectUtils.notNull(loader)) { for (String className : classes) { try { loader.loadClass(className); } catch (ClassNotFoundException ex) { throw new IOException(ex); } } } } /** * Loads passed classes to specified current {@link Thread}'s context class * loader * * @param classes */ public static void loadClasses(Collection<String> classes) throws IOException { ClassLoader loader = getContextClassLoader(); loadClasses(classes, loader); } /** * Checks and caches if passed {@link ClassLoader} implementation or it's * parent class has close method * * @param loader * @throws IOException */ private static void checkOnClose(ClassLoader loader) throws IOException { // Finds if loader associated class or superclass has "close" // method Class<? extends ClassLoader> loaderClass = loader.getClass(); if (hasCloseMethod == null) { synchronized (LibraryLoader.class) { if (hasCloseMethod == null) { boolean hasMethod = MetaUtils.hasPublicMethod(loaderClass, CLOSE_METHOD_NAME); hasCloseMethod = hasMethod; } } } } /** * Closes passed {@link ClassLoader} if it is instance of * {@link URLClassLoader} class * * @param loader * @throws IOException */ public static void closeClassLoader(ClassLoader loader) throws IOException { if (ObjectUtils.notNull(loader) && loader instanceof URLClassLoader) { try { URLClassLoader urlClassLoader = ObjectUtils.cast(loader, URLClassLoader.class); urlClassLoader.clearAssertionStatus(); // Finds if loader associated class or superclass has "close" // method checkOnClose(loader); if (hasCloseMethod) { urlClassLoader.close(); } } catch (Throwable th) { LOG.error(th.getMessage(), th); } } } }
package org.minimalj.backend.db; import java.lang.reflect.Field; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.minimalj.model.Code; import org.minimalj.model.EnumUtils; import org.minimalj.model.Keys; import org.minimalj.model.View; import org.minimalj.model.ViewUtil; import org.minimalj.model.annotation.Required; import org.minimalj.model.properties.ChainedProperty; import org.minimalj.model.properties.FieldProperty; import org.minimalj.model.properties.PropertyInterface; import org.minimalj.transaction.criteria.CriteriaOperator; import org.minimalj.util.CloneHelper; import org.minimalj.util.Codes; import org.minimalj.util.EqualsHelper; import org.minimalj.util.FieldUtils; import org.minimalj.util.GenericUtils; import org.minimalj.util.IdUtils; import org.minimalj.util.LoggingRuntimeException; import org.minimalj.util.StringUtils; /** * Minimal-J internal<p> * * Base class of all table representing classes in this persistence layer. * Normally you should not need to extend from this class directly. Use * the existing subclasses or only the methods in DbPersistence. * */ public abstract class AbstractTable<T> { public static final Logger sqlLogger = Logger.getLogger("SQL"); private static final Map<Class<?>, LinkedHashMap<String, PropertyInterface>> columnsForClass = new HashMap<>(200); protected final DbPersistence dbPersistence; protected final DbPersistenceHelper helper; protected final Class<T> clazz; protected final LinkedHashMap<String, PropertyInterface> columns; protected final LinkedHashMap<String, PropertyInterface> lists; protected final String name; protected final PropertyInterface idProperty; protected final List<String> indexes = new ArrayList<>(); protected final Map<Connection, Map<String, PreparedStatement>> statements = new HashMap<>(); protected final String selectByIdQuery; protected final String insertQuery; protected final String clearQuery; // TODO: its a little bit strange to pass the idProperty here. Also because the property // is not allways a property of clazz. idProperty is only necessary because the clazz AND the // size of the idProperty is needed protected AbstractTable(DbPersistence dbPersistence, String name, Class<T> clazz, PropertyInterface idProperty) { this.dbPersistence = dbPersistence; this.helper = new DbPersistenceHelper(dbPersistence); this.name = buildTableName(dbPersistence, name != null ? name : StringUtils.toSnakeCase(clazz.getSimpleName())); this.clazz = clazz; this.idProperty = idProperty; this.columns = findColumns(clazz); this.lists = findLists(clazz); this.selectByIdQuery = selectByIdQuery(); this.insertQuery = insertQuery(); this.clearQuery = clearQuery(); findCodes(); findDependables(); findIndexes(); } public static String buildTableName(DbPersistence persistence, String name) { name = DbPersistenceHelper.buildName(name, persistence.getMaxIdentifierLength(), persistence.getTableNames()); // the persistence adds the table name too late. For subtables it's important // to add the table name here. Note that tableNames is a Set. Multiple // adds don't do any harm. persistence.getTableNames().add(name); return name; } protected LinkedHashMap<String, PropertyInterface> findColumns(Class<?> clazz) { if (columnsForClass.containsKey(clazz)) { return columnsForClass.get(clazz); } LinkedHashMap<String, PropertyInterface> columns = new LinkedHashMap<String, PropertyInterface>(); for (Field field : clazz.getFields()) { if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue; String fieldName = StringUtils.toSnakeCase(field.getName()).toUpperCase(); if (StringUtils.equals(fieldName, "ID", "VERSION")) continue; if (FieldUtils.isList(field)) continue; if (FieldUtils.isFinal(field) && !FieldUtils.isSet(field) && !Codes.isCode(field.getType())) { Map<String, PropertyInterface> inlinePropertys = findColumns(field.getType()); boolean hasClassName = FieldUtils.hasClassName(field); for (String inlineKey : inlinePropertys.keySet()) { String key = inlineKey; if (!hasClassName) { key = fieldName + "_" + inlineKey; } key = DbPersistenceHelper.buildName(key, dbPersistence.getMaxIdentifierLength(), columns.keySet()); columns.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey))); } } else { fieldName = DbPersistenceHelper.buildName(fieldName, dbPersistence.getMaxIdentifierLength(), columns.keySet()); columns.put(fieldName, new FieldProperty(field)); } } columnsForClass.put(clazz, columns); return columns; } protected static LinkedHashMap<String, PropertyInterface> findLists(Class<?> clazz) { LinkedHashMap<String, PropertyInterface> properties = new LinkedHashMap<String, PropertyInterface>(); for (Field field : clazz.getFields()) { if (!FieldUtils.isPublic(field) || FieldUtils.isStatic(field) || FieldUtils.isTransient(field)) continue; if (FieldUtils.isFinal(field) && !FieldUtils.isList(field)) { // This is needed to check if an inline Property contains a List Map<String, PropertyInterface> inlinePropertys = findLists(field.getType()); boolean hasClassName = FieldUtils.hasClassName(field); for (String inlineKey : inlinePropertys.keySet()) { String key = inlineKey; if (!hasClassName) { key = field.getName() + StringUtils.upperFirstChar(inlineKey); } properties.put(key, new ChainedProperty(clazz, field, inlinePropertys.get(inlineKey))); } } else if (FieldUtils.isList(field)) { properties.put(field.getName(), new FieldProperty(field)); } } return properties; } protected LinkedHashMap<String, PropertyInterface> getColumns() { return columns; } protected LinkedHashMap<String, PropertyInterface> getLists() { return lists; } protected Collection<String> getIndexes() { return indexes; } protected PreparedStatement getStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException { if (!statements.containsKey(connection)) { statements.put(connection, new HashMap<String, PreparedStatement>()); } Map<String, PreparedStatement> statementsForConnection = statements.get(connection); if (!statementsForConnection.containsKey(query)) { statementsForConnection.put(query, createStatement(connection, query, returnGeneratedKeys)); } return statementsForConnection.get(query); } static PreparedStatement createStatement(Connection connection, String query, boolean returnGeneratedKeys) throws SQLException { int autoGeneratedKeys = returnGeneratedKeys ? Statement.RETURN_GENERATED_KEYS : Statement.NO_GENERATED_KEYS; if (sqlLogger.isLoggable(Level.FINE)) { return new LoggingPreparedStatement(connection, query, autoGeneratedKeys, sqlLogger); } else { return connection.prepareStatement(query, autoGeneratedKeys); } } protected void execute(String s) { try (PreparedStatement statement = createStatement(dbPersistence.getConnection(), s.toString(), false)) { statement.execute(); } catch (SQLException x) { throw new LoggingRuntimeException(x, sqlLogger, "Statement failed: \n" + s.toString()); } } protected void createTable(DbSyntax syntax) { StringBuilder s = new StringBuilder(); syntax.addCreateStatementBegin(s, getTableName()); addSpecialColumns(syntax, s); addFieldColumns(syntax, s); addPrimaryKey(syntax, s); syntax.addCreateStatementEnd(s); execute(s.toString()); } protected abstract void addSpecialColumns(DbSyntax syntax, StringBuilder s); protected void addFieldColumns(DbSyntax syntax, StringBuilder s) { for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) { s.append(",\n ").append(column.getKey()).append(" "); PropertyInterface property = column.getValue(); syntax.addColumnDefinition(s, property); boolean isRequired = property.getAnnotation(Required.class) != null; s.append(isRequired ? " NOT NULL" : " DEFAULT NULL"); } } protected void addPrimaryKey(DbSyntax syntax, StringBuilder s) { syntax.addPrimaryKey(s, "ID"); } protected void createIndexes(DbSyntax syntax) { for (String index : indexes) { String s = syntax.createIndex(getTableName(), index, this instanceof HistorizedTable); execute(s.toString()); } } protected void createConstraints(DbSyntax syntax) { for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) { PropertyInterface property = column.getValue(); if (DbPersistenceHelper.isDependable(property) || ViewUtil.isReference(property)) { Class<?> fieldClass = ViewUtil.resolve(property.getClazz()); AbstractTable<?> referencedTable = dbPersistence.table(fieldClass); String s = syntax.createConstraint(getTableName(), column.getKey(), referencedTable.getTableName(), referencedTable instanceof HistorizedTable); if (s != null) { execute(s.toString()); } } } } public void clear() { try { PreparedStatement statement = getStatement(dbPersistence.getConnection(), clearQuery, false); statement.execute(); } catch (SQLException x) { throw new LoggingRuntimeException(x, sqlLogger, "Clear of Table " + getTableName() + " failed"); } } private String findColumn(String fieldPath) { for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) { if (entry.getValue().getPath().equals(fieldPath)) { return entry.getKey(); } } return null; } protected String getTableName() { return name; } public Class<T> getClazz() { return clazz; } private void findCodes() { for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) { PropertyInterface property = column.getValue(); Class<?> fieldClazz = property.getClazz(); if (Code.class.isAssignableFrom(fieldClazz) && fieldClazz != clazz) { dbPersistence.addClass(fieldClazz); } } } private void findDependables() { for (Map.Entry<String, PropertyInterface> column : getColumns().entrySet()) { PropertyInterface property = column.getValue(); Class<?> fieldClazz = property.getClazz(); if (DbPersistenceHelper.isDependable(property) && fieldClazz != clazz) { if (!View.class.isAssignableFrom(property.getClazz())) { dbPersistence.addClass(fieldClazz); } } } } protected void findIndexes() { for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) { PropertyInterface property = column.getValue(); if (ViewUtil.isReference(property)) { createIndex(property, property.getPath()); } } } protected String whereStatement(final String wholeFieldPath, CriteriaOperator criteriaOperator) { String fieldPath = wholeFieldPath; String column; while (true) { column = findColumn(fieldPath); if (column != null) break; int pos = fieldPath.lastIndexOf('.'); if (pos < 0) throw new IllegalArgumentException("FieldPath " + wholeFieldPath + " not even partially found in " + getTableName()); fieldPath = fieldPath.substring(0, pos); } if (fieldPath.length() < wholeFieldPath.length()) { String restOfFieldPath = wholeFieldPath.substring(fieldPath.length() + 1); if ("id".equals(restOfFieldPath)) { return column + " " + criteriaOperator.getOperatorAsString() + " ?"; } else { PropertyInterface subProperty = columns.get(column); AbstractTable<?> subTable = dbPersistence.table(ViewUtil.resolve(subProperty.getClazz())); return column + " = (select ID from " + subTable.getTableName() + " where " + subTable.whereStatement(restOfFieldPath, criteriaOperator) + ")"; } } else { return column + " " + criteriaOperator.getOperatorAsString() + " ?"; } } // execution helpers protected T executeSelect(PreparedStatement preparedStatement) throws SQLException { return executeSelect(preparedStatement, null); } protected T executeSelect(PreparedStatement preparedStatement, Integer time) throws SQLException { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet.next()) { return readResultSetRow(resultSet, time); } else { return null; } } } protected List<T> executeSelectAll(PreparedStatement preparedStatement) throws SQLException { return executeSelectAll(preparedStatement, Long.MAX_VALUE); } protected List<T> executeSelectAll(PreparedStatement preparedStatement, long maxResults) throws SQLException { List<T> result = new ArrayList<T>(); try (ResultSet resultSet = preparedStatement.executeQuery()) { while (resultSet.next() && result.size() < maxResults) { T object = readResultSetRow(resultSet, null); if (this instanceof Table) { Object id = IdUtils.getId(object); ((Table<T>) this).loadRelations(object, id); } result.add(object); } } return result; } protected T readResultSetRow(ResultSet resultSet, Integer time) throws SQLException { return readResultSetRow(dbPersistence, clazz, resultSet, time); } protected <R> R readResultSetRow(DbPersistence dbPersistence, Class<R> clazz, ResultSet resultSet, Integer time) throws SQLException { R result = CloneHelper.newInstance(clazz); DbPersistenceHelper helper = new DbPersistenceHelper(dbPersistence); LinkedHashMap<String, PropertyInterface> columns = findColumns(clazz); // first read the resultSet completly then resolve references // derby db mixes closing of resultSets. Map<PropertyInterface, Object> values = new HashMap<>(resultSet.getMetaData().getColumnCount() * 3); for (int columnIndex = 1; columnIndex <= resultSet.getMetaData().getColumnCount(); columnIndex++) { String columnName = resultSet.getMetaData().getColumnName(columnIndex); if ("ID".equalsIgnoreCase(columnName)) { IdUtils.setId(result, resultSet.getObject(columnIndex)); continue; } else if ("VERSION".equalsIgnoreCase(columnName)) { IdUtils.setVersion(result, resultSet.getInt(columnIndex)); continue; } PropertyInterface property = columns.get(columnName); if (property == null) continue; Object value = resultSet.getObject(columnIndex); if (value == null) continue; values.put(property, value); } for (Map.Entry<PropertyInterface, Object> entry : values.entrySet()) { Object value = entry.getValue(); PropertyInterface property = entry.getKey(); if (value != null) { Class<?> fieldClass = property.getClazz(); if (Code.class.isAssignableFrom(fieldClass)) { @SuppressWarnings("unchecked") Class<? extends Code> codeClass = (Class<? extends Code>) fieldClass; value = dbPersistence.getCode(codeClass, value, false); } else if (ViewUtil.isReference(property)) { Class<?> viewedClass = ViewUtil.getReferencedClass(property); Table<?> referenceTable = dbPersistence.getTable(viewedClass); Object referenceObject = referenceTable.read(value, false); // false -> subEntities not loaded value = CloneHelper.newInstance(fieldClass); ViewUtil.view(referenceObject, value); } else if (DbPersistenceHelper.isDependable(property)) { value = dbPersistence.getTable(fieldClass).read(value); } else if (fieldClass == Set.class) { Set<?> set = (Set<?>) property.getValue(result); Class<?> enumClass = GenericUtils.getGenericClass(property.getType()); EnumUtils.fillSet((int) value, enumClass, set); continue; // skip setValue, it's final } else { value = helper.convertToFieldClass(fieldClass, value); } property.setValue(result, value); } } return result; } protected enum ParameterMode { INSERT, UPDATE, HISTORIZE; } protected int setParameters(PreparedStatement statement, T object, boolean doubleValues, ParameterMode mode, Object id) throws SQLException { int parameterPos = 1; for (Map.Entry<String, PropertyInterface> column : columns.entrySet()) { PropertyInterface property = column.getValue(); Object value = property.getValue(object); if (value instanceof Code) { value = findId((Code) value); } else if (ViewUtil.isReference(property)) { if (value != null) { value = IdUtils.getId(value); } } else if (DbPersistenceHelper.isDependable(property)) { Table dependableTable = dbPersistence.getTable(property.getClazz()); if (mode == ParameterMode.INSERT) { if (value != null) { value = dependableTable.insert(value); } } else { // update String dependableColumnName = column.getKey(); Object dependableId = getDependableId(id, dependableColumnName); if (value != null) { value = updateDependable(dependableTable, dependableId, value, mode); } else { if (mode == ParameterMode.UPDATE) { // to delete a dependable the value where its used has to be set // to null first. This problem could also be solved by setting the // reference constraint to 'deferred'. But this 'deferred' is more // expensive for database and doesn't work with maria db (TODO: really?) setColumnToNull(id, dependableColumnName); dependableTable.delete(dependableId); } } } } helper.setParameter(statement, parameterPos++, value, property); if (doubleValues) helper.setParameter(statement, parameterPos++, value, property); } statement.setObject(parameterPos++, id); if (doubleValues) statement.setObject(parameterPos++, id); return parameterPos; } protected Object updateDependable(Table dependableTable, Object dependableId, Object dependableObject, ParameterMode mode) { if (dependableId != null) { Object objectInDb = dependableTable.read(dependableId); if (!EqualsHelper.equals(dependableObject, objectInDb)) { if (mode == ParameterMode.HISTORIZE) { IdUtils.setId(dependableObject, null); dependableObject = dependableTable.insert(dependableObject); } else { dependableTable.update(dependableId, dependableObject); } } } else { dependableObject = dependableTable.insert(dependableObject); } return dependableObject; } // TODO multiple dependables could be get with one (prepared) statement private Object getDependableId(Object id, String column) throws SQLException { String query = "SELECT " + column + " FROM " + getTableName() + " WHERE ID = ?"; if (this instanceof HistorizedTable) { query += " AND VERSION = 0"; } PreparedStatement preparedStatement = getStatement(dbPersistence.getConnection(), query, false); preparedStatement.setObject(1, id); try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet.next()) { return resultSet.getObject(1); } else { return null; } } } private void setColumnToNull(Object id, String column) throws SQLException { String update = "UPDATE " + getTableName() + " SET " + column + " = NULL WHERE ID = ?"; PreparedStatement preparedStatement = getStatement(dbPersistence.getConnection(), update, false); preparedStatement.setObject(1, id); preparedStatement.execute(); } private Object findId(Code code) { Object id = IdUtils.getId(code); if (id != null) { return id; } List<?> codes = dbPersistence.getCodes(code.getClass()); for (Object c : codes) { if (code.equals(c)) { return IdUtils.getId(c); } } return null; } protected abstract String insertQuery(); protected abstract String selectByIdQuery(); protected String clearQuery() { StringBuilder query = new StringBuilder(); query.append("DELETE FROM "); query.append(getTableName()); return query.toString(); } public void createIndex(Object key) { PropertyInterface property = Keys.getProperty(key); String fieldPath = property.getPath(); createIndex(property, fieldPath); } public void createIndex(PropertyInterface property, String fieldPath) { Map.Entry<String, PropertyInterface> entry = findX(fieldPath); if (indexes.contains(entry.getKey())) { return; } String myFieldPath = entry.getValue().getPath(); if (fieldPath.length() > myFieldPath.length()) { String rest = fieldPath.substring(myFieldPath.length() + 1); AbstractTable<?> innerTable = dbPersistence.table(entry.getValue().getClazz()); innerTable.createIndex(property, rest); } indexes.add(entry.getKey()); } protected Entry<String, PropertyInterface> findX(String fieldPath) { while (true) { for (Map.Entry<String, PropertyInterface> entry : columns.entrySet()) { String columnFieldPath = entry.getValue().getPath(); if (columnFieldPath.equals(fieldPath)) { return entry; } } int index = fieldPath.lastIndexOf('.'); if (index < 0) throw new IllegalArgumentException(); fieldPath = fieldPath.substring(0, index); } } }
package org.minimalj.util.resources; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import java.util.ResourceBundle.Control; import java.util.Set; import java.util.TreeSet; import java.util.logging.Logger; import org.minimalj.application.Application; import org.minimalj.application.Configuration; import org.minimalj.frontend.page.Page; import org.minimalj.model.Code; import org.minimalj.model.View; import org.minimalj.model.ViewUtil; import org.minimalj.model.properties.ChainedProperty; import org.minimalj.model.properties.PropertyInterface; import org.minimalj.util.LocaleContext; public class Resources { private static final Logger logger = Logger.getLogger(Resources.class.getName()); public static final boolean OPTIONAL = false; public static final String APPLICATION_NAME = "Application.name"; public static final String APPLICATION_ICON = "Application.icon"; private static final Map<Locale, ResourceBundleAccess> resourcesByLocale = new HashMap<>(); private static ResourceBundleAccess getAccess() { Locale locale = LocaleContext.getCurrent(); if (!resourcesByLocale.containsKey(locale)) { ResourceBundle resourceBundle = Application.getInstance().getResourceBundle(locale); ResourceBundle frameworkResourceBundle = ResourceBundle.getBundle(Resources.class.getPackage().getName() + ".MinimalJ", locale, Control.getNoFallbackControl(Control.FORMAT_PROPERTIES)); resourcesByLocale.put(locale, new ResourceBundleAccess(new MultiResourceBundle(resourceBundle, frameworkResourceBundle))); } return resourcesByLocale.get(locale); } public static boolean isAvailable(String resourceName) { return getAccess().isAvailable(resourceName); } public static Integer getInteger(String resourceName, boolean reportIfMissing) { return getAccess().getInteger(resourceName, reportIfMissing); } public static String getString(String resourceName) { return getString(resourceName, true); } /** * @param resourceName the name of the resource. No further prefixes or postfixes are applied * @param reportIfMissing Use the constant OPTIONAL if its not an application error when the resource is not available * @return the String or 'resourceName' if the resourceName does not exist */ public static String getString(String resourceName, boolean reportIfMissing) { return getAccess().getString(resourceName, reportIfMissing); } public static String getString(Class<?> clazz) { return getAccess().getString(clazz); } public static String getStringOrNull(Class<?> clazz) { return getAccess().getStringOrNull(clazz); } public static String getPropertyName(PropertyInterface property) { return getAccess().getPropertyName(property, null); } public static String getPropertyName(PropertyInterface property, String postfix) { return getAccess().getPropertyName(property, postfix); } public static String getResourceName(Class<?> clazz) { return getAccess().getResourceName(clazz); } public static String getPageTitle(Page page) { return getString(page.getClass()); } /* test */ static class ResourceBundleAccess { private final ResourceBundle resourceBundle; ResourceBundleAccess(ResourceBundle resourceBundle) { this.resourceBundle = resourceBundle; } boolean isAvailable(String resourceName) { return resourceBundle.containsKey(resourceName); } Integer getInteger(String resourceName, boolean reportIfMissing) { if (isAvailable(resourceName)) { String integerString = getString(resourceName); try { return Integer.parseInt(integerString); } catch (NumberFormatException nfe) { logger.warning("Number format wrong for resource " + resourceName + "('" + integerString + "')"); return null; } } else { reportMissing(resourceName, reportIfMissing); return null; } } String getString(String resourceName) { return getString(resourceName, true); } String getString(String resourceName, boolean reportIfMissing) { if (isAvailable(resourceName)) { return resourceBundle.getString(resourceName); } else { reportMissing(resourceName, reportIfMissing); return "'" + resourceName + "'"; } } String getString(Class<?> clazz) { String result = getStringOrNull(clazz); if (result != null) { return result; } return getString(clazz.getSimpleName()); } String getStringOrNull(Class<?> clazz) { if (isAvailable(clazz.getName())) { return getString(clazz.getName()); } else if (isAvailable(clazz.getSimpleName())) { return getString(clazz.getSimpleName()); } else if (View.class.isAssignableFrom(clazz) && !Code.class.isAssignableFrom(clazz)) { Class<?> viewedClass = ViewUtil.getViewedClass(clazz); String byViewedClass = getStringOrNull(viewedClass); if (byViewedClass != null) { return byViewedClass; } } return null; } String getPropertyName(PropertyInterface property, String postfix) { if (property instanceof ChainedProperty) { ChainedProperty chainedProperty = (ChainedProperty) property; return getProperty(chainedProperty, postfix); } Class<?> fieldClass = property.getClazz(); String fieldName = property.getName(); if (postfix != null) fieldName += postfix; Class<?> declaringClass = property.getDeclaringClass(); return getPropertyName(fieldName, declaringClass, fieldClass, postfix != null); } private String getProperty(ChainedProperty chainedProperty, String postfix) { List<PropertyInterface> chain = chainedProperty.getChain(); String fieldName = chainedProperty.getPath(); if (postfix != null) fieldName += postfix; while (chain.size() > 1) { String result = getPropertyName(fieldName, chain.get(0).getDeclaringClass(), chainedProperty.getClazz(), true); if (result != null) { return result; } else { chain = chain.subList(1, chain.size()); fieldName = fieldName.substring(fieldName.indexOf('.') + 1); } } return getPropertyName(chain.get(0), postfix); } String getPropertyName(String fieldName, Class<?> declaringClass, Class<?> fieldClass, boolean optional) { // completeQualifiedKey example: "ch.openech.model.Person.nationality" String completeQualifiedKey = declaringClass.getName() + "." + fieldName; if (resourceBundle.containsKey(completeQualifiedKey)) { return resourceBundle.getString(completeQualifiedKey); } // qualifiedKey example: "Person.nationality" String qualifiedKey = declaringClass.getSimpleName() + "." + fieldName; if (resourceBundle.containsKey(qualifiedKey)) { return resourceBundle.getString(qualifiedKey); } // if declaring class is a view check to viewed class if (View.class.isAssignableFrom(declaringClass) && !Code.class.isAssignableFrom(declaringClass)) { Class<?> viewedClass = ViewUtil.getViewedClass(declaringClass); return getPropertyName(fieldName, viewedClass, fieldClass, optional); } // class of field String byFieldClass = getStringOrNull(fieldClass); if (byFieldClass != null) { return byFieldClass; } // unqualifiedKey example: "nationality" if (resourceBundle.containsKey(fieldName)) { return resourceBundle.getString(fieldName); } // class of same name String className = getStringOrNull(fieldClass); if (className != null) { return className; } if (!optional) { reportMissing(qualifiedKey, true); return "'" + qualifiedKey + "'"; } else { return null; } } String getResourceName(Class<?> clazz) { if (clazz.isAnonymousClass()) { clazz = clazz.getSuperclass(); } Class<?> c = clazz; while (c != Object.class) { if (isAvailable(c.getName())) { return c.getName(); } if (isAvailable(c.getSimpleName())) { return c.getSimpleName(); } c = c.getSuperclass(); } return clazz.getSimpleName(); } } private static final Set<String> missing = new TreeSet<>(); private static void reportMissing(String resourceName, boolean reportIfMissing) { if (reportIfMissing && Configuration.isDevModeActive()) { missing.add(resourceName); } } public static void printMissing() { missing.stream().forEach(s -> System.out.println(s + " = ")); } private static Map<String, String> mimeTypeByPostfix = new HashMap<>(); static { mimeTypeByPostfix.put("html", "text/html;charset=UTF-8"); mimeTypeByPostfix.put("css", "text/css;charset=UTF-8"); mimeTypeByPostfix.put("js", "application/javascript;charset=UTF-8"); mimeTypeByPostfix.put("jpg", "image/jpg"); mimeTypeByPostfix.put("png", "image/png"); mimeTypeByPostfix.put("woff2", "font/woff2"); } public static void addMimeType(String postfix, String contentType) { mimeTypeByPostfix.put(postfix, contentType); } public static String getMimeType(String postfix) { return mimeTypeByPostfix.get(postfix); } }
package org.mitre.synthea.modules; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.mitre.synthea.engine.Distribution; import org.mitre.synthea.engine.Distribution.Kind; import org.mitre.synthea.engine.Module; import org.mitre.synthea.helpers.SimpleCSV; import org.mitre.synthea.helpers.Utilities; import org.mitre.synthea.world.agents.Clinician; import org.mitre.synthea.world.agents.Person; import org.mitre.synthea.world.agents.Provider; import org.mitre.synthea.world.concepts.HealthRecord.Code; import org.mitre.synthea.world.concepts.HealthRecord.Entry; import org.mitre.synthea.world.concepts.HealthRecord.Procedure; import org.mitre.synthea.world.concepts.VitalSign; public class PerformCABG extends Module { public PerformCABG() { this.name = "PerformCABG"; this.submodule = true; // make sure this doesn't run except when called } public Module clone() { return this; } private static final Code CABG = new Code("SNOMED-CT", "232717009", "Coronary artery bypass grafting (procedure)"); private static final Code EMERGENCY_CABG = new Code("SNOMED-CT", "414088005", "Emergency coronary artery bypass graft (procedure)"); private static List<Clinician> cabgSurgeons = loadCabgSurgeons(); private static List<Clinician> loadCabgSurgeons() { try { String cabgSurgeonsCsv = Utilities.readResource("cabg_surgeons.csv"); List<LinkedHashMap<String,String>> surgeons = SimpleCSV.parse(cabgSurgeonsCsv); // only keep "CABG" code lines surgeons.removeIf(s -> !s.get("surgery_group_label").equals("CABG")); if (Provider.getProviderList().isEmpty()) { // hack to prevent a crash is the test suite, if this module gets instantiated before providers load. // this should never happen when creating a real population return Collections.emptyList(); } Provider provider = Provider.getProviderList().get(0); Random clinicianRand = new Random(-1); ArrayList<Clinician> clinicianList = new ArrayList<>(); int id = 0; for (LinkedHashMap<String,String> surgeon : surgeons) { Clinician clin = new Clinician(-1, clinicianRand, id++, provider); clin.attributes.putAll(surgeon); clin.attributes.put(Clinician.SPECIALTY, "CABG"); String surgeonCode = (String)surgeon.get("surgeon_code_final"); clin.attributes.put(Clinician.FIRST_NAME, surgeonCode); clin.attributes.put(Clinician.LAST_NAME, surgeonCode); clin.attributes.put(Clinician.NAME, surgeonCode); clin.attributes.put(Clinician.NAME_PREFIX, "Dr."); clin.attributes.put(Clinician.GENDER, clinicianRand.nextBoolean() ? "F" : "M"); clin.attributes.put(Person.ADDRESS, provider.address); clin.attributes.put(Person.CITY, provider.city); clin.attributes.put(Person.STATE, provider.state); clin.attributes.put(Person.ZIP, provider.zip); clin.attributes.put(Person.COORDINATE, provider.getLonLat()); clinicianList.add(clin); } provider.clinicianMap.put("CABG", clinicianList); return clinicianList; } catch (Exception e) { throw new Error(e); } } private static Distribution NOISE = buildNoise(); private static Distribution buildNoise() { Distribution d = new Distribution(); d.kind = Kind.GAUSSIAN; d.parameters = new HashMap<>(); d.parameters.put("standardDeviation", 35.0); d.parameters.put("mean", -10.0); return d; } @Override public boolean process(Person person, long time) { long stopTime; if (person.attributes.containsKey("cabg_stop_time") ) { stopTime = (long) person.attributes.get("cabg_stop_time"); } else { Clinician surgeon = cabgSurgeons.get(person.randInt(cabgSurgeons.size())); stopTime = time + getCabgDuration(person, surgeon, time); person.attributes.put("cabg_stop_time", stopTime); boolean emergency = (boolean)person.attributes.get("care_score_e"); Code code = emergency ? EMERGENCY_CABG : CABG; String primaryCode = code.code; Procedure cabg = person.record.procedure(time, primaryCode); cabg.name = this.name; cabg.codes.add(code); cabg.stop = stopTime; cabg.clinician = surgeon; surgeon.incrementEncounters(); // hack this clinician back onto the record? person.record.currentEncounter(stopTime).clinician = surgeon; String reason = "cardiac_surgery_reason"; // below copied from Procedure State to make this easier if (person.attributes.containsKey(reason)) { Entry condition = (Entry) person.attributes.get(reason); cabg.reasons.addAll(condition.codes); } else if (person.hadPriorState(reason)) { // loop through the present conditions, the condition "name" will match // the name of the ConditionOnset state (aka "reason") for (Entry entry : person.record.present.values()) { if (reason.equals(entry.name)) { cabg.reasons.addAll(entry.codes); } } } } // note return options here, see State$CallSubmodule // if we return true, the submodule completed and processing continues to the next state // if we return false, the submodule did not complete (like with a Delay) and will re-process the next timestep. if (time >= stopTime) { // remove the stop time so that a second processing can go through correctly person.attributes.remove("cabg_stop_time"); person.history.get(0).exited = stopTime; // HACK for ensuring rewind time works. it will get overwritten later return true; } else { return false; } } public static final long MAX_DURATION = Utilities.convertTime("minutes", 926); public static final long MIN_DURATION = Utilities.convertTime("minutes", 45); // commented out for now - probably easier to just manually code these than make it generic // private static final Map<String,Double> COEFFICIENTS; // private static final Table<String,String,Double> VALUE_COEFFICIENTS; // static { // Map<String, Double> coefficients = new HashMap<>(); // coefficients.put("age", 14.0); // COEFFICIENTS = coefficients; // Table<String,String,Double> valueCoefficients = HashBasedTable.create(); // valueCoefficients.put(Person.GENDER, "F", 0.3); // valueCoefficients.put(Person.GENDER, "M", 0.3); // VALUE_COEFFICIENTS = valueCoefficients; /* Key,Coefficient,Type constant,79.23,number Age,-0.63,number bsa,12.39,number surgeon_mean_time,0.93,number n_surgeries,-0.03,number GENDER.M,4.16,category GENDER.F,-3.91,category care_score_e.1,-10.99,category care_score_e.2,-6.85,category care_score_e.3,0.03,category care_score_e.3E,-5.61,category care_score_e.4,6.54,category care_score_e.4E,3.18,category care_score_e.5,-19.50,category care_score_e.5E,-23.10,category Cardiac_Redo.False,-25.37,category Cardiac_Redo.True,97.48,category Operative_priority.0,-46.84,category Operative_priority.1,-15.70,category Operative_priority.2,-32.01,category Operative_priority.3,-15.82,category Operative_priority.4,-60.55,category */ private static final double AGE_COEFFICIENT = -0.63; private static final double BSA_COEFFICIENT = 12.39; private static final double surgeon_mean_time_COEFFICIENT = 0.93; private static final double n_surgeries_COEFFICIENT = -0.03; private static final double M_COEFFICIENT = 4.16; private static final double F_COEFFICIENT = -3.91; private static final double REDO_TRUE_COEFFICIENT = 97.48; private static final double REDO_FALSE_COEFFICIENT = -25.37; // care score private static final Map<String, Double> CARE_SCORE_COEFFICIENTS = createCareScoreCoefficients(); private static Map<String, Double> createCareScoreCoefficients() { Map<String, Double> map = new HashMap<>(); map.put("1", -10.99); map.put("2", -6.85); map.put("3", 0.03); map.put("3E", -5.61); map.put("4", 6.54); map.put("4E", 3.18); map.put("5", -19.50); map.put("5E", -23.10); return map; } // operative priority private static final double[] OPER_PRIORITY_COEFFICIENTS = { -46.84, -15.70, -32.01, -15.82, -60.55 }; public static long getCabgDuration(Person person, Clinician surgeon, long time) { double duration = 79.23; // baseline, minutes duration += (AGE_COEFFICIENT * person.ageInDecimalYears(time)); duration += (BSA_COEFFICIENT * getBodySurfaceArea(person, time)); if ("F".equals(person.attributes.get(Person.GENDER))) { duration += F_COEFFICIENT; } else { duration += M_COEFFICIENT; } boolean cardiacRedo = person.record.present.containsKey(EMERGENCY_CABG.code) || person.record.present.containsKey(CABG.code); if (cardiacRedo) { duration += REDO_TRUE_COEFFICIENT; } else { duration += REDO_FALSE_COEFFICIENT; } Integer careScore = (Integer)person.attributes.get("care_score"); String careScoreString = careScore.toString(); if (careScore > 2 && (boolean)(person.attributes.get("care_score_e"))) { careScoreString += "E"; } if (!CARE_SCORE_COEFFICIENTS.containsKey(careScoreString)) { throw new IllegalStateException("Failed to find " + careScoreString); } duration += CARE_SCORE_COEFFICIENTS.get(careScoreString); duration += OPER_PRIORITY_COEFFICIENTS[(int)person.attributes.get("care_priority_level")]; // these are ints but all have a trailing .0 in the CSV int surgeon_n_surgeries = (int)Double.parseDouble((String)surgeon.attributes.get("n_surgeries")); double surgeon_mean_time = Double.parseDouble((String)surgeon.attributes.get("mean")); duration += (surgeon_mean_time_COEFFICIENT * surgeon_mean_time); duration += (n_surgeries_COEFFICIENT * surgeon_n_surgeries); duration += NOISE.generate(person); long durationInMs = Utilities.convertTime("minutes", duration); return bound(durationInMs, MIN_DURATION, MAX_DURATION); } private static double getBodySurfaceArea(Person person, long time) { double h = person.getVitalSign(VitalSign.HEIGHT, time); double w = person.getVitalSign(VitalSign.WEIGHT, time); return Math.sqrt(h * w / 3600); } private static long bound(long value, long min, long max) { return Math.min(Math.max(value, min), max); } }
package org.ndexbio.rest.services; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import org.ndexbio.rest.NdexSchemaManager; import org.ndexbio.rest.domain.IBaseTerm; import org.ndexbio.rest.domain.IFunctionTerm; import org.ndexbio.rest.domain.IGroup; import org.ndexbio.rest.domain.IGroupInvitationRequest; import org.ndexbio.rest.domain.IGroupMembership; import org.ndexbio.rest.domain.IJoinGroupRequest; import org.ndexbio.rest.domain.INetworkAccessRequest; import org.ndexbio.rest.domain.INetworkMembership; import org.ndexbio.rest.domain.IUser; import org.ndexbio.rest.exceptions.NdexException; import org.ndexbio.rest.models.Status; import org.ndexbio.rest.models.User; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentPool; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.frames.FramedGraph; import com.tinkerpop.frames.FramedGraphFactory; import com.tinkerpop.frames.modules.gremlingroovy.GremlinGroovyModule; import com.tinkerpop.frames.modules.typedgraph.TypedGraphModuleBuilder; public abstract class NdexService { @Context HttpServletRequest servletRequest; protected FramedGraphFactory _graphFactory = null; protected ODatabaseDocumentTx _ndexDatabase = null; protected FramedGraph<OrientBaseGraph> _orientDbGraph = null; protected User getLoggedInUser() { Object user = servletRequest.getAttribute("User"); if (user != null) return (User)user; else return null; } protected void setupDatabase() { //When starting up this application, tell OrientDB's global //configuration to close the storage; this is required here otherwise //OrientDB connection pooling doesn't work as expected //OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false); _graphFactory = new FramedGraphFactory(new GremlinGroovyModule(), new TypedGraphModuleBuilder() .withClass(IGroup.class) .withClass(IUser.class) .withClass(IGroupMembership.class) .withClass(INetworkMembership.class) .withClass(IGroupInvitationRequest.class) .withClass(IJoinGroupRequest.class) .withClass(INetworkAccessRequest.class) .withClass(IBaseTerm.class) .withClass(IFunctionTerm.class).build()); // TODO: Refactor this to connect using a configurable // username/password, and database _ndexDatabase = ODatabaseDocumentPool.global().acquire("remote:localhost/ndex", "admin", "admin"); _orientDbGraph = _graphFactory.create((OrientBaseGraph)new OrientGraph(_ndexDatabase)); NdexSchemaManager.INSTANCE.init(_orientDbGraph.getBaseGraph()); } protected void teardownDatabase() { if (_graphFactory != null) _graphFactory = null; if (_ndexDatabase != null) { _ndexDatabase.close(); _ndexDatabase = null; } if (_orientDbGraph != null) { _orientDbGraph.shutdown(); _orientDbGraph = null; } } @GET @Path("/status") @Produces("application/json") public Status getStatus() throws NdexException { Status status = new Status(); status.setState("RUNNING"); return status; } @GET @Path("/api") @Produces("application/json") public Collection<Collection<String>> getApi() throws NdexException { Collection<Collection<String>> methodAnnotationList = new ArrayList<Collection<String>>(); for (Method method : this.getClass().getMethods()){ Collection<String> methodAnnotationStrings = new ArrayList<String>(); for (Annotation annotation : method.getDeclaredAnnotations()){ methodAnnotationStrings.add(annotation.toString()); } if (methodAnnotationStrings.size() > 0){ methodAnnotationList.add(methodAnnotationStrings); } } return methodAnnotationList; } }
package org.nnsoft.sameas4j; import static java.lang.String.format; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.net.URLConnection; import org.nnsoft.sameas4j.cache.Cache; import org.nnsoft.sameas4j.cache.CacheKey; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParseException; /** * Default implementation of {@link org.nnsoft.sameas4j.SameAsService}. */ final class SameAsServiceImpl implements SameAsService { /** * The sameas.org service for looking up URLs template constant. */ private static final String SERVICE_URL = "http://sameas.org/json?uri=%s"; /** * The sameas.org service for looking up keywords template constant. */ private static final String SERVICE_KEYWORD = "http://sameas.org/json?q=%s"; /** * The GsonBuilder used to create new equivalence JSON parser. */ private final GsonBuilder gsonBuilder = new GsonBuilder(); /** * The {@code Cache} reference, can be null. */ private Cache cache; /** * Creates a new {@link org.nnsoft.sameas4j.SameAsService} instance. */ public SameAsServiceImpl() { this.gsonBuilder.registerTypeAdapter(Equivalence.class, new EquivalenceDeserializer()); this.gsonBuilder.registerTypeAdapter(EquivalenceList.class, new EquivalenceListDeserializer()); } /** * {@inheritDoc} */ public void setCache(Cache cache) { this.cache = cache; } /** * {@inheritDoc} */ public Equivalence getDuplicates(URI uri) throws SameAsServiceException { return invokeULR(format(SERVICE_URL, uri), Equivalence.class); } /** * {@inheritDoc} */ public EquivalenceList getDuplicates(String keyword) throws SameAsServiceException { return invokeULR(format(SERVICE_KEYWORD, keyword), EquivalenceList.class); } /** * Invokes a Sameas.org service URL and parses the JSON response. * * @param <T> the expected return type. * @param toBeInvoked the service URL has to be invoked. * @param returnType the type the JSON response has to be bind to. * @return the bound object. * @throws SameAsServiceException is any error occurs. */ private <T> T invokeULR(String toBeInvoked, Class<T> returnType) throws SameAsServiceException { URL url; try { url = new URL(toBeInvoked); } catch (MalformedURLException e) { throw new SameAsServiceException("An error occurred while building the URL '" + toBeInvoked + "'"); } URLConnection connection = null; Reader reader = null; try { connection = url.openConnection(); long lastModified = connection.getLastModified(); if (this.cache != null) { CacheKey cacheKey = new CacheKey(toBeInvoked, lastModified); T cached = this.cache.get(cacheKey, returnType); if (cached != null) { return cached; } } if (connection instanceof HttpURLConnection) { ((HttpURLConnection) connection).connect(); } reader = new InputStreamReader(connection.getInputStream()); Gson gson = this.gsonBuilder.create(); T response = gson.fromJson(reader, returnType); if (this.cache != null) { CacheKey cacheKey = new CacheKey(toBeInvoked, lastModified); this.cache.put(cacheKey, response); } return response; } catch (IOException e) { throw new SameAsServiceException(format("An error occurred while invoking the URL '%s': %s", toBeInvoked, e.getMessage())); } catch (JsonParseException e) { throw new SameAsServiceException("An error occurred while parsing the JSON response", e); } finally { if (connection != null && connection instanceof HttpURLConnection) { ((HttpURLConnection) connection).disconnect(); } if (reader != null) { try { reader.close(); } catch (IOException e) { // close it quietly } } } } }
package org.openhmis.webservice; import java.io.IOException; import java.util.List; import javax.ws.rs.BeanParam; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.log4j.Logger; import org.openhmis.code.ClientNameDataQuality; import org.openhmis.dto.ClientDTO; import org.openhmis.exception.AccessDeniedException; import org.openhmis.exception.RecordNotFoundException; import org.openhmis.dto.search.ClientSearchDTO; import org.openhmis.manager.ClientManager; import org.openhmis.util.Authentication; import org.openhmis.util.DateParser; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; @Path("/clients") public class ClientService { private static final Logger log = Logger.getLogger(ClientService.class); private static final ClientManager clientManager = new ClientManager(); public ClientService() {} @GET @Path("/") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public List<ClientDTO> getClients(@HeaderParam("Authorization") String authorization, @BeanParam ClientSearchDTO searchDTO) throws JsonProcessingException { if(!Authentication.googleAuthenticate(authorization, Authentication.READ)) throw new AccessDeniedException(); // Return clients that match the search terms List<ClientDTO> clientDTOs = clientManager.getClients(searchDTO); /* TBD (issue #65): We need to determine a standard logging format, some conventions, and maybe a helper class to enforce it all; would also be nice to log which user made the request. But for now, just show that logging works. */ log.debug("GET /clients/ (" + clientDTOs.size() + " results)"); return clientDTOs; } @POST @Path("/") @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClientDTO createClient(@HeaderParam("Authorization") String authorization, ClientDTO inputVO) throws JsonParseException, JsonMappingException, IOException { if(!Authentication.googleAuthenticate(authorization, Authentication.WRITE)) throw new AccessDeniedException(); ClientDTO outputVO = clientManager.addClient(inputVO); return outputVO; } @GET @Path("/{personalId}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClientDTO getClient(@HeaderParam("Authorization") String authorization, @PathParam("personalId") String personalId) throws JsonProcessingException { if(!Authentication.googleAuthenticate(authorization, Authentication.READ)) throw new AccessDeniedException(); ClientDTO clientDTO = clientManager.getClientByPersonalId(personalId); return clientDTO; } @PUT @Path("/{personalId}") @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClientDTO updateClient(@HeaderParam("Authorization") String authorization, @PathParam("personalId") String personalId, ClientDTO inputVO) throws JsonParseException, JsonMappingException, IOException { if(!Authentication.googleAuthenticate(authorization, Authentication.WRITE)) throw new AccessDeniedException(); inputVO.setPersonalId(personalId); ClientDTO outputVO = clientManager.updateClient(inputVO); return outputVO; } @DELETE @Path("/{personalId}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public String deleteClient(@HeaderParam("Authorization") String authorization, @PathParam("personalId") String personalId) throws JsonParseException, JsonMappingException, IOException { if(!Authentication.googleAuthenticate(authorization, Authentication.WRITE)) throw new AccessDeniedException(); clientManager.deleteClient(personalId); return "true"; } }
package org.spout.downpour; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; /** * An InputStream implementation that reads from another InputStream while caching the data to an OutputStream */ public class CachingInputStream extends InputStream { private InputStream readFrom = null; private OutputStream writeTo = null; private ByteBuffer buffer = ByteBuffer.allocate(1024); private Runnable onFinish = null; private Runnable onFailure = null; private long expectedBytes = -1; private long receivedBytes = 0; /** * Creates a new caching InputStream * @param readFrom the stream to read data from * @param writeTo the stream to cache the read data to */ public CachingInputStream(InputStream readFrom, OutputStream writeTo) { super(); this.readFrom = readFrom; this.writeTo = writeTo; } public void setOnFinish(Runnable onFinish) { this.onFinish = onFinish; } public void setOnFailure(Runnable onFailure) { this.onFailure = onFailure; } public synchronized void setExpectedBytes(long expectedBytes) { this.expectedBytes = expectedBytes; } public synchronized long getReceivedBytes() { return receivedBytes; } public long getExpectedBytes() { return expectedBytes; } @Override public synchronized int read() throws IOException { int data = readFrom.read(); receivedBytes ++; if (data == -1) { return data; // this is the end of the stream, no need to cache anything } if (!buffer.hasRemaining()) { // Buffer is full // Write buffer to output writeTo.write(buffer.array(), 0, buffer.capacity()); // Reset buffer buffer.position(0); } buffer.put((byte) data); return data; } /** * Closes the stream it reads from and the stream it caches to */ @Override public void close() throws IOException { readFrom.close(); super.close(); // Write remaining stuff to output writeTo.write(buffer.array(), 0, buffer.position()); buffer = null; writeTo.close(); if (expectedBytes != -1) { if (expectedBytes == receivedBytes) { if (onFinish != null) { try { onFinish.run(); } catch (Exception e) { e.printStackTrace(); } } } else { if (onFailure != null) { try { onFailure.run(); } catch (Exception e) { e.printStackTrace(); } } throw new IOException("File was not completely downloaded!"); } } } @Override public int available() throws IOException { return readFrom.available(); } @Override public synchronized void mark(int readlimit) { readFrom.mark(readlimit); } @Override public boolean markSupported() { return readFrom.markSupported(); } @Override public synchronized void reset() throws IOException { readFrom.reset(); } @Override public long skip(long n) throws IOException { return readFrom.skip(n); } }
package org.twdata.pkgscanner; import java.net.URL; import java.net.URLDecoder; import java.net.URISyntaxException; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.io.IOException; import java.io.File; /** * Does the actual work of scanning the classloader */ class InternalScanner { private Map<String,Set<String>> jarContentCache = new HashMap<String,Set<String>>(); private ClassLoader classloader; private PackageScanner.VersionMapping[] versionMappings; private OsgiVersionConverter versionConverter = new DefaultOsgiVersionConverter(); private final boolean debug; static interface Test { boolean matchesPackage(String pkg); boolean matchesJar(String name); } InternalScanner(ClassLoader cl, PackageScanner.VersionMapping[] versionMappings, boolean debug) { this.classloader = cl; for (PackageScanner.VersionMapping mapping : versionMappings) { mapping.toVersion(versionConverter.getVersion(mapping.getVersion())); } this.versionMappings = versionMappings; this.debug = debug; } void setOsgiVersionConverter(OsgiVersionConverter converter) { this.versionConverter = converter; } Collection<ExportPackage> findInPackages(Test test, String... roots) { // weans out duplicates by choosing the winner as the last one to be discovered Map<String, ExportPackage> map = new HashMap<String,ExportPackage>(); for (String pkg : roots) { for (ExportPackage export : findInPackage(test, pkg)) { map.put(export.getPackageName(), export); } } // Let's be nice and sort the results by package return new TreeSet(map.values()); } Collection<ExportPackage> findInUrls(Test test, URL... urls) { // weans out duplicates by choosing the winner as the last one to be discovered Map<String, ExportPackage> map = new HashMap<String,ExportPackage>(); Vector<URL> list = new Vector<URL>(Arrays.asList(urls)); for (ExportPackage export : findInPackageWithUrls(test, "", list.elements())) { map.put(export.getPackageName(), export); } // Let's be nice and sort the results by package return new TreeSet<ExportPackage>(map.values()); } /** * Scans for classes starting at the package provided and descending into subpackages. * Each class is offered up to the Test as it is discovered, and if the Test returns * true the class is retained. * * @param test an instance of {@link Test} that will be used to filter classes * @param packageName the name of the package from which to start scanning for * classes, e.g. {@code net.sourceforge.stripes} */ List<ExportPackage> findInPackage(Test test, String packageName) { List<ExportPackage> localExports = new ArrayList<ExportPackage>(); packageName = packageName.replace('.', '/'); Enumeration<URL> urls; try { urls = classloader.getResources(packageName); } catch (IOException ioe) { System.err.println("Could not read package: " + packageName); return localExports; } return findInPackageWithUrls(test, packageName, urls); } List<ExportPackage> findInPackageWithUrls(Test test, String packageName, Enumeration<URL> urls) { List<ExportPackage> localExports = new ArrayList<ExportPackage>(); while (urls.hasMoreElements()) { try { URL url = urls.nextElement(); String urlPath = url.getPath(); // it's in a JAR, grab the path to the jar if (urlPath.lastIndexOf('!') > 0) { urlPath = urlPath.substring(0, urlPath.lastIndexOf('!')); } else if (!urlPath.startsWith("file:")) { urlPath = "file:"+urlPath; } //System.out.println("Scanning for classes in [" + urlPath + "] matching criteria: " + test); File file = new File(new URL(urlPath).toURI()); if (file.isDirectory()) { localExports.addAll(loadImplementationsInDirectory(test, packageName, file)); } else { if (test.matchesJar(file.getName())) { localExports.addAll(loadImplementationsInJar(test, file)); } } } catch (IOException ioe) { System.err.println("could not read entries: " + ioe); } catch (URISyntaxException e) { System.err.println("Invalid file name: "+ e); } } return localExports; } /** * Finds matches in a physical directory on a filesystem. Examines all * files within a directory - if the File object is not a directory, and ends with <i>.class</i> * the file is loaded and tested to see if it is acceptable according to the Test. Operates * recursively to find classes within a folder structure matching the package structure. * * @param test a Test used to filter the classes that are discovered * @param parent the package name up to this directory in the package hierarchy. E.g. if * /classes is in the classpath and we wish to examine files in /classes/org/apache then * the values of <i>parent</i> would be <i>org/apache</i> * @param location a File object representing a directory */ List<ExportPackage> loadImplementationsInDirectory(Test test, String parent, File location) { File[] files = location.listFiles(); StringBuilder builder = null; List<ExportPackage> localExports = new ArrayList<ExportPackage>(); Set<String> scanned = new HashSet<String>(); for (File file : files) { builder = new StringBuilder(100); builder.append(parent).append("/").append(file.getName()); String packageOrClass = (parent == null ? file.getName() : builder.toString()); if (file.isDirectory()) { localExports.addAll(loadImplementationsInDirectory(test, packageOrClass, file)); // If the parent is empty, then assume the directory's jars should be searched } else if ("".equals(parent) && file.getName().endsWith(".jar") && test.matchesJar(file.getName())) { localExports.addAll(loadImplementationsInJar(test, file)); } else { String pkg = packageOrClass; int lastSlash = pkg.lastIndexOf('/'); if (lastSlash > 0) { pkg = pkg.substring(0, lastSlash); } pkg = pkg.replace('/', '.'); if (!scanned.contains(pkg)) { if (test.matchesPackage(pkg)) { localExports.add(new ExportPackage(pkg, determinePackageVersion(null, pkg))); } scanned.add(pkg); } } } return localExports; } /** * Finds matching classes within a jar files that contains a folder structure * matching the package structure. If the File is not a JarFile or does not exist a warning * will be logged, but no error will be raised. * * @param test a Test used to filter the classes that are discovered * @param file the jar file to be examined for classes */ List<ExportPackage> loadImplementationsInJar(Test test, File file) { List<ExportPackage> localExports = new ArrayList<ExportPackage>(); Set<String> packages = jarContentCache.get(file.getPath()); if (packages == null) { packages = new HashSet<String>(); try { JarFile jarFile = new JarFile(file); for (Enumeration<JarEntry> e = jarFile.entries(); e.hasMoreElements(); ) { JarEntry entry = e.nextElement(); String name = entry.getName(); if (!entry.isDirectory()) { String pkg = name; int pos = pkg.lastIndexOf('/'); if (pos > -1) { pkg = pkg.substring(0, pos); } pkg = pkg.replace('/', '.'); packages.add(pkg); } } } catch (IOException ioe) { System.err.println("Could not search jar file '" + file + "' for classes matching criteria: " + test + " due to an IOException" + ioe); return Collections.emptyList(); } finally { // set the cache, even if the scan produced an error jarContentCache.put(file.getPath(), packages); } } Set<String> scanned = new HashSet<String>(); for (String pkg : packages) { if (!scanned.contains(pkg)) { if (test.matchesPackage(pkg)) { localExports.add(new ExportPackage(pkg, determinePackageVersion(file, pkg))); } scanned.add(pkg); } } return localExports; } String determinePackageVersion(File jar, String pkg) { // Look for an explicit mapping String version = null; for (PackageScanner.VersionMapping mapping : versionMappings) { if (mapping.matches(pkg)) { version = mapping.getVersion(); } } if (version == null && jar != null) { // TODO: Look for osgi headers // Try to guess the version from the jar name String name = jar.getName(); version = extractVersion(name); } if (version == null && debug) { if (jar != null) { System.err.println("Unable to determine version for '" + pkg + "' in jar '" + jar.getPath() + "'"); } else { System.err.println("Unable to determine version for '" + pkg + "'"); } } return version; } /** * Tries to guess the version by assuming it starts as the first number after a '-' or '_' sign, then converts * the version into an OSGi-compatible one. */ String extractVersion(String filename) { StringBuilder version = null; boolean lastWasSeparator = false; for (int x=0; x<filename.length(); x++) { char c = filename.charAt(x); if (c == '-' || c == '_') lastWasSeparator = true; else if (Character.isDigit(c) && lastWasSeparator && version == null) version = new StringBuilder(); if (version != null) version.append(c); } if (version != null) { if (".jar".equals(version.substring(version.length() - 4))) version.delete(version.length() - 4, version.length()); return versionConverter.getVersion(version.toString()); } else return null; } }
package org.webbitserver.stub; import org.webbitserver.HttpRequest; import org.webbitserver.InboundCookieParser; import org.webbitserver.helpers.QueryParameters; import java.net.HttpCookie; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.URI; import java.util.*; /** * Implementation of HttpRequest that is easy to construct manually and populate. * Useful for testing. */ public class StubHttpRequest extends StubDataHolder implements HttpRequest { private String uri = "/"; private String method = "GET"; private List<Map.Entry<String, String>> headers = new ArrayList<Map.Entry<String, String>>(); private SocketAddress remoteAddress = new InetSocketAddress("localhost", 0); private Object id = "StubID"; private long timestamp = 0; private String body; public StubHttpRequest() { } public StubHttpRequest(String uri) { this.uri = uri; } @Override public String uri() { return uri; } @Override public StubHttpRequest uri(String uri) { this.uri = uri; return this; } @Override public String header(String name) { for (Map.Entry<String, String> header : headers) { if (header.getKey().equals(name)) { return header.getValue(); } } return null; } @Override public boolean hasHeader(String name) { for (Map.Entry<String, String> header : headers) { if (header.getKey().equals(name)) { return true; } } return false; } @Override public List<HttpCookie> cookies() { return InboundCookieParser.parse(headers(COOKIE_HEADER)); } @Override public HttpCookie cookie(String name) { for (HttpCookie cookie : cookies()) { if (cookie.getName().equals(name)) { return cookie; } } return null; } @Override public String queryParam(String key) { return new QueryParameters(URI.create(uri()).getQuery()).first(key); } @Override public List<String> queryParams(String key) { return new QueryParameters(URI.create(uri()).getQuery()).all(key); } @Override public Set<String> queryParamKeys() { return new QueryParameters(URI.create(uri()).getQuery()).keys(); } @Override public String postParam(String key) { return new QueryParameters(body()).first(key); } @Override public List<String> postParams(String key) { return new QueryParameters(body()).all(key); } @Override public Set<String> postParamKeys() { return new QueryParameters(body()).keys(); } @Override public String cookieValue(String name) { HttpCookie cookie = cookie(name); return cookie == null ? null : cookie.getValue(); } @Override public List<String> headers(String name) { List<String> result = new ArrayList<String>(); for (Map.Entry<String, String> header : headers) { if (header.getKey().equals(name)) { result.add(header.getValue()); } } return result; } @Override public List<Map.Entry<String, String>> allHeaders() { return headers; } @Override public String method() { return method; } @Override public String body() { return body; } public StubHttpRequest body(String body) { this.body = body; return this; } public StubHttpRequest method(String method) { this.method = method; return this; } public StubHttpRequest header(String name, String value) { headers.add(new AbstractMap.SimpleEntry<String, String>(name, value)); return this; } @Override public StubHttpRequest data(String key, Object value) { super.data(key, value); return this; } @Override public SocketAddress remoteAddress() { return remoteAddress; } @Override public Object id() { return id; } public StubHttpRequest id(Object id) { this.id = id; return this; } @Override public long timestamp() { return timestamp; } public StubHttpRequest timestamp(long timestamp) { this.timestamp = timestamp; return this; } public StubHttpRequest remoteAddress(SocketAddress remoteAddress) { this.remoteAddress = remoteAddress; return this; } }
package permafrost.tundra.flow; import com.wm.app.b2b.server.InvokeState; import com.wm.data.IData; import com.wm.data.IDataCursor; import com.wm.data.IDataFactory; import com.wm.lang.ns.NSField; import com.wm.lang.ns.NSRecord; import com.wm.lang.ns.NSService; import com.wm.lang.schema.Validator; import com.wm.lang.xml.WMDocumentException; import permafrost.tundra.content.ValidationResult; import permafrost.tundra.data.IDataHelper; import permafrost.tundra.data.IDataJSONParser; import permafrost.tundra.lang.ExceptionHelper; import permafrost.tundra.server.ServiceHelper; import java.io.IOException; /** * Convenience services for working with the invoke pipeline. */ public class PipelineHelper { /** * Disallow instantiation. */ private PipelineHelper() {} /** * Validates the given pipeline against the calling service's input or output signature. * * @param pipeline The pipeline to be validated. * @param direction Whether to validate the input or output signature. * @return Whether the validation succeeded or failed. */ public static ValidationResult validate(IData pipeline, InputOutputSignature direction) { return validate(ServiceHelper.self(), pipeline, direction); } /** * Validates the given pipeline against the given service's input or output signature. * * @param service The service whose signature is used to validate against. * @param pipeline The pipeline to be validated. * @param direction Whether to validate the input or output signature. * @return Whether the validation succeeded or failed. */ public static ValidationResult validate(NSService service, IData pipeline, InputOutputSignature direction) { ValidationResult result = ValidationResult.VALID; // we can only validate the pipeline when not debugging if (service != null && !"wm.server.flow:stepFlow".equals(service.getNSName().getFullName())) { NSRecord record; if (direction == InputOutputSignature.INPUT) { record = service.getSignature().getInput(); } else { record = service.getSignature().getOutput(); } if (record != null) { Validator validator = Validator.create(pipeline, record, Validator.getDefaultOptions()); validator.setLocale(InvokeState.getCurrentLocale()); validator.setMaximumErrors(-1); // return all errors IDataCursor cursor = null; try { IData scope = validator.validate(); cursor = scope.getCursor(); boolean isValid = IDataHelper.getOrDefault(cursor, "isValid", Boolean.class, true); if (!isValid) { IData[] errors = IDataHelper.get(cursor, "errors", IData[].class); result = buildResult(direction, isValid, errors); } } catch (WMDocumentException ex) { ExceptionHelper.raiseUnchecked(ex); } finally { if (cursor != null) cursor.destroy(); } } } return result; } /** * Sanitizes the given pipeline against the current service's input or output signature by dropping all undeclared * variables from the top-level of the pipeline. * * @param pipeline The pipeline to be sanitized. * @param direction Whether to sanitize using the input or output signature. */ public static void sanitize(IData pipeline, InputOutputSignature direction) { sanitize(ServiceHelper.self(), pipeline, direction); } /** * Sanitizes the given pipeline against the given service's input or output signature by dropping all undeclared * variables from the top-level of the pipeline. * * @param service The service whose signature is used to sanitized against. * @param pipeline The pipeline to be sanitized. * @param direction Whether to sanitize using the input or output signature. */ public static void sanitize(NSService service, IData pipeline, InputOutputSignature direction) { // we can only sanitize the pipeline when not debugging if (service != null && !"wm.server.flow:stepFlow".equals(service.getNSName().getFullName())) { NSRecord record; if (direction == InputOutputSignature.INPUT) { record = service.getSignature().getInput(); } else { record = service.getSignature().getOutput(); } if (record != null) { NSField[] fields = record.getFields(); if (fields != null) { String[] names = new String[fields.length]; for (int i = 0; i < fields.length; i++) { names[i] = fields[i].getName(); } IDataHelper.clear(pipeline, names); } } } } /** * Specifies whether to use the input or output signature. */ public enum InputOutputSignature { INPUT, OUTPUT; } /** * Returns a ValidationResult for the given inputs. * * @param direction The signature direction that was validated against. * @param isValid Whether the validation succeeded or failed. * @param errors Optional list of errors describing why the validation failed. * @return A ValidationResult object representing the given inputs. */ public static ValidationResult buildResult(InputOutputSignature direction, boolean isValid, IData[] errors) { ValidationResult result; if (isValid) { result = ValidationResult.VALID; } else { StringBuilder errorMessage = new StringBuilder(); errorMessage.append("Pipeline validation against ") .append(direction.toString().toLowerCase()) .append(" signature failed"); if (errors != null && errors.length > 0) { errorMessage.append(": "); IData errorDetails = IDataFactory.create(); IDataHelper.put(errorDetails, "recordWithNoID", errors); IDataJSONParser parser = new IDataJSONParser(false); try { parser.emit(errorMessage, errorDetails); } catch(IOException ex) { // ignore exception } } result = new ValidationResult(false, errorMessage.toString(), errors); } return result; } }
package pl.joegreen.sergeants.framework; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pl.joegreen.sergeants.api.GeneralsApi; import pl.joegreen.sergeants.api.listener.NoArgsListener; import pl.joegreen.sergeants.api.response.ChatMessageApiResponse; import pl.joegreen.sergeants.api.response.GameLostApiResponse; import pl.joegreen.sergeants.api.response.GameStartApiResponse; import pl.joegreen.sergeants.api.response.GameUpdateApiResponse; import pl.joegreen.sergeants.framework.model.*; import pl.joegreen.sergeants.framework.model.api.GameStartedApiResponseImpl; import pl.joegreen.sergeants.framework.model.api.UpdatableGameState; import pl.joegreen.sergeants.framework.queue.QueueConfiguration; import pl.joegreen.sergeants.framework.user.UserConfiguration; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.function.Function; public class Games { private final static Logger LOGGER = LoggerFactory.getLogger(Games.class); private static final String HELLO_MESSAGE = "Bot created with the Sergeants framework: https://git.io/sergeants"; private GeneralsApi api = null; private Function<Actions, Bot> botProvider; private QueueConfiguration queueConfiguration; private UpdatableGameState currentGameState; private GameStartApiResponse gameStartApiResponse; private Bot bot; private Actions actions; private int gamesToPlay = 0; private UserConfiguration userConfiguration; private List<GameResult> gameResults = new ArrayList<>(); private CompletableFuture<List<GameResult>> gameResultsFuture; private boolean inGame; public static CompletableFuture<List<GameResult>> playAsynchronously(int games, Function<Actions, Bot> botProvider, QueueConfiguration queueConfiguration, UserConfiguration userConfiguration) { if (games <= 0) { throw new IllegalArgumentException("Number of games must be positive"); } CompletableFuture<List<GameResult>> resultsFuture = new CompletableFuture<>(); new Games(GeneralsApi.create(), games, botProvider, queueConfiguration, userConfiguration, resultsFuture).playRound(); return resultsFuture; } public static CompletableFuture<List<GameResult>> playAsynchronouslyWithCustomApi(GeneralsApi api, int games, Function<Actions, Bot> botProvider, QueueConfiguration queueConfiguration, UserConfiguration userConfiguration) { if (games <= 0) { throw new IllegalArgumentException("Number of games must be positive"); } CompletableFuture<List<GameResult>> resultsFuture = new CompletableFuture<>(); new Games(api, games, botProvider, queueConfiguration, userConfiguration, resultsFuture).playRound(); return resultsFuture; } public static List<GameResult> play(int games, Function<Actions, Bot> botProvider, QueueConfiguration queueConfiguration, UserConfiguration userConfiguration) { try { return playAsynchronously(games, botProvider, queueConfiguration, userConfiguration).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } private Games(GeneralsApi api, int games, Function<Actions, Bot> botProvider, QueueConfiguration queueConfiguration, UserConfiguration userConfiguration, CompletableFuture<List<GameResult>> resultsFuture) { this.gamesToPlay = games; this.botProvider = botProvider; this.userConfiguration = userConfiguration; this.queueConfiguration = queueConfiguration; this.api = configureApi(api); this.actions = new BotActions(); this.gameResultsFuture = resultsFuture; } private GeneralsApi configureApi(GeneralsApi api) { return api .onDisconnected(this::onDisconnected) .onGameStarted(this::gameStarted) .onGameUpdated(this::onGameUpdated) .onGameLost(this::onGameLost) .onGameWon(this::onGameWon) .onSetUsernameError(LOGGER::error) .onChatMessage(this::onChatMessage); } private void onChatMessage(String room, ChatMessageApiResponse chatMessageApiResponse) { if (bot != null) { runBotMethodCatchingExceptions( () -> bot.onChatMessage(new ChatMessage( determineRoomType(room, gameStartApiResponse), chatMessageApiResponse.getText(), chatMessageApiResponse.getUsername() )) ); } } private ChatMessage.ChatType determineRoomType(String room, GameStartApiResponse gameStartApiResponse) { if (room.equals(gameStartApiResponse.getChatRoom())) { return ChatMessage.ChatType.GAME; } else if (room.equals(gameStartApiResponse.getTeamChatRoom())) { return ChatMessage.ChatType.TEAM; } else { return ChatMessage.ChatType.UNKNOWN; } } private void onGameFinished(GameResult gameResult) { api.leaveGame(); if (inGame) { inGame = false; gamesToPlay LOGGER.debug("Game finished, {} games left, result: ", gamesToPlay, gameResult); runBotMethodCatchingExceptions(() -> bot.onGameFinished(gameResult)); gameResults.add(gameResult); if (gamesToPlay > 0) { playRound(); } else { LOGGER.debug("All games finished, disconnecting from the API"); gameResultsFuture.complete(gameResults); api.disconnect(); } } } private void onDisconnected() { GameResult gameResult = new GameResult(GameResult.Result.DISCONNECTED, currentGameState, Optional.empty()); onGameFinished(gameResult); } private void onGameWon() { GameResult gameResult = new GameResult(GameResult.Result.WON, currentGameState, Optional.empty()); onGameFinished(gameResult); } private void onGameLost(GameLostApiResponse gameLostApiResponse) { GameResult gameResult = new GameResult(GameResult.Result.LOST, currentGameState, Optional.of(tryToGetKillerName(gameLostApiResponse))); onGameFinished(gameResult); } private String tryToGetKillerName(GameLostApiResponse gameLostApiResponse) { try { return currentGameState.getPlayers().get(gameLostApiResponse.getKiller()).getUsername(); } catch (Exception ex) { LOGGER.error("Cannot find out the killer name on game lost", ex); return "UNKNOWN"; } } private void connectIfNeededAndThen(NoArgsListener listener) { if (!api.isConnected()) { api.onceConnected(() -> { LOGGER.debug("Connected to the API"); userConfiguration.configureUsername(api); listener.onEvent(); }); api.connect(); } else { listener.onEvent(); } } private void playRound() { connectIfNeededAndThen(() -> { LOGGER.info("Joining game queue with configuration" + queueConfiguration); queueConfiguration.joinQueue(api, userConfiguration.getUserId()); }); } private void onGameUpdated(GameUpdateApiResponse gameUpdateApiResponse) { LOGGER.trace("Game update: {}", gameUpdateApiResponse); if (currentGameState == null) { if (gameUpdateApiResponse.getTurn() != 1) { LOGGER.warn("Ignoring game update! Expected new game to start and game turn of received game update is not 1. Ignored update: {}", gameUpdateApiResponse); return; } currentGameState = UpdatableGameState.createInitialGameState(gameStartApiResponse, gameUpdateApiResponse); } else { int expectedTurnOfGameUpdate = currentGameState.getTurn() + 1; if (gameUpdateApiResponse.getTurn() != expectedTurnOfGameUpdate) { LOGGER.warn("Ignoring game update! Current game state turn is {} and game turn of received game update is not {}. Ignored update: {}", currentGameState.getTurn(), expectedTurnOfGameUpdate, gameUpdateApiResponse); return; } currentGameState = currentGameState.update(gameUpdateApiResponse); } runBotMethodCatchingExceptions(() -> bot.onGameStateUpdate(currentGameState)); } private void gameStarted(GameStartApiResponse gameStartApiResponse) { inGame = true; LOGGER.debug("Game started: {}", gameStartApiResponse); this.gameStartApiResponse = gameStartApiResponse; bot = botProvider.apply(actions); runBotMethodCatchingExceptions(() -> bot.onGameStarted(new GameStartedApiResponseImpl(gameStartApiResponse))); actions.sendChat(HELLO_MESSAGE); currentGameState = null; } private void runBotMethodCatchingExceptions(Runnable runnable) { try { runnable.run(); } catch (Exception ex) { LOGGER.error("Exception while calling bot method. The game will be continued, but bot state may become inconsistent.", ex); } } private class BotActions implements Actions { private final Logger LOGGER = LoggerFactory.getLogger(BotActions.class); @Override public void move(int indexFrom, int indexTo) { move(indexFrom, indexTo, false); } @Override public void move(Field fieldFrom, Field fieldTo) { move(fieldFrom.getIndex(), fieldTo.getIndex(), false); } @Override public void move(Field fieldFrom, Field fieldTo, boolean moveHalf) { if (!fieldTo.getPosition().isMovableFrom(fieldFrom.getPosition())) { LOGGER.error("Moving between fields that are not neighbours, it probably has no effect. {} ==> {}", fieldFrom, fieldTo); } move(fieldFrom.getIndex(), fieldTo.getIndex(), moveHalf); } @Override public void move(int indexFrom, int indexTo, boolean moveHalf) { LOGGER.trace("Move from {} to {}, half={}", indexFrom, indexTo, moveHalf); api.attack(indexFrom, indexTo, moveHalf); } @Override public void sendChat(String message) { LOGGER.debug("Sending chat message: " + message); api.sendChatMessage(gameStartApiResponse.getChatRoom(), message); } @Override public void sendTeamChat(String message) { if (currentGameState.getTeamChatRoom() != null) { LOGGER.debug("Sending team chat message:" + message); api.sendChatMessage(gameStartApiResponse.getTeamChatRoom(), message); } else { LOGGER.error("Cannot send team chat message, there's no team chat"); } } @Override public void leaveGame() { api.leaveGame(); } @Override public void ping(int index) { LOGGER.trace("Pinging {}", index); api.ping(index); } @Override public void ping(Field field) { ping(field.getIndex()); } @Override public void clearMoves() { LOGGER.debug("Clearing moves queue"); api.clearMoves(); } @Override public GeneralsApi getBareApi() { return api; } } }
package ruke.vrj.translator; import ruke.vrj.Symbol; import ruke.vrj.SymbolFlag; import ruke.vrj.util.NameGenerator; public class VariableStatement implements Expression { public final Symbol variable; public final Expression value; public VariableStatement(final Symbol variable, final Expression value) { this.variable = variable; this.value = value; } @Override public final String toString() { final Symbol type = this.variable.children.resolve(this.variable.type); final boolean isStruct = type.flags.contains(SymbolFlag.STRUCT); final String translatedType = isStruct ? "integer" : this.variable.type; String declaration = translatedType + " "; if (this.variable.flags.contains(SymbolFlag.LOCAL)) { declaration = "local " + declaration; } final boolean isProperty = this.variable.flags.contains(SymbolFlag.PROPERTY); final boolean notStatic = !this.variable.flags.contains(SymbolFlag.STATIC); if (this.variable.flags.contains(SymbolFlag.ARRAY) || (isProperty && notStatic)) { declaration += "array "; } declaration += NameGenerator.to(this.variable); if (this.value != null) { declaration += " = " + this.value.toString(); } return declaration; } }
package se.claremont.autotest.common; import org.junit.Assert; import org.junit.Assume; import se.claremont.autotest.guidriverpluginstructure.swingsupport.festswinggluecode.ApplicationManager; import se.claremont.autotest.support.SupportMethods; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; public class TestCase { public final TestCaseLog testCaseLog; public String testName; String pathToHtmlLog; final String testSetName; final Date startTime; Date stopTime; final TestCaseData testCaseData; final KnownErrorsList testCaseKnownErrorsList = new KnownErrorsList(); private final KnownErrorsList testSetKnownErrorsEncounteredInThisTestCase = new KnownErrorsList(); ResultStatus resultStatus = ResultStatus.UNEVALUATED; private boolean reported = false; @SuppressWarnings("WeakerAccess") final KnownErrorsList testSetKnownErrors; private final ArrayList<TestCaseLogReporter> reporters = new ArrayList<>(); final UUID uid = UUID.randomUUID(); List<String> processesRunningAtTestCaseStart = new ArrayList<>(); /** * Setting up a new test case run and prepares it for execution */ public TestCase(KnownErrorsList knownErrorsList, String testName){ if(knownErrorsList == null){ knownErrorsList = new KnownErrorsList(); } testCaseLog = new TestCaseLog(testName); testSetName = SupportMethods.classNameAtStacktraceLevel(4); testSetKnownErrors = knownErrorsList; this.testName = testName; testCaseData = new TestCaseData(); addTestCaseData("Test case name", testName); startTime = new Date(); testCaseLog.log(LogLevel.INFO, "Starting test execution at " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(startTime) + "."); String memoryInfo = "Total memory available to JVM (bytes): " + Runtime.getRuntime().totalMemory() + ". "; long maxMemory = Runtime.getRuntime().maxMemory(); memoryInfo += "Maximum memory (bytes): " + (maxMemory == Long.MAX_VALUE ? "no limit" : maxMemory) + ". "; testCaseLog.log(LogLevel.INFO, "Running tests on machine with OS " + System.getProperty("OS.name") + " and " + Runtime.getRuntime().availableProcessors() + " processors. " + memoryInfo); reporters.add(new TestCaseLogReporterPureTextBasedLogFile(this)); reporters.add(new TestCaseLogReporterHtmlLogFile(this)); setLogFolderIfNotAlreadySet(); ApplicationManager applicationManager = new ApplicationManager(this); processesRunningAtTestCaseStart = applicationManager.listActiveRunningProcessesOnLocalMachine(); } /** * Compares the currently running processes on the executing machine with the ones that * were running when the test case started, and reports the difference to the test case * log in a log post. */ public void writeProcessListDeviationsFromSystemStartToLog(){ ApplicationManager applicationManager = new ApplicationManager(this); List<String> currentProcessesRunning = applicationManager.listActiveRunningProcessesOnLocalMachine(); List<String> copyOfProcessList = new ArrayList<>(); copyOfProcessList.addAll(processesRunningAtTestCaseStart); currentProcessesRunning.removeAll(copyOfProcessList); copyOfProcessList.removeAll(applicationManager.listActiveRunningProcessesOnLocalMachine()); StringBuilder sb = new StringBuilder(); sb.append("Process(es) added since test case start: '" + String.join("', '", currentProcessesRunning) + "'." + SupportMethods.LF); sb.append("Process(es) that has exited since test case start: '" + String.join("', '", copyOfProcessList) + "'." + SupportMethods.LF); if(copyOfProcessList.size() > 0 || currentProcessesRunning.size() > 0){ testCaseLog.logDifferentlyToTextLogAndHtmlLog(LogLevel.INFO, "Running process list deviation since test case start:" + SupportMethods.LF + sb.toString(), "Running process list deviation since test case start:<br>" + SupportMethods.LF + sb.toString().replace(SupportMethods.LF, "<br>" + SupportMethods.LF)); } else { testCaseLog.log(LogLevel.INFO, "No changes to what processes are running, from test case start, could be detected."); } } /** * Sets the log folder if no log folder is already set */ private void setLogFolderIfNotAlreadySet(){ LogFolder.setLogFolder(this.getClass().getName()); pathToHtmlLog = LogFolder.testRunLogFolder + testName + ".html"; } boolean isSameAs(Object object){ if(object.getClass() != TestCase.class){ return false; } TestCase otherTestCase = (TestCase) object; return otherTestCase.uid.equals(this.uid); } /** * Interpreting test case results and writing to logs */ public void report(){ if(reported){ return; } stopTime = new Date(); testCaseLog.log(LogLevel.INFO, "Ending test execution at " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(stopTime) + "."); logKnownErrors(); evaluateResultStatus(); testCaseLog.log(LogLevel.DEBUG, "Evaluated test result status to '" + SupportMethods.enumCapitalNameToFriendlyString(resultStatus.toString()) + "'."); CliTestRunner.testRun.reporters.evaluateTestCase(this); reporters.forEach(TestCaseLogReporter::report); reported = true; assertExecutionResultsToTestRunner(); } private void logKnownErrors(){ testCaseLog.log(LogLevel.DEBUG, "Assessing test case testCaseLog for known errors."); testCaseKnownErrorsList.assessLogForKnownErrors(this); testCaseKnownErrorsList.knownErrors.stream().filter(KnownError::encountered).forEachOrdered(knownError -> testCaseLog.log(LogLevel.INFO, "Known error '" + knownError.description + "' encountered during execution.")); for(KnownError knownError : testCaseKnownErrorsList.nonencounteredKnownErrors().knownErrors){ testCaseLog.log(LogLevel.INFO, "Known error '" + knownError.description + "' was not encountered during test execution."); } //testSetKnownErrorsEncounteredInThisTestCase = KnownErrorsList.returnEncounteredKnownErrorsFromKnownErrorsListMatchedToLog(testSetKnownErrors, testCaseLog); testSetKnownErrorsEncounteredInThisTestCase.knownErrors.stream().filter(KnownError::encountered).forEachOrdered(knownError -> testCaseLog.log(LogLevel.INFO, "Known error from test set '" + knownError.description + "' encountered during execution.")); } /** * Used to add a known error to a specific test case. All entered patterns must be matched for the known error to be fulfilled. * * @param description The friendly text string describing the known error * @param regexPatternsForLogRowsThatMustOccur The string patterns to find in the testCaseLog. */ @SuppressWarnings("SameParameterValue") public void addKnownError(String description, String[] regexPatternsForLogRowsThatMustOccur){ testCaseKnownErrorsList.add(new KnownError(description, regexPatternsForLogRowsThatMustOccur)); } /** * Used to add a known error to a specific test case. * * @param description The friendly text string describing the known error * @param regexPatternForLogRow The string patterns to find in the testCaseLog. */ public void addKnownError(String description, String regexPatternForLogRow){ testCaseKnownErrorsList.add(new KnownError(description, regexPatternForLogRow)); } /** * Adds some test case DATA that follows the test case and can be retrieved at any point during execution. * Test case DATA is used in parameters with a parameter name and a string based parameter value. * * @param parameterName Parameter name * @param parameterValue Parameter value */ public void addTestCaseData(String parameterName, String parameterValue){ testCaseLog.log(LogLevel.DEBUG, "Adding test case DATA parameter '" + parameterName + "' with parameter value '" + parameterValue + "'."); testCaseData.testCaseDataList.add(new ValuePair(parameterName, parameterValue)); } /** * Retrieves the parameter value for the given parameter name. * @param parameterName The name of the parameter to find the value for * @return Returns the parameter value for the given parameter */ @SuppressWarnings("WeakerAccess") public List<String> valuesForTestCaseDataParameter(String parameterName){ List<String> returnStrings = testCaseData.testCaseDataList.stream().filter(valuePair -> valuePair.parameter.equals(parameterName)).map(valuePair -> valuePair.value).collect(Collectors.toList()); if(returnStrings.size() > 0){ String logString = "Reading parameter values "; for(String returnString : returnStrings){ logString = logString + "'" + returnString + "', "; } logString = logString + "for parameter '" + parameterName + "'."; testCaseLog.log(LogLevel.DEBUG, logString); }else{ testCaseLog.log(LogLevel.DEBUG, "Reading test case DATA but could not find any values for parameter '" + parameterName + "'."); } return returnStrings; } /** * Retrieves the parameter value for the given parameter name. * @param parameterName The name of the parameter to return value of * @return Return the value of the first encountered parameter with stated name */ public String valueForFirstMatchForTestCaseDataParameter(String parameterName){ String returnString = ""; List<String> values = valuesForTestCaseDataParameter(parameterName); if(values.size() > 0){ returnString = values.get(0); testCaseLog.log(LogLevel.DEBUG, "Reading first match '" + returnString + "' for parameter name '" + parameterName + "'."); }else { testCaseLog.log(LogLevel.DEBUG, "Reading test case DATA but could not find any values for parameter '" + parameterName + "'."); } return returnString; } /** * When a test case is evaluated after a test run the result status is set */ enum ResultStatus{ UNEVALUATED, PASSED, FAILED_WITH_ONLY_KNOWN_ERRORS, FAILED_WITH_BOTH_NEW_AND_KNOWN_ERRORS, FAILED_WITH_ONLY_NEW_ERRORS } /** * Evaluates test case execution testCaseLog to assess if the test case ran successfully, if known errors were encountered - or new errors. Return result status. */ void evaluateResultStatus(){ ArrayList<LogPost> erroneousPosts = testCaseLog.onlyErroneousLogPosts(); if(erroneousPosts.size() == 0) { resultStatus = ResultStatus.PASSED; return; } testCaseKnownErrorsList.assessLogForKnownErrors(this); testSetKnownErrors.assessLogForKnownErrors(this); //testSetKnownErrorsEncounteredInThisTestCase = KnownErrorsList.returnEncounteredKnownErrorsFromKnownErrorsListMatchedToLog(testSetKnownErrors, testCaseLog); boolean knownErrorsEncountered = false; boolean newErrorsEncountered = false; for(LogPost logPost : testCaseLog.onlyErroneousLogPosts()){ if(logPost.identifiedToBePartOfKnownError){ knownErrorsEncountered = true; if(newErrorsEncountered) break; //if both are set to true: continue }else{ newErrorsEncountered = true; if(knownErrorsEncountered) break; } } if(newErrorsEncountered && knownErrorsEncountered){ resultStatus = ResultStatus.FAILED_WITH_BOTH_NEW_AND_KNOWN_ERRORS; } else if (newErrorsEncountered){ resultStatus = ResultStatus.FAILED_WITH_ONLY_NEW_ERRORS; } else { resultStatus = ResultStatus.FAILED_WITH_ONLY_KNOWN_ERRORS; } } /** * Marks the test case status in the style of the test runner according to result status. * Also halts further test case execution. */ private void assertExecutionResultsToTestRunner(){ if(resultStatus == ResultStatus.UNEVALUATED) evaluateResultStatus(); if(resultStatus == ResultStatus.FAILED_WITH_BOTH_NEW_AND_KNOWN_ERRORS || resultStatus == ResultStatus.FAILED_WITH_ONLY_NEW_ERRORS){ Assert.assertFalse(testCaseLog.toString(), true); } else if(resultStatus == ResultStatus.FAILED_WITH_ONLY_KNOWN_ERRORS){ Assume.assumeTrue(false); Assert.assertFalse(testCaseLog.toString(), true); } } /** * Writes an entry to the log of the test case * * @param logLevel Log level for the log post * @param message Log message for the log post */ public void log(LogLevel logLevel, String message){ testCaseLog.log(logLevel, message); } /** * Some log posts is best logged with different strings for html log and text based log. * * @param logLevel Log level of log post * @param pureTestLogMessage Text string for pure text log * @param htmlFormattedLogMessage HTML formatted text string for HTML test case log */ @SuppressWarnings("SameParameterValue") public void logDifferentlyToTextLogAndHtmlLog(LogLevel logLevel, String pureTestLogMessage, String htmlFormattedLogMessage){ testCaseLog.logDifferentlyToTextLogAndHtmlLog(logLevel, pureTestLogMessage, htmlFormattedLogMessage); } /** * Friendly representation of the test case as a string. * * @return A string describing the relevant test case information */ public @Override String toString(){ return testName + " in class " + testSetName + " with testCaseLog " + testCaseLog.toString(); } public String toJson(){ StringBuilder json = new StringBuilder(); json.append("{\"testCaseRunInstance\": {").append(SupportMethods.LF); if(testSetName != null) json.append(" \"testSetName\": \"").append(testSetName).append("\",").append(SupportMethods.LF); if(testName != null) json.append(" \"testName\": \"").append(testName).append("\",").append(SupportMethods.LF); if(resultStatus != null) json.append(" \"status\": \"").append(SupportMethods.enumCapitalNameToFriendlyString(resultStatus.toString())).append("\",").append(SupportMethods.LF); if(uid != null) json.append(" \"guid\": \"").append(uid.toString()).append("\",").append(SupportMethods.LF); if(pathToHtmlLog != null) json.append(" \"pathToHtmlLog\": \"").append(pathToHtmlLog.replace("\\", "\\\\")).append("\",").append(SupportMethods.LF); if(testCaseLog != null) json.append(testCaseLog.toJson()).append(",").append(SupportMethods.LF); json.append(" \"reported\": ").append(String.valueOf(reported)).append(",").append(SupportMethods.LF); if(startTime != null) json.append(" \"startTime\": \"").append(new SimpleDateFormat("yyyyMMdd HH:mm:ss").format(startTime)).append("\",").append(SupportMethods.LF); if(stopTime != null) json.append(" \"stopTime\": \"").append(new SimpleDateFormat("yyyyMMdd HH:mm:ss").format(stopTime)).append("\",").append(SupportMethods.LF); if(testCaseData != null) json.append(testCaseData.toJson()).append(",").append(SupportMethods.LF); if(testCaseKnownErrorsList != null) json.append(" \"testCaseKnownErrorsList\": ").append(testCaseKnownErrorsList.toJson()).append(",").append(SupportMethods.LF); if(testSetKnownErrorsEncounteredInThisTestCase != null) json.append(" \"testSetKnownErrorsEncounteredInThisTestCase\": ").append(testSetKnownErrorsEncounteredInThisTestCase.toJson()).append(",").append(SupportMethods.LF); if(testSetKnownErrors != null) json.append(" \"testSetKnownErrors\": ").append(testSetKnownErrors.toJson()).append(SupportMethods.LF); json.append(" }").append(SupportMethods.LF); json.append("}").append(SupportMethods.LF); return json.toString(); } class TestCaseData { List<ValuePair> testCaseDataList = new ArrayList<>(); public String toJson(){ StringBuilder json = new StringBuilder(); List<String> datastrings = new ArrayList<>(); json.append("\"testCaseData\": [").append(SupportMethods.LF); for(ValuePair valuePair : testCaseDataList){ datastrings.add("{" + SupportMethods.LF + " \"parameter\": \"" + valuePair.parameter + "\"," + SupportMethods.LF + "\"value\": \"" + valuePair.value + "\"" + SupportMethods.LF + "}"); } json.append(String.join("," + SupportMethods.LF, datastrings)); json.append(" ]").append(SupportMethods.LF); return json.toString(); } } }
package seedu.Tdoo.logic.commands; import seedu.Tdoo.commons.core.Messages; import seedu.Tdoo.commons.core.UnmodifiableObservableList; import seedu.Tdoo.commons.exceptions.IllegalValueException; import seedu.Tdoo.model.task.*; import seedu.Tdoo.model.task.UniqueTaskList.TaskNotFoundException; import seedu.Tdoo.model.task.attributes.*; /** * Edit a task identified using it's last displayed index from the TodoList. */ public class EditCommand extends Command { public static final String COMMAND_WORD = "edit"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Edits information of the task in the task-list.\n" + "Parameters: TASK_TYPE INDEX_NUMBER name/TASK_NAME p/PRIORITY\n" + "Example: " + COMMAND_WORD + " todo 1 name/Assignment 1 p/2\n" + "Parameters: TASK_TYPE INDEX_NUMBER name/TASK_NAME from/DATE to/ENDDATE at/START_TIME to/END_TIME\n" + "Example: " + COMMAND_WORD + " event 1 name/Time's birthday party from/25-12-2016 to/26-12-2016 at/12:00 to/16:00\n" + "Parameters: TASK_TYPE INDEX_NUMBER name/TASK_NAME on/DATE at/END_TIME\n" + "Example: " + COMMAND_WORD + " deadline 1 name/CS2103 v0.2 on/25-12-2016 at/14:00"; public static final String MESSAGE_EDIT_TASK_SUCCESS = "Edited task: %1$s"; public static final String INVALID_VALUE = "Invalid value"; public static final String MISSING_TASK = "The target task cannot be missing"; public static final String MESSAGE_EDIT_DUPLICATE_TASK = "This task already exists in the Task-list"; public final String dataType; public final int targetIndex; private final Task toEdit; ReadOnlyTask taskToEdit = null; //@@author A0139920A public EditCommand(String name, String date, String endDate, String priority, int targetIndex, String dataType) throws IllegalValueException { this.targetIndex = targetIndex; this.dataType = dataType; this.toEdit = new Todo( new Name(name), new StartDate(date), new EndDate(endDate), new Priority(priority), "false" ); } //@@author A0139920A public EditCommand(String name, String date, String endDate, String startTime, String endTime, int targetIndex, String dataType) throws IllegalValueException { this.targetIndex = targetIndex; this.dataType = dataType; this.toEdit = new Event( new Name(name), new StartDate(date), new EndDate(endDate), new StartTime(startTime), new EndTime(endTime), "false" ); } public EditCommand(String name, String date, String endTime, int targetIndex, String dataType) throws IllegalValueException { this.targetIndex = targetIndex; this.dataType = dataType; this.toEdit = new Deadline( new Name(name), new StartDate(date), new EndTime(endTime), "false" ); } //@@author A0139920A public EditCommand(ReadOnlyTask original, String dataType, ReadOnlyTask toEdit) { this.taskToEdit = original; this.toEdit = (Task) toEdit; this.targetIndex = -1; this.dataType = dataType; } //@@author A0139920A @Override public CommandResult execute() { if(this.taskToEdit == null && this.targetIndex != -1) { UnmodifiableObservableList<ReadOnlyTask> lastShownList = null; switch (dataType) { case "todo": lastShownList = model.getFilteredTodoList(); break; case "event": lastShownList = model.getFilteredEventList(); break; case "deadline": lastShownList = model.getFilteredDeadlineList(); break; } if (lastShownList.size() < targetIndex) { indicateAttemptToExecuteIncorrectCommand(); return new CommandResult(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } taskToEdit = lastShownList.get(targetIndex - 1); } assert model != null; try { model.editTask(taskToEdit, dataType, toEdit); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, toEdit)); } catch (IllegalValueException ive) { return new CommandResult(INVALID_VALUE); }catch (TaskNotFoundException pnfe) { return new CommandResult(MISSING_TASK); } } }
package seedu.ezdo.logic.commands; import java.util.ArrayList; import seedu.ezdo.commons.core.Messages; import seedu.ezdo.commons.core.UnmodifiableObservableList; import seedu.ezdo.logic.commands.exceptions.CommandException; import seedu.ezdo.model.todo.ReadOnlyTask; import seedu.ezdo.model.todo.Task; //@@author A0141010L /** * Marks a task as identified using its last displayed index from ezDo as done */ public class DoneCommand extends Command implements MultipleIndexCommand { public static final String COMMAND_WORD = "done"; public static final String SHORT_COMMAND_WORD = "d"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Marks the task identified by the index number used in the last task listing as done\n" + "Parameters: INDEX (must be a positive integer)\n" + "Example: " + COMMAND_WORD + " 1"; public static final String MESSAGE_DONE_TASK_SUCCESS = "Done task: %1$s"; private static final String MESSAGE_UNDONE_TASK_SUCCESS = "Undone task: %1$s"; public static final String MESSAGE_DONE_LISTED = "Done tasks listed"; private final ArrayList<Integer> targetIndexes; private final ArrayList<Task> tasksToToggle; private final boolean requestToViewDoneOnly; public DoneCommand(ArrayList<Integer> indexes) { this.targetIndexes = new ArrayList<Integer>(indexes); this.requestToViewDoneOnly = false; this.tasksToToggle = new ArrayList<Task>(); } public DoneCommand() { this.targetIndexes = null; this.requestToViewDoneOnly = true; this.tasksToToggle = null; } @Override public CommandResult execute() throws CommandException { UnmodifiableObservableList<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); if (requestToViewDoneOnly) { model.updateFilteredDoneList(); return new CommandResult(MESSAGE_DONE_LISTED); } if (!isIndexValid(lastShownList)) { throw new CommandException(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } //if (isAnyTaskDone(lastShownList)) { // throw new CommandException(Messages.MESSAGE_WRONG_LIST); for (int i = 0; i < targetIndexes.size(); i++) { Task taskToToggle = (Task) lastShownList.get(targetIndexes.get(i) - 1); tasksToToggle.add(taskToToggle); } boolean isDone = model.toggleTasksDone(tasksToToggle); if (isDone) { return new CommandResult(String.format(MESSAGE_DONE_TASK_SUCCESS, tasksToToggle)); } else { return new CommandResult(String.format(MESSAGE_UNDONE_TASK_SUCCESS, tasksToToggle)); } } //@@author A0139248X @Override public boolean isIndexValid(UnmodifiableObservableList<ReadOnlyTask> lastShownList) { return targetIndexes.stream().allMatch(index -> index <= lastShownList.size() && index != 0); } }
package seedu.jimi.logic.commands; import java.util.HashSet; import java.util.Set; import javafx.collections.transformation.FilteredList; import seedu.jimi.commons.core.Messages; import seedu.jimi.commons.core.UnmodifiableObservableList; import seedu.jimi.commons.exceptions.IllegalValueException; import seedu.jimi.model.ModelManager; import seedu.jimi.model.tag.Tag; import seedu.jimi.model.tag.UniqueTagList; import seedu.jimi.model.task.FloatingTask; import seedu.jimi.model.task.Name; import seedu.jimi.model.task.ReadOnlyTask; import seedu.jimi.model.task.UniqueTaskList; import seedu.jimi.model.task.UniqueTaskList.DuplicateTaskException; import seedu.jimi.model.task.UniqueTaskList.TaskNotFoundException; /** * * @author zexuan * * Edits an existing task/event in Jimi. */ public class EditCommand extends Command { public static final String COMMAND_WORD = "edit"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Edits an existing task/event in Jimi. \n" + "Example: " + COMMAND_WORD + " 2 by 10th July at 12 pm"; public static final String MESSAGE_EDIT_TASK_SUCCESS = "Updated task details: %1$s"; public static final String MESSAGE_DUPLICATE_TASK = "This task already exists in Jimi"; private final int taskIndex; //index of task/event to be edited private UniqueTagList newTagList; private Name newName; public EditCommand(String name, Set<String> tags, int taskIndex) throws IllegalValueException { final Set<Tag> tagSet = new HashSet<>(); for (String tagName : tags) { tagSet.add(new Tag(tagName)); } this.taskIndex = taskIndex; //if new fields are to be edited, instantiate them if (name.length() != 0) { this.newName = new Name(name); } if (!tagSet.isEmpty()) { this.newTagList = new UniqueTagList(tagSet); } } @Override public CommandResult execute() { UnmodifiableObservableList<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); ReadOnlyTask taskToEdit = lastShownList.get(taskIndex - 1); if (lastShownList.size() < taskIndex) { indicateAttemptToExecuteIncorrectCommand(); return new CommandResult(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } ReadOnlyTask taskToReplace = new FloatingTask( newName == null ? taskToEdit.getName() : newName, newTagList == null ? taskToEdit.getTags() : newTagList); model.editFloatingTask(taskIndex - 1, new FloatingTask(taskToReplace)); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, taskToReplace)); } }
package seedu.task.storage; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.util.Optional; import java.util.logging.Logger; import seedu.task.commons.core.LogsCenter; import seedu.task.commons.exceptions.DataConversionException; import seedu.task.commons.util.FileUtil; import seedu.task.model.ReadOnlyTaskList; /** * A class to access TaskList data stored as an xml file on the hard disk. */ public class XmlTaskListStorage implements TaskListStorage { private static final Logger logger = LogsCenter.getLogger(XmlTaskListStorage.class); private String filePath; private File savedFile = null; public XmlTaskListStorage(String filePath) { this.filePath = filePath; } @Override public String getTaskListFilePath() { return filePath; } @Override public Optional<ReadOnlyTaskList> readTaskList() throws DataConversionException, IOException { return readTaskList(filePath); } /** * Similar to {@link #readTaskList()} * @param filePath location of the data. Cannot be null * @throws DataConversionException if the file is not in the correct format. */ @Override public Optional<ReadOnlyTaskList> readTaskList(String filePath) throws DataConversionException, FileNotFoundException { assert filePath != null; File taskListFile = new File(filePath); if (!taskListFile.exists()) { logger.info("TaskList file " + taskListFile + " not found"); return Optional.empty(); } ReadOnlyTaskList taskListOptional = XmlFileStorage.loadDataFromSaveFile(new File(filePath)); return Optional.of(taskListOptional); } @Override public void saveTaskList(ReadOnlyTaskList taskList) throws IOException { saveTaskList(taskList, filePath); } /** * Similar to {@link #saveTaskList(ReadOnlyTaskList)} * @param filePath location of the data. Cannot be null */ @Override public void saveTaskList(ReadOnlyTaskList taskList, String filePath) throws IOException { assert taskList != null; assert filePath != null; File file = new File(filePath); FileUtil.createIfMissing(file); assert file != null; XmlFileStorage.saveDataToFile(file, new XmlSerializableTaskList(taskList)); this.savedFile = file; } //@@author A0163559U @Override public void saveTaskListInNewLocation(ReadOnlyTaskList taskList, File newFile) throws IOException { logger.info("Attempting to save taskList in file" + newFile); saveTaskList(taskList, filePath); try { String taskData = FileUtil.readFromFile(savedFile); System.out.println(taskData); FileUtil.writeToFile(newFile, taskData); } catch (FileAlreadyExistsException faee) { logger.warning("FileAlreadyExistsException in saveTaskListInNewLocation"); return; //abort updating state } catch (IOException ioe) { logger.warning("IO Exception in saveTaskListInNewLocation"); return; //abort updating state } updateState(newFile); } public void updateState(File file) { this.savedFile = file; this.filePath = file.toString(); } @Override public Optional<ReadOnlyTaskList> loadTaskListFromNewLocation(File loadFile) throws FileNotFoundException, DataConversionException { Optional<ReadOnlyTaskList> newTaskList = readTaskList(loadFile.toString()); if (newTaskList.isPresent()) { updateState(loadFile); } return newTaskList; } //@@author }
package seedu.todo.controllers; import java.util.ArrayList; import java.util.Arrays; import seedu.todo.ui.UiManager; import seedu.todo.ui.views.HelpView; public class HelpController implements Controller { private static String NAME = "Help"; private static String DESCRIPTION = "Shows documentation for all valid commands."; private static String COMMAND_SYNTAX = "help"; private static CommandDefinition commandDefinition = new CommandDefinition(NAME, DESCRIPTION, COMMAND_SYNTAX); public static CommandDefinition getCommandDefinition() { return commandDefinition; } @Override public float inputConfidence(String input) { return (input.startsWith("help")) ? 1 : 0; } @Override public void process(String input) { HelpView view = UiManager.loadView(HelpView.class); view.commandDefinitions = Arrays.asList(getAllCommandDefinitions()); view.render(); } private CommandDefinition[] getAllCommandDefinitions() { return new CommandDefinition[] { HelpController.getCommandDefinition(), AddController.getCommandDefinition(), ListController.getCommandDefinition(), UpdateController.getCommandDefinition(), DestroyController.getCommandDefinition() }; } }
package uk.ac.edukapp.service; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.Query; import javax.servlet.ServletContext; import uk.ac.edukapp.cache.Cache; import uk.ac.edukapp.model.Useraccount; import uk.ac.edukapp.model.Userrating; import uk.ac.edukapp.model.Widgetprofile; import uk.ac.edukapp.util.Message; public class UserRateService extends AbstractService { public UserRateService(ServletContext ctx) { super(ctx); } @SuppressWarnings("unchecked") public List<Userrating> getRatingsForWidgetProfile( Widgetprofile widgetProfile) { EntityManager entityManager = getEntityManagerFactory() .createEntityManager(); Query wpQuery = entityManager .createNamedQuery("Userrating.findForWidgetProfile"); wpQuery.setParameter("widgetprofile", widgetProfile); List<Userrating> ratings = (List<Userrating>) wpQuery.getResultList(); entityManager.close(); return ratings; } public Number getAverageRating(Widgetprofile widgetProfile) { EntityManager entityManager = getEntityManagerFactory() .createEntityManager(); Query q = entityManager.createNamedQuery("Userrating.getAverageValue"); q.setParameter("widgetprofile", widgetProfile); Number average = (Number) q.getSingleResult(); if (average == null) average = 0.0; entityManager.close(); return average; } public Long getRatingCount(Widgetprofile widgetProfile) { EntityManager entityManager = getEntityManagerFactory() .createEntityManager(); Query q = entityManager.createNamedQuery("Userrating.getCount"); q.setParameter("widgetprofile", widgetProfile); Long count = (Long) q.getSingleResult(); entityManager.close(); return count; } public Userrating getUserRatingForWidget ( Widgetprofile widgetProfile, Useraccount user ) { EntityManager entityManager = getEntityManagerFactory().createEntityManager(); Query q = entityManager.createNamedQuery("Userrating.findForWidgetAndUser"); q.setParameter ( "widgetprofile", widgetProfile); q.setParameter("useraccount", user ); Userrating ur = (Userrating)q.getSingleResult(); entityManager.close(); return ur; } public Message publishUserRate(String rating, Useraccount userAccount, Widgetprofile widgetProfile) { EntityManager entityManager = getEntityManagerFactory() .createEntityManager(); Message msg = new Message(); try { entityManager.getTransaction().begin(); // check if user rating exists - if yes update Userrating userRating = null; Query q = entityManager .createNamedQuery("Userrating.findForWidgetAndUser"); q.setParameter("widgetprofile", widgetProfile); q.setParameter("useraccount", userAccount); List<Userrating> rats = (List<Userrating>) q.getResultList(); if (rats.size() > 0) { userRating = rats.get(0); } // Create the review if (userRating == null) { userRating = new Userrating(); userRating.setUserAccount(userAccount); userRating.setWidgetProfile(widgetProfile); } int rat = Integer.parseInt(rating); byte rate = (byte) rat; userRating.setRating(rate); userRating.setTime(new Date()); entityManager.persist(userRating); entityManager.getTransaction().commit(); entityManager.close(); msg.setMessage("OK"); // Remove cached stats for this widget profile Cache.getInstance().remove("widgetStats:"+widgetProfile.getId()); } catch (Exception e) { msg.setMessage("error:" + e.getMessage()); e.printStackTrace(); } return msg; } }
package us.myles.ViaVersion.update; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.entity.Player; import org.bukkit.plugin.Plugin; import org.bukkit.scheduler.BukkitRunnable; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import us.myles.ViaVersion.api.ViaVersion; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.UUID; public class UpdateUtil { private final static String URL = "http://api.spiget.org/v1/resources/"; private final static int PLUGIN = 19254; public final static String PREFIX = ChatColor.GREEN + "" + ChatColor.BOLD + "[ViaVersion] " + ChatColor.GREEN; public static void sendUpdateMessage(final UUID uuid, final Plugin plugin) { new BukkitRunnable() { @Override public void run() { final String message = getUpdateMessage(false); if (message != null) { new BukkitRunnable() { @Override public void run() { Player p = Bukkit.getPlayer(uuid); if (p != null) { p.sendMessage(PREFIX + message); } } }.runTask(plugin); } } }.runTaskAsynchronously(plugin); } public static void sendUpdateMessage(final Plugin plugin) { new BukkitRunnable() { @Override public void run() { final String message = getUpdateMessage(true); if (message != null) { new BukkitRunnable() { @Override public void run() { plugin.getLogger().warning(message); } }.runTask(plugin); } } }.runTaskAsynchronously(plugin); } private static String getUpdateMessage(boolean console) { if (ViaVersion.getInstance().getVersion().equals("${project.version}")) { return "You are using a debug/custom version, consider updating."; } String newestString = getNewestVersion(); if (newestString == null) { if (console) { return "Could not check for updates, check your connection."; } else { return null; } } Version current; try { current = new Version(ViaVersion.getInstance().getVersion()); } catch (IllegalArgumentException e) { return "You are using a debug/custom version, consider updating."; } Version newest = new Version(newestString); if (current.compareTo(newest) < 0) return "There is a newer version available: " + newest.toString(); else if (console && current.compareTo(newest) != 0) { return "You are running a newer version than is released!"; } return null; } private static String getNewestVersion() { try { URL url = new URL(URL + PLUGIN + "?" + System.currentTimeMillis()); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setUseCaches(true); connection.addRequestProperty("User-Agent", "ViaVersion " + ViaVersion.getInstance().getVersion()); connection.setDoOutput(true); BufferedReader br = new BufferedReader(new InputStreamReader(connection.getInputStream())); String input; String content = ""; while ((input = br.readLine()) != null) { content = content + input; } br.close(); JSONParser parser = new JSONParser(); JSONObject statistics; try { statistics = (JSONObject) parser.parse(content); } catch (ParseException e) { e.printStackTrace(); return null; } return (String) statistics.get("version"); } catch (MalformedURLException e) { return null; } catch (IOException e) { return null; } } }
package edu.umd.cs.findbugs; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.apache.bcel.Repository; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.CodeException; import org.apache.bcel.classfile.Constant; import org.apache.bcel.classfile.ConstantClass; import org.apache.bcel.classfile.ConstantDouble; import org.apache.bcel.classfile.ConstantFloat; import org.apache.bcel.classfile.ConstantInteger; import org.apache.bcel.classfile.ConstantLong; import org.apache.bcel.classfile.ConstantString; import org.apache.bcel.classfile.ConstantUtf8; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.LocalVariable; import org.apache.bcel.classfile.LocalVariableTable; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.BasicType; import org.apache.bcel.generic.Type; import edu.umd.cs.findbugs.visitclass.Constants2; import edu.umd.cs.findbugs.visitclass.DismantleBytecode; import edu.umd.cs.findbugs.visitclass.LVTHelper; import edu.umd.cs.findbugs.visitclass.PreorderVisitor; /** * tracks the types and numbers of objects that are currently on the operand stack * throughout the execution of method. To use, a detector should instantiate one for * each method, and call <p>stack.sawOpcode(this,seen);</p> at the bottom of their sawOpcode method. * at any point you can then inspect the stack and see what the types of objects are on * the stack, including constant values if they were pushed. The types described are of * course, only the static types. * There are some outstanding opcodes that have yet to be implemented, I couldn't * find any code that actually generated these, so i didn't put them in because * I couldn't test them: * <ul> * <li>dup2_x2</li> * <li>jsr_w</li> * <li>wide</li> * </ul> */ public class OpcodeStack implements Constants2 { private static final boolean DEBUG = Boolean.getBoolean("ocstack.debug"); private List<Item> stack; private List<Item> lvValues; private int jumpTarget; private Stack<List<Item>> jumpStack; private boolean seenTransferOfControl = false; public static class Item { public static final int BYTE_ARRAY_LOAD = 1; public static final int RANDOM_INT = 2; public static final int LOW_8_BITS_CLEAR = 3; public static final Object UNKNOWN = null; private int specialKind; private String signature; private Object constValue = UNKNOWN; private FieldAnnotation field; private boolean isNull = false; private int registerNumber = -1; private boolean isInitialParameter = false; private Object userValue = null; public int getSize() { if (signature.equals("J") || signature.equals("D")) return 2; return 1; } private static boolean equals(Object o1, Object o2) { if (o1 == o2) return true; if (o1 == null || o2 == null) return false; return o1.equals(o2); } public int hashCode() { int r = 42 + specialKind; if (signature != null) r+= signature.hashCode(); r *= 31; if (constValue != null) r+= constValue.hashCode(); r *= 31; if (field != null) r+= field.hashCode(); r *= 31; if (isInitialParameter) r += 17; r += registerNumber; return r; } public boolean equals(Object o) { if (!(o instanceof Item)) return false; Item that = (Item) o; return equals(this.signature, that.signature) && equals(this.constValue, that.constValue) && equals(this.field, that.field) && this.isNull == that.isNull && this.specialKind == that.specialKind && this.registerNumber == that.registerNumber; } public String toString() { StringBuffer buf = new StringBuffer("< "); buf.append(signature); if (specialKind == BYTE_ARRAY_LOAD) buf.append(", byte_array_load"); else if (specialKind == RANDOM_INT) buf.append(", random_int"); else if (specialKind == LOW_8_BITS_CLEAR) buf.append(", low8clear"); if (constValue != UNKNOWN) { buf.append(", "); buf.append(constValue); } if (field!= UNKNOWN) { buf.append(", "); buf.append(field); } if (isInitialParameter) { buf.append(", IP"); } if (isNull) { buf.append(", isNull"); } if (registerNumber != -1) { buf.append(", r"); buf.append(registerNumber); } buf.append(" >"); return buf.toString(); } public static Item merge(Item i1, Item i2) { if (i1 == null) return i2; if (i2 == null) return i1; if (i1.equals(i2)) return i1; Item m = new Item(); m.isNull = false; if (equals(i1.signature,i2.signature)) m.signature = i1.signature; if (equals(i1.constValue,i2.constValue)) m.constValue = i1.constValue; if (equals(i1.field,i2.field)) m.field = i1.field; if (i1.isNull == i2.isNull) m.isNull = i1.isNull; if (i1.registerNumber == i2.registerNumber) m.registerNumber = i1.registerNumber; if (i1.specialKind == i2.specialKind) m.specialKind = i1.specialKind; return m; } public Item(String s, int constValue) { this(s, (Object)(Integer)constValue); } public Item(String s) { this(s, UNKNOWN); } public Item(String s, FieldAnnotation f, int reg) { signature = s; field = f; registerNumber = reg; } public Item(Item it, int reg) { this.signature = it.signature; this.constValue = it.constValue; this.field = it.field; this.isNull = it.isNull; this.registerNumber = reg; } public Item(String s, FieldAnnotation f) { this(s, f, -1); } public Item(String s, Object v) { signature = s; constValue = v; if (v instanceof Integer) { int value = (Integer) v; if (value != 0 && (value & 0xff) == 0) specialKind = LOW_8_BITS_CLEAR; } else if (v instanceof Long) { long value = (Long) v; if (value != 0 && (value & 0xff) == 0) specialKind = LOW_8_BITS_CLEAR; } } public Item() { signature = "Ljava/lang/Object;"; constValue = null; isNull = true; } public JavaClass getJavaClass() throws ClassNotFoundException { String baseSig; if (isPrimitive()) return null; if (isArray()) { baseSig = getElementSignature(); } else { baseSig = signature; } if (baseSig.length() == 0) return null; baseSig = baseSig.substring(1, baseSig.length() - 1); baseSig = baseSig.replace('/', '.'); return Repository.lookupClass(baseSig); } public boolean isArray() { return signature.startsWith("["); } public boolean isInitialParameter() { return isInitialParameter; } public String getElementSignature() { if (!isArray()) return signature; else { int pos = 0; int len = signature.length(); while (pos < len) { if (signature.charAt(pos) != '[') break; pos++; } return signature.substring(pos); } } public boolean isPrimitive() { return !signature.startsWith("L"); } public int getRegisterNumber() { return registerNumber; } public String getSignature() { return signature; } public boolean isNull() { return isNull; } public Object getConstant() { return constValue; } public FieldAnnotation getField() { return field; } /** * @param specialKind The specialKind to set. */ public void setSpecialKind(int specialKind) { this.specialKind = specialKind; } /** * @return Returns the specialKind. */ public int getSpecialKind() { return specialKind; } /** * attaches a detector specified value to this item * * @param value the custom value to set */ public void setUserValue(Object value) { userValue = value; } /** * gets the detector specified value for this item * * @return the custom value */ public Object getUserValue() { return userValue; } } public String toString() { return stack.toString() + "::" + lvValues.toString(); } public OpcodeStack() { stack = new ArrayList<Item>(); lvValues = new ArrayList<Item>(); jumpStack = new Stack<List<Item>>(); } boolean needToMerge = true; public void mergeJumps(DismantleBytecode dbc) { if (!needToMerge) return; needToMerge = false; List<Item> jumpEntry = jumpEntries.get(dbc.getPC()); if (jumpEntry != null) { if (DEBUG) { System.out.println("XXXXXXX"); System.out.println("merging lvValues at jump target " + dbc.getPC() + " -> " + jumpEntry); System.out.println(" current lvValues " + lvValues); } mergeLists(lvValues, jumpEntry); if (DEBUG) System.out.println(" merged lvValues " + lvValues); } if (dbc.getPC() == jumpTarget) { jumpTarget = -1; if (!jumpStack.empty()) { List<Item> stackToMerge = jumpStack.pop(); if (DEBUG) { System.out.println("************"); System.out.println("merging stacks at " + dbc.getPC() + " -> " + stackToMerge); System.out.println(" current stack " + stack); } mergeLists(stack, stackToMerge); if (DEBUG) System.out.println(" updated stack " + stack); } } } public void sawOpcode(DismantleBytecode dbc, int seen) { int register; String signature; Item it, it2, it3; Constant cons; mergeJumps(dbc); needToMerge = true; try { switch (seen) { case ALOAD: pushByLocalObjectLoad(dbc, dbc.getRegisterOperand()); break; case ALOAD_0: case ALOAD_1: case ALOAD_2: case ALOAD_3: pushByLocalObjectLoad(dbc, seen - ALOAD_0); break; case DLOAD: pushByLocalLoad("D", dbc.getRegisterOperand()); break; case DLOAD_0: case DLOAD_1: case DLOAD_2: case DLOAD_3: pushByLocalLoad("D", seen - DLOAD_0); break; case FLOAD: pushByLocalLoad("F", dbc.getRegisterOperand()); break; case FLOAD_0: case FLOAD_1: case FLOAD_2: case FLOAD_3: pushByLocalLoad("F", seen - FLOAD_0); break; case ILOAD: pushByLocalLoad("I", dbc.getRegisterOperand()); break; case ILOAD_0: case ILOAD_1: case ILOAD_2: case ILOAD_3: pushByLocalLoad("I", seen - ILOAD_0); break; case LLOAD: pushByLocalLoad("J", dbc.getRegisterOperand()); break; case LLOAD_0: case LLOAD_1: case LLOAD_2: case LLOAD_3: pushByLocalLoad("J", seen - LLOAD_0); break; case GETSTATIC: { Item i = new Item(dbc.getSigConstantOperand(), FieldAnnotation.fromReferencedField(dbc)); push(i); break; } case LDC: case LDC_W: case LDC2_W: cons = dbc.getConstantRefOperand(); pushByConstant(dbc, cons); break; case INSTANCEOF: pop(); push(new Item("I")); break; case ARETURN: case DRETURN: case FRETURN: case IFEQ: case IFNE: case IFLT: case IFLE: case IFGT: case IFGE: case IFNONNULL: case IFNULL: case IRETURN: case LOOKUPSWITCH: case LRETURN: case TABLESWITCH: seenTransferOfControl = true; pop(); break; case MONITORENTER: case MONITOREXIT: case POP: case PUTSTATIC: pop(); break; case IF_ACMPEQ: case IF_ACMPNE: case IF_ICMPEQ: case IF_ICMPNE: case IF_ICMPLT: case IF_ICMPLE: case IF_ICMPGT: case IF_ICMPGE: seenTransferOfControl = true; pop(2); break; case POP2: case PUTFIELD: pop(2); break; case IALOAD: case SALOAD: pop(2); push(new Item("I")); break; case DUP: it = pop(); push(it); push(it); break; case DUP2: it = pop(); if (it.getSize() == 2) { push(it); push(it); } else { it2 = pop(); push(it2); push(it); push(it2); push(it); } break; case DUP_X1: it = pop(); it2 = pop(); push(it); push(it2); push(it); break; case DUP_X2: it = pop(); it2 = pop(); signature = it2.getSignature(); if (signature.equals("J") || signature.equals("D")) { push(it); push(it2); push(it); } else { it3 = pop(); push(it); push(it3); push(it2); push(it); } break; case DUP2_X1: it = pop(); it2 = pop(); signature = it.getSignature(); if (signature.equals("J") || signature.equals("D")) { push(it); push(it2); push(it); } else { it3 = pop(); push(it2); push(it); push(it3); push(it2); push(it); } break; case IINC: register = dbc.getRegisterOperand(); it = getLVValue( register ); it2 = new Item("I", new Integer(dbc.getIntConstant())); pushByIntMath( IADD, it, it2); pushByLocalStore(register); break; case ATHROW: pop(); break; case CHECKCAST: case NOP: break; case RET: case RETURN: seenTransferOfControl = true; break; case GOTO: case GOTO_W: //It is assumed that no stack items are present when seenTransferOfControl = true; if (getStackDepth() > 0) { jumpStack.push(new ArrayList<Item>(stack)); pop(); jumpTarget = dbc.getBranchTarget(); } break; case SWAP: Item i1 = pop(); Item i2 = pop(); push(i1); push(i2); break; case ICONST_M1: case ICONST_0: case ICONST_1: case ICONST_2: case ICONST_3: case ICONST_4: case ICONST_5: push(new Item("I", new Integer(seen-ICONST_0))); break; case LCONST_0: case LCONST_1: push(new Item("J", new Long(seen-LCONST_0))); break; case DCONST_0: case DCONST_1: push(new Item("D", new Double(seen-DCONST_0))); break; case FCONST_0: case FCONST_1: case FCONST_2: push(new Item("F", new Float(seen-FCONST_0))); break; case ACONST_NULL: push(new Item()); break; case ASTORE: case DSTORE: case FSTORE: case ISTORE: case LSTORE: pushByLocalStore(dbc.getRegisterOperand()); break; case ASTORE_0: case ASTORE_1: case ASTORE_2: case ASTORE_3: pushByLocalStore(seen - ASTORE_0); break; case DSTORE_0: case DSTORE_1: case DSTORE_2: case DSTORE_3: pushByLocalStore(seen - DSTORE_0); break; case FSTORE_0: case FSTORE_1: case FSTORE_2: case FSTORE_3: pushByLocalStore(seen - FSTORE_0); break; case ISTORE_0: case ISTORE_1: case ISTORE_2: case ISTORE_3: pushByLocalStore(seen - ISTORE_0); break; case LSTORE_0: case LSTORE_1: case LSTORE_2: case LSTORE_3: pushByLocalStore(seen - LSTORE_0); break; case GETFIELD: pop(); push(new Item(dbc.getSigConstantOperand(), FieldAnnotation.fromReferencedField(dbc))); break; case ARRAYLENGTH: pop(); push(new Item("I")); break; case BALOAD: { pop(2); Item v = new Item("I"); v.setSpecialKind(Item.BYTE_ARRAY_LOAD); push(v); break; } case CALOAD: pop(2); push(new Item("I")); break; case DALOAD: pop(2); push(new Item("D")); break; case FALOAD: pop(2); push(new Item("F")); break; case LALOAD: pop(2); push(new Item("J")); break; case AASTORE: case BASTORE: case CASTORE: case DASTORE: case FASTORE: case IASTORE: case LASTORE: case SASTORE: pop(3); break; case BIPUSH: case SIPUSH: push(new Item("I", new Integer(dbc.getIntConstant()))); break; case IADD: case ISUB: case IMUL: case IDIV: case IAND: case IOR: case IXOR: case ISHL: case ISHR: case IREM: case IUSHR: it = pop(); it2 = pop(); pushByIntMath(seen, it, it2); break; case INEG: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer(-(Integer) it.getConstant()))); } else { push(new Item("I")); } break; case LNEG: it = pop(); if (it.getConstant() != null) { push(new Item("J", new Long(-(Long) it.getConstant()))); } else { push(new Item("J")); } break; case DNEG: it = pop(); if (it.getConstant() != null) { push(new Item("D", new Double(-(Double) it.getConstant()))); } else { push(new Item("D")); } break; case LADD: case LSUB: case LMUL: case LDIV: case LAND: case LOR: case LXOR: case LSHL: case LSHR: case LREM: case LUSHR: if (DEBUG) System.out.println("Long math: " + this); it = pop(); it2 = pop(); try { pushByLongMath(seen, it, it2); } catch (Exception e) { e.printStackTrace(); } finally { if (DEBUG) System.out.println("After long math: " + this); } break; case LCMP: it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { long l = (Long) it.getConstant(); long l2 = (Long) it.getConstant(); if (l2 < l) push(new Item("I", new Integer(-1))); else if (l2 > l) push(new Item("I", new Integer(1))); else push(new Item("I", new Integer(0))); } else { push(new Item("I")); } break; case FCMPG: case FCMPL: it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { float f = (Float) it.getConstant(); float f2 = (Float) it.getConstant(); if (f2 < f) push(new Item("I", new Integer(-1))); else if (f2 > f) push(new Item("I", new Integer(1))); else push(new Item("I", new Integer(0))); } else { push(new Item("I")); } break; case DCMPG: case DCMPL: it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { double d = (Double) it.getConstant(); double d2 = (Double) it.getConstant(); if (d2 < d) push(new Item("I", new Integer(-1))); else if (d2 > d) push(new Item("I", new Integer(1))); else push(new Item("I", new Integer(0))); } else { push(new Item("I")); } break; case FADD: case FSUB: case FMUL: case FDIV: it = pop(); it2 = pop(); pushByFloatMath(seen, it, it2); break; case DADD: case DSUB: case DMUL: case DDIV: case DREM: it = pop(); it2 = pop(); pushByDoubleMath(seen, it, it2); break; case I2B: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((int)((byte)((Integer)it.getConstant()).intValue())))); } else { push(new Item("I")); } break; case I2C: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((int)((char)((Integer)it.getConstant()).intValue())))); } else { push(new Item("I")); } break; case I2D: it = pop(); if (it.getConstant() != null) { push(new Item("D", new Double((double)((Integer)it.getConstant()).intValue()))); } else { push(new Item("D")); } break; case I2F: it = pop(); if (it.getConstant() != null) { push(new Item("F", new Float((float)((Integer)it.getConstant()).intValue()))); } else { push(new Item("F")); } break; case I2L:{ it = pop(); Item newValue; if (it.getConstant() != null) { newValue = new Item("J", new Long((long)((Integer)it.getConstant()).intValue())); } else { newValue = new Item("J"); } newValue.setSpecialKind(it.getSpecialKind()); push(newValue); } break; case I2S: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((int)((short)((Integer)it.getConstant()).intValue())))); } else { push(new Item("I")); } break; case D2I: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((Integer) it.getConstant()))); } else { push(new Item("I")); } break; case D2F: it = pop(); if (it.getConstant() != null) { push(new Item("F", new Float((float)((Double)it.getConstant()).doubleValue()))); } else { push(new Item("F")); } break; case D2L: it = pop(); if (it.getConstant() != null) { push(new Item("J", new Long((long)((Double)it.getConstant()).doubleValue()))); } else { push(new Item("J")); } break; case L2I: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((int)((Long)it.getConstant()).longValue()))); } else { push(new Item("I")); } break; case L2D: it = pop(); if (it.getConstant() != null) { push(new Item("D", new Double((double)((Long)it.getConstant()).longValue()))); } else { push(new Item("D")); } break; case L2F: it = pop(); if (it.getConstant() != null) { push(new Item("F", new Float((float)((Long)it.getConstant()).longValue()))); } else { push(new Item("F")); } break; case F2I: it = pop(); if (it.getConstant() != null) { push(new Item("I", new Integer((int)((Float)it.getConstant()).floatValue()))); } else { push(new Item("I")); } break; case F2D: it = pop(); if (it.getConstant() != null) { push(new Item("D", new Double((double)((Float)it.getConstant()).floatValue()))); } else { push(new Item("D")); } break; case NEW: pushBySignature("L" + dbc.getClassConstantOperand() + ";"); break; case NEWARRAY: pop(); signature = "[" + BasicType.getType((byte)dbc.getIntConstant()).getSignature(); pushBySignature(signature); break; // According to the VM Spec 4.4.1, anewarray and multianewarray // can refer to normal class/interface types (encoded in // "internal form"), or array classes (encoded as signatures // beginning with "["). case ANEWARRAY: pop(); signature = dbc.getClassConstantOperand(); if (!signature.startsWith("[")) { signature = "L" + signature + ";"; } pushBySignature(signature); break; case MULTIANEWARRAY: int dims = dbc.getIntConstant(); while ((dims pop(); } signature = dbc.getClassConstantOperand(); if (!signature.startsWith("[")) { signature = "L" + signature + ";"; } pushBySignature(signature); break; case AALOAD: pop(); it = pop(); pushBySignature(it.getElementSignature()); break; case JSR: push(new Item("")); break; case INVOKEINTERFACE: case INVOKESPECIAL: case INVOKESTATIC: case INVOKEVIRTUAL: pushByInvoke(dbc, seen != INVOKESTATIC); if (dbc.getNameConstantOperand().equals("nextInt")) { Item i = pop(); i.setSpecialKind(Item.RANDOM_INT); push(i); } break; default: throw new UnsupportedOperationException("OpCode not supported yet" ); } } catch (RuntimeException e) { //If an error occurs, we clear the stack and locals. one of two things will occur. //Either the client will expect more stack items than really exist, and so they're condition check will fail, //or the stack will resync with the code. But hopefully not false positives if (DEBUG) e.printStackTrace(); clear(); } finally { if (exceptionHandlers.get(dbc.getNextPC())) push(new Item()); if (DEBUG) { System.out.println(OPCODE_NAMES[seen] + " stack depth: " + getStackDepth()); System.out.println(this); } } } private void mergeLists(List<Item> mergeInto, List<Item> stackToMerge) { // merge stacks if (mergeInto.size() != stackToMerge.size()) { if (DEBUG) { System.out.println("Bad merging stacks"); System.out.println("current stack: " + mergeInto); System.out.println("jump stack: " + stackToMerge); } } else { if (DEBUG) { System.out.println("Merging stacks"); System.out.println("current stack: " + mergeInto); System.out.println("jump stack: " + stackToMerge); } for (int i = 0; i < mergeInto.size(); i++) mergeInto.set(i, Item.merge(mergeInto.get(i), stackToMerge.get(i))); if (DEBUG) { System.out.println("merged stack: " + mergeInto); } } } public void clear() { stack.clear(); lvValues.clear(); jumpStack.clear(); } BitSet exceptionHandlers = new BitSet(); private Map<Integer, List<Item>> jumpEntries = new HashMap<Integer, List<Item>>(); private void addJumpValue(int target) { List<Item> atTarget = jumpEntries.get(target); if (atTarget == null) { jumpEntries.put(target, new ArrayList(lvValues)); return; } mergeLists(atTarget, lvValues); } public int resetForMethodEntry(final DismantleBytecode v) { jumpEntries.clear(); int result= resetForMethodEntry0(v); Code code = v.getMethod().getCode(); if (code == null) return result; if (false) { // Be clever DismantleBytecode branchAnalysis = new DismantleBytecode() { @Override public void sawOpcode(int seen) { OpcodeStack.this.sawOpcode(this,seen); } @Override public void sawBranchTo(int pc) { addJumpValue(pc); } }; branchAnalysis.setupVisitorForClass(v.getThisClass()); branchAnalysis.doVisitMethod(v.getMethod()); if (DEBUG && !jumpEntries.isEmpty()) { System.out.println("Found dataflow for jumps"); for(Integer pc : jumpEntries.keySet()) System.out.println(pc + " -> " + jumpEntries.get(pc)); } resetForMethodEntry0(v); } return result; } private int resetForMethodEntry0(PreorderVisitor v) { if (DEBUG) System.out.println(" stack.clear(); jumpTarget = -1; lvValues.clear(); jumpStack.clear(); seenTransferOfControl = false; String className = v.getClassName(); String signature = v.getMethodSig(); exceptionHandlers.clear(); Method m = v.getMethod(); Code code = m.getCode(); if (code != null) { CodeException[] exceptionTable = code.getExceptionTable(); if (exceptionTable != null) for(CodeException ex : exceptionTable) exceptionHandlers.set(ex.getHandlerPC()); } if (DEBUG) System.out.println(" --- " + className + " " + m.getName() + " " + signature); Type[] argTypes = Type.getArgumentTypes(signature); int reg = 0; if (!m.isStatic()) { Item it = new Item("L" + className+";"); it.isInitialParameter = true; it.registerNumber = reg; setLVValue( reg++, it); } for (Type argType : argTypes) { Item it = new Item(argType.getSignature()); it.registerNumber = reg; it.isInitialParameter = true; setLVValue(reg++, it); } return reg; } public int getStackDepth() { return stack.size(); } public Item getStackItem(int stackOffset) { if (stackOffset < 0 || stackOffset >= stack.size()) { assert false; return new Item("Lfindbugs/OpcodeStackError;"); } int tos = stack.size() - 1; int pos = tos - stackOffset; try { return stack.get(pos); } catch (ArrayIndexOutOfBoundsException e) { throw new ArrayIndexOutOfBoundsException( "Requested item at offset " + stackOffset + " in a stack of size " + stack.size() +", made request for position " + pos); } } private Item pop() { return stack.remove(stack.size()-1); } private void pop(int count) { while ((count pop(); } private void push(Item i) { stack.add(i); } private void pushByConstant(DismantleBytecode dbc, Constant c) { if (c instanceof ConstantClass) push(new Item("Ljava/lang/Class;", null)); else if (c instanceof ConstantInteger) push(new Item("I", new Integer(((ConstantInteger) c).getBytes()))); else if (c instanceof ConstantString) { int s = ((ConstantString) c).getStringIndex(); push(new Item("Ljava/lang/String;", getStringFromIndex(dbc, s))); } else if (c instanceof ConstantFloat) push(new Item("F", new Float(((ConstantFloat) c).getBytes()))); else if (c instanceof ConstantDouble) push(new Item("D", new Double(((ConstantDouble) c).getBytes()))); else if (c instanceof ConstantLong) push(new Item("J", new Long(((ConstantLong) c).getBytes()))); else throw new UnsupportedOperationException("Constant type not expected" ); } private void pushByLocalObjectLoad(DismantleBytecode dbc, int register) { Method m = dbc.getMethod(); LocalVariableTable lvt = m.getLocalVariableTable(); if (lvt != null) { LocalVariable lv = LVTHelper.getLocalVariableAtPC(lvt, register, dbc.getPC()); if (lv != null) { String signature = lv.getSignature(); pushByLocalLoad(signature, register); return; } } pushByLocalLoad("", register); } private void pushByIntMath(int seen, Item it, Item it2) { if (DEBUG) System.out.println("pushByIntMath: " + it.getConstant() + " " + it2.getConstant() ); Item newValue = new Item("I"); try { if ((it.getConstant() != null) && it2.getConstant() != null) { Integer intValue2 = (Integer) it2.getConstant(); Integer intValue1 = (Integer) it.getConstant(); if (seen == IADD) newValue = new Item("I",intValue2 + intValue1); else if (seen == ISUB) newValue = new Item("I",intValue2 - intValue1); else if (seen == IMUL) newValue = new Item("I", intValue2 * intValue1); else if (seen == IDIV) newValue = new Item("I", intValue2 / intValue1); else if (seen == IAND) { newValue = new Item("I", intValue2 & intValue1); if ((intValue1&0xff) == 0 && intValue1 != 0 || (intValue2&0xff) == 0 && intValue2 != 0 ) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } else if (seen == IOR) newValue = new Item("I",intValue2 | intValue1); else if (seen == IXOR) newValue = new Item("I",intValue2 ^ intValue1); else if (seen == ISHL) { newValue = new Item("I",intValue2 << intValue1); if (intValue1 >= 8) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } else if (seen == ISHR) newValue = new Item("I",intValue2 >> intValue1); else if (seen == IREM) newValue = new Item("I", intValue2 % intValue1); else if (seen == IUSHR) newValue = new Item("I", intValue2 >>> intValue1); } else if (it2.getConstant() != null && seen == ISHL && (Integer) it2.getConstant() >= 8) newValue.specialKind = Item.LOW_8_BITS_CLEAR; else if (it2.getConstant() != null && seen == IAND && ((Integer) it2.getConstant() & 0xff) == 0) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } catch (RuntimeException e) { // ignore it } if (DEBUG) System.out.println("push: " + newValue); push(newValue); } private void pushByLongMath(int seen, Item it, Item it2) { Item newValue = new Item("J"); try { if ((it.getConstant() != null) && it2.getConstant() != null) { Long longValue2 = ((Long) it2.getConstant()); if (seen == LSHL) { newValue =new Item("J", longValue2 << ((Number) it.getConstant()).intValue()); if (((Number) it.getConstant()).intValue() >= 8) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } else if (seen == LSHR) newValue =new Item("J", longValue2 >> ((Number) it.getConstant()).intValue()); else if (seen == LUSHR) newValue =new Item("J", longValue2 >>> ((Number) it.getConstant()).intValue()); else { Long longValue1 = ((Long) it.getConstant()); if (seen == LADD) newValue = new Item("J", longValue2 + longValue1); else if (seen == LSUB) newValue = new Item("J", longValue2 - longValue1); else if (seen == LMUL) newValue = new Item("J", longValue2 * longValue1); else if (seen == LDIV) newValue =new Item("J", longValue2 / longValue1); else if (seen == LAND) { newValue = new Item("J", longValue2 & longValue1); if ((longValue1&0xff) == 0 && longValue1 != 0 || (longValue2&0xff) == 0 && longValue2 != 0 ) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } else if (seen == LOR) newValue = new Item("J", longValue2 | longValue1); else if (seen == LXOR) newValue =new Item("J", longValue2 ^ longValue1); else if (seen == LREM) newValue =new Item("J", longValue2 % longValue1); } } else if (it2.getConstant() != null && seen == LSHR && ((Integer) it2.getConstant()) >= 8) newValue.specialKind = Item.LOW_8_BITS_CLEAR; else if (it2.getConstant() != null && seen == LAND && (((Long) it2.getConstant()) & 0xff) == 0) newValue.specialKind = Item.LOW_8_BITS_CLEAR; } catch (RuntimeException e) { // ignore it } push(newValue); } private void pushByFloatMath(int seen, Item it, Item it2) { if ((it.getConstant() != null) && it2.getConstant() != null) { if (seen == FADD) push(new Item("F", ((Float) it2.getConstant()) + ((Float) it.getConstant()))); else if (seen == FSUB) push(new Item("F", ((Float) it2.getConstant()) - ((Float) it.getConstant()))); else if (seen == FMUL) push(new Item("F", ((Float) it2.getConstant()) * ((Float) it.getConstant()))); else if (seen == FDIV) push(new Item("F", ((Float) it2.getConstant()) / ((Float) it.getConstant()))); } else { push(new Item("F")); } } private void pushByDoubleMath(int seen, Item it, Item it2) { if ((it.getConstant() != null) && it2.getConstant() != null) { if (seen == DADD) push(new Item("D", ((Double) it2.getConstant()) + ((Double) it.getConstant()))); else if (seen == DSUB) push(new Item("D", ((Double) it2.getConstant()) - ((Double) it.getConstant()))); else if (seen == DMUL) push(new Item("D", ((Double) it2.getConstant()) * ((Double) it.getConstant()))); else if (seen == DDIV) push(new Item("D", ((Double) it2.getConstant()) / ((Double) it.getConstant()))); else if (seen == DREM) push(new Item("D")); } else { push(new Item("D")); } } private void pushByInvoke(DismantleBytecode dbc, boolean popThis) { String signature = dbc.getSigConstantOperand(); Type[] argTypes = Type.getArgumentTypes(signature); pop(argTypes.length+(popThis ? 1 : 0)); pushBySignature(Type.getReturnType(signature).getSignature()); } private String getStringFromIndex(DismantleBytecode dbc, int i) { ConstantUtf8 name = (ConstantUtf8) dbc.getConstantPool().getConstant(i); return name.getBytes(); } private void pushBySignature(String s) { if ("V".equals(s)) return; push(new Item(s, null)); } private void pushByLocalStore(int register) { Item it = pop(); setLVValue( register, it ); } private void pushByLocalLoad(String signature, int register) { Item it = getLVValue(register); if (it == null) { Item item = new Item(signature); item.registerNumber = register; push(item); } else if (it.getRegisterNumber() >= 0) push(it); else { push(new Item(it, register)); } } private void setLVValue(int index, Item value ) { int addCount = index - lvValues.size() + 1; while ((addCount lvValues.add(null); if (seenTransferOfControl) value = Item.merge(value, lvValues.get(index) ); lvValues.set(index, value); } private Item getLVValue(int index) { if (index >= lvValues.size()) return null; return lvValues.get(index); } } // vim:ts=4
package edu.umd.cs.findbugs; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.meta.TypeQualifier; import javax.annotation.meta.When; import org.apache.bcel.Repository; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.CodeException; import org.apache.bcel.classfile.Constant; import org.apache.bcel.classfile.ConstantClass; import org.apache.bcel.classfile.ConstantDouble; import org.apache.bcel.classfile.ConstantFloat; import org.apache.bcel.classfile.ConstantInteger; import org.apache.bcel.classfile.ConstantLong; import org.apache.bcel.classfile.ConstantString; import org.apache.bcel.classfile.ConstantUtf8; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.LocalVariable; import org.apache.bcel.classfile.LocalVariableTable; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.BasicType; import org.apache.bcel.generic.Type; //import sun.tools.tree.NewInstanceExpression; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.AnalysisFeatures; import edu.umd.cs.findbugs.ba.ClassMember; import edu.umd.cs.findbugs.ba.FieldSummary; import edu.umd.cs.findbugs.ba.XFactory; import edu.umd.cs.findbugs.ba.XField; import edu.umd.cs.findbugs.ba.XMethod; import edu.umd.cs.findbugs.classfile.CheckedAnalysisException; import edu.umd.cs.findbugs.classfile.Global; import edu.umd.cs.findbugs.classfile.IAnalysisCache; import edu.umd.cs.findbugs.classfile.MethodDescriptor; import edu.umd.cs.findbugs.classfile.engine.bcel.AnalysisFactory; import edu.umd.cs.findbugs.internalAnnotations.SlashedClassName; import edu.umd.cs.findbugs.util.ClassName; import edu.umd.cs.findbugs.util.Util; import edu.umd.cs.findbugs.visitclass.Constants2; import edu.umd.cs.findbugs.visitclass.DismantleBytecode; import edu.umd.cs.findbugs.visitclass.LVTHelper; import edu.umd.cs.findbugs.visitclass.PreorderVisitor; /** * tracks the types and numbers of objects that are currently on the operand stack * throughout the execution of method. To use, a detector should instantiate one for * each method, and call <p>stack.sawOpcode(this,seen);</p> at the bottom of their sawOpcode method. * at any point you can then inspect the stack and see what the types of objects are on * the stack, including constant values if they were pushed. The types described are of * course, only the static types. * There are some outstanding opcodes that have yet to be implemented, I couldn't * find any code that actually generated these, so i didn't put them in because * I couldn't test them: * <ul> * <li>dup2_x2</li> * <li>jsr_w</li> * <li>wide</li> * </ul> */ public class OpcodeStack implements Constants2 { private static final boolean DEBUG = SystemProperties.getBoolean("ocstack.debug"); private static final boolean DEBUG2 = DEBUG; private List<Item> stack; private List<Item> lvValues; private List<Integer> lastUpdate; private boolean top; static class HttpParameterInjection { HttpParameterInjection(String parameterName, int pc) { this.parameterName = parameterName; this.pc = pc; } String parameterName; int pc; } private boolean seenTransferOfControl = false; private boolean useIterativeAnalysis = AnalysisContext.currentAnalysisContext().getBoolProperty(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS); public static class Item { @Documented @TypeQualifier(applicableTo=Integer.class) @Retention(RetentionPolicy.RUNTIME) public @interface SpecialKind {} public static final @SpecialKind int NOT_SPECIAL = 0; public static final @SpecialKind int SIGNED_BYTE = 1; public static final @SpecialKind int RANDOM_INT = 2; public static final @SpecialKind int LOW_8_BITS_CLEAR = 3; public static final @SpecialKind int HASHCODE_INT = 4; public static final @SpecialKind int INTEGER_SUM = 5; public static final @SpecialKind int AVERAGE_COMPUTED_USING_DIVISION = 6; public static final @SpecialKind int FLOAT_MATH = 7; public static final @SpecialKind int RANDOM_INT_REMAINDER = 8; public static final @SpecialKind int HASHCODE_INT_REMAINDER = 9; public static final @SpecialKind int FILE_SEPARATOR_STRING = 10; public static final @SpecialKind int MATH_ABS = 11; public static final @SpecialKind int NON_NEGATIVE = 12; public static final @SpecialKind int NASTY_FLOAT_MATH = 13; public static final @SpecialKind int FILE_OPENED_IN_APPEND_MODE = 14; public static final @SpecialKind int SERVLET_REQUEST_TAINTED = 15; public static final @SpecialKind int NEWLY_ALLOCATED = 16; public static final @SpecialKind int ZERO_MEANS_NULL = 17; public static final @SpecialKind int NONZERO_MEANS_NULL = 18; private static final int IS_INITIAL_PARAMETER_FLAG=1; private static final int COULD_BE_ZERO_FLAG = 2; private static final int IS_NULL_FLAG = 4; public static final Object UNKNOWN = null; private @SpecialKind int specialKind = NOT_SPECIAL; private String signature; private Object constValue = UNKNOWN; private @CheckForNull ClassMember source; private int pc = -1; private int flags; private int registerNumber = -1; private Object userValue = null; private HttpParameterInjection injection = null; private int fieldLoadedFromRegister = -1; public void makeCrossMethod() { pc = -1; registerNumber = -1; fieldLoadedFromRegister = -1; } public int getSize() { if (signature.equals("J") || signature.equals("D")) return 2; return 1; } public int getPC() { return pc; } public void setPC(int pc) { this.pc = pc; } public boolean isWide() { return getSize() == 2; } @Override public int hashCode() { int r = 42 + specialKind; if (signature != null) r+= signature.hashCode(); r *= 31; if (constValue != null) r+= constValue.hashCode(); r *= 31; if (source != null) r+= source.hashCode(); r *= 31; r += flags; r *= 31; r += registerNumber; return r; } @Override public boolean equals(Object o) { if (!(o instanceof Item)) return false; Item that = (Item) o; return Util.nullSafeEquals(this.signature, that.signature) && Util.nullSafeEquals(this.constValue, that.constValue) && Util.nullSafeEquals(this.source, that.source) && Util.nullSafeEquals(this.userValue, that.userValue) && Util.nullSafeEquals(this.injection, that.injection) && this.specialKind == that.specialKind && this.registerNumber == that.registerNumber && this.flags == that.flags && this.fieldLoadedFromRegister == that.fieldLoadedFromRegister; } @Override public String toString() { StringBuilder buf = new StringBuilder("< "); buf.append(signature); switch(specialKind) { case SIGNED_BYTE: buf.append(", byte_array_load"); break; case RANDOM_INT: buf.append(", random_int"); break; case LOW_8_BITS_CLEAR: buf.append(", low8clear"); break; case HASHCODE_INT: buf.append(", hashcode_int"); break; case INTEGER_SUM: buf.append(", int_sum"); break; case AVERAGE_COMPUTED_USING_DIVISION: buf.append(", averageComputingUsingDivision"); break; case FLOAT_MATH: buf.append(", floatMath"); break; case NASTY_FLOAT_MATH: buf.append(", nastyFloatMath"); break; case HASHCODE_INT_REMAINDER: buf.append(", hashcode_int_rem"); break; case RANDOM_INT_REMAINDER: buf.append(", random_int_rem"); break; case FILE_SEPARATOR_STRING: buf.append(", file_separator_string"); break; case MATH_ABS: buf.append(", Math.abs"); break; case NON_NEGATIVE: buf.append(", non_negative"); break; case FILE_OPENED_IN_APPEND_MODE: buf.append(", file opened in append mode"); break; case SERVLET_REQUEST_TAINTED: buf.append(", servlet request tainted"); break; case NEWLY_ALLOCATED: buf.append(", new"); break; case ZERO_MEANS_NULL: buf.append(", zero means null"); break; case NONZERO_MEANS_NULL: buf.append(", nonzero means null"); break; case NOT_SPECIAL : break; default: buf.append(", #" + specialKind); break; } if (constValue != UNKNOWN) { if (constValue instanceof String) { buf.append(", \""); buf.append(constValue); buf.append("\""); } else { buf.append(", "); buf.append(constValue); } } if (source instanceof XField) { buf.append(", "); if (fieldLoadedFromRegister != -1) buf.append(fieldLoadedFromRegister).append(':'); buf.append(source); } if (source instanceof XMethod) { buf.append(", return value from "); buf.append(source); } if (isInitialParameter()) { buf.append(", IP"); } if (isNull()) { buf.append(", isNull"); } if (registerNumber != -1) { buf.append(", r"); buf.append(registerNumber); } if (isCouldBeZero()) buf.append(", cbz"); buf.append(" >"); return buf.toString(); } public static Item merge(Item i1, Item i2) { if (i1 == null) return i2; if (i2 == null) return i1; if (i1.equals(i2)) return i1; Item m = new Item(); m.flags = i1.flags & i2.flags; m.setCouldBeZero(i1.isCouldBeZero() || i2.isCouldBeZero()); if (i1.pc == i2.pc) m.pc = i1.pc; if (Util.nullSafeEquals(i1.signature, i2.signature)) m.signature = i1.signature; else if (i1.isNull()) m.signature = i2.signature; else if (i2.isNull()) m.signature = i1.signature; if (Util.nullSafeEquals(i1.constValue, i2.constValue)) m.constValue = i1.constValue; if (Util.nullSafeEquals(i1.source, i2.source)) { m.source = i1.source; } else if ("".equals(i1.constValue)) m.source = i2.source; else if ("".equals(i2.constValue)) m.source = i1.source; if (Util.nullSafeEquals(i1.userValue, i2.userValue)) m.userValue = i1.userValue; if (i1.registerNumber == i2.registerNumber) m.registerNumber = i1.registerNumber; if (i1.fieldLoadedFromRegister == i2.fieldLoadedFromRegister) m.fieldLoadedFromRegister = i1.fieldLoadedFromRegister; if (i1.specialKind == SERVLET_REQUEST_TAINTED) { m.specialKind = SERVLET_REQUEST_TAINTED; m.injection = i1.injection; } else if (i2.specialKind == SERVLET_REQUEST_TAINTED) { m.specialKind = SERVLET_REQUEST_TAINTED; m.injection = i2.injection; } else if (i1.specialKind == i2.specialKind) m.specialKind = i1.specialKind; else if (i1.specialKind == NASTY_FLOAT_MATH || i2.specialKind == NASTY_FLOAT_MATH) m.specialKind = NASTY_FLOAT_MATH; else if (i1.specialKind == FLOAT_MATH || i2.specialKind == FLOAT_MATH) m.specialKind = FLOAT_MATH; if (DEBUG) System.out.println("Merge " + i1 + " and " + i2 + " gives " + m); return m; } public Item(String signature, int constValue) { this(signature, Integer.valueOf(constValue)); } public Item(String signature) { this(signature, UNKNOWN); } public Item(Item it) { this.signature = it.signature; this.constValue = it.constValue; this.source = it.source; this.registerNumber = it.registerNumber; this.userValue = it.userValue; this.injection = it.injection; this.flags = it.flags; this.specialKind = it.specialKind; this.pc = it.pc; } public Item(Item it, int reg) { this(it); this.registerNumber = reg; } public Item(String signature, FieldAnnotation f) { this.signature = signature; if (f != null) source = XFactory.createXField(f); fieldLoadedFromRegister = -1; } public Item(String signature, FieldAnnotation f, int fieldLoadedFromRegister) { this.signature = signature; if (f != null) source = XFactory.createXField(f); this.fieldLoadedFromRegister = fieldLoadedFromRegister; } public int getFieldLoadedFromRegister() { return fieldLoadedFromRegister; } public void setLoadedFromField(XField f, int fieldLoadedFromRegister) { source = f; this.fieldLoadedFromRegister = fieldLoadedFromRegister; this.registerNumber = -1; } public @CheckForNull String getHttpParameterName() { if (!isServletParameterTainted()) throw new IllegalStateException(); if (injection == null) return null; return injection.parameterName; } public int getInjectionPC() { if (!isServletParameterTainted()) throw new IllegalStateException(); if (injection == null) return -1; return injection.pc; } public Item(String signature, Object constantValue) { this.signature = signature; constValue = constantValue; if (constantValue instanceof Integer) { int value = (Integer) constantValue; if (value != 0 && (value & 0xff) == 0) specialKind = LOW_8_BITS_CLEAR; if (value == 0) setCouldBeZero(true); } else if (constantValue instanceof Long) { long value = (Long) constantValue; if (value != 0 && (value & 0xff) == 0) specialKind = LOW_8_BITS_CLEAR; if (value == 0) setCouldBeZero(true); } } public Item() { signature = "Ljava/lang/Object;"; constValue = null; setNull(true); } public static Item nullItem(String signature) { Item item = new Item(signature); item.constValue = null; item.setNull(true); return item; } /** Returns null for primitive and arrays */ public @CheckForNull JavaClass getJavaClass() throws ClassNotFoundException { String baseSig; if (isPrimitive() || isArray()) return null; baseSig = signature; if (baseSig.length() == 0) return null; baseSig = baseSig.substring(1, baseSig.length() - 1); baseSig = baseSig.replace('/', '.'); return Repository.lookupClass(baseSig); } public boolean isArray() { return signature.startsWith("["); } @Deprecated public String getElementSignature() { if (!isArray()) return signature; else { int pos = 0; int len = signature.length(); while (pos < len) { if (signature.charAt(pos) != '[') break; pos++; } return signature.substring(pos); } } public boolean isNonNegative() { if (specialKind == NON_NEGATIVE) return true; if (constValue instanceof Number) { double value = ((Number) constValue).doubleValue(); return value >= 0; } return false; } public boolean isPrimitive() { return !signature.startsWith("L") && !signature.startsWith("["); } public int getRegisterNumber() { return registerNumber; } public String getSignature() { return signature; } /** * Returns a constant value for this Item, if known. * NOTE: if the value is a constant Class object, the constant value returned is the name of the class. */ public Object getConstant() { return constValue; } /** Use getXField instead */ @Deprecated public FieldAnnotation getFieldAnnotation() { return FieldAnnotation.fromXField(getXField()); } public XField getXField() { if (source instanceof XField) return (XField) source; return null; } /** * @param specialKind The specialKind to set. */ public void setSpecialKind(@SpecialKind int specialKind) { this.specialKind = specialKind; } /** * @return Returns the specialKind. */ public @SpecialKind int getSpecialKind() { return specialKind; } /** * @return Returns the specialKind. */ public boolean isBooleanNullnessValue() { return specialKind == ZERO_MEANS_NULL || specialKind == NONZERO_MEANS_NULL; } /** * attaches a detector specified value to this item * * @param value the custom value to set */ public void setUserValue(Object value) { userValue = value; } /** * * @return if this value is the return value of a method, give the method * invoked */ public @CheckForNull XMethod getReturnValueOf() { if (source instanceof XMethod) return (XMethod) source; return null; } public boolean couldBeZero() { return isCouldBeZero(); } public boolean mustBeZero() { Object value = getConstant(); return value instanceof Number && ((Number)value).intValue() == 0; } /** * gets the detector specified value for this item * * @return the custom value */ public Object getUserValue() { return userValue; } public boolean isServletParameterTainted() { return getSpecialKind() == Item.SERVLET_REQUEST_TAINTED; } public void setServletParameterTainted() { setSpecialKind(Item.SERVLET_REQUEST_TAINTED); } public boolean valueCouldBeNegative() { return !isNonNegative() && (getSpecialKind() == Item.RANDOM_INT || getSpecialKind() == Item.SIGNED_BYTE || getSpecialKind() == Item.HASHCODE_INT || getSpecialKind() == Item.RANDOM_INT_REMAINDER || getSpecialKind() == Item.HASHCODE_INT_REMAINDER); } public boolean checkForIntegerMinValue() { return !isNonNegative() && (getSpecialKind() == Item.RANDOM_INT || getSpecialKind() == Item.HASHCODE_INT ); } /** * @param isInitialParameter The isInitialParameter to set. */ private void setInitialParameter(boolean isInitialParameter) { setFlag(isInitialParameter, IS_INITIAL_PARAMETER_FLAG); } /** * @return Returns the isInitialParameter. */ public boolean isInitialParameter() { return (flags & IS_INITIAL_PARAMETER_FLAG) != 0; } /** * @param couldBeZero The couldBeZero to set. */ private void setCouldBeZero(boolean couldBeZero) { setFlag(couldBeZero, COULD_BE_ZERO_FLAG); } /** * @return Returns the couldBeZero. */ private boolean isCouldBeZero() { return (flags & COULD_BE_ZERO_FLAG) != 0; } /** * @param isNull The isNull to set. */ private void setNull(boolean isNull) { setFlag(isNull, IS_NULL_FLAG); } private void setFlag(boolean value, int flagBit) { if (value) flags |= flagBit; else flags &= ~flagBit; } /** * @return Returns the isNull. */ public boolean isNull() { return (flags & IS_NULL_FLAG) != 0; } public void clearNewlyAllocated() { if (specialKind == NEWLY_ALLOCATED) { if (signature.startsWith("Ljava/lang/StringB")) constValue = null; specialKind = NOT_SPECIAL; } } public boolean isNewlyAllocated() { return specialKind == NEWLY_ALLOCATED; } /** * @param i * @return */ public boolean hasConstantValue(int value) { if (constValue instanceof Number) return ((Number) constValue).intValue() == value; return false; } } @Override public String toString() { if (isTop()) return "TOP"; return stack.toString() + "::" + lvValues.toString(); } public OpcodeStack() { stack = new ArrayList<Item>(); lvValues = new ArrayList<Item>(); lastUpdate = new ArrayList<Integer>(); } boolean needToMerge = true; private boolean reachOnlyByBranch = false; public static String getExceptionSig(DismantleBytecode dbc, CodeException e) { if (e.getCatchType() == 0) return "Ljava/lang/Throwable;"; Constant c = dbc.getConstantPool().getConstant(e.getCatchType()); if (c instanceof ConstantClass) return "L"+((ConstantClass)c).getBytes(dbc.getConstantPool())+";"; return "Ljava/lang/Throwable;"; } public void mergeJumps(DismantleBytecode dbc) { if (!needToMerge) return; needToMerge = false; if (dbc.getPC() == zeroOneComing) { pop(); top = false; OpcodeStack.Item item = new Item("I"); if (oneMeansNull) item.setSpecialKind(Item.NONZERO_MEANS_NULL); else item.setSpecialKind(Item.ZERO_MEANS_NULL); item.setPC(dbc.getPC() - 8); item.setCouldBeZero(true); push(item); zeroOneComing= -1; if (DEBUG) System.out.println("Updated to " + this); return; } boolean stackUpdated = false; if (!isTop() && (convertJumpToOneZeroState == 3 || convertJumpToZeroOneState == 3)) { pop(); Item top = new Item("I"); top.setCouldBeZero(true); push(top); convertJumpToOneZeroState = convertJumpToZeroOneState = 0; stackUpdated = true; } List<Item> jumpEntry = null; if (jumpEntryLocations.get(dbc.getPC())) jumpEntry = jumpEntries.get(dbc.getPC()); if (jumpEntry != null) { setReachOnlyByBranch(false); List<Item> jumpStackEntry = jumpStackEntries.get(dbc.getPC()); if (DEBUG2) { System.out.println("XXXXXXX " + isReachOnlyByBranch()); System.out.println("merging lvValues at jump target " + dbc.getPC() + " -> " + jumpEntry); System.out.println(" current lvValues " + lvValues); System.out.println(" merging stack entry " + jumpStackEntry); System.out.println(" current stack values " + stack); } if (isTop()) { lvValues = new ArrayList<Item>(jumpEntry); if (jumpStackEntry != null) stack = new ArrayList<Item>(jumpStackEntry); else stack.clear(); setTop(false); return; } if (isReachOnlyByBranch()) { setTop(false); lvValues = new ArrayList<Item>(jumpEntry); if (!stackUpdated) { if (jumpStackEntry != null) stack = new ArrayList<Item>(jumpStackEntry); else stack.clear(); } } else { setTop(false); mergeLists(lvValues, jumpEntry, false); if (!stackUpdated && jumpStackEntry != null) mergeLists(stack, jumpStackEntry, false); } if (DEBUG) System.out.println(" merged lvValues " + lvValues); } else if (isReachOnlyByBranch() && !stackUpdated) { stack.clear(); for(CodeException e : dbc.getCode().getExceptionTable()) { if (e.getHandlerPC() == dbc.getPC()) { push(new Item(getExceptionSig(dbc, e))); setReachOnlyByBranch(false); setTop(false); return; } } setTop(true); } } int convertJumpToOneZeroState = 0; int convertJumpToZeroOneState = 0; private void setLastUpdate(int reg, int pc) { while (lastUpdate.size() <= reg) lastUpdate.add(0); lastUpdate.set(reg, pc); } public int getLastUpdate(int reg) { if (lastUpdate.size() <= reg) return 0; return lastUpdate.get(reg); } public int getNumLastUpdates() { return lastUpdate.size(); } int zeroOneComing = -1; boolean oneMeansNull; public void sawOpcode(DismantleBytecode dbc, int seen) { int register; String signature; Item it, it2, it3; Constant cons; if (dbc.isRegisterStore()) setLastUpdate(dbc.getRegisterOperand(), dbc.getPC()); mergeJumps(dbc); needToMerge = true; try { if (isTop()) { encountedTop = true; return; } if (seen == GOTO) { int nextPC = dbc.getPC() + 3; if (nextPC <= dbc.getMaxPC()) { int prevOpcode1 = dbc.getPrevOpcode(1); int prevOpcode2 = dbc.getPrevOpcode(2); try { int nextOpcode = dbc.getCodeByte(dbc.getPC() + 3); if ((prevOpcode1 == ICONST_0 || prevOpcode1 == ICONST_1) && (prevOpcode2 == IFNULL || prevOpcode2 == IFNONNULL) && (nextOpcode == ICONST_0 || nextOpcode == ICONST_1) && prevOpcode1 != nextOpcode) { oneMeansNull = prevOpcode1 == ICONST_0; if (prevOpcode2 != IFNULL) oneMeansNull = !oneMeansNull; zeroOneComing = nextPC+1; convertJumpToOneZeroState = convertJumpToZeroOneState = 0; } } catch(ArrayIndexOutOfBoundsException e) { throw e; // throw new ArrayIndexOutOfBoundsException(nextPC + " " + dbc.getMaxPC()); } } } switch (seen) { case ICONST_1: convertJumpToOneZeroState = 1; break; case GOTO: if (convertJumpToOneZeroState == 1 && dbc.getBranchOffset() == 4) convertJumpToOneZeroState = 2; else convertJumpToOneZeroState = 0; break; case ICONST_0: if (convertJumpToOneZeroState == 2) convertJumpToOneZeroState = 3; else convertJumpToOneZeroState = 0; break; default:convertJumpToOneZeroState = 0; } switch (seen) { case ICONST_0: convertJumpToZeroOneState = 1; break; case GOTO: if (convertJumpToZeroOneState == 1 && dbc.getBranchOffset() == 4) convertJumpToZeroOneState = 2; else convertJumpToZeroOneState = 0; break; case ICONST_1: if (convertJumpToZeroOneState == 2) convertJumpToZeroOneState = 3; else convertJumpToZeroOneState = 0; break; default:convertJumpToZeroOneState = 0; } switch (seen) { case ALOAD: pushByLocalObjectLoad(dbc, dbc.getRegisterOperand()); break; case ALOAD_0: case ALOAD_1: case ALOAD_2: case ALOAD_3: pushByLocalObjectLoad(dbc, seen - ALOAD_0); break; case DLOAD: pushByLocalLoad("D", dbc.getRegisterOperand()); break; case DLOAD_0: case DLOAD_1: case DLOAD_2: case DLOAD_3: pushByLocalLoad("D", seen - DLOAD_0); break; case FLOAD: pushByLocalLoad("F", dbc.getRegisterOperand()); break; case FLOAD_0: case FLOAD_1: case FLOAD_2: case FLOAD_3: pushByLocalLoad("F", seen - FLOAD_0); break; case ILOAD: pushByLocalLoad("I", dbc.getRegisterOperand()); break; case ILOAD_0: case ILOAD_1: case ILOAD_2: case ILOAD_3: pushByLocalLoad("I", seen - ILOAD_0); break; case LLOAD: pushByLocalLoad("J", dbc.getRegisterOperand()); break; case LLOAD_0: case LLOAD_1: case LLOAD_2: case LLOAD_3: pushByLocalLoad("J", seen - LLOAD_0); break; case GETSTATIC: { FieldSummary fieldSummary = AnalysisContext.currentAnalysisContext().getFieldSummary(); XField fieldOperand = dbc.getXFieldOperand(); if (fieldOperand != null && fieldSummary.isComplete() && !fieldOperand.isPublic()) { OpcodeStack.Item item = fieldSummary.getSummary(fieldOperand); if (item != null) { Item itm = new Item(item); itm.setLoadedFromField(fieldOperand, Integer.MAX_VALUE); push(itm); break; } } FieldAnnotation field = FieldAnnotation.fromReferencedField(dbc); Item i = new Item(dbc.getSigConstantOperand(), field, Integer.MAX_VALUE); if (field.getFieldName().equals("separator") && field.getClassName().equals("java.io.File")) { i.setSpecialKind(Item.FILE_SEPARATOR_STRING); } push(i); break; } case LDC: case LDC_W: case LDC2_W: cons = dbc.getConstantRefOperand(); pushByConstant(dbc, cons); break; case INSTANCEOF: pop(); push(new Item("I")); break; case IFEQ: case IFNE: case IFLT: case IFLE: case IFGT: case IFGE: case IFNONNULL: case IFNULL: seenTransferOfControl = true; { Item top = pop(); // if we see a test comparing a special negative value with 0, // reset all other such values on the opcode stack if (top.valueCouldBeNegative() && (seen == IFLT || seen == IFLE || seen == IFGT || seen == IFGE)) { int specialKind = top.getSpecialKind(); for(Item item : stack) if (item != null && item.getSpecialKind() == specialKind) item.setSpecialKind(0); for(Item item : lvValues) if (item != null && item.getSpecialKind() == specialKind) item.setSpecialKind(0); } } addJumpValue(dbc.getPC(), dbc.getBranchTarget()); break; case LOOKUPSWITCH: case TABLESWITCH: seenTransferOfControl = true; setReachOnlyByBranch(true); pop(); addJumpValue(dbc.getPC(), dbc.getBranchTarget()); int pc = dbc.getBranchTarget() - dbc.getBranchOffset(); for(int offset : dbc.getSwitchOffsets()) addJumpValue(dbc.getPC(), offset+pc); break; case ARETURN: case DRETURN: case FRETURN: case IRETURN: case LRETURN: seenTransferOfControl = true; setReachOnlyByBranch(true); pop(); break; case MONITORENTER: case MONITOREXIT: case POP: case PUTSTATIC: pop(); break; case IF_ACMPEQ: case IF_ACMPNE: case IF_ICMPEQ: case IF_ICMPNE: case IF_ICMPLT: case IF_ICMPLE: case IF_ICMPGT: case IF_ICMPGE: { seenTransferOfControl = true; Item right = pop(); Item left = pop(); if (right.hasConstantValue(Integer.MIN_VALUE) && left.checkForIntegerMinValue() || left.hasConstantValue(Integer.MIN_VALUE) && right.checkForIntegerMinValue() ) { for(Item i : stack) if (i != null && i.checkForIntegerMinValue()) i.setSpecialKind(Item.NOT_SPECIAL); for(Item i : lvValues) if (i != null && i.checkForIntegerMinValue()) i.setSpecialKind(Item.NOT_SPECIAL); } int branchTarget = dbc.getBranchTarget(); addJumpValue(dbc.getPC(), branchTarget); break; } case POP2: it = pop(); if (it.getSize() == 1) pop(); break; case PUTFIELD: pop(2); break; case IALOAD: case SALOAD: pop(2); push(new Item("I")); break; case DUP: handleDup(); break; case DUP2: handleDup2(); break; case DUP_X1: handleDupX1(); break; case DUP_X2: handleDupX2(); break; case DUP2_X1: handleDup2X1(); break; case DUP2_X2: handleDup2X2(); break; case IINC: register = dbc.getRegisterOperand(); it = getLVValue( register ); it2 = new Item("I", dbc.getIntConstant()); pushByIntMath(dbc, IADD, it2, it); pushByLocalStore(register); break; case ATHROW: pop(); seenTransferOfControl = true; setReachOnlyByBranch(true); setTop(true); break; case CHECKCAST: { String castTo = dbc.getClassConstantOperand(); if (castTo.charAt(0) != '[') castTo = "L" + castTo + ";"; it = new Item(pop()); it.signature = castTo; push(it); break; } case NOP: break; case RET: case RETURN: seenTransferOfControl = true; setReachOnlyByBranch(true); break; case GOTO: case GOTO_W: seenTransferOfControl = true; setReachOnlyByBranch(true); addJumpValue(dbc.getPC(), dbc.getBranchTarget()); stack.clear(); setTop(true); break; case SWAP: handleSwap(); break; case ICONST_M1: case ICONST_0: case ICONST_1: case ICONST_2: case ICONST_3: case ICONST_4: case ICONST_5: push(new Item("I", (seen-ICONST_0))); break; case LCONST_0: case LCONST_1: push(new Item("J", (long)(seen-LCONST_0))); break; case DCONST_0: case DCONST_1: push(new Item("D", (double)(seen-DCONST_0))); break; case FCONST_0: case FCONST_1: case FCONST_2: push(new Item("F", (float)(seen-FCONST_0))); break; case ACONST_NULL: push(new Item()); break; case ASTORE: case DSTORE: case FSTORE: case ISTORE: case LSTORE: pushByLocalStore(dbc.getRegisterOperand()); break; case ASTORE_0: case ASTORE_1: case ASTORE_2: case ASTORE_3: pushByLocalStore(seen - ASTORE_0); break; case DSTORE_0: case DSTORE_1: case DSTORE_2: case DSTORE_3: pushByLocalStore(seen - DSTORE_0); break; case FSTORE_0: case FSTORE_1: case FSTORE_2: case FSTORE_3: pushByLocalStore(seen - FSTORE_0); break; case ISTORE_0: case ISTORE_1: case ISTORE_2: case ISTORE_3: pushByLocalStore(seen - ISTORE_0); break; case LSTORE_0: case LSTORE_1: case LSTORE_2: case LSTORE_3: pushByLocalStore(seen - LSTORE_0); break; case GETFIELD: { FieldSummary fieldSummary = AnalysisContext.currentAnalysisContext().getFieldSummary(); XField fieldOperand = dbc.getXFieldOperand(); if (fieldOperand != null && fieldSummary.isComplete() && !fieldOperand.isPublic()) { OpcodeStack.Item item = fieldSummary.getSummary(fieldOperand); if (item != null) { Item addr = pop(); Item itm = new Item(item); itm.setLoadedFromField(fieldOperand, addr.getRegisterNumber()); push(itm); break; } } Item item = pop(); int reg = item.getRegisterNumber(); push(new Item(dbc.getSigConstantOperand(), FieldAnnotation.fromReferencedField(dbc), reg)); } break; case ARRAYLENGTH: { pop(); Item v = new Item("I"); v.setSpecialKind(Item.NON_NEGATIVE); push(v); } break; case BALOAD: { pop(2); Item v = new Item("I"); v.setSpecialKind(Item.SIGNED_BYTE); push(v); break; } case CALOAD: pop(2); push(new Item("I")); break; case DALOAD: pop(2); push(new Item("D")); break; case FALOAD: pop(2); push(new Item("F")); break; case LALOAD: pop(2); push(new Item("J")); break; case AASTORE: case BASTORE: case CASTORE: case DASTORE: case FASTORE: case IASTORE: case LASTORE: case SASTORE: pop(3); break; case BIPUSH: case SIPUSH: push(new Item("I", (Integer)dbc.getIntConstant())); break; case IADD: case ISUB: case IMUL: case IDIV: case IAND: case IOR: case IXOR: case ISHL: case ISHR: case IREM: case IUSHR: it = pop(); it2 = pop(); pushByIntMath(dbc, seen, it2, it); break; case INEG: it = pop(); if (it.getConstant() instanceof Integer) { push(new Item("I", ( Integer)(-(Integer) it.getConstant()))); } else { push(new Item("I")); } break; case LNEG: it = pop(); if (it.getConstant() instanceof Long) { push(new Item("J", ( Long)(-(Long) it.getConstant()))); } else { push(new Item("J")); } break; case FNEG: it = pop(); if (it.getConstant() instanceof Float) { push(new Item("F", ( Float)(-(Float) it.getConstant()))); } else { push(new Item("F")); } break; case DNEG: it = pop(); if (it.getConstant() instanceof Double) { push(new Item("D", ( Double)(-(Double) it.getConstant()))); } else { push(new Item("D")); } break; case LADD: case LSUB: case LMUL: case LDIV: case LAND: case LOR: case LXOR: case LSHL: case LSHR: case LREM: case LUSHR: it = pop(); it2 = pop(); pushByLongMath(seen, it2, it); break; case LCMP: handleLcmp(); break; case FCMPG: case FCMPL: handleFcmp(seen); break; case DCMPG: case DCMPL: handleDcmp(seen); break; case FADD: case FSUB: case FMUL: case FDIV: case FREM: it = pop(); it2 = pop(); pushByFloatMath(seen, it, it2); break; case DADD: case DSUB: case DMUL: case DDIV: case DREM: it = pop(); it2 = pop(); pushByDoubleMath(seen, it, it2); break; case I2B: it = pop(); if (it.getConstant() != null) { it =new Item("I", (byte)constantToInt(it)); } else { it = new Item("I"); } it.setSpecialKind(Item.SIGNED_BYTE); push(it); break; case I2C: it = pop(); if (it.getConstant() != null) { it = new Item("I", (char)constantToInt(it)); } else { it = new Item("I"); } it.setSpecialKind(Item.NON_NEGATIVE); push(it); break; case I2L: case D2L: case F2L:{ it = pop(); Item newValue; if (it.getConstant() != null) { newValue = new Item("J", constantToLong(it)); } else { newValue = new Item("J"); } newValue.setSpecialKind(it.getSpecialKind()); push(newValue); } break; case I2S: it = pop(); if (it.getConstant() != null) { push(new Item("I", (short)constantToInt(it))); } else { push(new Item("I")); } break; case L2I: case D2I: case F2I: it = pop(); if (it.getConstant() != null) { push(new Item("I",constantToInt(it))); } else { push(new Item("I")); } break; case L2F: case D2F: case I2F: it = pop(); if (it.getConstant() != null) { push(new Item("F", constantToFloat(it))); } else { push(new Item("F")); } break; case F2D: case I2D: case L2D: it = pop(); if (it.getConstant() != null) { push(new Item("D", constantToDouble(it))); } else { push(new Item("D")); } break; case NEW: { Item item = new Item("L" + dbc.getClassConstantOperand() + ";", (Object) null); item.setSpecialKind(Item.NEWLY_ALLOCATED); push(item); } break; case NEWARRAY: pop(); signature = "[" + BasicType.getType((byte)dbc.getIntConstant()).getSignature(); pushBySignature(signature, dbc); break; // According to the VM Spec 4.4.1, anewarray and multianewarray // can refer to normal class/interface types (encoded in // "internal form"), or array classes (encoded as signatures // beginning with "["). case ANEWARRAY: pop(); signature = dbc.getClassConstantOperand(); if (signature.charAt(0) == '[') signature = "[" + signature; else signature = "[L" + signature + ";"; pushBySignature(signature, dbc); break; case MULTIANEWARRAY: int dims = dbc.getIntConstant(); for(int i = 0; i < dims; i++) pop(); signature = dbc.getClassConstantOperand(); pushBySignature(signature, dbc); break; case AALOAD: { pop(); it = pop(); String arraySig = it.getSignature(); if ( arraySig.charAt(0) == '[') pushBySignature(arraySig.substring(1), dbc); else push(new Item()); } break; case JSR: seenTransferOfControl = true; setReachOnlyByBranch(false); push(new Item("")); // push return address on stack addJumpValue(dbc.getPC(), dbc.getBranchTarget()); pop(); if (dbc.getBranchOffset() < 0) { // OK, backwards JSRs are weird; reset the stack. int stackSize = stack.size(); stack.clear(); for(int i = 0; i < stackSize; i++) stack.add(new Item()); } setTop(false); break; case INVOKEINTERFACE: case INVOKESPECIAL: case INVOKESTATIC: case INVOKEVIRTUAL: processMethodCall(dbc, seen); break; default: throw new UnsupportedOperationException("OpCode " + OPCODE_NAMES[seen] + " not supported " ); } } catch (RuntimeException e) { //If an error occurs, we clear the stack and locals. one of two things will occur. //Either the client will expect more stack items than really exist, and so they're condition check will fail, //or the stack will resync with the code. But hopefully not false positives String msg = "Error processing opcode " + OPCODE_NAMES[seen] + " @ " + dbc.getPC() + " in " + dbc.getFullyQualifiedMethodName(); AnalysisContext.logError(msg , e); if (DEBUG) e.printStackTrace(); clear(); } finally { if (DEBUG) { System.out.println(dbc.getNextPC() + "pc : " + OPCODE_NAMES[seen] + " stack depth: " + getStackDepth()); System.out.println(this); } } } /** * @param it * @return */ private int constantToInt(Item it) { return ((Number)it.getConstant()).intValue(); } /** * @param it * @return */ private float constantToFloat(Item it) { return ((Number)it.getConstant()).floatValue(); } /** * @param it * @return */ private double constantToDouble(Item it) { return ((Number)it.getConstant()).doubleValue(); } /** * @param it * @return */ private long constantToLong(Item it) { return ((Number)it.getConstant()).longValue(); } /** * handle dcmp * */ private void handleDcmp(int opcode) { Item it; Item it2; it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { double d = (Double) it.getConstant(); double d2 = (Double) it.getConstant(); if (Double.isNaN(d) || Double.isNaN(d2)) { if (opcode == DCMPG) push(new Item("I", (Integer)(1))); else push(new Item("I", (Integer)(-1))); } if (d2 < d) push(new Item("I", (Integer) (-1) )); else if (d2 > d) push(new Item("I", (Integer)1)); else push(new Item("I", (Integer)0)); } else { push(new Item("I")); } } /** * handle fcmp * */ private void handleFcmp(int opcode) { Item it; Item it2; it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { float f = (Float) it.getConstant(); float f2 = (Float) it.getConstant(); if (Float.isNaN(f) || Float.isNaN(f2)) { if (opcode == FCMPG) push(new Item("I", (Integer)(1))); else push(new Item("I", (Integer)(-1))); } if (f2 < f) push(new Item("I", (Integer)(-1))); else if (f2 > f) push(new Item("I", (Integer)(1))); else push(new Item("I", (Integer)(0))); } else { push(new Item("I")); } } /** * handle lcmp */ private void handleLcmp() { Item it; Item it2; it = pop(); it2 = pop(); if ((it.getConstant() != null) && it2.getConstant() != null) { long l = (Long) it.getConstant(); long l2 = (Long) it.getConstant(); if (l2 < l) push(new Item("I", (Integer)(-1))); else if (l2 > l) push(new Item("I", (Integer)(1))); else push(new Item("I", (Integer)(0))); } else { push(new Item("I")); } } /** * handle swap */ private void handleSwap() { Item i1 = pop(); Item i2 = pop(); push(i1); push(i2); } /** * handleDup */ private void handleDup() { Item it; it = pop(); push(it); push(it); } /** * handle dupX1 */ private void handleDupX1() { Item it; Item it2; it = pop(); it2 = pop(); push(it); push(it2); push(it); } /** * handle dup2 */ private void handleDup2() { Item it, it2; it = pop(); if (it.getSize() == 2) { push(it); push(it); } else { it2 = pop(); push(it2); push(it); push(it2); push(it); } } /** * handle Dup2x1 */ private void handleDup2X1() { String signature; Item it; Item it2; Item it3; it = pop(); it2 = pop(); signature = it.getSignature(); if (signature.equals("J") || signature.equals("D")) { push(it); push(it2); push(it); } else { it3 = pop(); push(it2); push(it); push(it3); push(it2); push(it); } } private void handleDup2X2() { String signature; Item it = pop(); Item it2 = pop(); if (it.isWide()) { if (it2.isWide()) { push(it); push(it2); push(it); } else { Item it3 = pop(); push(it); push(it3); push(it2); push(it); } } else { Item it3 = pop(); if (it3.isWide()) { push(it2); push(it); push(it3); push(it2); push(it); } else { Item it4 = pop(); push(it2); push(it); push(it4); push(it3); push(it2); push(it); } } } /** * Handle DupX2 */ private void handleDupX2() { String signature; Item it; Item it2; Item it3; it = pop(); it2 = pop(); signature = it2.getSignature(); if (signature.equals("J") || signature.equals("D")) { push(it); push(it2); push(it); } else { it3 = pop(); push(it); push(it3); push(it2); push(it); } } private void processMethodCall(DismantleBytecode dbc, int seen) { String clsName = dbc.getClassConstantOperand(); String methodName = dbc.getNameConstantOperand(); String signature = dbc.getSigConstantOperand(); String appenderValue = null; boolean servletRequestParameterTainted = false; boolean sawUnknownAppend = false; Item sbItem = null; Item topItem = null; if (getStackDepth() > 0) topItem = getStackItem(0); boolean topIsTainted = topItem!= null && topItem.isServletParameterTainted(); HttpParameterInjection injection = null; if (topIsTainted) injection = topItem.injection; //TODO: stack merging for trinaries kills the constant.. would be nice to maintain. if ("java/lang/StringBuffer".equals(clsName) || "java/lang/StringBuilder".equals(clsName)) { if ("<init>".equals(methodName)) { if ("(Ljava/lang/String;)V".equals(signature)) { Item i = getStackItem(0); appenderValue = (String)i.getConstant(); if (i.isServletParameterTainted()) servletRequestParameterTainted = true; } else if ("()V".equals(signature)) { appenderValue = ""; } } else if ("toString".equals(methodName) && getStackDepth() >= 1) { Item i = getStackItem(0); appenderValue = (String)i.getConstant(); if (i.isServletParameterTainted()) servletRequestParameterTainted = true; } else if ("append".equals(methodName)) { if (signature.indexOf("II)") == -1 && getStackDepth() >= 2) { sbItem = getStackItem(1); Item i = getStackItem(0); if (i.isServletParameterTainted() || sbItem.isServletParameterTainted()) servletRequestParameterTainted = true; Object sbVal = sbItem.getConstant(); Object sVal = i.getConstant(); if ((sbVal != null) && (sVal != null)) { appenderValue = sbVal + sVal.toString(); } else if (sbItem.registerNumber >= 0) { OpcodeStack.Item item = getLVValue(sbItem.registerNumber); if (item != null) item.constValue = null; } } else if (signature.startsWith("([CII)")) { sawUnknownAppend = true; sbItem = getStackItem(3); if (sbItem.registerNumber >= 0) { OpcodeStack.Item item = getLVValue(sbItem.registerNumber); if (item != null) item.constValue = null; } } else { sawUnknownAppend = true; } } } else if (seen == INVOKESPECIAL && clsName.equals("java/io/FileOutputStream") && methodName.equals("<init>") && (signature.equals("(Ljava/io/File;Z)V") || signature.equals("(Ljava/lang/String;Z)V"))) { OpcodeStack.Item item = getStackItem(0); Object value = item.getConstant(); if ( value instanceof Integer && ((Integer)value).intValue() == 1) { pop(3); Item top = getStackItem(0); if (top.signature.equals("Ljava/io/FileOutputStream;")) { top.setSpecialKind(Item.FILE_OPENED_IN_APPEND_MODE); top.source = XFactory.createReferencedXMethod(dbc); top.setPC(dbc.getPC()); } return; } } else if (seen == INVOKESPECIAL && clsName.equals("java/io/BufferedOutputStream") && methodName.equals("<init>") && signature.equals("(Ljava/io/OutputStream;)V")) { OpcodeStack.Item item = getStackItem(0); if (getStackItem(0).getSpecialKind() == Item.FILE_OPENED_IN_APPEND_MODE && getStackItem(2).signature.equals("Ljava/io/BufferedOutputStream;")) { pop(2); Item top = getStackItem(0); top.setSpecialKind(Item.FILE_OPENED_IN_APPEND_MODE); top.source = XFactory.createReferencedXMethod(dbc); top.setPC(dbc.getPC()); return; } } else if (seen == INVOKEINTERFACE && methodName.equals("getParameter") && clsName.equals("javax/servlet/http/HttpServletRequest") || clsName.equals("javax/servlet/http/ServletRequest")) { Item requestParameter = pop(); pop(); Item result = new Item("Ljava/lang/String;"); result.setServletParameterTainted(); result.source = XFactory.createReferencedXMethod(dbc); String parameterName = null; if (requestParameter.getConstant() instanceof String) parameterName = (String) requestParameter.getConstant(); result.injection = new HttpParameterInjection(parameterName, dbc.getPC()); result.setPC(dbc.getPC()); push(result); return; } else if (seen == INVOKEINTERFACE && methodName.equals("getQueryString") && clsName.equals("javax/servlet/http/HttpServletRequest") || clsName.equals("javax/servlet/http/ServletRequest")) { pop(); Item result = new Item("Ljava/lang/String;"); result.setServletParameterTainted(); result.source = XFactory.createReferencedXMethod(dbc); result.setPC(dbc.getPC()); push(result); return; } else if (seen == INVOKEINTERFACE && methodName.equals("getHeader") && clsName.equals("javax/servlet/http/HttpServletRequest") || clsName.equals("javax/servlet/http/ServletRequest")) { Item requestParameter = pop(); pop(); Item result = new Item("Ljava/lang/String;"); result.setServletParameterTainted(); result.source = XFactory.createReferencedXMethod(dbc); result.setPC(dbc.getPC()); push(result); return; } pushByInvoke(dbc, seen != INVOKESTATIC); if ((sawUnknownAppend || appenderValue != null || servletRequestParameterTainted) && getStackDepth() > 0) { Item i = this.getStackItem(0); i.constValue = appenderValue; if (!sawUnknownAppend && servletRequestParameterTainted) { i.injection = topItem.injection; i.setServletParameterTainted(); } if (sbItem != null) { i.registerNumber = sbItem.registerNumber; i.source = sbItem.source; if (i.injection == null) i.injection = sbItem.injection; if (sbItem.registerNumber >= 0) setLVValue(sbItem.registerNumber, i ); } return; } if ((clsName.equals("java/util/Random") || clsName.equals("java/security/SecureRandom")) && methodName.equals("nextInt") && signature.equals("()I")) { Item i = pop(); i.setSpecialKind(Item.RANDOM_INT); push(i); } else if (clsName.equals("java/lang/Math") && methodName.equals("abs")) { Item i = pop(); i.setSpecialKind(Item.MATH_ABS); push(i); } else if (seen == INVOKEVIRTUAL && methodName.equals("hashCode") && signature.equals("()I") || seen == INVOKESTATIC && clsName.equals("java/lang/System") && methodName.equals("identityHashCode") && signature.equals("(Ljava/lang/Object;)I")) { Item i = pop(); i.setSpecialKind(Item.HASHCODE_INT); push(i); } else if (topIsTainted && ( methodName.startsWith("encode") && clsName.equals("javax/servlet/http/HttpServletResponse") || methodName.equals("trim") && clsName.equals("java/lang/String")) ) { Item i = pop(); i.setSpecialKind(Item.SERVLET_REQUEST_TAINTED); i.injection = injection; push(i); } if (!signature.endsWith(")V")) { Item i = pop(); i.source = XFactory.createReferencedXMethod(dbc); push(i); } } private void mergeLists(List<Item> mergeInto, List<Item> mergeFrom, boolean errorIfSizesDoNotMatch) { // merge stacks int intoSize = mergeInto.size(); int fromSize = mergeFrom.size(); if (errorIfSizesDoNotMatch && intoSize != fromSize) { if (DEBUG2) { System.out.println("Bad merging items"); System.out.println("current items: " + mergeInto); System.out.println("jump items: " + mergeFrom); } } else { if (DEBUG2) { if (intoSize == fromSize) System.out.println("Merging items"); else System.out.println("Bad merging items"); System.out.println("current items: " + mergeInto); System.out.println("jump items: " + mergeFrom); } for (int i = 0; i < Math.min(intoSize, fromSize); i++) mergeInto.set(i, Item.merge(mergeInto.get(i), mergeFrom.get(i))); if (DEBUG2) { System.out.println("merged items: " + mergeInto); } } } public void clear() { stack.clear(); lvValues.clear(); } boolean encountedTop; boolean backwardsBranch; BitSet exceptionHandlers = new BitSet(); private Map<Integer, List<Item>> jumpEntries = new HashMap<Integer, List<Item>>(); private Map<Integer, List<Item>> jumpStackEntries = new HashMap<Integer, List<Item>>(); private BitSet jumpEntryLocations = new BitSet(); static class JumpInfo { final Map<Integer, List<Item>> jumpEntries; final Map<Integer, List<Item>> jumpStackEntries; final BitSet jumpEntryLocations; JumpInfo(Map<Integer, List<Item>> jumpEntries, Map<Integer, List<Item>> jumpStackEntries, BitSet jumpEntryLocations) { this.jumpEntries = jumpEntries; this.jumpStackEntries = jumpStackEntries; this.jumpEntryLocations = jumpEntryLocations; } } public static class JumpInfoFactory extends AnalysisFactory<JumpInfo> { public JumpInfoFactory() { super("Jump info for opcode stack", JumpInfo.class); } public JumpInfo analyze(IAnalysisCache analysisCache, MethodDescriptor descriptor) throws CheckedAnalysisException { Method method = analysisCache.getMethodAnalysis(Method.class, descriptor); JavaClass jclass = getJavaClass(analysisCache, descriptor.getClassDescriptor()); Code code = method.getCode(); final OpcodeStack stack = new OpcodeStack(); if (code == null) { return null; } DismantleBytecode branchAnalysis = new DismantleBytecode() { @Override public void sawOpcode(int seen) { stack.sawOpcode(this, seen); } }; branchAnalysis.setupVisitorForClass(jclass); int oldCount = 0; while (true) { stack.resetForMethodEntry0(ClassName.toSlashedClassName(jclass.getClassName()), method); branchAnalysis.doVisitMethod(method); int newCount = stack.jumpEntries.size(); if (newCount == oldCount || !stack.encountedTop || !stack.backwardsBranch) break; oldCount = newCount; } return new JumpInfo(stack.jumpEntries, stack.jumpStackEntries, stack.jumpEntryLocations); }} public boolean isJumpTarget(int pc) { return jumpEntryLocations.get(pc); } private void addJumpValue(int from, int target) { if (DEBUG) System.out.println("Set jump entry at " + methodName + ":" + target + "pc to " + stack + " : " + lvValues ); if (from >= target) backwardsBranch = true; List<Item> atTarget = jumpEntries.get(target); if (atTarget == null) { if (DEBUG) System.out.println("Was null"); jumpEntries.put(target, new ArrayList<Item>(lvValues)); jumpEntryLocations.set(target); if (stack.size() > 0) { jumpStackEntries.put(target, new ArrayList<Item>(stack)); } return; } mergeLists(atTarget, lvValues, false); List<Item> stackAtTarget = jumpStackEntries.get(target); if (stack.size() > 0 && stackAtTarget != null) mergeLists(stackAtTarget, stack, false); if (DEBUG) System.out.println("merge target for " + methodName + ":" + target + "pc is " + atTarget); } private String methodName; DismantleBytecode v; public void learnFrom(JumpInfo info) { jumpEntries = new HashMap<Integer, List<Item>>(info.jumpEntries); jumpStackEntries = new HashMap<Integer, List<Item>>(info.jumpStackEntries); jumpEntryLocations = (BitSet) info.jumpEntryLocations.clone(); } public void initialize() { setTop(false); jumpEntries.clear(); jumpStackEntries.clear(); jumpEntryLocations.clear(); encountedTop = false; backwardsBranch = false; lastUpdate.clear(); convertJumpToOneZeroState = convertJumpToZeroOneState = 0; zeroOneComing = -1; setReachOnlyByBranch(false); } public int resetForMethodEntry(final DismantleBytecode v) { this.v = v; initialize(); int result = resetForMethodEntry0(v); Code code = v.getMethod().getCode(); if (code == null) return result; if (useIterativeAnalysis) { IAnalysisCache analysisCache = Global.getAnalysisCache(); XMethod xMethod = XFactory.createXMethod(v.getThisClass(), v.getMethod()); try { JumpInfo jump = analysisCache.getMethodAnalysis(JumpInfo.class, xMethod.getMethodDescriptor()); if (jump != null) { learnFrom(jump); } } catch (CheckedAnalysisException e) { AnalysisContext.logError("Error getting jump information", e); } } return result; } private int resetForMethodEntry0(PreorderVisitor v) { return resetForMethodEntry0(v.getClassName(), v.getMethod()); } private int resetForMethodEntry0(@SlashedClassName String className, Method m) { methodName = m.getName(); if (DEBUG) System.out.println(" String signature = m.getSignature(); stack.clear(); lvValues.clear(); top = false; encountedTop = false; backwardsBranch = false; setReachOnlyByBranch(false); seenTransferOfControl = false; exceptionHandlers.clear(); Code code = m.getCode(); if (code != null) { CodeException[] exceptionTable = code.getExceptionTable(); if (exceptionTable != null) for(CodeException ex : exceptionTable) exceptionHandlers.set(ex.getHandlerPC()); } if (DEBUG) System.out.println(" --- " + className + " " + m.getName() + " " + signature); Type[] argTypes = Type.getArgumentTypes(signature); int reg = 0; if (!m.isStatic()) { Item it = new Item("L" + className+";"); it.setInitialParameter(true); it.registerNumber = reg; setLVValue( reg, it); reg += it.getSize(); } for (Type argType : argTypes) { Item it = new Item(argType.getSignature()); it.registerNumber = reg; it.setInitialParameter(true); setLVValue(reg, it); reg += it.getSize(); } return reg; } public int getStackDepth() { return stack.size(); } public Item getStackItem(int stackOffset) { if (stackOffset < 0 || stackOffset >= stack.size()) { AnalysisContext.logError("Can't get stack offset " + stackOffset + " from " + stack.toString() +" @ " + v.getPC() + " in " + v.getFullyQualifiedMethodName(), new IllegalArgumentException(stackOffset + " is not a value stack offset")); return new Item("Lfindbugs/OpcodeStackError;"); } int tos = stack.size() - 1; int pos = tos - stackOffset; try { return stack.get(pos); } catch (ArrayIndexOutOfBoundsException e) { throw new ArrayIndexOutOfBoundsException( "Requested item at offset " + stackOffset + " in a stack of size " + stack.size() +", made request for position " + pos); } } private Item pop() { return stack.remove(stack.size()-1); } public void replaceTop(Item newTop) { pop(); push(newTop); } private void pop(int count) { while ((count pop(); } private void push(Item i) { stack.add(i); } private void pushByConstant(DismantleBytecode dbc, Constant c) { if (c instanceof ConstantClass) push(new Item("Ljava/lang/Class;", ((ConstantClass)c).getConstantValue(dbc.getConstantPool()))); else if (c instanceof ConstantInteger) push(new Item("I", (Integer)(((ConstantInteger) c).getBytes()))); else if (c instanceof ConstantString) { int s = ((ConstantString) c).getStringIndex(); push(new Item("Ljava/lang/String;", getStringFromIndex(dbc, s))); } else if (c instanceof ConstantFloat) push(new Item("F", ((ConstantFloat) c).getBytes())); else if (c instanceof ConstantDouble) push(new Item("D", ((ConstantDouble) c).getBytes())); else if (c instanceof ConstantLong) push(new Item("J", ((ConstantLong) c).getBytes())); else throw new UnsupportedOperationException("Constant type not expected" ); } private void pushByLocalObjectLoad(DismantleBytecode dbc, int register) { Method m = dbc.getMethod(); LocalVariableTable lvt = m.getLocalVariableTable(); if (lvt != null) { LocalVariable lv = LVTHelper.getLocalVariableAtPC(lvt, register, dbc.getPC()); if (lv != null) { String signature = lv.getSignature(); pushByLocalLoad(signature, register); return; } } pushByLocalLoad("Ljava/lang/Object;", register); } private void pushByIntMath(DismantleBytecode dbc, int seen, Item lhs, Item rhs) { Item newValue = new Item("I"); if (lhs == null || rhs == null) { push(newValue); return; } try { if (DEBUG) System.out.println("pushByIntMath: " + rhs.getConstant() + " " + lhs.getConstant() ); if ((rhs.getConstant() != null) && lhs.getConstant() != null) { Integer lhsValue = (Integer) lhs.getConstant(); Integer rhsValue = (Integer) rhs.getConstant(); if (seen == IADD) newValue = new Item("I",lhsValue + rhsValue); else if (seen == ISUB) newValue = new Item("I",lhsValue - rhsValue); else if (seen == IMUL) newValue = new Item("I", lhsValue * rhsValue); else if (seen == IDIV) newValue = new Item("I", lhsValue / rhsValue); else if (seen == IAND) { newValue = new Item("I", lhsValue & rhsValue); if ((rhsValue&0xff) == 0 && rhsValue != 0 || (lhsValue&0xff) == 0 && lhsValue != 0 ) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); } else if (seen == IOR) newValue = new Item("I",lhsValue | rhsValue); else if (seen == IXOR) newValue = new Item("I",lhsValue ^ rhsValue); else if (seen == ISHL) { newValue = new Item("I",lhsValue << rhsValue); if (rhsValue >= 8) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); } else if (seen == ISHR) newValue = new Item("I",lhsValue >> rhsValue); else if (seen == IREM) newValue = new Item("I", lhsValue % rhsValue); else if (seen == IUSHR) newValue = new Item("I", lhsValue >>> rhsValue); } else if (rhs.getConstant() != null && seen == ISHL && (Integer) rhs.getConstant() >= 8) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); else if (lhs.getConstant() != null && seen == IAND) { int value = (Integer) lhs.getConstant(); if (value == 0) newValue = new Item("I", 0); else if ((value & 0xff) == 0) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); else if (value >= 0) newValue.setSpecialKind(Item.NON_NEGATIVE); } else if (rhs.getConstant() != null && seen == IAND) { int value = (Integer) rhs.getConstant(); if (value == 0) newValue = new Item("I", 0); else if ((value & 0xff) == 0) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); else if (value >= 0) newValue.setSpecialKind(Item.NON_NEGATIVE); } else if (seen == IAND && lhs.getSpecialKind() == Item.ZERO_MEANS_NULL) { newValue.setSpecialKind(Item.ZERO_MEANS_NULL); newValue.setPC(lhs.getPC()); } else if (seen == IAND && rhs.getSpecialKind() == Item.ZERO_MEANS_NULL) { newValue.setSpecialKind(Item.ZERO_MEANS_NULL); newValue.setPC(rhs.getPC()); } else if (seen == IOR && lhs.getSpecialKind() == Item.NONZERO_MEANS_NULL) { newValue.setSpecialKind(Item.NONZERO_MEANS_NULL); newValue.setPC(lhs.getPC()); } else if (seen == IOR && rhs.getSpecialKind() == Item.NONZERO_MEANS_NULL) { newValue.setSpecialKind(Item.NONZERO_MEANS_NULL); newValue.setPC(rhs.getPC()); } } catch (ArithmeticException e) { assert true; // ignore it } catch (RuntimeException e) { String msg = "Error processing2 " + lhs + OPCODE_NAMES[seen] + rhs + " @ " + dbc.getPC() + " in " + dbc.getFullyQualifiedMethodName(); AnalysisContext.logError(msg , e); } if (lhs.getSpecialKind() == Item.INTEGER_SUM && rhs.getConstant() != null ) { int rhsValue = (Integer) rhs.getConstant(); if (seen == IDIV && rhsValue ==2 || seen == ISHR && rhsValue == 1) newValue.setSpecialKind(Item.AVERAGE_COMPUTED_USING_DIVISION); } if (seen == IADD && newValue.getSpecialKind() == Item.NOT_SPECIAL && lhs.getConstant() == null && rhs.getConstant() == null ) newValue.setSpecialKind(Item.INTEGER_SUM); if (seen == IREM && lhs.getSpecialKind() == Item.HASHCODE_INT) newValue.setSpecialKind(Item.HASHCODE_INT_REMAINDER); if (seen == IREM && lhs.getSpecialKind() == Item.RANDOM_INT) newValue.setSpecialKind(Item.RANDOM_INT_REMAINDER); if (DEBUG) System.out.println("push: " + newValue); newValue.setPC(dbc.getPC()); push(newValue); } private void pushByLongMath(int seen, Item lhs, Item rhs) { Item newValue = new Item("J"); try { if ((rhs.getConstant() != null) && lhs.getConstant() != null) { Long lhsValue = ((Long) lhs.getConstant()); if (seen == LSHL) { newValue =new Item("J", lhsValue << ((Number) rhs.getConstant()).intValue()); if (((Number) rhs.getConstant()).intValue() >= 8) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); } else if (seen == LSHR) newValue =new Item("J", lhsValue >> ((Number) rhs.getConstant()).intValue()); else if (seen == LUSHR) newValue =new Item("J", lhsValue >>> ((Number) rhs.getConstant()).intValue()); else { Long rhsValue = ((Long) rhs.getConstant()); if (seen == LADD) newValue = new Item("J", lhsValue + rhsValue); else if (seen == LSUB) newValue = new Item("J", lhsValue - rhsValue); else if (seen == LMUL) newValue = new Item("J", lhsValue * rhsValue); else if (seen == LDIV) newValue =new Item("J", lhsValue / rhsValue); else if (seen == LAND) { newValue = new Item("J", lhsValue & rhsValue); if ((rhsValue&0xff) == 0 && rhsValue != 0 || (lhsValue&0xff) == 0 && lhsValue != 0 ) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); } else if (seen == LOR) newValue = new Item("J", lhsValue | rhsValue); else if (seen == LXOR) newValue =new Item("J", lhsValue ^ rhsValue); else if (seen == LREM) newValue =new Item("J", lhsValue % rhsValue); } } else if (rhs.getConstant() != null && seen == LSHL && ((Integer) rhs.getConstant()) >= 8) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); else if (lhs.getConstant() != null && seen == LAND && (((Long) lhs.getConstant()) & 0xff) == 0) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); else if (rhs.getConstant() != null && seen == LAND && (((Long) rhs.getConstant()) & 0xff) == 0) newValue.setSpecialKind(Item.LOW_8_BITS_CLEAR); } catch (RuntimeException e) { // ignore it } push(newValue); } private void pushByFloatMath(int seen, Item it, Item it2) { Item result; int specialKind = Item.FLOAT_MATH; if ((it.getConstant() instanceof Float) && it2.getConstant() instanceof Float) { if (seen == FADD) result =new Item("F", ((Float) it2.getConstant()) + ((Float) it.getConstant())); else if (seen == FSUB) result =new Item("F", ((Float) it2.getConstant()) - ((Float) it.getConstant())); else if (seen == FMUL) result =new Item("F", ((Float) it2.getConstant()) * ((Float) it.getConstant())); else if (seen == FDIV) result =new Item("F", ((Float) it2.getConstant()) / ((Float) it.getConstant())); else if (seen == FREM) result =new Item("F", ((Float) it2.getConstant()) % ((Float) it.getConstant())); else result =new Item("F"); } else { result =new Item("F"); if (seen == DDIV) specialKind = Item.NASTY_FLOAT_MATH; } result.setSpecialKind(specialKind); push(result); } private void pushByDoubleMath(int seen, Item it, Item it2) { Item result; int specialKind = Item.FLOAT_MATH; if ((it.getConstant() instanceof Double) && it2.getConstant() instanceof Double) { if (seen == DADD) result = new Item("D", ((Double) it2.getConstant()) + ((Double) it.getConstant())); else if (seen == DSUB) result = new Item("D", ((Double) it2.getConstant()) - ((Double) it.getConstant())); else if (seen == DMUL) result = new Item("D", ((Double) it2.getConstant()) * ((Double) it.getConstant())); else if (seen == DDIV) result = new Item("D", ((Double) it2.getConstant()) / ((Double) it.getConstant())); else if (seen == DREM) result = new Item("D", ((Double) it2.getConstant()) % ((Double) it.getConstant())); else result = new Item("D"); } else { result = new Item("D"); if (seen == DDIV) specialKind = Item.NASTY_FLOAT_MATH; } result.setSpecialKind(specialKind); push(result); } private void pushByInvoke(DismantleBytecode dbc, boolean popThis) { String signature = dbc.getSigConstantOperand(); if (dbc.getNameConstantOperand().equals("<init>") && signature.endsWith(")V") && popThis) { pop(PreorderVisitor.getNumberArguments(signature)); Item constructed = pop(); if (getStackDepth() > 0) { Item next = getStackItem(0); if (constructed.equals(next)) next.source = XFactory.createReferencedXMethod(dbc); } return; } pop(PreorderVisitor.getNumberArguments(signature)+(popThis ? 1 : 0)); pushBySignature(Type.getReturnType(signature).getSignature(), dbc); } private String getStringFromIndex(DismantleBytecode dbc, int i) { ConstantUtf8 name = (ConstantUtf8) dbc.getConstantPool().getConstant(i); return name.getBytes(); } private void pushBySignature(String s, DismantleBytecode dbc) { if ("V".equals(s)) return; Item item = new Item(s, (Object) null); if (dbc != null) item.setPC(dbc.getPC()); push(item); } private void pushByLocalStore(int register) { Item it = pop(); if (it.getRegisterNumber() != register) { for(Item i : lvValues) if (i != null) { if (i.registerNumber == register) i.registerNumber = -1; if (i.fieldLoadedFromRegister == register) i.fieldLoadedFromRegister = -1; } for(Item i : stack) if (i != null) { if (i.registerNumber == register) i.registerNumber = -1; if (i.fieldLoadedFromRegister == register) i.fieldLoadedFromRegister = -1; } } setLVValue( register, it ); } private void pushByLocalLoad(String signature, int register) { Item oldItem = getLVValue(register); Item newItem; if (oldItem == null) { newItem = new Item(signature); newItem.registerNumber = register; } else { newItem = oldItem; if (newItem.signature.equals("Ljava/lang/Object;") && !signature.equals("Ljava/lang/Object;")) { newItem = new Item(oldItem); newItem.signature = signature; } if (newItem.getRegisterNumber() < 0) { if (newItem == oldItem) newItem = new Item(oldItem); newItem.registerNumber = register; } } push(newItem); } private void setLVValue(int index, Item value ) { int addCount = index - lvValues.size() + 1; while ((addCount lvValues.add(null); if (!useIterativeAnalysis && seenTransferOfControl) value = Item.merge(value, lvValues.get(index) ); lvValues.set(index, value); } private Item getLVValue(int index) { if (index >= lvValues.size()) return new Item(); return lvValues.get(index); } /** * @param top The top to set. */ private void setTop(boolean top) { if (top) { if (!this.top) this.top = true; } else if (this.top) this.top = false; } /** * @return Returns the top. */ public boolean isTop() { if (top) return true; return false; } /** * @param reachOnlyByBranch The reachOnlyByBranch to set. */ void setReachOnlyByBranch(boolean reachOnlyByBranch) { if (reachOnlyByBranch) setTop(true); this.reachOnlyByBranch = reachOnlyByBranch; } /** * @return Returns the reachOnlyByBranch. */ boolean isReachOnlyByBranch() { return reachOnlyByBranch; } } // vim:ts=4
package org.datacommons.util; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import java.io.IOException; import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.util.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Strings; import org.datacommons.proto.Mcf; // This class checks the existence of typically schema-related, nodes or (select types of) // triples in the KG or local graph. // Users of this class submit checks for node (submitNodeCheck) or triple (submitTripleCheck) // along with a logging callback (LogCb). The implementation batches calls to DC, and on // completion invokes the callback to notify on existence failures. At the very end, users // need to issue a final drain call (drainRemoteCalls). // This class is thread-safe. public class ExistenceChecker { private static final Logger logger = LogManager.getLogger(ExistenceChecker.class); // Use the autopush end-point so we get more recent schema additions that // haven't rolled out. private static final String API_ROOT = "https://autopush.api.datacommons.org/node/property-values"; // For now we only need checks for certain Property/Class props. private static final Set<String> SCHEMA_PROPERTIES = Set.of(Vocabulary.DOMAIN_INCLUDES, Vocabulary.RANGE_INCLUDES, Vocabulary.SUB_CLASS_OF); // Batching thresholds. Allow tests to set this. public static int DC_CALL_BATCH_LIMIT = 1000; public static int MAX_PENDING_CALLS = 100000; // Useful for mocking. private final HttpClient httpClient; // Logging stuff. private final boolean verbose; private final LogWrapper logCtx; // This is a combination of local KG data and prior cached checks. // Node is just the DCID. Triple is "s,p,o" and the property just includes SCHEMA_PROPERTIES. private final Set<String> existingNodesOrTriples; // Existence cache private final Set<String> missingNodesOrTriples; // Absence cache // To amortize DC call latency we batch calls up to DC_CALL_BATCH_LIMIT. The batching happens // per (triple) predicate. // Batch map: predicate -> subject -> object -> list of pending call-contexts // We batch based on the number of subjects in a predicate. To avoid worst case memory // usage, if all checks are for the same node, we have a global limit of max pending calls. private final Map<String, Map<String, Map<String, List<LogCb>>>> remoteBatchMap; private int totalPendingCallCount = 0; public ExistenceChecker(HttpClient httpClient, boolean verbose, LogWrapper logCtx) { this.httpClient = httpClient; this.logCtx = logCtx; this.verbose = verbose; existingNodesOrTriples = new HashSet<>(); missingNodesOrTriples = new HashSet<>(); remoteBatchMap = new HashMap<>(); } public synchronized void submitNodeCheck(String node, LogCb logCb) throws IOException, InterruptedException { logCtx.incrementInfoCounterBy("Existence_NumChecks", 1); if (checkLocal(node, Vocabulary.TYPE_OF, "", logCb)) { return; } batchRemoteCall(node, Vocabulary.TYPE_OF, "", logCb); } public synchronized void submitTripleCheck(String sub, String pred, String obj, LogCb logCb) throws IOException, InterruptedException { if (pred.equals(Vocabulary.DOMAIN_INCLUDES) && (sub.contains("/") || sub.equals("count"))) { // Don't bother with domain checks for schema-less properties. // Measured property 'count' is an aggregate that is not a property of an instance, but // of a set. return; } logCtx.incrementInfoCounterBy("Existence_NumChecks", 1); if (checkLocal(sub, pred, obj, logCb)) { return; } batchRemoteCall(sub, pred, obj, logCb); } public synchronized void addLocalGraph(Mcf.McfGraph graph) { for (Map.Entry<String, Mcf.McfGraph.PropertyValues> node : graph.getNodesMap().entrySet()) { // Skip doing anything with StatVarObs. String typeOf = McfUtil.getPropVal(node.getValue(), Vocabulary.TYPE_OF); if (typeOf.equals(Vocabulary.STAT_VAR_OBSERVATION_TYPE) || typeOf.equals(Vocabulary.LEGACY_OBSERVATION_TYPE_SUFFIX)) { continue; } String dcid = McfUtil.getPropVal(node.getValue(), Vocabulary.DCID); if (dcid.isEmpty()) { continue; } existingNodesOrTriples.add(dcid); if (missingNodesOrTriples.contains(dcid)) { missingNodesOrTriples.remove(dcid); } if (!typeOf.equals(Vocabulary.CLASS_TYPE) && !typeOf.equals(Vocabulary.PROPERTY_TYPE)) { continue; } for (Map.Entry<String, Mcf.McfGraph.Values> pv : node.getValue().getPvsMap().entrySet()) { if (SCHEMA_PROPERTIES.contains(pv.getKey())) { for (Mcf.McfGraph.TypedValue tv : pv.getValue().getTypedValuesList()) { var key = makeKey(dcid, pv.getKey(), tv.getValue()); existingNodesOrTriples.add(key); if (missingNodesOrTriples.contains(key)) { missingNodesOrTriples.remove(key); } } } } } } public synchronized void drainRemoteCalls() throws IOException, InterruptedException { // To avoid mutating map while iterating, get the keys first. List<String> preds = new ArrayList<>(remoteBatchMap.keySet()); for (var pred : preds) { if (verbose) { logger.info("Draining " + remoteBatchMap.get(pred).size() + " dcids for predicate " + pred); } drainRemoteCallsForPredicate(pred, remoteBatchMap.get(pred)); remoteBatchMap.remove(pred); } } private void batchRemoteCall(String sub, String pred, String obj, LogCb logCb) throws IOException, InterruptedException { Map<String, Map<String, List<LogCb>>> subMap = null; if (remoteBatchMap.containsKey(pred)) { subMap = remoteBatchMap.get(pred); } else { subMap = new HashMap<>(); } Map<String, List<LogCb>> objMap = null; if (subMap.containsKey(sub)) { objMap = subMap.get(sub); } else { objMap = new HashMap<>(); } List<LogCb> calls = null; if (objMap.containsKey(obj)) { calls = objMap.get(obj); } else { calls = new ArrayList<>(); } // Add pending call. calls.add(logCb); objMap.put(obj, calls); subMap.put(sub, objMap); totalPendingCallCount++; remoteBatchMap.put(pred, subMap); // Maybe drain the batch. if (totalPendingCallCount >= MAX_PENDING_CALLS) { if (verbose) logger.info("Draining remote calls due to MAX_PENDING_CALLS"); drainRemoteCalls(); } else if (subMap.size() >= DC_CALL_BATCH_LIMIT) { if (verbose) { logger.info( "Draining due to batching limit with " + subMap.size() + " dcids for " + "predicate " + pred); } drainRemoteCallsForPredicate(pred, subMap); remoteBatchMap.remove(pred); } } private void drainRemoteCallsForPredicate( String pred, Map<String, Map<String, List<LogCb>>> subMap) throws IOException, InterruptedException { performDcCall(pred, new ArrayList<>(subMap.keySet()), subMap); } private void performDcCall( String pred, List<String> subs, Map<String, Map<String, List<LogCb>>> subMap) throws IOException, InterruptedException { logCtx.incrementInfoCounterBy("Existence_NumDcCalls", 1); var dataJson = callDc(subs, pred); if (dataJson == null) { if (verbose) { logger.info("DC call failed for - " + Strings.join(subs, ',') + ", " + pred); } // Important: If the dcid is malformed, Mixer can return failure. Also, if the URI is too // long, then too this happens. So issue independent RPCs now. If this happens often enough, // we can revisit. logger.warn("DC Call failed (bad DCID or URI length). Issuing individual calls now."); for (String sub : subs) { performDcCall(pred, List.of(sub), subMap); } return; } if (dataJson.entrySet().size() != subs.size()) { // Should not really happen, so throw exception throw new IOException( "Invalid results payload from Staging DC API endpoint for: '" + Strings.join(subs, ',') + "'," + " '" + pred + "': " + dataJson); } for (var entry : dataJson.entrySet()) { var sub = entry.getKey(); var nodeJson = entry.getValue().getAsJsonObject(); var objMap = subMap.get(sub); for (var kv : objMap.entrySet()) { var obj = kv.getKey(); var cbs = kv.getValue(); var key = makeKey(sub, pred, obj); if (checkOneResult(obj, nodeJson)) { existingNodesOrTriples.add(key); } else { if (verbose) { logger.info("Missing " + (obj.isEmpty() ? "node" : "triple") + " in DC " + key); } missingNodesOrTriples.add(key); // Log the missing details. for (var cb : cbs) { logEntry(cb, obj); } } totalPendingCallCount -= cbs.size(); } subMap.remove(sub); } } private boolean checkOneResult(String obj, JsonObject nodeJson) { if (nodeJson.has("out")) { if (obj.isEmpty()) { // Node existence case. if (nodeJson.getAsJsonArray("out").size() > 0) { return true; } } else { // Triple existence case. for (var objVal : nodeJson.getAsJsonArray("out")) { if (objVal.getAsJsonObject().getAsJsonPrimitive("dcid").getAsString().equals(obj)) { return true; } } } } return false; } // Returns true if we were able to complete the check locally. private boolean checkLocal(String sub, String pred, String obj, LogCb logCb) { String key = makeKey(sub, pred, obj); if (existingNodesOrTriples.contains(key)) { return true; } if (missingNodesOrTriples.contains(key)) { logEntry(logCb, obj); return true; } return false; } private JsonObject callDc(List<String> nodes, String property) throws IOException, InterruptedException { List<String> args = new ArrayList<>(); JsonObject arg = new JsonObject(); JsonArray dcids = new JsonArray(); for (var node : nodes) { dcids.add(node); } arg.add("dcids", dcids); arg.addProperty("property", property); arg.addProperty("direction", "out"); var request = HttpRequest.newBuilder(URI.create(API_ROOT)) .header("accept", "application/json") .POST(HttpRequest.BodyPublishers.ofString(arg.toString())) .build(); var response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); var payloadJson = new JsonParser().parse(response.body().trim()).getAsJsonObject(); if (payloadJson == null || !payloadJson.has("payload")) return null; return new JsonParser().parse(payloadJson.get("payload").getAsString()).getAsJsonObject(); } private static void logEntry(LogCb logCb, String obj) { String message, counter; if (obj.isEmpty()) { counter = "Existence_MissingReference"; message = "Failed reference existence check"; } else { counter = "Existence_MissingTriple"; message = "Failed triple existence check"; } logCb.logError(counter, message); } private static String makeKey(String s, String p, String o) { if (o.isEmpty()) { return s; } return s + "," + p + "," + o; } }
package org.vx68k.bitbucket; import java.util.logging.Logger; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Observes; import org.vx68k.bitbucket.BitbucketRepositoryPush; /** * Logs the JSON object for each push notification. * * @author Kaz Nishimura * @since 1.0 */ @ApplicationScoped public class LoggingObserver { private final Logger logger = Logger.getLogger(getClass().getName()); public void commitNotified(@Observes BitbucketRepositoryPush push) { BitbucketUser actor = push.getActor(); logger.info(actor.getUsername() + " pushed"); logger.fine(push.getJson().toString()); } }
package yuku.alkitab.base.widget; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.ClipData; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Typeface; import androidx.annotation.NonNull; import androidx.core.content.res.ResourcesCompat; import androidx.core.view.GravityCompat; import androidx.drawerlayout.widget.DrawerLayout; import androidx.core.widget.NestedScrollView; import androidx.appcompat.widget.SwitchCompat; import android.text.SpannableStringBuilder; import android.text.style.RelativeSizeSpan; import android.util.AttributeSet; import android.view.DragEvent; import android.view.HapticFeedbackConstants; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.Button; import android.widget.CompoundButton; import android.widget.PopupMenu; import android.widget.Spinner; import android.widget.TextView; import yuku.afw.storage.Preferences; import yuku.afw.widget.EasyAdapter; import yuku.alkitab.base.App; import yuku.alkitab.base.IsiActivity; import yuku.alkitab.base.S; import yuku.alkitab.base.ac.AboutActivity; import yuku.alkitab.base.ac.DevotionActivity; import yuku.alkitab.base.ac.ReadingPlanActivity; import yuku.alkitab.base.ac.SettingsActivity; import yuku.alkitab.songs.SongViewActivity; import yuku.alkitab.base.config.AppConfig; import yuku.alkitab.base.storage.Prefkey; import yuku.alkitab.base.util.CurrentReading; import yuku.alkitab.songs.SongBookUtil; import yuku.alkitab.debug.R; import yuku.alkitab.tracking.Tracker; import java.util.ArrayList; import java.util.List; public abstract class LeftDrawer extends NestedScrollView { public static final String PROGRESS_MARK_DRAG_MIME_TYPE = "application/vnd.yuku.alkitab.progress_mark.drag"; // mandatory TextView bBible; TextView bDevotion; TextView bReadingPlan; TextView bSongs; View bSettings; View bHelp; // for launching other activities final Activity activity; // for closing drawer DrawerLayout drawerLayout; public LeftDrawer(final Context context, final AttributeSet attrs) { super(context, attrs); activity = isInEditMode() ? null : (Activity) context; } @Override protected void onFinishInflate() { super.onFinishInflate(); setClickable(true); for (int i = 0, len = getChildCount(); i < len; i++) { getChildAt(i).setDuplicateParentStateEnabled(false); } bBible = findViewById(R.id.bBible); bDevotion = findViewById(R.id.bDevotion); bReadingPlan = findViewById(R.id.bReadingPlan); bSongs = findViewById(R.id.bSongs); bSettings = findViewById(R.id.bSettings); bHelp = findViewById(R.id.bHelp); if (this instanceof Text) setDrawerItemSelected(bBible); if (this instanceof Devotion) setDrawerItemSelected(bDevotion); if (this instanceof ReadingPlan) setDrawerItemSelected(bReadingPlan); if (this instanceof Songs) setDrawerItemSelected(bSongs); // hide and show according to app config if (!isInEditMode()) { bSongs.setVisibility(AppConfig.get().menuSongs ? VISIBLE : GONE); bDevotion.setVisibility(AppConfig.get().menuDevotion ? VISIBLE : GONE); } bBible.setOnClickListener(v -> { bBible_click(); closeDrawer(); }); bDevotion.setOnClickListener(v -> { bDevotion_click(); closeDrawer(); }); bReadingPlan.setOnClickListener(v -> { bReadingPlan_click(); closeDrawer(); }); bSongs.setOnClickListener(v -> { bSongs_click(); closeDrawer(); }); bSettings.setOnClickListener(v -> { bSettings_click(); closeDrawer(); }); bHelp.setOnClickListener(v -> { bHelp_click(); closeDrawer(); }); } void setDrawerItemSelected(@NonNull TextView drawerItem) { final int selectedTextColor = ResourcesCompat.getColor(getResources(), R.color.accent, getContext().getTheme()); drawerItem.setTextColor(selectedTextColor); drawerItem.setTypeface(Typeface.create("sans-serif-medium", Typeface.NORMAL)); } @Override public boolean onDragEvent(final DragEvent event) { if (event.getAction() == DragEvent.ACTION_DRAG_STARTED) { Tracker.trackEvent("pin_drag_started"); // Just so that the progress pin is not dropped to the verses return event.getClipDescription().hasMimeType(PROGRESS_MARK_DRAG_MIME_TYPE); } return false; } public void toggleDrawer() { if (drawerLayout.isDrawerOpen(GravityCompat.START)) { drawerLayout.closeDrawer(GravityCompat.START); } else { drawerLayout.openDrawer(GravityCompat.START); } } public void closeDrawer() { drawerLayout.closeDrawer(GravityCompat.START); } void bHelp_click() { activity.startActivity(AboutActivity.createIntent()); } void bSettings_click() { activity.startActivity(SettingsActivity.createIntent()); } /** * When the current activity is not {@link yuku.alkitab.base.IsiActivity}, * this clears all activity on this stack, * starts {@link yuku.alkitab.base.IsiActivity} on the background, * and then starts {@link yuku.alkitab.base.ac.ReadingPlanActivity}. */ void bReadingPlan_click() { if (getContext() instanceof IsiActivity) { activity.startActivity(ReadingPlanActivity.createIntent()); } else { final Intent baseIntent = IsiActivity.createIntent(); baseIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); baseIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); final Intent intent = ReadingPlanActivity.createIntent(); activity.startActivities(new Intent[]{baseIntent, intent}); } } /** * When the current activity is not {@link yuku.alkitab.base.IsiActivity}, * this clears all activity on this stack, * starts {@link yuku.alkitab.base.IsiActivity} on the background, * and then starts {@link SongViewActivity}. */ void bSongs_click() { if (getContext() instanceof IsiActivity) { activity.startActivity(SongViewActivity.createIntent()); } else { final Intent baseIntent = IsiActivity.createIntent(); baseIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); baseIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); final Intent intent = SongViewActivity.createIntent(); activity.startActivities(new Intent[]{baseIntent, intent}); } } /** * When the current activity is not {@link yuku.alkitab.base.IsiActivity}, * this clears all activity on this stack, * starts {@link yuku.alkitab.base.IsiActivity} on the background, * and then starts {@link yuku.alkitab.base.ac.DevotionActivity}. */ void bDevotion_click() { if (getContext() instanceof IsiActivity) { activity.startActivity(DevotionActivity.createIntent()); } else { final Intent baseIntent = IsiActivity.createIntent(); baseIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); baseIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); final Intent intent = DevotionActivity.createIntent(); activity.startActivities(new Intent[]{baseIntent, intent}); } } /** * This clears all activity on this stack and starts {@link yuku.alkitab.base.IsiActivity}. */ void bBible_click() { final Intent intent = IsiActivity.createIntent(); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); activity.startActivity(intent); } public static class Text extends LeftDrawer { public interface Listener { void bMarkers_click(); void bDisplay_click(); void cFullScreen_checkedChange(boolean isChecked); void cNightMode_checkedChange(boolean isChecked); void cSplitVersion_checkedChange(final SwitchCompat cSplitVersion, boolean isChecked); void bProgressMarkList_click(); void bProgress_click(int preset_id); void bCurrentReadingClose_click(); void bCurrentReadingReference_click(); } public interface Handle { void setFullScreen(boolean fullScreen); void setSplitVersion(boolean splitVersion); } View bMarkers; View bDisplay; SwitchCompat cFullScreen; SwitchCompat cNightMode; SwitchCompat cSplitVersion; View bProgressMarkList; View bProgress1; View bProgress2; View bProgress3; View bProgress4; View bProgress5; View panelCurrentReadingHeader; View bCurrentReadingClose; TextView bCurrentReadingReference; Listener listener; Handle handle = new Handle() { @Override public void setFullScreen(final boolean fullScreen) { cFullScreen.setOnCheckedChangeListener(null); cFullScreen.setChecked(fullScreen); cFullScreen.setOnCheckedChangeListener(cFullScreen_checkedChange); } @Override public void setSplitVersion(final boolean splitVersion) { cSplitVersion.setOnCheckedChangeListener(null); cSplitVersion.setChecked(splitVersion); cSplitVersion.setOnCheckedChangeListener(cSplitVersion_checkedChange); } }; public Text(final Context context, final AttributeSet attrs) { super(context, attrs); } public Handle getHandle() { return handle; } @Override protected void onFinishInflate() { super.onFinishInflate(); bMarkers = findViewById(R.id.bMarkers); bDisplay = findViewById(R.id.bDisplay); cFullScreen = findViewById(R.id.cFullScreen); cNightMode = findViewById(R.id.cNightMode); cSplitVersion = findViewById(R.id.cSplitVersion); bProgressMarkList = findViewById(R.id.bProgressMarkList); bProgress1 = findViewById(R.id.bProgress1); bProgress2 = findViewById(R.id.bProgress2); bProgress3 = findViewById(R.id.bProgress3); bProgress4 = findViewById(R.id.bProgress4); bProgress5 = findViewById(R.id.bProgress5); panelCurrentReadingHeader = findViewById(R.id.panelCurrentReadingHeader); bCurrentReadingClose = findViewById(R.id.bCurrentReadingClose); bCurrentReadingReference = findViewById(R.id.bCurrentReadingReference); cNightMode.setChecked(!isInEditMode() && Preferences.getBoolean(Prefkey.is_night_mode, false)); bProgressMarkList.setOnClickListener(v -> listener.bProgressMarkList_click()); final View[] views = new View[]{bProgress1, bProgress2, bProgress3, bProgress4, bProgress5}; for (int i = 0; i < views.length; i++) { final View b = views[i]; final int preset_id = i; b.setOnClickListener(v -> { listener.bProgress_click(preset_id); closeDrawer(); }); b.setOnLongClickListener(v -> { final ClipData dragData = new ClipData("progress_mark", new String[]{PROGRESS_MARK_DRAG_MIME_TYPE}, new ClipData.Item("" + preset_id)); b.setPressed(false); final DragShadowBuilder dragShadowBuilder = new DragShadowBuilder(b); performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); closeDrawer(); v.startDrag(dragData, dragShadowBuilder, null, 0); return true; }); } bMarkers.setOnClickListener(v -> { listener.bMarkers_click(); closeDrawer(); }); bDisplay.setOnClickListener(v -> { listener.bDisplay_click(); closeDrawer(); }); cFullScreen.setOnCheckedChangeListener(cFullScreen_checkedChange); cNightMode.setOnCheckedChangeListener(cNightMode_checkedChange); cSplitVersion.setOnCheckedChangeListener(cSplitVersion_checkedChange); bCurrentReadingClose.setOnClickListener(v -> listener.bCurrentReadingClose_click()); bCurrentReadingReference.setOnClickListener(v -> listener.bCurrentReadingReference_click()); displayCurrentReading(); // The following is not in onAttachedFromWindow, because we need to listen to // ACTION_ACTIVE_VERSION_CHANGED as early as possible, so we do not end up with // a verse reference from a version that was not actually selected during app startup. final IntentFilter filter = new IntentFilter(); filter.addAction(CurrentReading.ACTION_CURRENT_READING_CHANGED); filter.addAction(IsiActivity.ACTION_ACTIVE_VERSION_CHANGED); App.getLbm().registerReceiver(currentReadingChangeReceiver, filter); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); App.getLbm().unregisterReceiver(currentReadingChangeReceiver); } final BroadcastReceiver currentReadingChangeReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { displayCurrentReading(); } }; void displayCurrentReading() { if (isInEditMode()) return; final int[] aris = CurrentReading.get(); if (aris == null) { panelCurrentReadingHeader.setVisibility(GONE); bCurrentReadingReference.setVisibility(GONE); } else { panelCurrentReadingHeader.setVisibility(VISIBLE); bCurrentReadingReference.setVisibility(VISIBLE); bCurrentReadingReference.setText(S.activeVersion().referenceRange(aris[0], aris[1])); } } CompoundButton.OnCheckedChangeListener cFullScreen_checkedChange = new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) { listener.cFullScreen_checkedChange(isChecked); } }; CompoundButton.OnCheckedChangeListener cNightMode_checkedChange = new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) { listener.cNightMode_checkedChange(isChecked); } }; CompoundButton.OnCheckedChangeListener cSplitVersion_checkedChange = new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) { listener.cSplitVersion_checkedChange(cSplitVersion, isChecked); closeDrawer(); } }; public <T extends Activity & Listener> void configure(T listener, DrawerLayout drawerLayout) { this.listener = listener; this.drawerLayout = drawerLayout; } @Override void bBible_click() { closeDrawer(); } } public static class Devotion extends LeftDrawer { DevotionKindAdapter adapter; public interface Listener { void bPrev_click(); void bNext_click(); void bReload_click(); void cbKind_itemSelected(DevotionActivity.DevotionKind kind); } public interface Handle { void setDevotionDate(CharSequence date); void setDevotionKind(DevotionActivity.DevotionKind kind); } Spinner cbKind; TextView tCurrentDate; View bPrev; View bNext; View bReload; Listener listener; Handle handle = new Handle() { @Override public void setDevotionDate(final CharSequence date) { tCurrentDate.setText(date); } @Override public void setDevotionKind(final DevotionActivity.DevotionKind kind) { final AdapterView.OnItemSelectedListener backup = cbKind.getOnItemSelectedListener(); cbKind.setOnItemSelectedListener(null); cbKind.setSelection(adapter.getPositionForKind(kind)); cbKind.setOnItemSelectedListener(backup); } }; public Devotion(final Context context, final AttributeSet attrs) { super(context, attrs); } public Handle getHandle() { return handle; } @Override protected void onFinishInflate() { super.onFinishInflate(); cbKind = findViewById(R.id.cbKind); tCurrentDate = findViewById(R.id.tCurrentDate); bPrev = findViewById(R.id.bPrev); bNext = findViewById(R.id.bNext); bReload = findViewById(R.id.bReload); cbKind.setAdapter(adapter = new DevotionKindAdapter()); cbKind.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(final AdapterView<?> parent, final View view, final int position, final long id) { listener.cbKind_itemSelected(adapter.getItem(position)); } @Override public void onNothingSelected(final AdapterView<?> parent) { } }); bPrev.setOnClickListener(v -> listener.bPrev_click()); bNext.setOnClickListener(v -> listener.bNext_click()); bReload.setOnClickListener(v -> { listener.bReload_click(); closeDrawer(); }); } public <T extends Activity & Listener> void configure(T listener, DrawerLayout drawerLayout) { this.listener = listener; this.drawerLayout = drawerLayout; } @Override void bDevotion_click() { closeDrawer(); } class DevotionKindAdapter extends EasyAdapter { final List<DevotionActivity.DevotionKind> kinds = new ArrayList<>(); { // gather available devotions based on app config for (String devotionName : AppConfig.get().devotionNames) { kinds.add(DevotionActivity.DevotionKind.getByName(devotionName)); } } @Override public View newView(final int position, final ViewGroup parent) { return LayoutInflater.from(getContext()).inflate(android.R.layout.simple_spinner_item, parent, false); } @Override public DevotionActivity.DevotionKind getItem(final int position) { return kinds.get(position); } @Override public void bindView(final View view, final int position, final ViewGroup parent) { final DevotionActivity.DevotionKind kind = getItem(position); final SpannableStringBuilder sb = new SpannableStringBuilder(); sb.append(kind.title); sb.append("\n"); final int sb_len = sb.length(); sb.append(kind.subtitle); sb.setSpan(new RelativeSizeSpan(0.7f), sb_len, sb.length(), 0); ((TextView) view).setText(sb); } @Override public View newDropDownView(final int position, final ViewGroup parent) { final TextView res = (TextView) LayoutInflater.from(getContext()).inflate(android.R.layout.simple_spinner_dropdown_item, parent, false); res.setSingleLine(false); return res; } @Override public int getCount() { return kinds.size(); } public int getPositionForKind(final DevotionActivity.DevotionKind kind) { return kinds.indexOf(kind); } } } public static class ReadingPlan extends LeftDrawer { public interface Listener { void bRestart_click(); } public interface Handle { void setDescription(CharSequence description); } NestedScrollView scrollDescription; TextView tDescription; View bRestart; Listener listener; Handle handle = new Handle() { @Override public void setDescription(final CharSequence description) { if (description == null) { bRestart.setVisibility(GONE); scrollDescription.setVisibility(GONE); tDescription.setText(""); } else { bRestart.setVisibility(VISIBLE); scrollDescription.setVisibility(VISIBLE); tDescription.setText(description); } } }; public ReadingPlan(final Context context, final AttributeSet attrs) { super(context, attrs); } public Handle getHandle() { return handle; } @Override protected void onFinishInflate() { super.onFinishInflate(); scrollDescription = findViewById(R.id.scrollDescription); tDescription = findViewById(R.id.tDescription); bRestart = findViewById(R.id.bRestart); bRestart.setOnClickListener(v -> listener.bRestart_click()); } @Override void bReadingPlan_click() { closeDrawer(); } public <T extends Activity & Listener> void configure(T listener, DrawerLayout drawerLayout) { this.listener = listener; this.drawerLayout = drawerLayout; } } public static class Songs extends LeftDrawer { public interface Listener { void songKeypadButton_click(View v); void songBookSelected(String name); void moreSelected(); } public interface Handle { void setOkButtonEnabled(boolean enabled); void setAButtonEnabled(boolean enabled); void setBButtonEnabled(boolean enabled); void setCButtonEnabled(boolean enabled); void setBookName(CharSequence bookName); void setCode(String code); } Listener listener; Handle handle = new Handle() { @Override public void setOkButtonEnabled(boolean enabled) { bOk.setEnabled(enabled); } @Override public void setAButtonEnabled(boolean enabled) { bDigitA.setEnabled(enabled); } @Override public void setBButtonEnabled(boolean enabled) { bDigitB.setEnabled(enabled); } @Override public void setCButtonEnabled(boolean enabled) { bDigitC.setEnabled(enabled); } @Override public void setBookName(final CharSequence bookName) { bChangeBook.setText(bookName); } @Override public void setCode(final String code) { bChangeCode.setText(code); } }; public Songs(final Context context, final AttributeSet attrs) { super(context, attrs); } TextView bChangeBook; TextView bChangeCode; Button bOk; Button bDigitA; Button bDigitB; Button bDigitC; public Handle getHandle() { return handle; } @Override protected void onFinishInflate() { super.onFinishInflate(); bChangeBook = findViewById(R.id.bChangeBook); bChangeCode = findViewById(R.id.bChangeCode); bOk = findViewById(R.id.bOk); bDigitA = findViewById(R.id.bDigitA); bDigitB = findViewById(R.id.bDigitB); bDigitC = findViewById(R.id.bDigitC); bChangeBook.setOnClickListener(v -> { final PopupMenu popupChangeBook = SongBookUtil.getSongBookPopupMenu(activity, false, true, bChangeBook); popupChangeBook.setOnMenuItemClickListener(SongBookUtil.getSongBookOnMenuItemClickListener(new SongBookUtil.DefaultOnSongBookSelectedListener() { @Override public void onSongBookSelected(final String name) { listener.songBookSelected(name); } @Override public void onMoreSelected() { listener.moreSelected(); } })); popupChangeBook.show(); }); // all buttons for (int buttonId : new int[]{ R.id.bDigit0, R.id.bDigit1, R.id.bDigit2, R.id.bDigit3, R.id.bDigit4, R.id.bDigit5, R.id.bDigit6, R.id.bDigit7, R.id.bDigit8, R.id.bDigit9, R.id.bDigitA, R.id.bDigitB, R.id.bDigitC, R.id.bOk, R.id.bBackspace, }) { findViewById(buttonId).setOnClickListener(button_click); } } final OnClickListener button_click = v -> { if (listener != null) { listener.songKeypadButton_click(v); } }; @Override void bSongs_click() { closeDrawer(); } public <T extends Activity & Listener> void configure(T listener, DrawerLayout drawerLayout) { this.listener = listener; this.drawerLayout = drawerLayout; } } }
package edu.vu.isis.ammo.core.provider; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; import java.util.Set; import android.provider.BaseColumns; import edu.vu.isis.ammo.util.EnumUtils; public enum PresenceSchema { /** This is a locally unique identifier for the request */ ID(BaseColumns._ID,"TEXT"), /** This is a universally unique identifier for the request */ UUID("TEXT"), /** Device originating the request */ ORIGIN("TEXT"), /** Who last modified the request */ OPERATOR("TEXT"), /** Presence state: 1=available, 2=not available, etc.*/ STATE("INTEGER"), /** The time when first observed (millisec); indicates the first time the peer was observed.*/ FIRST("INTEGER"), /** when last observed (millisec); * When the operator was last seen "speaking" on the channel. * The latest field indicates the last time the peer was observed. */ LATEST("INTEGER"), /** how many times seen since first. * How many times the peer has been seen since FIRST. * Each time LATEST is changed this COUNT should be incremented*/ COUNT("INTEGER"), /** The time when no longer relevant (millisec); * the request becomes stale and may be discarded. */ EXPIRATION("INTEGER"); /** * Valid values for the STATE field. * The codes are independent, they may be combined to form the state. * If that is the case they can be teased apart with the encode/decode methods. */ public enum State { /** There is every reason to believe the device is present */ PRESENT(0x01), /** The device is seen regularly but intermittently */ RARE(0x02), /** The device is probably not currently present */ MISSED(0x04), /** The device is almost certainly not present */ LOST(0x08), /** There is no record for that device */ ABSENT(0x10); public final long code; private State(int code) { this.code = code; } static public State lookup(long lowMask) { return State.lookupMap.get(lowMask); } private static final HashMap<Long, State> lookupMap; static { final EnumSet<State> set = EnumSet.allOf(State.class); lookupMap = new HashMap<Long, State>(set.size()); for (final State state : set) { lookupMap.put(Long.valueOf(state.code), state); } } } /** * Provide a set of states to be encoded into a long integer. * * @param stateSet * @return */ public long encodeState(Set<State> stateSet) { long encodedState = 0; for (final State state : stateSet) { encodedState |= state.code; } return encodedState; } /** * Produce a set of states from an encoded long integer. * * @param stateSet an integer of states. * @return */ public Set<State> decodeStates( long encodedState) { long lowMask = Long.lowestOneBit(encodedState); if (lowMask < 1) return null; final EnumSet<State> decodedState = EnumSet.of(State.lookup(lowMask)); long highMask = Long.highestOneBit(encodedState); if (lowMask == highMask) return decodedState; lowMask = lowMask << 1; while (lowMask != highMask) { decodedState.add(State.lookup(lowMask)); lowMask = lowMask << 1; } decodedState.add(State.lookup(lowMask)); return decodedState; } /** textual field name */ final public String field; /** type */ final public String type; private PresenceSchema( String type) { this.field = this.name(); this.type = type; } private PresenceSchema( String field, String type) { this.field = field; this.type = type; } /** * an array of all field names */ public static final String[] FIELD_NAMES = EnumUtils.buildFieldNames(PresenceSchema.class); /** * map an array of field names to fields. * * @param names an array of field names * @return an array of fields */ public static ArrayList<PresenceSchema> mapFields(final String[] names) { return EnumUtils.getFields(PresenceSchema.class, names); } @Override public String toString() { return this.field; } }
package br.odb.disksofdoom; import java.util.LinkedList; import br.odb.disksofdoom.DisksOfDoomMainApp.Disk; import br.odb.gameapp.ConsoleApplication; import br.odb.gameapp.UserMetaCommandLineAction; public class SolveCommand extends UserMetaCommandLineAction { class SolutionMove { public SolutionMove( int n, int p0, int p2, int p1) { problemSize = n; fromPole = p0; toPole = p2; usingPole = p1; } int problemSize; int fromPole; int toPole; int usingPole; } public SolveCommand( ConsoleApplication app ) { super( app ); } @Override public String getHelp() { // TODO Auto-generated method stub return null; } @Override public int requiredOperands() { // TODO Auto-generated method stub return 1; } public void moveFrom( int n, LinkedList< Disk > from, int index0, LinkedList< Disk> to, int index1, LinkedList< Disk > using, int index2 ) { Disk d; System.out.println( "solving from n = " + n + " from " + index0 + " to " + index1 + " using " + index2 ); if ( n == 1 ) { d = from.pop(); System.out.println( "moving disk = " + d.size + " from " + index0 + " to " + index1 ); to.push( d ); } else { moveFrom( n - 1, from, index0, using, index2, to, index1 ); d = from.pop(); System.out.println( "moving disk = " + d.size + " from " + index0 + " to " + index1 ); to.push( d ); moveFrom( n - 1, using, index2, to, index1, from, index0 ); } } @Override public void run(ConsoleApplication app, String arg1) throws Exception { DisksOfDoomMainApp game = (DisksOfDoomMainApp) app; moveFrom( 6, game.pole[ 0 ], 0, game.pole[ 2 ], 2, game.pole[ 1 ], 1 ); // LinkedList< SolutionMove > moves = new LinkedList< SolutionMove >(); // moves.push( new SolutionMove( 2, 0, 2, 1 ) ); // SolutionMove currentMove; // Disk disk; // while ( !moves.isEmpty() ) { // currentMove = moves.pop(); // System.out.println( "solving from n = " + currentMove.problemSize + " from " + currentMove.fromPole + " to " + currentMove.toPole + " using " + currentMove.usingPole ); // if ( currentMove.problemSize == 1 ) { // disk = game.pole[ currentMove.fromPole ].pop(); // System.out.println( "moving disk = " + disk.size + " from " + currentMove.fromPole + " to " + currentMove.toPole ); // game.pole[ currentMove.toPole ].push( disk ); // } else { // moves.push( new SolutionMove( currentMove.problemSize - 1, currentMove.fromPole, currentMove.usingPole, currentMove.toPole ) ); //// disk = game.pole[ currentMove.fromPole ].pop(); //// game.pole[ currentMove.toPole ].push( disk ); // moves.push( new SolutionMove( currentMove.problemSize - 1, currentMove.fromPole,currentMove.toPole, currentMove.usingPole ) ); // moves.push( new SolutionMove( currentMove.problemSize - 1, currentMove.usingPole,currentMove.toPole, currentMove.fromPole ) ); } @Override public String toString() { // TODO Auto-generated method stub return "solve"; } }
package umich.ms.fileio.filetypes.mzml; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javolution.xml.internal.stream.XMLStreamReaderImpl; import org.apache.commons.pool2.ObjectPool; import umich.ms.datatypes.LCMSDataSubset; import umich.ms.datatypes.lcmsrun.LCMSRunInfo; import umich.ms.datatypes.scan.IScan; import umich.ms.datatypes.scancollection.IScanCollection; import umich.ms.datatypes.scancollection.impl.ScanCollectionDefault; import umich.ms.datatypes.spectrum.ISpectrum; import umich.ms.fileio.Opts; import umich.ms.fileio.exceptions.FileParsingException; import umich.ms.fileio.filetypes.xmlbased.AbstractXMLBasedDataSource; import umich.ms.fileio.filetypes.xmlbased.IndexBuilder; import umich.ms.fileio.filetypes.xmlbased.IndexBuilderInfo; import umich.ms.util.IntervalST; /** * * @author Dmitry Avtonomov <dmitriy.avtonomov@gmail.com> */ public class MZMLFile extends AbstractXMLBasedDataSource<MZMLIndexElement, MZMLIndex> { private MZMLIndex index; public MZMLFile(String path) { super(path); } public ObjectPool<XMLStreamReaderImpl> getReaderPool() { return readerPool; } @Override public MZMLIndex getIndex() { return index; } @Override public MZMLIndex fetchIndex() throws FileParsingException { MZMLIndex tmp = index; if (tmp == null) { synchronized (this) { tmp = getIndex(); if (tmp == null) { tmp = parseIndex(); index = tmp; } } } return tmp; } @Override public MZMLIndex parseIndex() throws FileParsingException { MZMLIndexParser parser = new MZMLIndexParser(this); return parser.parse(); } @Override public LCMSRunInfo parseRunInfo() throws FileParsingException { MZMLRunHeaderParser parser = new MZMLRunHeaderParser(this); return parser.parse(); } @Override protected void releaseResources() { index = null; } @Override public MZMLMultiSpectraParser getSpectraParser(InputStream inputStream, LCMSDataSubset subset, ObjectPool<XMLStreamReaderImpl> readerPool, Integer numSpectra) { MZMLMultiSpectraParser parser; try { parser = new MZMLMultiSpectraParser(inputStream, subset, this); } catch (FileParsingException ex) { throw new IllegalStateException(ex); } parser.setNumScansToProcess(numSpectra); parser.setReaderPool(readerPool); return parser; } @Override public IndexBuilder<MZMLIndexElement> getIndexBuilder(IndexBuilderInfo info) { MZMLMultiSpectraParser parser = getSpectraParser(info.is, LCMSDataSubset.STRUCTURE_ONLY, getReaderPool(), null); MZMLMultiSpectraParser.MZMLIndexBuilder builder = parser.getIndexBuilder(info); return builder; } public static void main(String[] args) throws FileParsingException { if (args.length == 0) { System.out.println("Give me a dollar. And a list of mzML files. E.g.: "
package ru.matevosyan.start; import ru.matevosyan.models.Comments; import ru.matevosyan.models.Item; public class MenuTracker { /** * Input instance variable input. */ private Input input; /** * Input instance variable tracker. */ private Tracker tracker; /** * Maximum user action. */ private final int maxUserAction = 9; /** * Instance variable for saving all user action. * And use it for run specific class, in dependence users selection action. */ private UserAction[] userAction = new UserAction[maxUserAction]; /** * instance availableRange for menu number range. */ private int[] availableRange = new int[this.userAction.length]; /** * Number of elements in userAction. * Variable one to use in userAction array in 0 position * */ private int position = 0; /** * Constructor MenuTracker. * @since 1.0 * @param input for getting input state * @param tracker for getting tracker */ public MenuTracker(Input input, Tracker tracker) { this.input = input; this.tracker = tracker; } /** * Method fillAction fot fill out user action which invoking new class instance. */ public void fillAction() { addAction(new AddItem(1, "Add new item")); addAction(new ShowItems(2, "Show all item")); addAction(new EditItem(3, "Edit item")); addAction(new DeleteItem(4, "Delete item")); addAction(new AddCommentToItem(5, "Add comment to item")); addAction(new FindItemById(6, "Find item by id")); addAction(new FindItemByName(7, "Find item by name")); addAction(new FindItemByDate(8, "Find item by date")); addAction(new ShowItemComments(9, "Show item comments")); /** * fill availableRange out */ for (int i = 0; i < this.userAction.length; i++) { availableRange[i] = this.userAction[i].key(); } } /** * Method addAction use to add action to userAction array. * @param action concrete class is use as menu point */ public void addAction(BaseAction action) { this.userAction[position++] = action; } /** * method to return concrete key from availableRange. * @return availableRange array for getting to menu */ public int[] getKeys() { return availableRange; } /** * Method select created to execute concrete action method execute that contains in array position that user had invoked. * @param key user selection * @throws NullPointerException fo select which invoke execute method */ public void select(int key) throws NullPointerException { boolean invalid = true; do { try { this.userAction[key - 1].execute(this.input, this.tracker); invalid = false; } catch (NullPointerException npe) { System.out.println("Does not exist or invalid data, please ry again"); } } while (invalid); } /** * Method show created for showing the list of user actions and action description. */ public void show() { System.out.println(" M-E-N-U"); for (UserAction userAction : this.userAction) { if (userAction != null) { System.out.println(userAction.info()); } } } private class AddItem extends BaseAction { /** * use BaseAction constructor to assign the variable value created own AddItem constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private AddItem(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String name = input.ask("Please enter the Task's name "); String description = input.ask("Please enter the Task's description "); tracker.add(new Item(name, description)); } } private class ShowItems extends BaseAction { /** * use BaseAction constructor to assign the variable value created own ShowItems constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private ShowItems(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { for (Item item : tracker.getAll()) { if (item != null) { System.out.println(String.format("\r\n Id: %s. \r\n Name: %s. \r\n Description: %s. \r\n Date: %s. \r\n" + " item.getDescription(), item.getCreate())); } } } } private class EditItem extends BaseAction { /** * use BaseAction constructor to assign the variable value created own EditItem constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private EditItem(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String id = String.valueOf(input.ask("Please enter the Task's id: ", tracker.fillRangeOfId())); String name = input.ask("Please enter the Task's name: "); String description = input.ask("Please enter the Task's description: "); Item item = new Item(name, description); item.setId(id); tracker.editItem(item); } } private class DeleteItem extends BaseAction { /** * use BaseAction constructor to assign the variable value created own DeleteItem constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private DeleteItem(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String id = String.valueOf(input.ask("Please enter the Task's id: ", tracker.fillRangeOfId())); tracker.deleteItem(id); } } private class AddCommentToItem extends BaseAction { /** * use BaseAction constructor to assign the variable value created own AddCommentToItem constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private AddCommentToItem(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String id = String.valueOf(input.ask("Please enter the Task's id: ", tracker.fillRangeOfId())); String comment = input.ask("Please enter the Task's comment: "); Item findItem = tracker.findById(id); tracker.addComment(findItem, comment); } } public class FindItemById extends BaseAction { /** * use BaseAction constructor to assign the variable value created own FindItemById constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private FindItemById(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String id = String.valueOf(input.ask("Please enter the Task's id: ", tracker.fillRangeOfId())); Item itemFindById = tracker.findById(id); System.out.println(String.format("\r\n Id: %s. \r\n Name: %s. \r\n Description: %s. \r\n Date: %s. \r\n" + " itemFindById.getDescription(), itemFindById.getCreate())); } } private class FindItemByName extends BaseAction { /** * use BaseAction constructor to assign the variable value created own FindItemByName constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private FindItemByName(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String name = input.ask("Please enter the Task's name: "); Item itemFindByName = tracker.findByName(name); System.out.println(String.format("\r\n Id: %s. \r\n Name: %s. \r\n Description: %s. \r\n Date: %s. \r\n" + " itemFindByName.getDescription(), itemFindByName.getCreate())); } } private class FindItemByDate extends BaseAction { /** * use BaseAction constructor to assign the variable value created own FindItemByDate constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private FindItemByDate(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String date = input.ask("Please enter the Task's date: "); Item itemFindByDate = tracker.findByDate(date); System.out.println(String.format("\r\n Id: %s. \r\n Name: %s. \r\n Description: %s. \r\n Date: %s. \r\n" + " itemFindByDate.getDescription(), itemFindByDate.getCreate())); } } private class ShowItemComments extends BaseAction { /** * use BaseAction constructor to assign the variable value created own ShowItemComments constructor. * @param key use to assign userAction key to return it. * @param name use to assign userAction menu point to return it. * */ private ShowItemComments(int key, String name) { super(key, name); } @Override public void execute(Input input, Tracker tracker) { String id = String.valueOf(input.ask("Please enter the Task's id: ", tracker.fillRangeOfId())); Item itemForComment = tracker.findById(id); final int maxCommentLength = 5; Comments[] comment = itemForComment.getAllComment(); System.out.println("\r\n Comments: \r\n boolean check = true; for (int i = 0; i < maxCommentLength; i++) { if (comment[i] != null) { check = false; System.out.println(String.format(" |%s } else if (comment[i] == null && check) { i = 1999999999; System.out.println("In this item no comments"); } } } } }
package org.azavea.otm.fields; import android.app.Activity; import android.app.DatePickerDialog; import android.content.Context; import android.util.Log; import android.widget.Button; import org.azavea.otm.App; import org.azavea.otm.R; import org.azavea.otm.data.Model; import org.json.JSONObject; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; public class DateField extends ButtonField { DateField(JSONObject fieldDef) { super(fieldDef); } /** * Format the value with any units, if provided in the definition */ @Override protected String formatValue(Object value) { return formatTimestampForDisplay((String) value); } @Override protected void setupButton(final Button choiceButton, Object value, Model model, Activity activity) { if (!JSONObject.NULL.equals(value)) { final String timestamp = (String) value; final String formattedDate = formatTimestampForDisplay(timestamp); choiceButton.setText(formattedDate); choiceButton.setTag(R.id.choice_button_value_tag, timestamp); } else { choiceButton.setText(R.string.unspecified_field_value); } choiceButton.setOnClickListener(v -> { final String setTimestamp = (String) choiceButton.getTag(R.id.choice_button_value_tag); final Calendar cal = getCalendarForTimestamp(activity, setTimestamp); new DatePickerDialog(activity, (view, year, month, day) -> { final String updatedTimestamp = getTimestamp(activity, year, month, day); final String displayDate = formatTimestampForDisplay(updatedTimestamp); choiceButton.setText(displayDate); choiceButton.setTag(R.id.choice_button_value_tag, updatedTimestamp); }, cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DAY_OF_MONTH)).show(); }); } public static Calendar getCalendarForTimestamp(Context context, String setTimestamp) { final Calendar cal = new GregorianCalendar(); final SimpleDateFormat timestampFormatter = new SimpleDateFormat(context.getString(R.string.server_date_format)); if (setTimestamp != null) { try { cal.setTime(timestampFormatter.parse(setTimestamp)); } catch (ParseException e) { Log.e(App.LOG_TAG, "Error parsing date stored on tag.", e); } } return cal; } public static String getTimestamp(Context context, int year, int month, int day) { final SimpleDateFormat timestampFormatter = new SimpleDateFormat(context.getString(R.string.server_date_format)); final Calendar updatedCal = new GregorianCalendar(); updatedCal.set(Calendar.YEAR, year); updatedCal.set(Calendar.MONTH, month); updatedCal.set(Calendar.DAY_OF_MONTH, day); return timestampFormatter.format(updatedCal.getTime()); } public static String formatTimestampForDisplay(String timestamp) { final String displayPattern = App.getCurrentInstance().getShortDateFormat(); final String serverPattern = App.getAppInstance().getString(R.string.server_date_format); final SimpleDateFormat timestampFormatter = new SimpleDateFormat(serverPattern); final SimpleDateFormat displayFormatter = new SimpleDateFormat(displayPattern); try { final Date date = timestampFormatter.parse(timestamp); return displayFormatter.format(date); } catch (ParseException e) { return App.getAppInstance().getResources().getString(R.string.unspecified_field_value); } } }
package uk.jamierocks.zinc.plugin; import com.google.inject.Inject; import org.slf4j.Logger; import org.spongepowered.api.Game; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.game.state.GamePreInitializationEvent; import org.spongepowered.api.plugin.Plugin; import org.spongepowered.api.service.ProviderExistsException; import uk.jamierocks.zinc.CommandService; @Plugin(id = "zinc", name = "ZincPlugin", version = "@project.version@") public class ZincPlugin { @Inject private Game game; @Inject private Logger logger; @Listener public void onInit(GamePreInitializationEvent event) { try { this.game.getServiceManager() .setProvider(this, CommandService.class, new CommandService(this.game)); } catch (ProviderExistsException e) { this.logger.error("Failed to register command service!", e); } } }
package c5db.log; import c5db.C5ServerConstants; import c5db.generated.OLogHeader; import c5db.replication.QuorumConfiguration; import c5db.util.C5Iterators; import c5db.util.CheckedSupplier; import c5db.util.KeySerializingExecutor; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.io.CountingInputStream; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.util.ArrayList; import java.util.Deque; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static c5db.log.EntryEncodingUtil.decodeAndCheckCrc; import static c5db.log.EntryEncodingUtil.encodeWithLengthAndCrc; import static c5db.log.LogPersistenceService.BytePersistence; import static c5db.log.LogPersistenceService.PersistenceNavigator; import static c5db.log.LogPersistenceService.PersistenceNavigatorFactory; import static c5db.log.LogPersistenceService.PersistenceReader; import static c5db.log.OLogEntryOracle.OLogEntryOracleFactory; import static c5db.log.OLogEntryOracle.QuorumConfigurationWithSeqNum; import static c5db.log.SequentialLog.LogEntryNotFound; import static c5db.log.SequentialLog.LogEntryNotInSequence; /** * OLog that delegates each quorum's logging tasks to a separate log record for that quorum, executing * any blocking tasks on a KeySerializingExecutor, with quorumId as the key. It is safe for use * by multiple threads, but each quorum's sequence numbers must be ascending with no gaps within * that quorum; so having multiple unsynchronized threads writing for the same quorum is unlikely * to work. * <p> * Each quorum's log record is a sequence of SequentialLogs, each based on its own persistence (e.g., * a file) served from the LogPersistenceService injected on creation. */ public class QuorumDelegatingLog implements OLog, AutoCloseable { private final LogPersistenceService<?> persistenceService; private final KeySerializingExecutor taskExecutor; private final Map<String, PerQuorum> quorumMap = new ConcurrentHashMap<>(); private final OLogEntryOracleFactory OLogEntryOracleFactory; private final PersistenceNavigatorFactory persistenceNavigatorFactory; public QuorumDelegatingLog(LogPersistenceService<?> persistenceService, KeySerializingExecutor taskExecutor, OLogEntryOracleFactory OLogEntryOracleFactory, PersistenceNavigatorFactory persistenceNavigatorFactory ) { this.persistenceService = persistenceService; this.taskExecutor = taskExecutor; this.OLogEntryOracleFactory = OLogEntryOracleFactory; this.persistenceNavigatorFactory = persistenceNavigatorFactory; } @Override public ListenableFuture<Void> openAsync(String quorumId) { quorumMap.computeIfAbsent(quorumId, q -> new PerQuorum(quorumId)); return submitQuorumTask(quorumId, () -> { getQuorumStructure(quorumId).open(); return null; }); } @Override public ListenableFuture<Boolean> logEntry(List<OLogEntry> passedInEntries, String quorumId) { List<OLogEntry> entries = validateAndMakeDefensiveCopy(passedInEntries); getQuorumStructure(quorumId).ensureEntriesAreConsecutive(entries); updateOracleWithNewEntries(entries, quorumId); // TODO group commit / sync return submitQuorumTask(quorumId, () -> { currentLog(quorumId).append(entries); return true; }); } @Override public ListenableFuture<List<OLogEntry>> getLogEntries(long start, long end, String quorumId) { if (end < start) { throw new IllegalArgumentException("getLogEntries: end < start"); } else if (end == start) { return Futures.immediateFuture(new ArrayList<>()); } return submitQuorumTask(quorumId, () -> { if (!seqNumPrecedesLog(start, getQuorumStructure(quorumId).currentLogWithHeader())) { return currentLog(quorumId).subSequence(start, end); } else { return multiLogGet(start, end, quorumId); } }); } @Override public ListenableFuture<Boolean> truncateLog(long seqNum, String quorumId) { getQuorumStructure(quorumId).setExpectedNextSequenceNumber(seqNum); oLogEntryOracle(quorumId).notifyTruncation(seqNum); return submitQuorumTask(quorumId, () -> { while (seqNumPrecedesLog(seqNum, getQuorumStructure(quorumId).currentLogWithHeader())) { getQuorumStructure(quorumId).deleteCurrentLog(); } currentLog(quorumId).truncate(seqNum); return true; }); } @Override public long getNextSeqNum(String quorumId) { return getQuorumStructure(quorumId).getExpectedNextSequenceNumber(); } @Override public long getLastTerm(String quorumId) { return oLogEntryOracle(quorumId).getLastTerm(); } @Override public long getLogTerm(long seqNum, String quorumId) { return oLogEntryOracle(quorumId).getTermAtSeqNum(seqNum); } @Override public QuorumConfigurationWithSeqNum getLastQuorumConfig(String quorumId) { return oLogEntryOracle(quorumId).getLastQuorumConfig(); } @Override public ListenableFuture<Void> roll(String quorumId) throws IOException { final OLogHeader newLogHeader = buildRollHeader(quorumId); return submitQuorumTask(quorumId, () -> { getQuorumStructure(quorumId).roll(newLogHeader); return null; }); } @Override public void close() throws IOException { try { taskExecutor.shutdownAndAwaitTermination(C5ServerConstants.WAL_CLOSE_TIMEOUT_SECONDS, TimeUnit.SECONDS); } catch (InterruptedException | TimeoutException e) { throw new RuntimeException(e); } for (PerQuorum quorumStructure : quorumMap.values()) { quorumStructure.close(); } } private static class SequentialLogWithHeader { public final SequentialLog<OLogEntry> log; public final OLogHeader header; private SequentialLogWithHeader(SequentialLog<OLogEntry> log, OLogHeader header) { this.log = log; this.header = header; } } private class PerQuorum { private final String quorumId; private final SequentialEntryCodec<OLogEntry> codec = new OLogEntry.Codec(); private final Deque<SequentialLogWithHeader> logDeque = new LinkedList<>(); private volatile long expectedNextSequenceNumber = 1; public final OLogEntryOracle oLogEntryOracle = OLogEntryOracleFactory.create(); public PerQuorum(String quorumId) { this.quorumId = quorumId; } public void open() throws IOException { loadCurrentOrNewLog(); } public void ensureEntriesAreConsecutive(List<OLogEntry> entries) { for (OLogEntry e : entries) { long entrySeqNum = e.getSeqNum(); long expectedSeqNum = expectedNextSequenceNumber; if (entrySeqNum != expectedSeqNum) { throw new IllegalArgumentException("Unexpected sequence number in entries requested to be logged"); } expectedNextSequenceNumber++; } } public void setExpectedNextSequenceNumber(long seqNum) { expectedNextSequenceNumber = seqNum; } public long getExpectedNextSequenceNumber() { return expectedNextSequenceNumber; } @NotNull public SequentialLogWithHeader currentLogWithHeader() throws IOException { if (logDeque.isEmpty()) { loadCurrentOrNewLog(); } return logDeque.peek(); } public void roll(OLogHeader newLogHeader) throws IOException { SequentialLogWithHeader newLog = writeNewLog(persistenceService, newLogHeader); logDeque.push(newLog); } public void deleteCurrentLog() throws IOException { persistenceService.truncate(quorumId); logDeque.pop(); } public Iterator<SequentialLogWithHeader> getLogIterator() { final Iterator<SequentialLogWithHeader> dequeIterator = logDeque.iterator(); final int dequeSize = logDeque.size(); // First return the log(s) already in memory, then read additional logs from the persistence. return Iterators.concat( dequeIterator, Iterators.transform(C5Iterators.advanced(persistenceService.iterator(quorumId), dequeSize), (persistenceSupplier) -> { try { return readLogFromPersistence(persistenceSupplier.get()); } catch (IOException e) { throw new LogPersistenceService.IteratorIOException(e); } })); } public void close() throws IOException { // TODO if one log fails to close, it won't attempt to close any after that one. for (SequentialLogWithHeader logWithHeader : logDeque) { logWithHeader.log.close(); } } private CountingInputStream getCountingInputStream(PersistenceReader reader) { return new CountingInputStream(Channels.newInputStream(reader)); } private void loadCurrentOrNewLog() throws IOException { final BytePersistence persistence = persistenceService.getCurrent(quorumId); final SequentialLogWithHeader logWithHeader; if (persistence == null) { logWithHeader = writeNewLog(persistenceService, newQuorumHeader()); } else { logWithHeader = readLogFromPersistence(persistence); } logDeque.push(logWithHeader); prepareLogOracle(logWithHeader); increaseExpectedNextSeqNumTo(oLogEntryOracle.getGreatestSeqNum() + 1); } /** * Create a new log and header and write them to a new persistence. The header corresponds to the * current position and state of the current log (if there is one). * * @param persistenceService The LogPersistenceService, included as a parameter here in order to * capture a wildcard * @param <P> Captured wildcard; the type of the BytePersistence to create and write * @throws IOException */ private <P extends BytePersistence> SequentialLogWithHeader writeNewLog(LogPersistenceService<P> persistenceService, OLogHeader header) throws IOException { final P persistence = persistenceService.create(quorumId); final List<ByteBuffer> serializedHeader = encodeWithLengthAndCrc(OLogHeader.getSchema(), header); persistence.append(Iterables.toArray(serializedHeader, ByteBuffer.class)); persistenceService.append(quorumId, persistence); final long headerSize = persistence.size(); return createSequentialLogWithHeader(persistence, header, headerSize); } private SequentialLogWithHeader readLogFromPersistence(BytePersistence persistence) throws IOException { try (CountingInputStream input = getCountingInputStream(persistence.getReader())) { final OLogHeader header = decodeAndCheckCrc(input, OLogHeader.getSchema()); final long headerSize = input.getCount(); return createSequentialLogWithHeader(persistence, header, headerSize); } } private SequentialLogWithHeader createSequentialLogWithHeader(BytePersistence persistence, OLogHeader header, long headerSize) throws IOException { final PersistenceNavigator navigator = persistenceNavigatorFactory.create(persistence, codec, headerSize); navigator.addToIndex(header.getBaseSeqNum() + 1, headerSize); final SequentialLog<OLogEntry> log = new EncodedSequentialLog<>(persistence, codec, navigator); return new SequentialLogWithHeader(log, header); } private void prepareLogOracle(SequentialLogWithHeader logWithHeader) throws IOException { SequentialLog<OLogEntry> log = logWithHeader.log; final OLogHeader header = logWithHeader.header; oLogEntryOracle.notifyLogging(new OLogEntry(header.getBaseSeqNum(), header.getBaseTerm(), new OLogProtostuffContent<>(header.getBaseConfiguration()))); // TODO it isn't necessary to read the content of every entry; only those which refer to configurations. // TODO Also should the navigator be updated on the last entry? log.forEach(oLogEntryOracle::notifyLogging); } private void increaseExpectedNextSeqNumTo(long seqNum) { if (seqNum > expectedNextSequenceNumber) { setExpectedNextSequenceNumber(seqNum); } } private OLogHeader newQuorumHeader() { return new OLogHeader(0, 0, QuorumConfiguration.EMPTY.toProtostuff()); } } private List<OLogEntry> validateAndMakeDefensiveCopy(List<OLogEntry> entries) { if (entries.isEmpty()) { throw new IllegalArgumentException("Attempting to log an empty entry list"); } return ImmutableList.copyOf(entries); } private List<OLogEntry> multiLogGet(long start, long end, String quorumId) throws IOException, LogEntryNotFound, LogEntryNotInSequence { final Iterator<SequentialLogWithHeader> logIterator = getQuorumStructure(quorumId).getLogIterator(); final Deque<List<OLogEntry>> entries = new LinkedList<>(); long remainingEnd = end; while (remainingEnd > start) { if (!logIterator.hasNext()) { throw new LogEntryNotFound("Unable to locate a log containing the requested entries"); } SequentialLogWithHeader logWithHeader = logIterator.next(); long firstSeqNumInLog = logWithHeader.header.getBaseSeqNum() + 1; if (remainingEnd > firstSeqNumInLog) { entries.push(logWithHeader.log.subSequence(Math.max(firstSeqNumInLog, start), remainingEnd)); remainingEnd = firstSeqNumInLog; } } return Lists.newArrayList(Iterables.concat(entries)); } private SequentialLog<OLogEntry> currentLog(String quorumId) throws IOException { return getQuorumStructure(quorumId).currentLogWithHeader().log; } private OLogEntryOracle oLogEntryOracle(String quorumId) { return getQuorumStructure(quorumId).oLogEntryOracle; } private PerQuorum getQuorumStructure(String quorumId) { PerQuorum perQuorum = quorumMap.get(quorumId); if (perQuorum == null) { quorumNotOpen(quorumId); } return perQuorum; } private void updateOracleWithNewEntries(List<OLogEntry> entries, String quorumId) { OLogEntryOracle oLogEntryOracle = oLogEntryOracle(quorumId); for (OLogEntry e : entries) { oLogEntryOracle.notifyLogging(e); } } private OLogHeader buildRollHeader(String quorumId) { final long baseTerm = getLastTerm(quorumId); final long baseSeqNum = getNextSeqNum(quorumId) - 1; final QuorumConfiguration baseConfiguration = getLastQuorumConfig(quorumId).quorumConfiguration; return new OLogHeader(baseTerm, baseSeqNum, baseConfiguration.toProtostuff()); } private boolean seqNumPrecedesLog(long seqNum, @NotNull SequentialLogWithHeader logWithHeader) { return seqNum <= logWithHeader.header.getBaseSeqNum(); } private void quorumNotOpen(String quorumId) { throw new QuorumNotOpen("QuorumDelegatingLog#getQuorumStructure: quorum " + quorumId + " not open"); } private <T> ListenableFuture<T> submitQuorumTask(String quorumId, CheckedSupplier<T, Exception> task) { return taskExecutor.submit(quorumId, task); } }