gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4; import org.antlr.runtime.ANTLRFileStream; import org.antlr.runtime.ANTLRStringStream; import org.antlr.runtime.CharStream; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.ParserRuleReturnScope; import org.antlr.runtime.RecognitionException; import org.antlr.v4.analysis.AnalysisPipeline; import org.antlr.v4.automata.ATNFactory; import org.antlr.v4.automata.LexerATNFactory; import org.antlr.v4.automata.ParserATNFactory; import org.antlr.v4.codegen.CodeGenPipeline; import org.antlr.v4.codegen.CodeGenerator; import org.antlr.v4.misc.Graph; import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.parse.GrammarASTAdaptor; import org.antlr.v4.parse.GrammarTreeVisitor; import org.antlr.v4.parse.ToolANTLRLexer; import org.antlr.v4.parse.ToolANTLRParser; import org.antlr.v4.parse.v3TreeGrammarException; import org.antlr.v4.runtime.RuntimeMetaData; import org.antlr.v4.runtime.misc.LogManager; import org.antlr.v4.runtime.misc.Nullable; import org.antlr.v4.semantics.SemanticPipeline; import org.antlr.v4.tool.ANTLRMessage; import org.antlr.v4.tool.ANTLRToolListener; import org.antlr.v4.tool.BuildDependencyGenerator; import org.antlr.v4.tool.DOTGenerator; import org.antlr.v4.tool.DefaultToolListener; import org.antlr.v4.tool.ErrorManager; import org.antlr.v4.tool.ErrorType; import org.antlr.v4.tool.Grammar; import org.antlr.v4.tool.GrammarTransformPipeline; import org.antlr.v4.tool.LexerGrammar; import org.antlr.v4.tool.Rule; import org.antlr.v4.tool.ast.ActionAST; import org.antlr.v4.tool.ast.GrammarAST; import org.antlr.v4.tool.ast.GrammarASTErrorNode; import org.antlr.v4.tool.ast.GrammarRootAST; import org.antlr.v4.tool.ast.RuleAST; import org.antlr.v4.tool.ast.TerminalAST; import org.stringtemplate.v4.STGroup; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; public class Tool { public static final String VERSION; static { // Assigned in a static{} block to prevent the field from becoming a // compile-time constant VERSION = RuntimeMetaData.VERSION; } public static final String GRAMMAR_EXTENSION = ".g4"; public static final String LEGACY_GRAMMAR_EXTENSION = ".g"; public static final List<String> ALL_GRAMMAR_EXTENSIONS = Collections.unmodifiableList(Arrays.asList(GRAMMAR_EXTENSION, LEGACY_GRAMMAR_EXTENSION)); public static enum OptionArgType { NONE, STRING } // NONE implies boolean public static class Option { String fieldName; String name; OptionArgType argType; String description; public Option(String fieldName, String name, String description) { this(fieldName, name, OptionArgType.NONE, description); } public Option(String fieldName, String name, OptionArgType argType, String description) { this.fieldName = fieldName; this.name = name; this.argType = argType; this.description = description; } } // fields set by option manager public File inputDirectory; // used by mvn plugin but not set by tool itself. public String outputDirectory; public String libDirectory; public boolean generate_ATN_dot = false; public String grammarEncoding = null; // use default locale's encoding public String msgFormat = "antlr"; public boolean launch_ST_inspector = false; public boolean ST_inspector_wait_for_close = false; public boolean force_atn = false; public boolean log = false; public boolean gen_listener = true; public boolean gen_visitor = false; public boolean gen_dependencies = false; public String genPackage = null; public Map<String, String> grammarOptions = null; public boolean warnings_are_errors = false; public boolean longMessages = false; public static Option[] optionDefs = { new Option("outputDirectory", "-o", OptionArgType.STRING, "specify output directory where all output is generated"), new Option("libDirectory", "-lib", OptionArgType.STRING, "specify location of grammars, tokens files"), new Option("generate_ATN_dot", "-atn", "generate rule augmented transition network diagrams"), new Option("grammarEncoding", "-encoding", OptionArgType.STRING, "specify grammar file encoding; e.g., euc-jp"), new Option("msgFormat", "-message-format", OptionArgType.STRING, "specify output style for messages in antlr, gnu, vs2005"), new Option("longMessages", "-long-messages", "show exception details when available for errors and warnings"), new Option("gen_listener", "-listener", "generate parse tree listener (default)"), new Option("gen_listener", "-no-listener", "don't generate parse tree listener"), new Option("gen_visitor", "-visitor", "generate parse tree visitor"), new Option("gen_visitor", "-no-visitor", "don't generate parse tree visitor (default)"), new Option("genPackage", "-package", OptionArgType.STRING, "specify a package/namespace for the generated code"), new Option("gen_dependencies", "-depend", "generate file dependencies"), new Option("", "-D<option>=value", "set/override a grammar-level option"), new Option("warnings_are_errors", "-Werror", "treat warnings as errors"), new Option("launch_ST_inspector", "-XdbgST", "launch StringTemplate visualizer on generated code"), new Option("ST_inspector_wait_for_close", "-XdbgSTWait", "wait for STViz to close before continuing"), new Option("force_atn", "-Xforce-atn", "use the ATN simulator for all predictions"), new Option("log", "-Xlog", "dump lots of logging info to antlr-timestamp.log"), }; // helper vars for option management protected boolean haveOutputDir = false; protected boolean return_dont_exit = false; // The internal options are for my use on the command line during dev public static boolean internalOption_PrintGrammarTree = false; public static boolean internalOption_ShowATNConfigsInDFA = false; public final String[] args; protected List<String> grammarFiles = new ArrayList<String>(); public ErrorManager errMgr; public LogManager logMgr = new LogManager(); List<ANTLRToolListener> listeners = new CopyOnWriteArrayList<ANTLRToolListener>(); /** Track separately so if someone adds a listener, it's the only one * instead of it and the default stderr listener. */ DefaultToolListener defaultListener = new DefaultToolListener(this); public static void main(String[] args) { Tool antlr = new Tool(args); if ( args.length == 0 ) { antlr.help(); antlr.exit(0); } try { antlr.processGrammarsOnCommandLine(); } finally { if ( antlr.log ) { try { String logname = antlr.logMgr.save(); System.out.println("wrote "+logname); } catch (IOException ioe) { antlr.errMgr.toolError(ErrorType.INTERNAL_ERROR, ioe); } } } if ( antlr.return_dont_exit ) return; if (antlr.errMgr.getNumErrors() > 0) { antlr.exit(1); } antlr.exit(0); } public Tool() { this(null); } public Tool(String[] args) { this.args = args; errMgr = new ErrorManager(this); errMgr.setFormat(msgFormat); handleArgs(); } protected void handleArgs() { int i=0; while ( args!=null && i<args.length ) { String arg = args[i]; i++; if ( arg.startsWith("-D") ) { // -Dlanguage=Java syntax handleOptionSetArg(arg); continue; } if ( arg.charAt(0)!='-' ) { // file name if ( !grammarFiles.contains(arg) ) grammarFiles.add(arg); continue; } boolean found = false; for (Option o : optionDefs) { if ( arg.equals(o.name) ) { found = true; String argValue = null; if ( o.argType==OptionArgType.STRING ) { argValue = args[i]; i++; } // use reflection to set field Class<? extends Tool> c = this.getClass(); try { Field f = c.getField(o.fieldName); if ( argValue==null ) { if ( arg.startsWith("-no-") ) f.setBoolean(this, false); else f.setBoolean(this, true); } else f.set(this, argValue); } catch (Exception e) { errMgr.toolError(ErrorType.INTERNAL_ERROR, "can't access field "+o.fieldName); } } } if ( !found ) { errMgr.toolError(ErrorType.INVALID_CMDLINE_ARG, arg); } } if ( outputDirectory!=null ) { if (outputDirectory.endsWith("/") || outputDirectory.endsWith("\\")) { outputDirectory = outputDirectory.substring(0, outputDirectory.length() - 1); } File outDir = new File(outputDirectory); haveOutputDir = true; if (outDir.exists() && !outDir.isDirectory()) { errMgr.toolError(ErrorType.OUTPUT_DIR_IS_FILE, outputDirectory); libDirectory = "."; } } else { outputDirectory = "."; } if ( libDirectory!=null ) { if (libDirectory.endsWith("/") || libDirectory.endsWith("\\")) { libDirectory = libDirectory.substring(0, libDirectory.length() - 1); } File outDir = new File(libDirectory); if (!outDir.exists()) { errMgr.toolError(ErrorType.DIR_NOT_FOUND, libDirectory); libDirectory = "."; } } else { libDirectory = "."; } if ( launch_ST_inspector ) { STGroup.trackCreationEvents = true; return_dont_exit = true; } } protected void handleOptionSetArg(String arg) { int eq = arg.indexOf('='); if ( eq>0 && arg.length()>3 ) { String option = arg.substring("-D".length(), eq); String value = arg.substring(eq+1); if ( value.length()==0 ) { errMgr.toolError(ErrorType.BAD_OPTION_SET_SYNTAX, arg); return; } if ( Grammar.parserOptions.contains(option) || Grammar.lexerOptions.contains(option) ) { if ( grammarOptions==null ) grammarOptions = new HashMap<String, String>(); grammarOptions.put(option, value); } else { errMgr.grammarError(ErrorType.ILLEGAL_OPTION, null, null, option); } } else { errMgr.toolError(ErrorType.BAD_OPTION_SET_SYNTAX, arg); } } public void processGrammarsOnCommandLine() { List<GrammarRootAST> sortedGrammars = sortGrammarByTokenVocab(grammarFiles); for (GrammarRootAST t : sortedGrammars) { final Grammar g = createGrammar(t); g.fileName = t.fileName; if ( gen_dependencies ) { BuildDependencyGenerator dep = new BuildDependencyGenerator(this, g); /* List outputFiles = dep.getGeneratedFileList(); List dependents = dep.getDependenciesFileList(); System.out.println("output: "+outputFiles); System.out.println("dependents: "+dependents); */ System.out.println(dep.getDependencies().render()); } else if (errMgr.getNumErrors() == 0) { process(g, true); } } } /** To process a grammar, we load all of its imported grammars into subordinate grammar objects. Then we merge the imported rules into the root grammar. If a root grammar is a combined grammar, we have to extract the implicit lexer. Once all this is done, we process the lexer first, if present, and then the parser grammar */ public void process(Grammar g, boolean gencode) { g.loadImportedGrammars(); GrammarTransformPipeline transform = new GrammarTransformPipeline(g, this); transform.process(); LexerGrammar lexerg; GrammarRootAST lexerAST; if ( g.ast!=null && g.ast.grammarType== ANTLRParser.COMBINED && !g.ast.hasErrors ) { lexerAST = transform.extractImplicitLexer(g); // alters g.ast if ( lexerAST!=null ) { if (grammarOptions != null) { lexerAST.cmdLineOptions = grammarOptions; } lexerg = new LexerGrammar(this, lexerAST); lexerg.fileName = g.fileName; lexerg.originalGrammar = g; g.implicitLexer = lexerg; lexerg.implicitLexerOwner = g; processNonCombinedGrammar(lexerg, gencode); // System.out.println("lexer tokens="+lexerg.tokenNameToTypeMap); // System.out.println("lexer strings="+lexerg.stringLiteralToTypeMap); } } if ( g.implicitLexer!=null ) g.importVocab(g.implicitLexer); // System.out.println("tokens="+g.tokenNameToTypeMap); // System.out.println("strings="+g.stringLiteralToTypeMap); processNonCombinedGrammar(g, gencode); } public void processNonCombinedGrammar(Grammar g, boolean gencode) { if ( g.ast==null || g.ast.hasErrors ) return; if ( internalOption_PrintGrammarTree ) System.out.println(g.ast.toStringTree()); boolean ruleFail = checkForRuleIssues(g); if ( ruleFail ) return; int prevErrors = errMgr.getNumErrors(); // MAKE SURE GRAMMAR IS SEMANTICALLY CORRECT (FILL IN GRAMMAR OBJECT) SemanticPipeline sem = new SemanticPipeline(g); sem.process(); String language = g.getOptionString("language"); if ( !CodeGenerator.targetExists(language) ) { errMgr.toolError(ErrorType.CANNOT_CREATE_TARGET_GENERATOR, language); return; } if ( errMgr.getNumErrors()>prevErrors ) return; // BUILD ATN FROM AST ATNFactory factory; if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g); else factory = new ParserATNFactory(g); g.atn = factory.createATN(); if ( generate_ATN_dot ) generateATNs(g); // PERFORM GRAMMAR ANALYSIS ON ATN: BUILD DECISION DFAs AnalysisPipeline anal = new AnalysisPipeline(g); anal.process(); //if ( generate_DFA_dot ) generateDFAs(g); if ( g.tool.getNumErrors()>prevErrors ) return; // GENERATE CODE if ( gencode ) { CodeGenPipeline gen = new CodeGenPipeline(g); gen.process(); } } /** * Important enough to avoid multiple definitions that we do very early, * right after AST construction. Also check for undefined rules in * parser/lexer to avoid exceptions later. Return true if we find multiple * definitions of the same rule or a reference to an undefined rule or * parser rule ref in lexer rule. */ public boolean checkForRuleIssues(final Grammar g) { // check for redefined rules GrammarAST RULES = (GrammarAST)g.ast.getFirstChildWithType(ANTLRParser.RULES); List<GrammarAST> rules = new ArrayList<GrammarAST>(RULES.getAllChildrenWithType(ANTLRParser.RULE)); for (GrammarAST mode : g.ast.getAllChildrenWithType(ANTLRParser.MODE)) { rules.addAll(mode.getAllChildrenWithType(ANTLRParser.RULE)); } boolean redefinition = false; final Map<String, RuleAST> ruleToAST = new HashMap<String, RuleAST>(); for (GrammarAST r : rules) { RuleAST ruleAST = (RuleAST)r; GrammarAST ID = (GrammarAST)ruleAST.getChild(0); String ruleName = ID.getText(); RuleAST prev = ruleToAST.get(ruleName); if ( prev !=null ) { GrammarAST prevChild = (GrammarAST)prev.getChild(0); g.tool.errMgr.grammarError(ErrorType.RULE_REDEFINITION, g.fileName, ID.getToken(), ruleName, prevChild.getToken().getLine()); redefinition = true; continue; } ruleToAST.put(ruleName, ruleAST); } // check for undefined rules class UndefChecker extends GrammarTreeVisitor { public boolean badref = false; @Override public void tokenRef(TerminalAST ref) { if ("EOF".equals(ref.getText())) { // this is a special predefined reference return; } if ( g.isLexer() ) ruleRef(ref, null); } @Override public void ruleRef(GrammarAST ref, ActionAST arg) { RuleAST ruleAST = ruleToAST.get(ref.getText()); if (Character.isUpperCase(currentRuleName.charAt(0)) && Character.isLowerCase(ref.getText().charAt(0))) { badref = true; String fileName = ref.getToken().getInputStream().getSourceName(); errMgr.grammarError(ErrorType.PARSER_RULE_REF_IN_LEXER_RULE, fileName, ref.getToken(), ref.getText(), currentRuleName); } else if ( ruleAST==null ) { badref = true; errMgr.grammarError(ErrorType.UNDEFINED_RULE_REF, g.fileName, ref.token, ref.getText()); } } @Override public ErrorManager getErrorManager() { return errMgr; } } UndefChecker chk = new UndefChecker(); chk.visitGrammar(g.ast); return redefinition || chk.badref; } public List<GrammarRootAST> sortGrammarByTokenVocab(List<String> fileNames) { // System.out.println(fileNames); Graph<String> g = new Graph<String>(); List<GrammarRootAST> roots = new ArrayList<GrammarRootAST>(); for (String fileName : fileNames) { GrammarAST t = parseGrammar(fileName); if ( t==null || t instanceof GrammarASTErrorNode) continue; // came back as error node if ( ((GrammarRootAST)t).hasErrors ) continue; GrammarRootAST root = (GrammarRootAST)t; roots.add(root); root.fileName = fileName; String grammarName = root.getChild(0).getText(); GrammarAST tokenVocabNode = findOptionValueAST(root, "tokenVocab"); // Make grammars depend on any tokenVocab options if ( tokenVocabNode!=null ) { String vocabName = tokenVocabNode.getText(); g.addEdge(grammarName, vocabName); } // add cycle to graph so we always process a grammar if no error // even if no dependency g.addEdge(grammarName, grammarName); } List<String> sortedGrammarNames = g.sort(); // System.out.println("sortedGrammarNames="+sortedGrammarNames); List<GrammarRootAST> sortedRoots = new ArrayList<GrammarRootAST>(); for (String grammarName : sortedGrammarNames) { for (GrammarRootAST root : roots) { if ( root.getGrammarName().equals(grammarName) ) { sortedRoots.add(root); break; } } } return sortedRoots; } /** Manually get option node from tree; return null if no defined. */ public static GrammarAST findOptionValueAST(GrammarRootAST root, String option) { GrammarAST options = (GrammarAST)root.getFirstChildWithType(ANTLRParser.OPTIONS); if ( options!=null && options.getChildCount() > 0 ) { for (Object o : options.getChildren()) { GrammarAST c = (GrammarAST)o; if ( c.getType() == ANTLRParser.ASSIGN && c.getChild(0).getText().equals(option) ) { return (GrammarAST)c.getChild(1); } } } return null; } /** Given the raw AST of a grammar, create a grammar object associated with the AST. Once we have the grammar object, ensure that all nodes in tree referred to this grammar. Later, we will use it for error handling and generally knowing from where a rule comes from. */ public Grammar createGrammar(GrammarRootAST ast) { final Grammar g; if ( ast.grammarType==ANTLRParser.LEXER ) g = new LexerGrammar(this, ast); else g = new Grammar(this, ast); // ensure each node has pointer to surrounding grammar GrammarTransformPipeline.setGrammarPtr(g, ast); return g; } public GrammarRootAST parseGrammar(String fileName) { try { File file = new File(fileName); if (!file.isAbsolute()) { file = new File(inputDirectory, fileName); } ANTLRFileStream in = new ANTLRFileStream(file.getAbsolutePath(), grammarEncoding); GrammarRootAST t = parse(fileName, in); return t; } catch (IOException ioe) { errMgr.toolError(ErrorType.CANNOT_OPEN_FILE, ioe, fileName); } return null; } /** Convenience method to load and process an ANTLR grammar. Useful * when creating interpreters. If you need to access to the lexer * grammar created while processing a combined grammar, use * getImplicitLexer() on returned grammar. */ public Grammar loadGrammar(String fileName) { GrammarRootAST grammarRootAST = parseGrammar(fileName); final Grammar g = createGrammar(grammarRootAST); g.fileName = fileName; process(g, false); return g; } private final Map<String, Grammar> importedGrammars = new HashMap<String, Grammar>(); /** * Try current dir then dir of g then lib dir * @param g * @param nameNode The node associated with the imported grammar name. */ public Grammar loadImportedGrammar(Grammar g, GrammarAST nameNode) throws IOException { String name = nameNode.getText(); Grammar imported = importedGrammars.get(name); if (imported == null) { g.tool.log("grammar", "load " + name + " from " + g.fileName); File importedFile = null; for (String extension : ALL_GRAMMAR_EXTENSIONS) { importedFile = getImportedGrammarFile(g, name + extension); if (importedFile != null) { break; } } if ( importedFile==null ) { errMgr.grammarError(ErrorType.CANNOT_FIND_IMPORTED_GRAMMAR, g.fileName, nameNode.getToken(), name); return null; } String absolutePath = importedFile.getAbsolutePath(); ANTLRFileStream in = new ANTLRFileStream(absolutePath, grammarEncoding); GrammarRootAST root = parse(g.fileName, in); if (root == null) { return null; } imported = createGrammar(root); imported.fileName = absolutePath; importedGrammars.put(root.getGrammarName(), imported); } return imported; } public GrammarRootAST parseGrammarFromString(String grammar) { return parse("<string>", new ANTLRStringStream(grammar)); } public GrammarRootAST parse(String fileName, CharStream in) { try { GrammarASTAdaptor adaptor = new GrammarASTAdaptor(in); ToolANTLRLexer lexer = new ToolANTLRLexer(in, this); CommonTokenStream tokens = new CommonTokenStream(lexer); lexer.tokens = tokens; ToolANTLRParser p = new ToolANTLRParser(tokens, this); p.setTreeAdaptor(adaptor); try { ParserRuleReturnScope r = p.grammarSpec(); GrammarAST root = (GrammarAST)r.getTree(); if ( root instanceof GrammarRootAST) { ((GrammarRootAST)root).hasErrors = lexer.getNumberOfSyntaxErrors()>0 || p.getNumberOfSyntaxErrors()>0; assert ((GrammarRootAST)root).tokenStream == tokens; if ( grammarOptions!=null ) { ((GrammarRootAST)root).cmdLineOptions = grammarOptions; } return ((GrammarRootAST)root); } } catch (v3TreeGrammarException e) { errMgr.grammarError(ErrorType.V3_TREE_GRAMMAR, fileName, e.location); } return null; } catch (RecognitionException re) { // TODO: do we gen errors now? ErrorManager.internalError("can't generate this message at moment; antlr recovers"); } return null; } public void generateATNs(Grammar g) { DOTGenerator dotGenerator = new DOTGenerator(g); List<Grammar> grammars = new ArrayList<Grammar>(); grammars.add(g); List<Grammar> imported = g.getAllImportedGrammars(); if ( imported!=null ) grammars.addAll(imported); for (Grammar ig : grammars) { for (Rule r : ig.rules.values()) { try { String dot = dotGenerator.getDOT(g.atn.ruleToStartState[r.index], g.isLexer()); if (dot != null) { writeDOTFile(g, r, dot); } } catch (IOException ioe) { errMgr.toolError(ErrorType.CANNOT_WRITE_FILE, ioe); } } } } /** This method is used by all code generators to create new output * files. If the outputDir set by -o is not present it will be created. * The final filename is sensitive to the output directory and * the directory where the grammar file was found. If -o is /tmp * and the original grammar file was foo/t.g4 then output files * go in /tmp/foo. * * The output dir -o spec takes precedence if it's absolute. * E.g., if the grammar file dir is absolute the output dir is given * precendence. "-o /tmp /usr/lib/t.g4" results in "/tmp/T.java" as * output (assuming t.g4 holds T.java). * * If no -o is specified, then just write to the directory where the * grammar file was found. * * If outputDirectory==null then write a String. */ public Writer getOutputFileWriter(Grammar g, String fileName) throws IOException { if (outputDirectory == null) { return new StringWriter(); } // output directory is a function of where the grammar file lives // for subdir/T.g4, you get subdir here. Well, depends on -o etc... File outputDir = getOutputDirectory(g.fileName); File outputFile = new File(outputDir, fileName); if (!outputDir.exists()) { outputDir.mkdirs(); } FileOutputStream fos = new FileOutputStream(outputFile); OutputStreamWriter osw; if ( grammarEncoding!=null ) { osw = new OutputStreamWriter(fos, grammarEncoding); } else { osw = new OutputStreamWriter(fos); } return new BufferedWriter(osw); } public File getImportedGrammarFile(Grammar g, String fileName) { File importedFile = new File(inputDirectory, fileName); if ( !importedFile.exists() ) { File gfile = new File(g.fileName); String parentDir = gfile.getParent(); importedFile = new File(parentDir, fileName); if ( !importedFile.exists() ) { // try in lib dir importedFile = new File(libDirectory, fileName); if ( !importedFile.exists() ) { return null; } } } return importedFile; } /** * Return the location where ANTLR will generate output files for a given * file. This is a base directory and output files will be relative to * here in some cases such as when -o option is used and input files are * given relative to the input directory. * * @param fileNameWithPath path to input source */ public File getOutputDirectory(String fileNameWithPath) { File outputDir; String fileDirectory; // Some files are given to us without a PATH but should should // still be written to the output directory in the relative path of // the output directory. The file directory is either the set of sub directories // or just or the relative path recorded for the parent grammar. This means // that when we write the tokens files, or the .java files for imported grammars // taht we will write them in the correct place. if (fileNameWithPath.lastIndexOf(File.separatorChar) == -1) { // No path is included in the file name, so make the file // directory the same as the parent grammar (which might sitll be just "" // but when it is not, we will write the file in the correct place. fileDirectory = "."; } else { fileDirectory = fileNameWithPath.substring(0, fileNameWithPath.lastIndexOf(File.separatorChar)); } if ( haveOutputDir ) { // -o /tmp /var/lib/t.g4 => /tmp/T.java // -o subdir/output /usr/lib/t.g4 => subdir/output/T.java // -o . /usr/lib/t.g4 => ./T.java if (fileDirectory != null && (new File(fileDirectory).isAbsolute() || fileDirectory.startsWith("~"))) { // isAbsolute doesn't count this :( // somebody set the dir, it takes precendence; write new file there outputDir = new File(outputDirectory); } else { // -o /tmp subdir/t.g4 => /tmp/subdir/t.g4 if (fileDirectory != null) { outputDir = new File(outputDirectory, fileDirectory); } else { outputDir = new File(outputDirectory); } } } else { // they didn't specify a -o dir so just write to location // where grammar is, absolute or relative, this will only happen // with command line invocation as build tools will always // supply an output directory. outputDir = new File(fileDirectory); } return outputDir; } protected void writeDOTFile(Grammar g, Rule r, String dot) throws IOException { writeDOTFile(g, r.g.name + "." + r.name, dot); } protected void writeDOTFile(Grammar g, String name, String dot) throws IOException { Writer fw = getOutputFileWriter(g, name + ".dot"); try { fw.write(dot); } finally { fw.close(); } } public void help() { info("ANTLR Parser Generator Version " + Tool.VERSION); for (Option o : optionDefs) { String name = o.name + (o.argType!=OptionArgType.NONE? " ___" : ""); String s = String.format(" %-19s %s", name, o.description); info(s); } } public void log(@Nullable String component, String msg) { logMgr.log(component, msg); } public void log(String msg) { log(null, msg); } public int getNumErrors() { return errMgr.getNumErrors(); } public void addListener(ANTLRToolListener tl) { if ( tl!=null ) listeners.add(tl); } public void removeListener(ANTLRToolListener tl) { listeners.remove(tl); } public void removeListeners() { listeners.clear(); } public List<ANTLRToolListener> getListeners() { return listeners; } public void info(String msg) { if ( listeners.isEmpty() ) { defaultListener.info(msg); return; } for (ANTLRToolListener l : listeners) l.info(msg); } public void error(ANTLRMessage msg) { if ( listeners.isEmpty() ) { defaultListener.error(msg); return; } for (ANTLRToolListener l : listeners) l.error(msg); } public void warning(ANTLRMessage msg) { if ( listeners.isEmpty() ) { defaultListener.warning(msg); } else { for (ANTLRToolListener l : listeners) l.warning(msg); } if (warnings_are_errors) { errMgr.emit(ErrorType.WARNING_TREATED_AS_ERROR, new ANTLRMessage(ErrorType.WARNING_TREATED_AS_ERROR)); } } public void version() { info("ANTLR Parser Generator Version " + VERSION); } public void exit(int e) { System.exit(e); } public void panic() { throw new Error("ANTLR panic"); } }
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.ui.dd; import com.google.gwt.dom.client.DivElement; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.EventListener; import com.vaadin.client.WidgetUtil; import com.vaadin.client.ui.dd.DragAndDropHandler.DragAndDropCallback; /** * Drag handle implementation. Drag handles are used for moving or resizing * widgets. This is a minimal-case component, meant to be used specifically as a * drag handle attached to another widget or element. As such, it does * <b>not</b> provide access to the events it's listening to (from the point of * view of this component, there really is no use for that). For the more * general, event-providing interface that this component is based on, see * {@link DragAndDropHandler}. * * @since 7.6 */ public class DragHandle { /** * Callback interface for the DragHandle event life cycle */ public interface DragHandleCallback { /** * Called when dragging starts */ public void onStart(); /** * Called when the drag handle has moved. * * @param deltaX * change in X direction since start * @param deltaY * change in Y direction since start */ public void onUpdate(double deltaX, double deltaY); /** * Called when the drag operation has been cancelled (usually by * pressing ESC) */ public void onCancel(); /** * Called when the drag operation completes successfully */ public void onComplete(); } private Element parent; private DivElement element; private String baseClassName; private DragAndDropHandler dndHandler; private DragAndDropCallback dndCallback; private DragHandleCallback userCallback; /** * Creates a new DragHandle. * * @param baseName * CSS style name to use for this DragHandle element. This * parameter is supplied to the constructor (rather than added * later) both to provide the "-dragged" style and to make sure * that the drag handle can be properly styled (it's otherwise * invisible) * @param callback * Callback object allows hooking up the drag handle to the rest * of the program logic */ public DragHandle(String baseName, DragHandleCallback callback) { parent = null; element = DivElement.as(DOM.createElement("div")); baseClassName = baseName; userCallback = callback; addStyleName(baseClassName); dndCallback = new DragAndDropCallback() { private double startX; private double startY; @Override public void onDrop() { removeDraggingStyle(); userCallback.onComplete(); } @Override public void onDragUpdate(Event e) { double dx = WidgetUtil.getTouchOrMouseClientX(e) - startX; double dy = WidgetUtil.getTouchOrMouseClientY(e) - startY; userCallback.onUpdate(dx, dy); } @Override public boolean onDragStart(Event e) { addDraggingStyle(); startX = WidgetUtil.getTouchOrMouseClientX(e); startY = WidgetUtil.getTouchOrMouseClientY(e); userCallback.onStart(); return true; } @Override public void onDragEnd() { // NOP, handled in onDrop and onDragCancel } @Override public void onDragCancel() { removeDraggingStyle(); userCallback.onCancel(); } private void addDraggingStyle() { addStyleName(baseClassName + "-dragged"); } private void removeDraggingStyle() { removeStyleName(baseClassName + "-dragged"); } }; dndHandler = new DragAndDropHandler(); DOM.sinkEvents(element, Event.ONMOUSEDOWN | Event.ONTOUCHSTART); DOM.setEventListener(element, new EventListener() { @Override public void onBrowserEvent(Event event) { dndHandler.onDragStartOnDraggableElement(event, dndCallback); event.stopPropagation(); } }); } /** * Returns the current parent element for this drag handle. May be null. * * @return an Element or null */ public Element getParent() { return parent; } /** * Gets the element used as actual drag handle. * * @return an Element */ public Element getElement() { return element; } /** * Adds this drag handle to an HTML element. * * @param elem * an element */ public void addTo(Element elem) { removeFromParent(); parent = elem; parent.appendChild(element); } /** * Removes this drag handle from whatever it was attached to. */ public void removeFromParent() { if (parent != null) { parent.removeChild(element); parent = null; } } /** * Adds CSS style name to the drag handle element. * * @param styleName * a CSS style name */ public void addStyleName(String styleName) { element.addClassName(styleName); } /** * Removes existing style name from drag handle element. * * @param styleName * a CSS style name */ public void removeStyleName(String styleName) { element.removeClassName(styleName); } }
/* * Copyright (C) 2013 readyState Software Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.notrace.systembar; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.os.Build; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.FrameLayout.LayoutParams; import java.lang.reflect.Field; import java.lang.reflect.Method; /** * Class to manage status and navigation bar tint effects when using KitKat * translucent system UI modes. * */ public class SystemBarTintManager { /** * The default system bar tint color value. */ public static final int DEFAULT_TINT_COLOR = Color.TRANSPARENT; private final SystemBarConfig mConfig; private boolean mStatusBarAvailable; private boolean mNavBarAvailable; private boolean mStatusBarTintEnabled; private boolean mNavBarTintEnabled; private View mStatusBarTintView; private View mNavBarTintView; private static boolean sIsMiuiV6; static { try { Class<?> sysClass = Class.forName("android.os.SystemProperties"); Method getStringMethod = sysClass.getDeclaredMethod("get", String.class); sIsMiuiV6 = "V6".equals((String) getStringMethod.invoke(sysClass, "ro.miui.ui.version.name")); } catch (Exception e) { e.printStackTrace(); } } /** * Constructor. Call this in the host activity onCreate method after its * content view has been set. You should always create new instances when * the host activity is recreated. * * @param activity The host activity. */ @TargetApi(19) public SystemBarTintManager(Activity activity) { Window win = activity.getWindow(); ViewGroup decorViewGroup = (ViewGroup) win.getDecorView(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { // check theme attrs int[] attrs = {android.R.attr.windowTranslucentStatus, android.R.attr.windowTranslucentNavigation}; TypedArray a = activity.obtainStyledAttributes(attrs); try { mStatusBarAvailable = a.getBoolean(0, false); mNavBarAvailable = a.getBoolean(1, false); } finally { a.recycle(); } // check window flags WindowManager.LayoutParams winParams = win.getAttributes(); int bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS; if ((winParams.flags & bits) != 0) { mStatusBarAvailable = true; } bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION; if ((winParams.flags & bits) != 0) { mNavBarAvailable = true; } } mConfig = new SystemBarConfig(activity, mStatusBarAvailable, mNavBarAvailable); // device might not have virtual navigation keys if (!mConfig.hasNavigtionBar()) { mNavBarAvailable = false; } if (mStatusBarAvailable) { setupStatusBarView(activity, decorViewGroup); } if (mNavBarAvailable) { setupNavBarView(activity, decorViewGroup); } } public int getStatusBarHeight() { return mConfig.getStatusBarHeight(); } public int getNavigationBarHeight() { return mConfig.getNavigationBarHeight(); } /** * Enable tinting of the system status bar. * * If the platform is running Jelly Bean or earlier, or translucent system * UI modes have not been enabled in either the theme or via window flags, * then this method does nothing. * * @param enabled True to enable tinting, false to disable it (default). */ public void setStatusBarTintEnabled(boolean enabled) { mStatusBarTintEnabled = enabled; if (mStatusBarAvailable) { mStatusBarTintView.setVisibility(enabled ? View.VISIBLE : View.GONE); } } /** * set status bar darkmode * @param darkmode * @param activity */ public void setStatusBarDarkMode(boolean darkmode, Activity activity) { if (sIsMiuiV6) { Class<? extends Window> clazz = activity.getWindow().getClass(); try { int darkModeFlag = 0; Class<?> layoutParams = Class.forName("android.view.MiuiWindowManager$LayoutParams"); Field field = layoutParams.getField("EXTRA_FLAG_STATUS_BAR_DARK_MODE"); darkModeFlag = field.getInt(layoutParams); Method extraFlagField = clazz.getMethod("setExtraFlags", int.class, int.class); extraFlagField.invoke(activity.getWindow(), darkmode ? darkModeFlag : 0, darkModeFlag); } catch (Exception e) { e.printStackTrace(); } } } /** * Enable tinting of the system navigation bar. * * If the platform does not have soft navigation keys, is running Jelly Bean * or earlier, or translucent system UI modes have not been enabled in either * the theme or via window flags, then this method does nothing. * * @param enabled True to enable tinting, false to disable it (default). */ public void setNavigationBarTintEnabled(boolean enabled) { mNavBarTintEnabled = enabled; if (mNavBarAvailable) { mNavBarTintView.setVisibility(enabled ? View.VISIBLE : View.GONE); } } /** * Apply the specified color tint to all system UI bars. * * @param color The color of the background tint. */ public void setTintColor(int color) { setStatusBarTintColor(color); setNavigationBarTintColor(color); } /** * Apply the specified drawable or color resource to all system UI bars. * * @param res The identifier of the resource. */ public void setTintResource(int res) { setStatusBarTintResource(res); setNavigationBarTintResource(res); } /** * Apply the specified drawable to all system UI bars. * * @param drawable The drawable to use as the background, or null to remove it. */ public void setTintDrawable(Drawable drawable) { setStatusBarTintDrawable(drawable); setNavigationBarTintDrawable(drawable); } /** * Apply the specified alpha to all system UI bars. * * @param alpha The alpha to use */ public void setTintAlpha(float alpha) { setStatusBarAlpha(alpha); setNavigationBarAlpha(alpha); } /** * Apply the specified color tint to the system status bar. * * @param color The color of the background tint. */ public void setStatusBarTintColor(int color) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundColor(color); } } /** * Apply the specified drawable or color resource to the system status bar. * * @param res The identifier of the resource. */ public void setStatusBarTintResource(int res) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundResource(res); } } /** * Apply the specified drawable to the system status bar. * * @param drawable The drawable to use as the background, or null to remove it. */ @SuppressWarnings("deprecation") public void setStatusBarTintDrawable(Drawable drawable) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundDrawable(drawable); } } /** * Apply the specified alpha to the system status bar. * * @param alpha The alpha to use */ @TargetApi(11) public void setStatusBarAlpha(float alpha) { if (mStatusBarAvailable && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mStatusBarTintView.setAlpha(alpha); } } /** * Apply the specified color tint to the system navigation bar. * * @param color The color of the background tint. */ public void setNavigationBarTintColor(int color) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundColor(color); } } /** * Apply the specified drawable or color resource to the system navigation bar. * * @param res The identifier of the resource. */ public void setNavigationBarTintResource(int res) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundResource(res); } } /** * Apply the specified drawable to the system navigation bar. * * @param drawable The drawable to use as the background, or null to remove it. */ @SuppressWarnings("deprecation") public void setNavigationBarTintDrawable(Drawable drawable) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundDrawable(drawable); } } /** * Apply the specified alpha to the system navigation bar. * * @param alpha The alpha to use */ @TargetApi(11) public void setNavigationBarAlpha(float alpha) { if (mNavBarAvailable && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mNavBarTintView.setAlpha(alpha); } } /** * Get the system bar configuration. * * @return The system bar configuration for the current device configuration. */ public SystemBarConfig getConfig() { return mConfig; } /** * Is tinting enabled for the system status bar? * * @return True if enabled, False otherwise. */ public boolean isStatusBarTintEnabled() { return mStatusBarTintEnabled; } /** * Is tinting enabled for the system navigation bar? * * @return True if enabled, False otherwise. */ public boolean isNavBarTintEnabled() { return mNavBarTintEnabled; } private void setupStatusBarView(Context context, ViewGroup decorViewGroup) { mStatusBarTintView = new View(context); LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, mConfig.getStatusBarHeight()); params.gravity = Gravity.TOP; if (mNavBarAvailable && !mConfig.isNavigationAtBottom()) { params.rightMargin = mConfig.getNavigationBarWidth(); } mStatusBarTintView.setLayoutParams(params); mStatusBarTintView.setBackgroundColor(DEFAULT_TINT_COLOR); mStatusBarTintView.setVisibility(View.GONE); decorViewGroup.addView(mStatusBarTintView); } private void setupNavBarView(Context context, ViewGroup decorViewGroup) { mNavBarTintView = new View(context); LayoutParams params; if (mConfig.isNavigationAtBottom()) { params = new LayoutParams(LayoutParams.MATCH_PARENT, mConfig.getNavigationBarHeight()); params.gravity = Gravity.BOTTOM; } else { params = new LayoutParams(mConfig.getNavigationBarWidth(), LayoutParams.MATCH_PARENT); params.gravity = Gravity.RIGHT; } mNavBarTintView.setLayoutParams(params); mNavBarTintView.setBackgroundColor(DEFAULT_TINT_COLOR); mNavBarTintView.setVisibility(View.GONE); decorViewGroup.addView(mNavBarTintView); } /** * Class which describes system bar sizing and other characteristics for the current * device configuration. * */ public static class SystemBarConfig { private static final String STATUS_BAR_HEIGHT_RES_NAME = "status_bar_height"; private static final String NAV_BAR_HEIGHT_RES_NAME = "navigation_bar_height"; private static final String NAV_BAR_HEIGHT_LANDSCAPE_RES_NAME = "navigation_bar_height_landscape"; private static final String NAV_BAR_WIDTH_RES_NAME = "navigation_bar_width"; private final boolean mTranslucentStatusBar; private final boolean mTranslucentNavBar; private final int mStatusBarHeight; private final int mActionBarHeight; private final boolean mHasNavigationBar; private final int mNavigationBarHeight; private final int mNavigationBarWidth; private final boolean mInPortrait; private final float mSmallestWidthDp; private SystemBarConfig(Activity activity, boolean translucentStatusBar, boolean traslucentNavBar) { Resources res = activity.getResources(); mInPortrait = (res.getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT); mSmallestWidthDp = getSmallestWidthDp(activity); mStatusBarHeight = getInternalDimensionSize(res, STATUS_BAR_HEIGHT_RES_NAME); mActionBarHeight = getActionBarHeight(activity); mNavigationBarHeight = getNavigationBarHeight(activity); mNavigationBarWidth = getNavigationBarWidth(activity); mHasNavigationBar = (mNavigationBarHeight > 0); mTranslucentStatusBar = translucentStatusBar; mTranslucentNavBar = traslucentNavBar; } @TargetApi(14) private int getActionBarHeight(Context context) { int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { TypedValue tv = new TypedValue(); context.getTheme().resolveAttribute(android.R.attr.actionBarSize, tv, true); result = context.getResources().getDimensionPixelSize(tv.resourceId); } return result; } @TargetApi(14) private int getNavigationBarHeight(Context context) { Resources res = context.getResources(); int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { if (!ViewConfiguration.get(context).hasPermanentMenuKey()) { String key; if (mInPortrait) { key = NAV_BAR_HEIGHT_RES_NAME; } else { key = NAV_BAR_HEIGHT_LANDSCAPE_RES_NAME; } return getInternalDimensionSize(res, key); } } return result; } @TargetApi(14) private int getNavigationBarWidth(Context context) { Resources res = context.getResources(); int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { if (!ViewConfiguration.get(context).hasPermanentMenuKey()) { return getInternalDimensionSize(res, NAV_BAR_WIDTH_RES_NAME); } } return result; } private int getInternalDimensionSize(Resources res, String key) { int result = 0; int resourceId = res.getIdentifier(key, "dimen", "android"); if (resourceId > 0) { result = res.getDimensionPixelSize(resourceId); } return result; } @SuppressLint("NewApi") private float getSmallestWidthDp(Activity activity) { DisplayMetrics metrics = new DisplayMetrics(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { activity.getWindowManager().getDefaultDisplay().getRealMetrics(metrics); } else { // TODO this is not correct, but we don't really care pre-kitkat activity.getWindowManager().getDefaultDisplay().getMetrics(metrics); } float widthDp = metrics.widthPixels / metrics.density; float heightDp = metrics.heightPixels / metrics.density; return Math.min(widthDp, heightDp); } /** * Should a navigation bar appear at the bottom of the screen in the current * device configuration? A navigation bar may appear on the nomal side of * the screen in certain configurations. * * @return True if navigation should appear at the bottom of the screen, False otherwise. */ public boolean isNavigationAtBottom() { return (mSmallestWidthDp >= 600 || mInPortrait); } /** * Get the height of the system status bar. * * @return The height of the status bar (in pixels). */ public int getStatusBarHeight() { return mStatusBarHeight; } /** * Get the height of the action bar. * * @return The height of the action bar (in pixels). */ public int getActionBarHeight() { return mActionBarHeight; } /** * Does this device have a system navigation bar? * * @return True if this device uses soft key navigation, False otherwise. */ public boolean hasNavigtionBar() { return mHasNavigationBar; } /** * Get the height of the system navigation bar. * * @return The height of the navigation bar (in pixels). If the device does not have * soft navigation keys, this will always return 0. */ public int getNavigationBarHeight() { return mNavigationBarHeight; } /** * Get the width of the system navigation bar when it is placed vertically on the screen. * * @return The width of the navigation bar (in pixels). If the device does not have * soft navigation keys, this will always return 0. */ public int getNavigationBarWidth() { return mNavigationBarWidth; } /** * Get the layout inset for any system UI that appears at the top of the screen. * * @param withActionBar True to include the height of the action bar, False otherwise. * @return The layout inset (in pixels). */ public int getPixelInsetTop(boolean withActionBar) { return (mTranslucentStatusBar ? mStatusBarHeight : 0) + (withActionBar ? mActionBarHeight : 0); } /** * Get the layout inset for any system UI that appears at the bottom of the screen. * * @return The layout inset (in pixels). */ public int getPixelInsetBottom() { if (mTranslucentNavBar && isNavigationAtBottom()) { return mNavigationBarHeight; } else { return 0; } } /** * Get the layout inset for any system UI that appears at the nomal of the screen. * * @return The layout inset (in pixels). */ public int getPixelInsetRight() { if (mTranslucentNavBar && !isNavigationAtBottom()) { return mNavigationBarWidth; } else { return 0; } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.orc.reader; import com.facebook.presto.memory.context.AggregatedMemoryContext; import com.facebook.presto.memory.context.LocalMemoryContext; import com.facebook.presto.orc.StreamDescriptor; import com.facebook.presto.orc.TupleDomainFilter; import com.facebook.presto.orc.metadata.ColumnEncoding; import com.facebook.presto.orc.stream.BooleanInputStream; import com.facebook.presto.orc.stream.InputStreamSource; import com.facebook.presto.orc.stream.InputStreamSources; import com.facebook.presto.spi.Subfield; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockLease; import com.facebook.presto.spi.block.ClosingBlockLease; import com.facebook.presto.spi.block.RowBlock; import com.facebook.presto.spi.block.RunLengthEncodedBlock; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import org.joda.time.DateTimeZone; import org.openjdk.jol.info.ClassLayout; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.facebook.presto.array.Arrays.ensureCapacity; import static com.facebook.presto.orc.TupleDomainFilter.IS_NOT_NULL; import static com.facebook.presto.orc.TupleDomainFilter.IS_NULL; import static com.facebook.presto.orc.metadata.Stream.StreamKind.PRESENT; import static com.facebook.presto.orc.stream.MissingInputStreamSource.missingStreamSource; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.airlift.slice.SizeOf.sizeOf; import static java.util.Objects.requireNonNull; public class StructSelectiveStreamReader implements SelectiveStreamReader { private static final int INSTANCE_SIZE = ClassLayout.parseClass(StructSelectiveStreamReader.class).instanceSize(); private final StreamDescriptor streamDescriptor; private final boolean nullsAllowed; private final boolean nonNullsAllowed; private final boolean outputRequired; @Nullable private final Type outputType; private final Map<String, SelectiveStreamReader> nestedReaders; private final SelectiveStreamReader[] orderedNestedReaders; private final LocalMemoryContext systemMemoryContext; private int readOffset; private int nestedReadOffset; private InputStreamSource<BooleanInputStream> presentStreamSource = missingStreamSource(BooleanInputStream.class); @Nullable private BooleanInputStream presentStream; private boolean rowGroupOpen; private boolean[] nulls; private int[] outputPositions; private int outputPositionCount; private boolean outputPositionsReadOnly; private boolean allNulls; private int[] nestedPositions; private int[] nestedOutputPositions; private int nestedOutputPositionCount; private boolean valuesInUse; public StructSelectiveStreamReader( StreamDescriptor streamDescriptor, Map<Subfield, TupleDomainFilter> filters, List<Subfield> requiredSubfields, Optional<Type> outputType, DateTimeZone hiveStorageTimeZone, AggregatedMemoryContext systemMemoryContext) { this.streamDescriptor = requireNonNull(streamDescriptor, "streamDescriptor is null"); this.systemMemoryContext = requireNonNull(systemMemoryContext, "systemMemoryContext is null").newLocalMemoryContext(StructSelectiveStreamReader.class.getSimpleName()); this.outputRequired = requireNonNull(outputType, "outputType is null").isPresent(); this.outputType = outputType.orElse(null); if (filters.isEmpty()) { nullsAllowed = true; nonNullsAllowed = true; } else { Optional<TupleDomainFilter> topLevelFilter = getTopLevelFilter(filters); if (topLevelFilter.isPresent()) { nullsAllowed = topLevelFilter.get() == IS_NULL; nonNullsAllowed = !nullsAllowed; } else { nullsAllowed = filters.values().stream().allMatch(TupleDomainFilter::testNull); nonNullsAllowed = true; } } Optional<List<Type>> nestedTypes = outputType.map(type -> type.getTypeParameters()); List<StreamDescriptor> nestedStreams = streamDescriptor.getNestedStreams(); Optional<Map<String, List<Subfield>>> requiredFields = getRequiredFields(requiredSubfields); // TODO streamDescriptor may be missing some fields (due to schema evolution, e.g. add field?) // TODO fields in streamDescriptor may be out of order (due to schema evolution, e.g. remove field?) Set<String> fieldsWithFilters = filters.keySet().stream() .map(Subfield::getPath) .filter(path -> path.size() > 0) .map(path -> path.get(0)) .filter(Subfield.NestedField.class::isInstance) .map(Subfield.NestedField.class::cast) .map(Subfield.NestedField::getName) .collect(toImmutableSet()); if (outputRequired || !fieldsWithFilters.isEmpty()) { ImmutableMap.Builder<String, SelectiveStreamReader> nestedReaders = ImmutableMap.builder(); for (int i = 0; i < nestedStreams.size(); i++) { StreamDescriptor nestedStream = nestedStreams.get(i); String fieldName = nestedStream.getFieldName().toLowerCase(Locale.ENGLISH); Optional<Type> fieldOutputType = nestedTypes.isPresent() ? Optional.of(nestedTypes.get().get(i)) : Optional.empty(); boolean requiredField = requiredFields.map(names -> names.containsKey(fieldName)).orElse(true); if (requiredField || fieldsWithFilters.contains(fieldName)) { Map<Subfield, TupleDomainFilter> nestedFilters = filters.entrySet().stream() .filter(entry -> entry.getKey().getPath().size() > 0) .filter(entry -> ((Subfield.NestedField) entry.getKey().getPath().get(0)).getName().equalsIgnoreCase(fieldName)) .collect(toImmutableMap(entry -> entry.getKey().tail(fieldName), Map.Entry::getValue)); List<Subfield> nestedRequiredSubfields = requiredFields.map(names -> names.get(fieldName)).orElse(ImmutableList.of()); SelectiveStreamReader nestedReader = SelectiveStreamReaders.createStreamReader( nestedStream, nestedFilters, fieldOutputType, nestedRequiredSubfields, hiveStorageTimeZone, systemMemoryContext.newAggregatedMemoryContext()); nestedReaders.put(fieldName, nestedReader); } else { nestedReaders.put(fieldName, new PruningStreamReader(nestedStream, fieldOutputType)); } } this.nestedReaders = nestedReaders.build(); this.orderedNestedReaders = orderNestedReaders(this.nestedReaders, fieldsWithFilters); } else { // No need to read the elements when output is not required and the filter is a simple IS [NOT] NULL this.nestedReaders = ImmutableMap.of(); this.orderedNestedReaders = new SelectiveStreamReader[0]; } } private static SelectiveStreamReader[] orderNestedReaders(Map<String, SelectiveStreamReader> nestedReaders, Set<String> fieldsWithFilters) { SelectiveStreamReader[] order = new SelectiveStreamReader[nestedReaders.size()]; int index = 0; for (String fieldName : fieldsWithFilters) { order[index++] = nestedReaders.get(fieldName); } for (Map.Entry<String, SelectiveStreamReader> entry : nestedReaders.entrySet()) { if (!fieldsWithFilters.contains(entry.getKey())) { order[index++] = entry.getValue(); } } return order; } @Override public int read(int offset, int[] positions, int positionCount) throws IOException { checkArgument(positionCount > 0, "positionCount must be greater than zero"); checkState(!valuesInUse, "BlockLease hasn't been closed yet"); if (!rowGroupOpen) { openRowGroup(); } allNulls = false; if (!nullsAllowed || !nonNullsAllowed) { outputPositions = ensureCapacity(outputPositions, positionCount); } else { outputPositions = positions; outputPositionsReadOnly = true; } systemMemoryContext.setBytes(getRetainedSizeInBytes()); if (presentStream == null) { // no nulls if (nonNullsAllowed) { if (nestedReaders.isEmpty()) { outputPositions = positions; outputPositionCount = positionCount; outputPositionsReadOnly = true; } else { readNestedStreams(offset, positions, positionCount); outputPositions = nestedOutputPositions; outputPositionCount = nestedOutputPositionCount; } readOffset = offset + positions[positionCount - 1]; } else { outputPositionCount = 0; } } else { // some or all nulls if (readOffset < offset) { nestedReadOffset += presentStream.countBitsSet(offset - readOffset); } nulls = ensureCapacity(nulls, positionCount); nestedPositions = ensureCapacity(nestedPositions, positionCount); outputPositionCount = 0; int streamPosition = 0; int nestedPositionCount = 0; int nullCount = 0; for (int i = 0; i < positionCount; i++) { int position = positions[i]; if (position > streamPosition) { int nonNullCount = presentStream.countBitsSet(position - streamPosition); nullCount += position - streamPosition - nonNullCount; streamPosition = position; } streamPosition++; if (presentStream.nextBit()) { // not null if (nonNullsAllowed) { nulls[outputPositionCount] = false; if (!nullsAllowed) { outputPositions[outputPositionCount] = position; } outputPositionCount++; nestedPositions[nestedPositionCount++] = position - nullCount; } } else { // null if (nullsAllowed) { nulls[outputPositionCount] = true; if (!nonNullsAllowed) { outputPositions[outputPositionCount] = position; } outputPositionCount++; } nullCount++; } } if (!nestedReaders.isEmpty()) { if (nestedPositionCount == 0) { allNulls = true; } else { readNestedStreams(nestedReadOffset, nestedPositions, nestedPositionCount); pruneOutputPositions(nestedPositionCount); } nestedReadOffset += streamPosition - nullCount; } readOffset = offset + streamPosition; } return outputPositionCount; } private void pruneOutputPositions(int nestedPositionCount) { if (nestedOutputPositionCount == 0) { allNulls = true; } if (nestedOutputPositionCount < nestedPositionCount) { if (outputPositionsReadOnly) { outputPositions = Arrays.copyOf(outputPositions, outputPositionCount); outputPositionsReadOnly = false; } int nestedIndex = 0; int skipped = 0; int nestedOutputIndex = 0; for (int i = 0; i < outputPositionCount; i++) { outputPositions[i - skipped] = outputPositions[i]; if (nullsAllowed) { nulls[i - skipped] = nulls[i]; if (nulls[i]) { continue; } } if (nestedOutputIndex >= nestedOutputPositionCount) { skipped++; } else if (nestedPositions[nestedIndex] < nestedOutputPositions[nestedOutputIndex]) { skipped++; } else { nestedOutputIndex++; } nestedIndex++; } } outputPositionCount -= nestedPositionCount - nestedOutputPositionCount; } private void readNestedStreams(int offset, int[] positions, int positionCount) throws IOException { int[] readPositions = positions; int readPositionCount = positionCount; for (SelectiveStreamReader reader : orderedNestedReaders) { readPositionCount = reader.read(offset, readPositions, readPositionCount); if (readPositionCount == 0) { break; } readPositions = reader.getReadPositions(); } if (readPositionCount > 0) { nestedOutputPositions = ensureCapacity(nestedOutputPositions, positionCount); System.arraycopy(readPositions, 0, nestedOutputPositions, 0, readPositionCount); } nestedOutputPositionCount = readPositionCount; } private void openRowGroup() throws IOException { presentStream = presentStreamSource.openStream(); rowGroupOpen = true; } @Override public int[] getReadPositions() { return outputPositions; } @Override public Block getBlock(int[] positions, int positionCount) { checkArgument(outputPositionCount > 0, "outputPositionCount must be greater than zero"); checkState(outputRequired, "This stream reader doesn't produce output"); checkState(positionCount <= outputPositionCount, "Not enough values"); checkState(!valuesInUse, "BlockLease hasn't been closed yet"); if (allNulls) { return createNullBlock(outputType, positionCount); } boolean includeNulls = nullsAllowed && presentStream != null; if (outputPositionCount == positionCount) { Block block = RowBlock.fromFieldBlocks(positionCount, Optional.ofNullable(includeNulls ? nulls : null), getFieldBlocks()); nulls = null; return block; } boolean[] nullsCopy = null; if (includeNulls) { nullsCopy = new boolean[positionCount]; } int positionIndex = 0; int nextPosition = positions[positionIndex]; int nestedIndex = 0; nestedOutputPositionCount = 0; for (int i = 0; i < outputPositionCount; i++) { if (outputPositions[i] < nextPosition) { if (!includeNulls || !nulls[i]) { nestedIndex++; } continue; } assert outputPositions[i] == nextPosition; if (!includeNulls || !nulls[i]) { nestedOutputPositions[nestedOutputPositionCount++] = nestedOutputPositions[nestedIndex]; nestedIndex++; } if (nullsCopy != null) { nullsCopy[positionIndex] = this.nulls[i]; } positionIndex++; if (positionIndex >= positionCount) { break; } nextPosition = positions[positionIndex]; } if (nestedOutputPositionCount == 0) { return createNullBlock(outputType, positionCount); } return RowBlock.fromFieldBlocks(positionCount, Optional.ofNullable(includeNulls ? nullsCopy : null), getFieldBlocks()); } private Block[] getFieldBlocks() { Block[] blocks = new Block[nestedReaders.size()]; int i = 0; for (SelectiveStreamReader reader : nestedReaders.values()) { blocks[i++] = reader.getBlock(nestedOutputPositions, nestedOutputPositionCount); } return blocks; } private static RunLengthEncodedBlock createNullBlock(Type type, int positionCount) { return new RunLengthEncodedBlock(type.createBlockBuilder(null, 1).appendNull().build(), positionCount); } @Override public BlockLease getBlockView(int[] positions, int positionCount) { checkArgument(outputPositionCount > 0, "outputPositionCount must be greater than zero"); checkState(outputRequired, "This stream reader doesn't produce output"); checkState(positionCount <= outputPositionCount, "Not enough values"); checkState(!valuesInUse, "BlockLease hasn't been closed yet"); if (allNulls) { return newLease(createNullBlock(outputType, positionCount)); } boolean includeNulls = nullsAllowed && presentStream != null; if (positionCount != outputPositionCount) { compactValues(positions, positionCount, includeNulls); if (nestedOutputPositionCount == 0) { allNulls = true; return newLease(createNullBlock(outputType, positionCount)); } } BlockLease[] fieldBlockLeases = new BlockLease[nestedReaders.size()]; Block[] fieldBlocks = new Block[nestedReaders.size()]; int i = 0; for (SelectiveStreamReader reader : nestedReaders.values()) { fieldBlockLeases[i] = reader.getBlockView(nestedOutputPositions, nestedOutputPositionCount); fieldBlocks[i] = fieldBlockLeases[i].get(); i++; } return newLease(RowBlock.fromFieldBlocks(positionCount, Optional.ofNullable(includeNulls ? nulls : null), fieldBlocks), fieldBlockLeases); } private void compactValues(int[] positions, int positionCount, boolean compactNulls) { if (outputPositionsReadOnly) { outputPositions = Arrays.copyOf(outputPositions, outputPositionCount); outputPositionsReadOnly = false; } int positionIndex = 0; int nextPosition = positions[positionIndex]; int nestedIndex = 0; nestedOutputPositionCount = 0; for (int i = 0; i < outputPositionCount; i++) { if (outputPositions[i] < nextPosition) { if (!compactNulls || !nulls[i]) { nestedIndex++; } continue; } assert outputPositions[i] == nextPosition; if (!compactNulls || !nulls[i]) { nestedOutputPositions[nestedOutputPositionCount++] = nestedOutputPositions[nestedIndex]; nestedIndex++; } if (compactNulls) { nulls[positionIndex] = nulls[i]; } outputPositions[positionIndex] = nextPosition; positionIndex++; if (positionIndex >= positionCount) { break; } nextPosition = positions[positionIndex]; } outputPositionCount = positionCount; } private BlockLease newLease(Block block, BlockLease...fieldBlockLeases) { valuesInUse = true; return ClosingBlockLease.newLease(block, () -> { for (BlockLease lease : fieldBlockLeases) { lease.close(); } valuesInUse = false; }); } @Override public void throwAnyError(int[] positions, int positionCount) { } @Override public String toString() { return toStringHelper(this) .addValue(streamDescriptor) .toString(); } @Override public void close() { systemMemoryContext.close(); } @Override public void startStripe(InputStreamSources dictionaryStreamSources, List<ColumnEncoding> encoding) throws IOException { presentStreamSource = missingStreamSource(BooleanInputStream.class); readOffset = 0; nestedReadOffset = 0; presentStream = null; rowGroupOpen = false; for (SelectiveStreamReader reader : nestedReaders.values()) { reader.startStripe(dictionaryStreamSources, encoding); } } @Override public void startRowGroup(InputStreamSources dataStreamSources) throws IOException { presentStreamSource = dataStreamSources.getInputStreamSource(streamDescriptor, PRESENT, BooleanInputStream.class); readOffset = 0; nestedReadOffset = 0; presentStream = null; rowGroupOpen = false; for (SelectiveStreamReader reader : nestedReaders.values()) { reader.startRowGroup(dataStreamSources); } } @Override public long getRetainedSizeInBytes() { return INSTANCE_SIZE + sizeOf(outputPositions) + sizeOf(nestedPositions) + sizeOf(nestedOutputPositions) + sizeOf(nulls) + nestedReaders.values().stream() .mapToLong(SelectiveStreamReader::getRetainedSizeInBytes) .sum(); } private static Optional<TupleDomainFilter> getTopLevelFilter(Map<Subfield, TupleDomainFilter> filters) { Map<Subfield, TupleDomainFilter> topLevelFilters = Maps.filterEntries(filters, entry -> entry.getKey().getPath().isEmpty()); if (topLevelFilters.isEmpty()) { return Optional.empty(); } checkArgument(topLevelFilters.size() == 1, "ROW column may have at most one top-level range filter"); TupleDomainFilter filter = Iterables.getOnlyElement(topLevelFilters.values()); checkArgument(filter == IS_NULL || filter == IS_NOT_NULL, "Top-level range filter on ROW column must be IS NULL or IS NOT NULL"); return Optional.of(filter); } private static Optional<Map<String, List<Subfield>>> getRequiredFields(List<Subfield> requiredSubfields) { if (requiredSubfields.isEmpty()) { return Optional.empty(); } Map<String, List<Subfield>> fields = new HashMap<>(); for (Subfield subfield : requiredSubfields) { List<Subfield.PathElement> path = subfield.getPath(); String name = ((Subfield.NestedField) path.get(0)).getName(); fields.computeIfAbsent(name, k -> new ArrayList<>()); if (path.size() > 1) { fields.get(name).add(new Subfield("c", path.subList(1, path.size()))); } } return Optional.of(ImmutableMap.copyOf(fields)); } private static final class PruningStreamReader implements SelectiveStreamReader { private static final int INSTANCE_SIZE = ClassLayout.parseClass(PruningStreamReader.class).instanceSize(); private final StreamDescriptor streamDescriptor; @Nullable private final Type outputType; private int[] outputPositions; private int outputPositionCount; private PruningStreamReader(StreamDescriptor streamDescriptor, Optional<Type> outputType) { this.streamDescriptor = requireNonNull(streamDescriptor, "streamDescriptor is null"); this.outputType = requireNonNull(outputType, "outputType is null").orElse(null); } @Override public int read(int offset, int[] positions, int positionCount) { outputPositions = positions; outputPositionCount = positionCount; return outputPositionCount; } @Override public int[] getReadPositions() { return outputPositions; } @Override public Block getBlock(int[] positions, int positionCount) { checkState(outputType != null, "This stream reader doesn't produce output"); return createNullBlock(outputType, positionCount); } @Override public BlockLease getBlockView(int[] positions, int positionCount) { checkState(outputType != null, "This stream reader doesn't produce output"); return ClosingBlockLease.newLease(createNullBlock(outputType, positionCount)); } @Override public void throwAnyError(int[] positions, int positionCount) { } @Override public String toString() { return toStringHelper(this) .addValue(streamDescriptor) .toString(); } @Override public void close() { } @Override public void startStripe(InputStreamSources dictionaryStreamSources, List<ColumnEncoding> encoding) { } @Override public void startRowGroup(InputStreamSources dataStreamSources) { } @Override public long getRetainedSizeInBytes() { return INSTANCE_SIZE + sizeOf(outputPositions); } } }
/*_########################################################################## _## _## Copyright (C) 2011-2015 Pcap4J.org _## _########################################################################## */ package org.pcap4j.core; import java.lang.reflect.Method; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.sun.jna.Callback; import com.sun.jna.Function; import com.sun.jna.FunctionMapper; import com.sun.jna.Library; import com.sun.jna.Native; import com.sun.jna.NativeLibrary; import com.sun.jna.NativeLong; import com.sun.jna.Platform; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.PointerByReference; /** * @author Kaito Yamada * @since pcap4j 0.9.1 */ final class NativeMappings { static final String PCAP_LIB_NAME = System.getProperty( NativeMappings.class.getPackage().getName() + ".pcapLibName", Platform.isWindows() ? "wpcap" : "pcap" ); static final Function PCAP_DUMP = Function.getFunction( PCAP_LIB_NAME, "pcap_dump" ); static final Map<String, Object> NATIVE_LOAD_LIBRARY_OPTIONS = new HashMap<String, Object>(); // LITTLE_ENDIAN: SPARC, JVM // BIG_ENDIAN: x86, network bite order static final ByteOrder NATIVE_BYTE_ORDER = ByteOrder.nativeOrder(); static final int SBIOCSTIME = 0x4201; static final Pointer ERRNO_P = Platform.isSolaris() ? NativeLibrary.getInstance(PCAP_LIB_NAME) .getGlobalVariableAddress("errno") : null; // see pcap-int.h: struct pcap static int getFdFromPcapT(Pointer p) { if (Platform.isWindows()) { return -1; } return p.getInt(0); } static { Native.register( NativeMappings.class, NativeLibrary.getInstance(PCAP_LIB_NAME) ); // for interface mapping final Map<String, String> funcMap = new HashMap<String, String>(); funcMap.put("pcap_set_rfmon", "pcap_set_rfmon"); funcMap.put("strioctl", "strioctl"); funcMap.put("dos_pcap_stats_ex", "pcap_stats_ex"); funcMap.put("win_pcap_stats_ex", "pcap_stats_ex"); funcMap.put( "pcap_open_offline_with_tstamp_precision", "pcap_open_offline_with_tstamp_precision" ); funcMap.put( "pcap_open_dead_with_tstamp_precision", "pcap_open_dead_with_tstamp_precision" ); funcMap.put("pcap_set_tstamp_precision", "pcap_set_tstamp_precision"); NATIVE_LOAD_LIBRARY_OPTIONS.put( Library.OPTION_FUNCTION_MAPPER, new FunctionMapper() { @Override public String getFunctionName(NativeLibrary library, Method method) { return funcMap.get(method.getName()); } } ); } // direct mappings // int pcap_findalldevs(pcap_if_t **alldevsp, char *errbuf) static native int pcap_findalldevs(PointerByReference alldevsp, PcapErrbuf errbuf); // TODO WinPcap: int pcap_findalldevs_ex(char *host, char *port, SOCKET sockctrl, struct pcap_rmtauth *auth, pcap_if_t **alldevs, char *errbuf) // void pcap_freealldevs (pcap_if_t *alldevsp) static native void pcap_freealldevs(Pointer alldevsp); // char *pcap_lookupdev(char *errbuf) static native Pointer pcap_lookupdev(PcapErrbuf errbuf); // int pcap_lookupnet(char *device, bpf_u_int32 *netp, bpf_u_int32 *maskp, char *errbuf) static native int pcap_lookupnet(String device, IntByReference netp, IntByReference maskp, PcapErrbuf errbuf); // pcap_t *pcap_open_live( // const char *device, int snaplen, int promisc, int to_ms, char *errbuf // ) static native Pointer pcap_open_live( String device, int snaplen, int promisc, int to_ms, PcapErrbuf errbuf ); // pcap_t *pcap_open_dead (int linktype, int snaplen) static native Pointer pcap_open_dead(int linktype, int snaplen); // pcap_t *pcap_open_offline(const char *fname, char *errbuf) static native Pointer pcap_open_offline(String fname, PcapErrbuf errbuf); // TODO WinPcap: pcap_t *pcap_open(const char *source, int snaplen, int flags, int read_timeout, struct pcap_rmtauth *auth, char *errbuf) // int pcap_setnonblock(pcap_t *p, int nonblock, char *errbuf) static native int pcap_setnonblock(Pointer p, int nonblock, PcapErrbuf errbuf); // int pcap_getnonblock(pcap_t *p, char *errbuf) static native int pcap_getnonblock(Pointer p, PcapErrbuf errbuf); // pcap_dumper_t *pcap_dump_open(pcap_t *p, const char *fname) static native Pointer pcap_dump_open(Pointer p, String fname); // void pcap_dump(u_char *user, const struct pcap_pkthdr *h, const u_char *sp) static native void pcap_dump(Pointer user, pcap_pkthdr header, byte[] packet); // int pcap_dump_flush(pcap_dumper_t *p) static native int pcap_dump_flush(Pointer p); // long pcap_dump_ftell(pcap_dumper_t *) static native NativeLong pcap_dump_ftell(Pointer dumper); // void pcap_dump_close(pcap_dumper_t *p) static native void pcap_dump_close(Pointer p); // TODO WinPcap: int pcap_live_dump(pcap_t *p, char *filename, int maxsize, int maxpacks) // int pcap_dispatch(pcap_t *p, int cnt, pcap_handler callback, u_char *user) static native int pcap_dispatch(Pointer p, int cnt, pcap_handler callback, Pointer user); // u_char *pcap_next(pcap_t *p, struct pcap_pkthdr *h) static native Pointer pcap_next(Pointer p, pcap_pkthdr h); // int pcap_next_ex(pcap_t *p, struct pcap_pkthdr **h, const u_char **data) static native int pcap_next_ex(Pointer p, PointerByReference h, PointerByReference data); // int pcap_loop(pcap_t *p, int cnt, pcap_handler callback, u_char *user) static native int pcap_loop(Pointer p, int cnt, pcap_handler callback, Pointer user); static native int pcap_loop(Pointer p, int cnt, Function callback, Pointer user); // void pcap_breakloop(pcap_t *p) static native void pcap_breakloop(Pointer p); // int pcap_compile( // pcap_t *p, struct bpf_program *fp, char *str, // int optimize, bpf_u_int32 netmask // ) static native int pcap_compile( Pointer p, bpf_program fp, String str, int optimize, int netmask ); // int pcap_compile_nopcap( // int snaplen_arg, int linktype_arg, struct bpf_program *program, char *buf, // int optimize, bpf_u_int32 mask // ) static native int pcap_compile_nopcap( int snaplen_arg, int linktype_arg, bpf_program fp, String buf, int optimize, int mask ); // int pcap_setfilter(pcap_t *p, struct bpf_program *fp) static native int pcap_setfilter(Pointer p, bpf_program fp); // void pcap_freecode(struct bpf_program *fp) static native void pcap_freecode(bpf_program fp); // int pcap_sendpacket(pcap_t *p, const u_char *buf, int size) static native int pcap_sendpacket(Pointer p, byte buf[], int size); // void pcap_close(pcap_t *p) static native void pcap_close(Pointer p); // int pcap_datalink(pcap_t *p) static native int pcap_datalink(Pointer p); // int pcap_list_datalinks(pcap_t *p, int **dlt_buf) static native int pcap_list_datalinks(Pointer p, PointerByReference dlt_buf); // void pcap_free_datalinks(int *dlt_list) static native void pcap_free_datalinks(Pointer dlt_list); // int pcap_set_datalink(pcap_t *p, int dlt) static native int pcap_set_datalink(Pointer p, int dlt); // int pcap_datalink_name_to_val(const char *name) static native int pcap_datalink_name_to_val(String name); // const char * pcap_datalink_val_to_name(int dlt) static native String pcap_datalink_val_to_name(int dlt); // const char* pcap_datalink_val_to_description(int dlt) static native String pcap_datalink_val_to_description(int dlt); // int pcap_snapshot(pcap_t *p) static native int pcap_snapshot(Pointer p); // int pcap_is_swapped(pcap_t *p) static native int pcap_is_swapped(Pointer p); // int pcap_major_version(pcap_t *p) static native int pcap_major_version(Pointer p); // int pcap_minor_version(pcap_t *p) static native int pcap_minor_version(Pointer p); // int pcap_stats(pcap_t *p, struct pcap_stat *ps) static native int pcap_stats(Pointer p, pcap_stat ps); // char *pcap_geterr(pcap_t *p) static native Pointer pcap_geterr(Pointer p); // char *pcap_strerror(int errno) static native Pointer pcap_strerror(int errno); // const char * pcap_lib_version(void) static native String pcap_lib_version(); // pcap_t *pcap_create (const char *device, char *ebuf) static native Pointer pcap_create(String device, PcapErrbuf ebuf); // int pcap_set_snaplen(pcap_t *p, int snaplen) static native int pcap_set_snaplen(Pointer p, int snaplen); // int pcap_set_promisc(pcap_t *p, int promisc) static native int pcap_set_promisc(Pointer p, int promisc); // int pcap_set_timeout(pcap_t *p, int timeout_ms) static native int pcap_set_timeout(Pointer p, int timeout_ms); // int pcap_set_buffer_size(pcap_t *p, int buffer_size) static native int pcap_set_buffer_size(Pointer p, int buffer_size); // int pcap_activate(pcap_t *p) static native int pcap_activate(Pointer p); // interface mappings interface PcapLibrary extends Library { static final PcapLibrary INSTANCE = (PcapLibrary)Native.loadLibrary( PCAP_LIB_NAME, PcapLibrary.class, NATIVE_LOAD_LIBRARY_OPTIONS ); // The following functions can't be mapped directly because they are supported by not all OSes // or by only very new versions of pcap libraries. // If you add a method here you need to put the method to funcMap in static initialization // block above. // int pcap_set_rfmon(pcap_t *p, int rfmon) int pcap_set_rfmon(Pointer p, int rfmon); // int strioctl(int fd, int cmd, int len, char *dp) int strioctl(int fd, int cmd, int len, Pointer dp); //int pcap_stats_ex(pcap_t *p, struct pcap_stat_ex *ps) int dos_pcap_stats_ex(Pointer p, pcap_stat_ex ps); // struct pcap_stat* pcap_stats_ex(pcap_t *p, int *pcap_stat_size) Pointer win_pcap_stats_ex(Pointer p, IntByReference pcap_stat_size); // pcap_t *pcap_open_offline_with_tstamp_precision(const char *fname, u_int precision, char*errbuf); Pointer pcap_open_offline_with_tstamp_precision(String fname, int precision, PcapErrbuf errbuf); // pcap_t *pcap_open_dead_with_tstamp_precision(int linktype, int snaplen, u_int precision); Pointer pcap_open_dead_with_tstamp_precision(int linkType, int snaplen, int precision); // int pcap_set_tstamp_precision(pcap_t *p, int tstamp_precision) int pcap_set_tstamp_precision(Pointer p, int tstamp_precision); } static interface pcap_handler extends Callback { // void got_packet( // u_char *args, const struct pcap_pkthdr *header, const u_char *packet // ); public void got_packet(Pointer args, Pointer header, Pointer packet); } public static class pcap_if extends Structure { public pcap_if.ByReference next; // struct pcap_if * public String name; // char * public String description; // char * public pcap_addr.ByReference addresses; // struct pcap_addr * public int flags; // bpf_u_int32 public pcap_if() {} public pcap_if(Pointer p) { super(p); read(); } public static class ByReference extends pcap_if implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("next"); list.add("name"); list.add("description"); list.add("addresses"); list.add("flags"); return list; } } public static class pcap_addr extends Structure { public pcap_addr.ByReference next; // struct pcap_addr * public sockaddr.ByReference addr; // struct sockaddr * public sockaddr.ByReference netmask; // struct sockaddr * public sockaddr.ByReference broadaddr; // struct sockaddr * public sockaddr.ByReference dstaddr; // struct sockaddr * public pcap_addr() {} public pcap_addr(Pointer p) { super(p); read(); } public static class ByReference extends pcap_addr implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("next"); list.add("addr"); list.add("netmask"); list.add("broadaddr"); list.add("dstaddr"); return list; } } public static class sockaddr extends Structure { public short sa_family; // u_short public byte[] sa_data = new byte[14]; // char[14] public sockaddr() {} public sockaddr(Pointer p) { super(p); read(); } public static class ByReference extends sockaddr implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("sa_family"); list.add("sa_data"); return list; } short getSaFamily() { if (isWindowsType()) { return sa_family; } else { if (NATIVE_BYTE_ORDER.equals(ByteOrder.BIG_ENDIAN)) { return (short)(0xFF & sa_family); } else { return (short)(0xFF & (sa_family >> 8)); } } } static boolean isWindowsType() { if ( Platform.isMac() || Platform.isFreeBSD() || Platform.isOpenBSD() || Platform.iskFreeBSD() ) { return false; } else { return true; } } } public static class sockaddr_in extends Structure { public short sin_family; // short public short sin_port; // u_short public in_addr sin_addr; // struct in_addr public byte[] sin_zero = new byte[8]; // char[8] public sockaddr_in() {} public sockaddr_in(Pointer p) { super(p); read(); } @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("sin_family"); list.add("sin_port"); list.add("sin_addr"); list.add("sin_zero"); return list; } short getSaFamily() { if (sockaddr.isWindowsType()) { return sin_family; } else { if (NATIVE_BYTE_ORDER.equals(ByteOrder.BIG_ENDIAN)) { return (short)(0xFF & sin_family); } else { return (short)(0xFF & (sin_family >> 8)); } } } } public static class in_addr extends Structure { public int s_addr; // in_addr_t = uint32_t public in_addr() {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("s_addr"); return list; } } public static class sockaddr_in6 extends Structure { public short sin6_family; // u_int16_t public short sin6_port; // u_int16_t public int sin6_flowinfo; // u_int32_t public in6_addr sin6_addr; // struct in6_addr public int sin6_scope_id; // u_int32_t public sockaddr_in6() {} public sockaddr_in6(Pointer p) { super(p); read(); } @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("sin6_family"); list.add("sin6_port"); list.add("sin6_flowinfo"); list.add("sin6_addr"); list.add("sin6_scope_id"); return list; } short getSaFamily() { if (sockaddr.isWindowsType()) { return sin6_family; } else { if (NATIVE_BYTE_ORDER.equals(ByteOrder.BIG_ENDIAN)) { return (short)(0xFF & sin6_family); } else { return (short)(0xFF & (sin6_family >> 8)); } } } } public static class in6_addr extends Structure { public byte[] s6_addr = new byte[16]; // unsigned char[16] public in6_addr() {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("s6_addr"); return list; } } // Linux specific public static class sockaddr_ll extends Structure { public short sll_family; // unsigned short public short sll_protocol; // __be16 public int sll_ifindex; // int public short sll_hatype;; // unsigned short public byte sll_pkttype; // unsigned char public byte sll_halen; // unsigned char public byte[] sll_addr = new byte[8]; // unsigned char[8] public sockaddr_ll() {} public sockaddr_ll(Pointer p) { super(p); read(); } @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("sll_family"); list.add("sll_protocol"); list.add("sll_ifindex"); list.add("sll_hatype"); list.add("sll_pkttype"); list.add("sll_halen"); list.add("sll_addr"); return list; } short getSaFamily() { if (sockaddr.isWindowsType()) { return sll_family; } else { if (NATIVE_BYTE_ORDER.equals(ByteOrder.BIG_ENDIAN)) { return (short)(0xFF & sll_family); } else { return (short)(0xFF & (sll_family >> 8)); } } } } // Mac OS X and BSD specific public static class sockaddr_dl extends Structure { public byte sdl_len; // u_char public byte sdl_family; // u_char public short sdl_index; // u_short public byte sdl_type; // u_char public byte sdl_nlen;; // u_char public byte sdl_alen; // u_char public byte sdl_slen; // u_char public byte[] sdl_data = new byte[46]; // unsigned char[46] // minimum work area, can be larger; // contains both if name and ll address public sockaddr_dl() {} public sockaddr_dl(Pointer p) { super(p); read(); } @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("sdl_len"); list.add("sdl_family"); list.add("sdl_index"); list.add("sdl_type"); list.add("sdl_nlen"); list.add("sdl_alen"); list.add("sdl_slen"); list.add("sdl_data"); return list; } byte[] getAddress() { return getPointer().getByteArray(8 + (0xFF & sdl_nlen), 0xFF & sdl_alen); } } public static class pcap_pkthdr extends Structure { public static final int TS_OFFSET; public static final int CAPLEN_OFFSET; public timeval ts;// struct timeval public int caplen; // bpf_u_int32 public int len;// bpf_u_int32 static { pcap_pkthdr ph = new pcap_pkthdr(); TS_OFFSET = ph.fieldOffset("ts"); CAPLEN_OFFSET = ph.fieldOffset("caplen"); } public pcap_pkthdr() {} public pcap_pkthdr(Pointer p) { super(p); read(); } public static class ByReference extends pcap_pkthdr implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("ts"); list.add("caplen"); list.add("len"); return list; } static NativeLong getTvSec(Pointer p) { return p.getNativeLong(TS_OFFSET + timeval.TV_SEC_OFFSET); } static NativeLong getTvUsec(Pointer p) { return p.getNativeLong(TS_OFFSET + timeval.TV_USEC_OFFSET); } static int getCaplen(Pointer p) { return p.getInt(CAPLEN_OFFSET); } } public static class timeval extends Structure { public static final int TV_SEC_OFFSET; public static final int TV_USEC_OFFSET; public NativeLong tv_sec; // long public NativeLong tv_usec; // long static { timeval tv = new timeval(); TV_SEC_OFFSET = tv.fieldOffset("tv_sec"); TV_USEC_OFFSET = tv.fieldOffset("tv_usec"); } public timeval() {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("tv_sec"); list.add("tv_usec"); return list; } } public static class bpf_program extends Structure { public int bf_len; // u_int public bpf_insn.ByReference bf_insns; // struct bpf_insn * public bpf_program() {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("bf_len"); list.add("bf_insns"); return list; } } public static class bpf_insn extends Structure { public short code; // u_short public byte jt; // u_char public byte jf; // u_char public int k; // bpf_u_int32 public bpf_insn() {} public static class ByReference extends bpf_insn implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("code"); list.add("jt"); list.add("jf"); list.add("k"); return list; } }; public static class pcap_stat extends Structure { public static final int PS_RECV_OFFSET; public static final int PS_DROP_OFFSET; public static final int PS_IFDROP_OFFSET; public int ps_recv; // u_int public int ps_drop; // u_int public int ps_ifdrop; // u_int static { pcap_stat ph = new pcap_stat(); PS_RECV_OFFSET = ph.fieldOffset("ps_recv"); PS_DROP_OFFSET = ph.fieldOffset("ps_drop"); PS_IFDROP_OFFSET = ph.fieldOffset("ps_ifdrop"); } public pcap_stat() {} public pcap_stat(Pointer p) { super(p); read(); } public static class ByReference extends pcap_stat implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("ps_recv"); list.add("ps_drop"); list.add("ps_ifdrop"); return list; } static int getPsRecv(Pointer p) { return p.getInt(PS_RECV_OFFSET); } static int getPsDrop(Pointer p) { return p.getInt(PS_DROP_OFFSET); } static int getPsIfdrop(Pointer p) { return p.getInt(PS_IFDROP_OFFSET); } }; public static class win_pcap_stat extends pcap_stat { public static final int BS_CAPT_OFFSET; public int bs_capt; // u_int static { win_pcap_stat ph = new win_pcap_stat(); BS_CAPT_OFFSET = ph.fieldOffset("bs_capt"); } public win_pcap_stat() {} public win_pcap_stat(Pointer p) { super(p); read(); } public static class ByReference extends win_pcap_stat implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = super.getFieldOrder(); list.add("bs_capt"); return list; } static int getBsCapt(Pointer p) { return p.getInt(BS_CAPT_OFFSET); } }; public static class pcap_stat_ex extends Structure { public NativeLong rx_packets; /* total packets received */ // u_long public NativeLong tx_packets; /* total packets transmitted */ // u_long public NativeLong rx_bytes; /* total bytes received */ // u_long public NativeLong tx_bytes; /* total bytes transmitted */ // u_long public NativeLong rx_errors; /* bad packets received */ // u_long public NativeLong tx_errors; /* packet transmit problems */ // u_long public NativeLong rx_dropped; /* no space in Rx buffers */ // u_long public NativeLong tx_dropped; /* no space available for Tx */ // u_long public NativeLong multicast; /* multicast packets received */ // u_long public NativeLong collisions; // u_long /* detailed rx_errors: */ public NativeLong rx_length_errors; // u_long public NativeLong rx_over_errors; /* receiver ring buff overflow */ // u_long public NativeLong rx_crc_errors; /* recv'd pkt with crc error */ // u_long public NativeLong rx_frame_errors; /* recv'd frame alignment error */ // u_long public NativeLong rx_fifo_errors; /* recv'r fifo overrun */ // u_long public NativeLong rx_missed_errors; /* recv'r missed packet */ // u_long /* detailed tx_errors */ public NativeLong tx_aborted_errors; // u_long public NativeLong tx_carrier_errors; // u_long public NativeLong tx_fifo_errors; // u_long public NativeLong tx_heartbeat_errors; // u_long public NativeLong tx_window_errors; // u_long public pcap_stat_ex() {} public static class ByReference extends pcap_stat_ex implements Structure.ByReference {} @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("rx_packets"); list.add("tx_packets"); list.add("rx_bytes"); list.add("tx_bytes"); list.add("rx_errors"); list.add("tx_errors"); list.add("rx_dropped"); list.add("tx_dropped"); list.add("multicast"); list.add("collisions"); list.add("rx_length_errors"); list.add("rx_over_errors"); list.add("rx_crc_errors"); list.add("rx_frame_errors"); list.add("rx_fifo_errors"); list.add("rx_missed_errors"); list.add("tx_aborted_errors"); list.add("tx_carrier_errors"); list.add("tx_fifo_errors"); list.add("tx_heartbeat_errors"); list.add("tx_window_errors"); return list; } }; public static class PcapErrbuf extends Structure { public byte[] buf = new byte[PCAP_ERRBUF_SIZE()]; public PcapErrbuf() {} private static int PCAP_ERRBUF_SIZE() { return 256; } public int length() { return toString().length(); } @Override protected List<String> getFieldOrder() { List<String> list = new ArrayList<String>(); list.add("buf"); return list; } @Override public String toString() { return Native.toString(buf); } } }
/* * Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.threeten.extra; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.time.DateTimeException; import java.time.LocalDate; import java.time.Period; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import com.google.common.collect.Range; import com.tngtech.junit.dataprovider.DataProvider; import com.tngtech.junit.dataprovider.UseDataProvider; /** * Test date range. */ public class TestLocalDateRange { private static final LocalDate MINP1 = LocalDate.MIN.plusDays(1); private static final LocalDate MINP2 = LocalDate.MIN.plusDays(2); private static final LocalDate MINP3 = LocalDate.MIN.plusDays(3); private static final LocalDate MAXM1 = LocalDate.MAX.minusDays(1); private static final LocalDate MAXM2 = LocalDate.MAX.minusDays(2); private static final LocalDate DATE_2012_07_01 = LocalDate.of(2012, 7, 1); private static final LocalDate DATE_2012_07_27 = LocalDate.of(2012, 7, 27); private static final LocalDate DATE_2012_07_28 = LocalDate.of(2012, 7, 28); private static final LocalDate DATE_2012_07_29 = LocalDate.of(2012, 7, 29); private static final LocalDate DATE_2012_07_30 = LocalDate.of(2012, 7, 30); private static final LocalDate DATE_2012_07_31 = LocalDate.of(2012, 7, 31); private static final LocalDate DATE_2012_08_01 = LocalDate.of(2012, 8, 1); private static final LocalDate DATE_2012_08_31 = LocalDate.of(2012, 8, 31); //----------------------------------------------------------------------- @Test public void test_ALL() { LocalDateRange test = LocalDateRange.ALL; assertEquals(LocalDate.MIN, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals("-999999999-01-01/+999999999-12-31", test.toString()); } //----------------------------------------------------------------------- @Test public void test_of() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(3, test.lengthInDays()); assertEquals("2012-07-28/2012-07-31", test.toString()); } @Test public void test_of_MIN() { LocalDateRange test = LocalDateRange.of(LocalDate.MIN, DATE_2012_07_31); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/2012-07-31", test.toString()); } @Test public void test_of_MAX() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, LocalDate.MAX); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("2012-07-28/+999999999-12-31", test.toString()); } @Test public void test_of_MIN_MAX() { LocalDateRange test = LocalDateRange.of(LocalDate.MIN, LocalDate.MAX); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/+999999999-12-31", test.toString()); } @Test public void test_of_MIN_MIN() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(LocalDate.MIN, LocalDate.MIN)); } @Test public void test_of_MIN_MINP1() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(LocalDate.MIN, MINP1)); } @Test public void test_of_MINP1_MINP1() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(MINP1, MINP1)); } @Test public void test_of_MIN_MINP2() { LocalDateRange test = LocalDateRange.of(LocalDate.MIN, MINP2); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(MINP1, test.getEndInclusive()); assertEquals(MINP2, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/-999999999-01-03", test.toString()); } @Test public void test_of_MINP1_MINP2() { LocalDateRange test = LocalDateRange.of(MINP1, MINP2); assertEquals(MINP1, test.getStart()); assertEquals(MINP1, test.getEndInclusive()); assertEquals(MINP2, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(1, test.lengthInDays()); assertEquals("-999999999-01-02/-999999999-01-03", test.toString()); } @Test public void test_of_MINP2_MINP2() { LocalDateRange test = LocalDateRange.of(MINP2, MINP2); assertEquals(MINP2, test.getStart()); assertEquals(MINP1, test.getEndInclusive()); assertEquals(MINP2, test.getEnd()); assertEquals(true, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(0, test.lengthInDays()); assertEquals("-999999999-01-03/-999999999-01-03", test.toString()); } @Test public void test_of_MAX_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(LocalDate.MAX, LocalDate.MAX)); } @Test public void test_of_MAXM1_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(MAXM1, LocalDate.MAX)); } @Test public void test_of_MAXM1_MAXM1() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(MAXM1, MAXM1)); } @Test public void test_of_empty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_30); assertEquals(DATE_2012_07_30, test.getStart()); assertEquals(DATE_2012_07_29, test.getEndInclusive()); assertEquals(DATE_2012_07_30, test.getEnd()); assertEquals(true, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(0, test.lengthInDays()); assertEquals("2012-07-30/2012-07-30", test.toString()); } @Test public void test_of_badOrder() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(DATE_2012_07_31, DATE_2012_07_30)); } //----------------------------------------------------------------------- @Test public void test_ofClosed() { LocalDateRange test = LocalDateRange.ofClosed(DATE_2012_07_28, DATE_2012_07_30); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(3, test.lengthInDays()); assertEquals("2012-07-28/2012-07-31", test.toString()); } @Test public void test_ofClosed_MIN() { LocalDateRange test = LocalDateRange.ofClosed(LocalDate.MIN, DATE_2012_07_30); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/2012-07-31", test.toString()); } @Test public void test_ofClosed_MAX() { LocalDateRange test = LocalDateRange.ofClosed(DATE_2012_07_28, LocalDate.MAX); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("2012-07-28/+999999999-12-31", test.toString()); } @Test public void test_ofClosed_MIN_MAX() { LocalDateRange test = LocalDateRange.ofClosed(LocalDate.MIN, LocalDate.MAX); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/+999999999-12-31", test.toString()); } @Test public void test_ofClosed_MIN_MIN() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofClosed(LocalDate.MIN, LocalDate.MIN)); } @Test public void test_ofClosed_MIN_MINP1() { LocalDateRange test = LocalDateRange.ofClosed(LocalDate.MIN, MINP1); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(MINP1, test.getEndInclusive()); assertEquals(MINP2, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/-999999999-01-03", test.toString()); } @Test public void test_ofClosed_MINP1_MINP1() { LocalDateRange test = LocalDateRange.ofClosed(MINP1, MINP1); assertEquals(MINP1, test.getStart()); assertEquals(MINP1, test.getEndInclusive()); assertEquals(MINP2, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(1, test.lengthInDays()); assertEquals("-999999999-01-02/-999999999-01-03", test.toString()); } @Test public void test_ofClosed_MIN_MINP2() { LocalDateRange test = LocalDateRange.ofClosed(LocalDate.MIN, MINP2); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(MINP2, test.getEndInclusive()); assertEquals(MINP3, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(Integer.MAX_VALUE, test.lengthInDays()); assertEquals("-999999999-01-01/-999999999-01-04", test.toString()); } @Test public void test_ofClosed_MINP1_MINP2() { LocalDateRange test = LocalDateRange.ofClosed(MINP1, MINP2); assertEquals(MINP1, test.getStart()); assertEquals(MINP2, test.getEndInclusive()); assertEquals(MINP3, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(2, test.lengthInDays()); assertEquals("-999999999-01-02/-999999999-01-04", test.toString()); } @Test public void test_ofClosed_MINP2_MINP2() { LocalDateRange test = LocalDateRange.ofClosed(MINP2, MINP2); assertEquals(MINP2, test.getStart()); assertEquals(MINP2, test.getEndInclusive()); assertEquals(MINP3, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals(1, test.lengthInDays()); assertEquals("-999999999-01-03/-999999999-01-04", test.toString()); } @Test public void test_ofClosed_MAX_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofClosed(LocalDate.MAX, LocalDate.MAX)); } @Test public void test_ofClosed_MAXM1_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofClosed(MAXM1, LocalDate.MAX)); } @Test public void test_ofClosed_MAXM1_MAXM1() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofClosed(MAXM1, MAXM1)); } @Test public void test_ofClosed_badOrder() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofClosed(DATE_2012_07_31, DATE_2012_07_30)); } //----------------------------------------------------------------------- @Test public void test_ofEmpty() { LocalDateRange test = LocalDateRange.ofEmpty(DATE_2012_07_30); assertEquals(DATE_2012_07_30, test.getStart()); assertEquals(DATE_2012_07_29, test.getEndInclusive()); assertEquals(DATE_2012_07_30, test.getEnd()); assertEquals(true, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals("2012-07-30/2012-07-30", test.toString()); } @Test public void test_ofEmpty_MIN() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofEmpty(LocalDate.MIN)); } @Test public void test_ofEmpty_MINP1() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofEmpty(MINP1)); } @Test public void test_ofEmpty_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofEmpty(LocalDate.MAX)); } @Test public void test_ofEmpty_MAXM1() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofEmpty(MAXM1)); } //----------------------------------------------------------------------- @Test public void test_ofUnbounded() { LocalDateRange test = LocalDateRange.ofUnbounded(); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals("-999999999-01-01/+999999999-12-31", test.toString()); } //----------------------------------------------------------------------- @Test public void test_ofUnboundedStart() { LocalDateRange test = LocalDateRange.ofUnboundedStart(DATE_2012_07_30); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(DATE_2012_07_29, test.getEndInclusive()); assertEquals(DATE_2012_07_30, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(true, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals("-999999999-01-01/2012-07-30", test.toString()); } @Test public void test_ofUnboundedStart_MIN() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofUnboundedStart(LocalDate.MIN)); } @Test public void test_ofUnboundedStart_MINP1() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofUnboundedStart(MINP1)); } //----------------------------------------------------------------------- @Test public void test_ofUnboundedEnd() { LocalDateRange test = LocalDateRange.ofUnboundedEnd(DATE_2012_07_30); assertEquals(DATE_2012_07_30, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); assertEquals(false, test.isEmpty()); assertEquals(false, test.isUnboundedStart()); assertEquals(true, test.isUnboundedEnd()); assertEquals("2012-07-30/+999999999-12-31", test.toString()); } @Test public void test_ofUnboundedEnd_MAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofUnboundedEnd(LocalDate.MAX)); } @Test public void test_ofUnboundedEnd_MAXM1() { assertThrows(DateTimeException.class, () -> LocalDateRange.ofUnboundedEnd(MAXM1)); } //----------------------------------------------------------------------- @Test public void test_of_period() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, Period.ofDays(3)); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); assertEquals(false, test.isUnboundedStart()); assertEquals(false, test.isUnboundedEnd()); assertEquals("2012-07-28/2012-07-31", test.toString()); } @Test public void test_of_period_negative() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(DATE_2012_07_31, Period.ofDays(-1))); } @Test public void test_of_period_atMIN() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(LocalDate.MIN, Period.ofDays(0))); } @Test public void test_of_period_atMAX() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(LocalDate.MAX, Period.ofDays(0))); } @Test public void test_of_period_atMAXM1_0D() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(MAXM1, Period.ofDays(0))); } @Test public void test_of_period_atMAXM1_1D() { assertThrows(DateTimeException.class, () -> LocalDateRange.of(MAXM1, Period.ofDays(1))); } //----------------------------------------------------------------------- @Test public void test_parse_CharSequence() { LocalDateRange test = LocalDateRange.parse(DATE_2012_07_27 + "/" + DATE_2012_07_29); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_29, test.getEnd()); } @Test public void test_parse_CharSequence_PeriodLocalDate() { LocalDateRange test = LocalDateRange.parse("P2D/" + DATE_2012_07_29); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_29, test.getEnd()); } @Test public void test_parse_CharSequence_PeriodLocalDate_case() { LocalDateRange test = LocalDateRange.parse("p2d/" + DATE_2012_07_29); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_29, test.getEnd()); } @Test public void test_parse_CharSequence_LocalDatePeriod() { LocalDateRange test = LocalDateRange.parse(DATE_2012_07_27 + "/P2D"); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_29, test.getEnd()); } @Test public void test_parse_CharSequence_LocalDatePeriod_case() { LocalDateRange test = LocalDateRange.parse(DATE_2012_07_27 + "/p2d"); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_29, test.getEnd()); } @Test public void test_parse_CharSequence_empty() { LocalDateRange test = LocalDateRange.parse(DATE_2012_07_27 + "/" + DATE_2012_07_27); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_27, test.getEnd()); } @Test public void test_parse_CharSequence_badOrder() { assertThrows(DateTimeException.class, () -> LocalDateRange.parse(DATE_2012_07_29 + "/" + DATE_2012_07_27)); } @Test public void test_parse_CharSequence_badFormat() { assertThrows(DateTimeParseException.class, () -> LocalDateRange.parse(DATE_2012_07_29 + "-" + DATE_2012_07_27)); } @Test public void test_parse_CharSequence_null() { assertThrows(NullPointerException.class, () -> LocalDateRange.parse(null)); } //----------------------------------------------------------------------- @Test public void test_isSerializable() { assertTrue(Serializable.class.isAssignableFrom(LocalDateRange.class)); } @Test public void test_serialization() throws Exception { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(test); } try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) { assertEquals(test, ois.readObject()); } } //----------------------------------------------------------------------- @Test public void test_withStart() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withStart(DATE_2012_07_27); assertEquals(DATE_2012_07_27, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); } @Test public void test_withStart_adjuster() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withStart(date -> date.minus(1, ChronoUnit.WEEKS)); assertEquals(DATE_2012_07_28.minusWeeks(1), test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); } @Test public void test_withStart_min() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withStart(LocalDate.MIN); assertEquals(LocalDate.MIN, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); } @Test public void test_withStart_empty() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withStart(DATE_2012_07_31); assertEquals(DATE_2012_07_31, test.getStart()); assertEquals(DATE_2012_07_30, test.getEndInclusive()); assertEquals(DATE_2012_07_31, test.getEnd()); } @Test public void test_withStart_invalid() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_30); assertThrows(DateTimeException.class, () -> base.withStart(DATE_2012_07_31)); } //----------------------------------------------------------------------- @Test public void test_withEnd() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withEnd(DATE_2012_07_30); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(DATE_2012_07_29, test.getEndInclusive()); assertEquals(DATE_2012_07_30, test.getEnd()); } @Test public void test_withEnd_adjuster() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withEnd(date -> date.plus(1, ChronoUnit.WEEKS)); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(DATE_2012_07_30.plusWeeks(1), test.getEndInclusive()); assertEquals(DATE_2012_07_31.plusWeeks(1), test.getEnd()); } @Test public void test_withEnd_max() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange test = base.withEnd(LocalDate.MAX); assertEquals(DATE_2012_07_28, test.getStart()); assertEquals(LocalDate.MAX, test.getEndInclusive()); assertEquals(LocalDate.MAX, test.getEnd()); } @Test public void test_withEnd_empty() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_31); LocalDateRange test = base.withEnd(DATE_2012_07_30); assertEquals(DATE_2012_07_30, test.getStart()); assertEquals(DATE_2012_07_29, test.getEndInclusive()); assertEquals(DATE_2012_07_30, test.getEnd()); } @Test public void test_withEnd_invalid() { LocalDateRange base = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertThrows(DateTimeException.class, () -> base.withEnd(DATE_2012_07_27)); } //----------------------------------------------------------------------- @Test public void test_contains() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(false, test.contains(LocalDate.MIN)); assertEquals(false, test.contains(DATE_2012_07_27)); assertEquals(true, test.contains(DATE_2012_07_28)); assertEquals(true, test.contains(DATE_2012_07_29)); assertEquals(true, test.contains(DATE_2012_07_30)); assertEquals(false, test.contains(DATE_2012_07_31)); assertEquals(false, test.contains(DATE_2012_08_01)); assertEquals(false, test.contains(LocalDate.MAX)); } @Test public void test_contains_baseEmpty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28); assertEquals(false, test.contains(LocalDate.MIN)); assertEquals(false, test.contains(DATE_2012_07_27)); assertEquals(false, test.contains(DATE_2012_07_28)); assertEquals(false, test.contains(DATE_2012_07_29)); assertEquals(false, test.contains(LocalDate.MAX)); } @Test public void test_contains_max() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, LocalDate.MAX); assertEquals(false, test.contains(LocalDate.MIN)); assertEquals(false, test.contains(DATE_2012_07_27)); assertEquals(true, test.contains(DATE_2012_07_28)); assertEquals(true, test.contains(DATE_2012_07_29)); assertEquals(true, test.contains(DATE_2012_07_30)); assertEquals(true, test.contains(DATE_2012_07_31)); assertEquals(true, test.contains(DATE_2012_08_01)); assertEquals(true, test.contains(LocalDate.MAX)); } //----------------------------------------------------------------------- @DataProvider public static Object[][] data_queries() { return new Object[][] { // before start { DATE_2012_07_01, DATE_2012_07_27, false, false, false, false }, { DATE_2012_07_01, DATE_2012_07_28, false, true, true, false }, // before end { DATE_2012_07_27, DATE_2012_07_30, false, false, true, true }, { DATE_2012_07_28, DATE_2012_07_30, true, false, true, true }, { DATE_2012_07_29, DATE_2012_07_30, true, false, true, true }, // same end { DATE_2012_07_27, DATE_2012_07_31, false, false, true, true }, { DATE_2012_07_28, DATE_2012_07_31, true, false, true, true }, { DATE_2012_07_29, DATE_2012_07_31, true, false, true, true }, { DATE_2012_07_30, DATE_2012_07_31, true, false, true , true}, // past end { DATE_2012_07_27, DATE_2012_08_01, false, false, true, true }, { DATE_2012_07_28, DATE_2012_08_01, false, false, true, true }, { DATE_2012_07_29, DATE_2012_08_01, false, false, true, true }, { DATE_2012_07_30, DATE_2012_08_01, false, false, true, true }, // start past end { DATE_2012_07_31, DATE_2012_08_01, false, true, true, false }, { DATE_2012_07_31, DATE_2012_08_31, false, true, true, false }, { DATE_2012_08_01, DATE_2012_08_31, false, false, false, false }, // empty { DATE_2012_07_27, DATE_2012_07_27, false, false, false, false }, { DATE_2012_07_28, DATE_2012_07_28, true, true, true, false }, { DATE_2012_07_29, DATE_2012_07_29, true, false, true, true }, { DATE_2012_07_30, DATE_2012_07_30, true, false, true, true }, { DATE_2012_07_31, DATE_2012_07_31, true, true, true, false }, { DATE_2012_08_31, DATE_2012_08_31, false, false, false, false }, // min { LocalDate.MIN, DATE_2012_07_27, false, false, false, false }, { LocalDate.MIN, DATE_2012_07_28, false, true, true, false }, { LocalDate.MIN, DATE_2012_07_29, false, false, true, true }, { LocalDate.MIN, DATE_2012_07_30, false, false, true, true }, { LocalDate.MIN, DATE_2012_07_31, false, false, true, true }, { LocalDate.MIN, DATE_2012_08_01, false, false, true, true }, { LocalDate.MIN, LocalDate.MAX, false, false, true, true }, // max { DATE_2012_07_27, LocalDate.MAX, false, false, true, true }, { DATE_2012_07_28, LocalDate.MAX, false, false, true, true }, { DATE_2012_07_29, LocalDate.MAX, false, false, true, true }, { DATE_2012_07_30, LocalDate.MAX, false, false, true, true }, { DATE_2012_07_31, LocalDate.MAX, false, true, true, false }, { DATE_2012_08_01, LocalDate.MAX, false, false, false, false }, }; } @ParameterizedTest @UseDataProvider("data_queries") public void test_encloses( LocalDate start, LocalDate end, boolean isEnclosedBy, boolean abuts, boolean isConnected, boolean overlaps) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(isEnclosedBy, test.encloses(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_queries") public void test_abuts( LocalDate start, LocalDate end, boolean isEnclosedBy, boolean abuts, boolean isConnected, boolean overlaps) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(abuts, test.abuts(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_queries") public void test_isConnected( LocalDate start, LocalDate end, boolean isEnclosedBy, boolean abuts, boolean isConnected, boolean overlaps) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(isConnected, test.isConnected(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_queries") public void test_overlaps( LocalDate start, LocalDate end, boolean isEnclosedBy, boolean abuts, boolean isConnected, boolean overlaps) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(overlaps, test.overlaps(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_queries") public void test_crossCheck( LocalDate start, LocalDate end, boolean isEnclosedBy, boolean abuts, boolean isConnected, boolean overlaps) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); LocalDateRange input = LocalDateRange.of(start, end); assertEquals(test.overlaps(input) || test.abuts(input), test.isConnected(input)); assertEquals(test.isConnected(input) && !test.abuts(input), test.overlaps(input)); } @Test public void test_encloses_max() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, LocalDate.MAX); assertEquals(true, test.encloses(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(true, test.encloses(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29))); assertEquals(true, test.encloses(LocalDateRange.of(DATE_2012_07_28, LocalDate.MAX))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_01, DATE_2012_07_27))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, LocalDate.MAX))); } @Test public void test_encloses_baseEmpty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_27))); assertEquals(true, test.encloses(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_27))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_28))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_27, LocalDate.MAX))); assertEquals(false, test.encloses(LocalDateRange.of(DATE_2012_07_28, LocalDate.MAX))); } @Test public void test_abuts_baseEmpty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28); assertEquals(false, test.abuts(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_27))); assertEquals(false, test.abuts(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(false, test.abuts(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); assertEquals(true, test.abuts(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_28))); assertEquals(true, test.abuts(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29))); } @Test public void test_isConnected_baseEmpty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28); assertEquals(false, test.isConnected(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_27))); assertEquals(true, test.isConnected(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(false, test.isConnected(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); } @Test public void test_overlaps_baseEmpty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28); assertEquals(false, test.overlaps(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_27))); assertEquals(true, test.overlaps(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(false, test.overlaps(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); } //----------------------------------------------------------------------- @DataProvider public static Object[][] data_intersection() { return new Object[][] { // adjacent { DATE_2012_07_01, DATE_2012_07_28, DATE_2012_07_28, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_28 }, // adjacent empty { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_30, DATE_2012_07_30, DATE_2012_07_30, DATE_2012_07_30 }, // overlap { DATE_2012_07_01, DATE_2012_07_29, DATE_2012_07_28, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_29 }, // encloses { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_29, DATE_2012_07_28, DATE_2012_07_29 }, // encloses empty { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_28, DATE_2012_07_28, DATE_2012_07_28 }, }; } @ParameterizedTest @UseDataProvider("data_intersection") public void test_intersection( LocalDate start1, LocalDate end1, LocalDate start2, LocalDate end2, LocalDate expStart, LocalDate expEnd) { LocalDateRange test1 = LocalDateRange.of(start1, end1); LocalDateRange test2 = LocalDateRange.of(start2, end2); LocalDateRange expected = LocalDateRange.of(expStart, expEnd); assertTrue(test1.isConnected(test2)); assertEquals(expected, test1.intersection(test2)); } @ParameterizedTest @UseDataProvider("data_intersection") public void test_intersection_reverse( LocalDate start1, LocalDate end1, LocalDate start2, LocalDate end2, LocalDate expStart, LocalDate expEnd) { LocalDateRange test1 = LocalDateRange.of(start1, end1); LocalDateRange test2 = LocalDateRange.of(start2, end2); LocalDateRange expected = LocalDateRange.of(expStart, expEnd); assertTrue(test2.isConnected(test1)); assertEquals(expected, test2.intersection(test1)); } @Test public void test_intersectionBad() { LocalDateRange test1 = LocalDateRange.of(DATE_2012_07_01, DATE_2012_07_28); LocalDateRange test2 = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_30); assertEquals(false, test1.isConnected(test2)); assertThrows(DateTimeException.class, () -> test1.intersection(test2)); } @Test public void test_intersection_same() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(test, test.intersection(test)); } //----------------------------------------------------------------------- @DataProvider public static Object[][] data_union() { return new Object[][] { // adjacent { DATE_2012_07_01, DATE_2012_07_28, DATE_2012_07_28, DATE_2012_07_30, DATE_2012_07_01, DATE_2012_07_30 }, // adjacent empty { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_30, DATE_2012_07_30, DATE_2012_07_01, DATE_2012_07_30 }, // overlap { DATE_2012_07_01, DATE_2012_07_29, DATE_2012_07_28, DATE_2012_07_30, DATE_2012_07_01, DATE_2012_07_30 }, // encloses { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_29, DATE_2012_07_01, DATE_2012_07_30 }, // encloses empty { DATE_2012_07_01, DATE_2012_07_30, DATE_2012_07_28, DATE_2012_07_28, DATE_2012_07_01, DATE_2012_07_30 }, }; } @ParameterizedTest @UseDataProvider("data_union") public void test_unionAndSpan( LocalDate start1, LocalDate end1, LocalDate start2, LocalDate end2, LocalDate expStart, LocalDate expEnd) { LocalDateRange test1 = LocalDateRange.of(start1, end1); LocalDateRange test2 = LocalDateRange.of(start2, end2); LocalDateRange expected = LocalDateRange.of(expStart, expEnd); assertTrue(test1.isConnected(test2)); assertEquals(expected, test1.union(test2)); assertEquals(expected, test1.span(test2)); } @ParameterizedTest @UseDataProvider("data_union") public void test_unionAndSpan_reverse( LocalDate start1, LocalDate end1, LocalDate start2, LocalDate end2, LocalDate expStart, LocalDate expEnd) { LocalDateRange test1 = LocalDateRange.of(start1, end1); LocalDateRange test2 = LocalDateRange.of(start2, end2); LocalDateRange expected = LocalDateRange.of(expStart, expEnd); assertTrue(test2.isConnected(test1)); assertEquals(expected, test2.union(test1)); assertEquals(expected, test2.span(test1)); } @ParameterizedTest @UseDataProvider("data_union") public void test_span_enclosesInputs( LocalDate start1, LocalDate end1, LocalDate start2, LocalDate end2, LocalDate expStart, LocalDate expEnd) { LocalDateRange test1 = LocalDateRange.of(start1, end1); LocalDateRange test2 = LocalDateRange.of(start2, end2); LocalDateRange expected = LocalDateRange.of(expStart, expEnd); assertEquals(true, expected.encloses(test1)); assertEquals(true, expected.encloses(test2)); } @Test public void test_union_disconnected() { LocalDateRange test1 = LocalDateRange.of(DATE_2012_07_01, DATE_2012_07_28); LocalDateRange test2 = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_30); assertFalse(test1.isConnected(test2)); assertThrows(DateTimeException.class, () -> test1.union(test2)); } @Test public void test_span_disconnected() { LocalDateRange test1 = LocalDateRange.of(DATE_2012_07_01, DATE_2012_07_28); LocalDateRange test2 = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_30); assertFalse(test1.isConnected(test2)); assertEquals(LocalDateRange.of(DATE_2012_07_01, DATE_2012_07_30), test1.span(test2)); } @Test public void test_unionAndSpan_same() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(test, test.union(test)); assertEquals(test, test.span(test)); } //----------------------------------------------------------------------- @Test public void test_stream() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); List<LocalDate> result = test.stream().collect(Collectors.toList()); assertEquals(3, result.size()); assertEquals(DATE_2012_07_28, result.get(0)); assertEquals(DATE_2012_07_29, result.get(1)); assertEquals(DATE_2012_07_30, result.get(2)); } @Test public void test_stream_MIN_MINP3() { LocalDateRange test = LocalDateRange.of(LocalDate.MIN, MINP3); List<LocalDate> result = test.stream().collect(Collectors.toList()); assertEquals(3, result.size()); assertEquals(LocalDate.MIN, result.get(0)); assertEquals(MINP1, result.get(1)); assertEquals(MINP2, result.get(2)); } @Test public void test_stream_MAXM2_MAX() { LocalDateRange test = LocalDateRange.of(MAXM2, LocalDate.MAX); List<LocalDate> result = test.stream().collect(Collectors.toList()); assertEquals(3, result.size()); assertEquals(MAXM2, result.get(0)); assertEquals(MAXM1, result.get(1)); assertEquals(LocalDate.MAX, result.get(2)); } //----------------------------------------------------------------------- @DataProvider public static Object[][] data_isBefore() { return new Object[][] { // before start { DATE_2012_07_01, DATE_2012_07_27, false }, // before end { DATE_2012_07_27, DATE_2012_07_30, false }, { DATE_2012_07_28, DATE_2012_07_30, false }, { DATE_2012_07_29, DATE_2012_07_30, false }, // same end { DATE_2012_07_27, DATE_2012_07_31, false }, { DATE_2012_07_28, DATE_2012_07_31, false }, { DATE_2012_07_29, DATE_2012_07_31, false }, { DATE_2012_07_30, DATE_2012_07_31, false }, // past end { DATE_2012_07_27, DATE_2012_08_01, false }, { DATE_2012_07_28, DATE_2012_08_01, false }, { DATE_2012_07_29, DATE_2012_08_01, false }, { DATE_2012_07_30, DATE_2012_08_01, false }, // start past end { DATE_2012_07_31, DATE_2012_08_01, true }, { DATE_2012_07_31, DATE_2012_08_31, true }, // empty { DATE_2012_07_30, DATE_2012_07_30, false }, { DATE_2012_07_31, DATE_2012_07_31, true }, // min { LocalDate.MIN, DATE_2012_07_27, false }, { LocalDate.MIN, DATE_2012_07_28, false }, { LocalDate.MIN, DATE_2012_07_29, false }, { LocalDate.MIN, DATE_2012_07_30, false }, { LocalDate.MIN, DATE_2012_07_31, false }, { LocalDate.MIN, DATE_2012_08_01, false }, { LocalDate.MIN, LocalDate.MAX, false }, // max { DATE_2012_07_27, LocalDate.MAX, false }, { DATE_2012_07_28, LocalDate.MAX, false }, { DATE_2012_07_29, LocalDate.MAX, false }, { DATE_2012_07_30, LocalDate.MAX, false }, { DATE_2012_07_31, LocalDate.MAX, true }, { DATE_2012_08_01, LocalDate.MAX, true }, }; } @ParameterizedTest @UseDataProvider("data_isBefore") public void test_isBefore_range(LocalDate start, LocalDate end, boolean before) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(before, test.isBefore(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_isBefore") public void test_isBefore_date(LocalDate start, LocalDate end, boolean before) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(before, test.isBefore(start)); } @Test public void test_isBefore_range_empty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29); assertEquals(false, test.isBefore(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_28))); assertEquals(false, test.isBefore(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29))); assertEquals(false, test.isBefore(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); assertEquals(true, test.isBefore(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_30))); assertEquals(true, test.isBefore(LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_30))); assertEquals(true, test.isBefore(LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_31))); } @Test public void test_isBefore_date_empty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29); assertEquals(false, test.isBefore(DATE_2012_07_28)); assertEquals(false, test.isBefore(DATE_2012_07_29)); assertEquals(true, test.isBefore(DATE_2012_07_30)); } //----------------------------------------------------------------------- @DataProvider public static Object[][] data_isAfter() { return new Object[][] { // before start { DATE_2012_07_01, DATE_2012_07_27, true }, // to start { DATE_2012_07_01, DATE_2012_07_28, true }, // before end { DATE_2012_07_01, DATE_2012_07_29, false }, { DATE_2012_07_27, DATE_2012_07_30, false }, { DATE_2012_07_28, DATE_2012_07_30, false }, { DATE_2012_07_29, DATE_2012_07_30, false }, // same end { DATE_2012_07_27, DATE_2012_07_31, false }, { DATE_2012_07_28, DATE_2012_07_31, false }, { DATE_2012_07_29, DATE_2012_07_31, false }, { DATE_2012_07_30, DATE_2012_07_31, false }, // past end { DATE_2012_07_27, DATE_2012_08_01, false }, { DATE_2012_07_28, DATE_2012_08_01, false }, { DATE_2012_07_29, DATE_2012_08_01, false }, { DATE_2012_07_30, DATE_2012_08_01, false }, // start past end { DATE_2012_07_31, DATE_2012_08_01, false }, { DATE_2012_07_31, DATE_2012_08_31, false }, // empty { DATE_2012_07_28, DATE_2012_07_28, true }, { DATE_2012_07_29, DATE_2012_07_29, false }, // min { LocalDate.MIN, DATE_2012_07_27, true }, { LocalDate.MIN, DATE_2012_07_28, true }, { LocalDate.MIN, DATE_2012_07_29, false }, { LocalDate.MIN, DATE_2012_07_30, false }, { LocalDate.MIN, DATE_2012_07_31, false }, { LocalDate.MIN, DATE_2012_08_01, false }, { LocalDate.MIN, LocalDate.MAX, false }, // max { DATE_2012_07_27, LocalDate.MAX, false }, { DATE_2012_07_28, LocalDate.MAX, false }, { DATE_2012_07_29, LocalDate.MAX, false }, { DATE_2012_07_30, LocalDate.MAX, false }, { DATE_2012_07_31, LocalDate.MAX, false }, { DATE_2012_08_01, LocalDate.MAX, false }, }; } @ParameterizedTest @UseDataProvider("data_isAfter") public void test_isAfter_range(LocalDate start, LocalDate end, boolean before) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(before, test.isAfter(LocalDateRange.of(start, end))); } @ParameterizedTest @UseDataProvider("data_isAfter") public void test_isAfter_date(LocalDate start, LocalDate end, boolean before) { LocalDateRange test = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_31); assertEquals(before, test.isAfter(end.minusDays(1))); } @Test public void test_isAfter_range_empty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29); assertEquals(true, test.isAfter(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_28))); assertEquals(true, test.isAfter(LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29))); assertEquals(true, test.isAfter(LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_28))); assertEquals(false, test.isAfter(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29))); assertEquals(false, test.isAfter(LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_30))); assertEquals(false, test.isAfter(LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_30))); assertEquals(false, test.isAfter(LocalDateRange.of(DATE_2012_07_30, DATE_2012_07_31))); } @Test public void test_isAfter_date_empty() { LocalDateRange test = LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29); assertEquals(true, test.isAfter(DATE_2012_07_28)); assertEquals(false, test.isAfter(DATE_2012_07_29)); assertEquals(false, test.isAfter(DATE_2012_07_30)); } //----------------------------------------------------------------------- @Test public void test_lengthInDays() { assertEquals(2, LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29).lengthInDays()); assertEquals(1, LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29).lengthInDays()); assertEquals(0, LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29).lengthInDays()); assertEquals(Integer.MAX_VALUE, LocalDateRange.of(LocalDate.MIN, DATE_2012_07_29).lengthInDays()); assertEquals(Integer.MAX_VALUE, LocalDateRange.of(DATE_2012_07_29, LocalDate.MAX).lengthInDays()); assertEquals(Integer.MAX_VALUE, LocalDateRange.of(MINP1, MAXM1).lengthInDays()); } @Test public void test_toPeriod() { assertEquals(Period.ofDays(2), LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29).toPeriod()); assertEquals(Period.ofDays(1), LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29).toPeriod()); assertEquals(Period.ofDays(0), LocalDateRange.of(DATE_2012_07_29, DATE_2012_07_29).toPeriod()); } @Test public void test_toPeriod_unbounded_MIN() { assertThrows(ArithmeticException.class, () -> LocalDateRange.of(LocalDate.MIN, DATE_2012_07_29).toPeriod()); } @Test public void test_toPeriod_unbounded_MAX() { assertThrows(ArithmeticException.class, () -> LocalDateRange.of(DATE_2012_07_29, LocalDate.MAX).toPeriod()); } //----------------------------------------------------------------------- @Test public void test_equals() { LocalDateRange a = LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29); LocalDateRange a2 = LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_29); LocalDateRange b = LocalDateRange.of(DATE_2012_07_27, DATE_2012_07_30); LocalDateRange c = LocalDateRange.of(DATE_2012_07_28, DATE_2012_07_29); assertEquals(true, a.equals(a)); assertEquals(true, a.equals(a2)); assertEquals(false, a.equals(b)); assertEquals(false, a.equals(c)); assertEquals(false, a.equals(null)); assertEquals(false, a.equals((Object) "")); assertEquals(true, a.hashCode() == a2.hashCode()); } //----------------------------------------------------------------------- @DataProvider public static List<List<Object>> data_crossCheckGuava() { List<List<Object>> list = new ArrayList<>(); for (int i1 = 1; i1 < 5; i1++) { for (int j1 = i1; j1 < 5; j1++) { LocalDate date11 = LocalDate.of(2016, 1, i1); LocalDate date12 = LocalDate.of(2016, 1, j1); LocalDateRange extraRange1 = LocalDateRange.of(date11, date12); Range<LocalDate> guavaRange1 = Range.closedOpen(date11, date12); for (int i2 = 1; i2 < 5; i2++) { for (int j2 = i2; j2 < 5; j2++) { LocalDate date21 = LocalDate.of(2016, 1, i2); LocalDate date22 = LocalDate.of(2016, 1, j2); LocalDateRange extraRange2 = LocalDateRange.of(date21, date22); Range<LocalDate> guavaRange2 = Range.closedOpen(date21, date22); list.add(Arrays.asList(extraRange1, extraRange2, guavaRange1, guavaRange2)); } } } } return list; } @ParameterizedTest @UseDataProvider("data_crossCheckGuava") public void crossCheckGuava_encloses( LocalDateRange extraRange1, LocalDateRange extraRange2, Range<LocalDate> guavaRange1, Range<LocalDate> guavaRange2) { boolean extra = extraRange1.encloses(extraRange2); boolean guava = guavaRange1.encloses(guavaRange2); assertEquals(guava, extra); } @ParameterizedTest @UseDataProvider("data_crossCheckGuava") public void crossCheckGuava_isConnected( LocalDateRange extraRange1, LocalDateRange extraRange2, Range<LocalDate> guavaRange1, Range<LocalDate> guavaRange2) { boolean extra = extraRange1.isConnected(extraRange2); boolean guava = guavaRange1.isConnected(guavaRange2); assertEquals(guava, extra); } @ParameterizedTest @UseDataProvider("data_crossCheckGuava") public void crossCheckGuava_intersection( LocalDateRange extraRange1, LocalDateRange extraRange2, Range<LocalDate> guavaRange1, Range<LocalDate> guavaRange2) { LocalDateRange extra = null; try { extra = extraRange1.intersection(extraRange2); } catch (DateTimeException ex) { // continue } Range<LocalDate> guava = null; try { guava = guavaRange1.intersection(guavaRange2); } catch (IllegalArgumentException ex) { // continue } if (extra == null) { assertEquals(guava, extra); } else if (guava != null) { assertEquals(guava.lowerEndpoint(), extra.getStart()); assertEquals(guava.upperEndpoint(), extra.getEnd()); } } @ParameterizedTest @UseDataProvider("data_crossCheckGuava") public void crossCheckGuava_span( LocalDateRange extraRange1, LocalDateRange extraRange2, Range<LocalDate> guavaRange1, Range<LocalDate> guavaRange2) { LocalDateRange extra = extraRange1.span(extraRange2); Range<LocalDate> guava = guavaRange1.span(guavaRange2); assertEquals(guava.lowerEndpoint(), extra.getStart()); assertEquals(guava.upperEndpoint(), extra.getEnd()); } }
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.test.web.client.samples.matchers; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; import org.junit.Before; import org.junit.Test; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.xml.Jaxb2RootElementHttpMessageConverter; import org.springframework.test.web.Person; import org.springframework.test.web.client.MockRestServiceServer; import org.springframework.web.client.RestTemplate; import static org.hamcrest.Matchers.*; import static org.springframework.test.web.client.match.MockRestRequestMatchers.*; import static org.springframework.test.web.client.response.MockRestResponseCreators.*; /** * Examples of defining expectations on XML request content with XPath expressions. * * @author Rossen Stoyanchev * * @see ContentRequestMatcherTests * @see XmlContentRequestMatcherTests */ public class XpathRequestMatcherTests { private static final Map<String, String> NS = Collections.singletonMap("ns", "http://example.org/music/people"); private MockRestServiceServer mockServer; private RestTemplate restTemplate; private PeopleWrapper people; @Before public void setup() { List<Person> composers = Arrays.asList( new Person("Johann Sebastian Bach").setSomeDouble(21), new Person("Johannes Brahms").setSomeDouble(.0025), new Person("Edvard Grieg").setSomeDouble(1.6035), new Person("Robert Schumann").setSomeDouble(Double.NaN)); List<Person> performers = Arrays.asList( new Person("Vladimir Ashkenazy").setSomeBoolean(false), new Person("Yehudi Menuhin").setSomeBoolean(true)); this.people = new PeopleWrapper(composers, performers); List<HttpMessageConverter<?>> converters = new ArrayList<HttpMessageConverter<?>>(); converters.add(new Jaxb2RootElementHttpMessageConverter()); this.restTemplate = new RestTemplate(); this.restTemplate.setMessageConverters(converters); this.mockServer = MockRestServiceServer.createServer(this.restTemplate); } @Test public void testExists() throws Exception { String composer = "/ns:people/composers/composer[%s]"; String performer = "/ns:people/performers/performer[%s]"; this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath(composer, NS, 1).exists()) .andExpect(xpath(composer, NS, 2).exists()) .andExpect(xpath(composer, NS, 3).exists()) .andExpect(xpath(composer, NS, 4).exists()) .andExpect(xpath(performer, NS, 1).exists()) .andExpect(xpath(performer, NS, 2).exists()) .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @Test public void testDoesNotExist() throws Exception { String composer = "/ns:people/composers/composer[%s]"; String performer = "/ns:people/performers/performer[%s]"; this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath(composer, NS, 0).doesNotExist()) .andExpect(xpath(composer, NS, 5).doesNotExist()) .andExpect(xpath(performer, NS, 0).doesNotExist()) .andExpect(xpath(performer, NS, 3).doesNotExist()) .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @Test public void testString() throws Exception { String composerName = "/ns:people/composers/composer[%s]/name"; String performerName = "/ns:people/performers/performer[%s]/name"; this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath(composerName, NS, 1).string("Johann Sebastian Bach")) .andExpect(xpath(composerName, NS, 2).string("Johannes Brahms")) .andExpect(xpath(composerName, NS, 3).string("Edvard Grieg")) .andExpect(xpath(composerName, NS, 4).string("Robert Schumann")) .andExpect(xpath(performerName, NS, 1).string("Vladimir Ashkenazy")) .andExpect(xpath(performerName, NS, 2).string("Yehudi Menuhin")) .andExpect(xpath(composerName, NS, 1).string(equalTo("Johann Sebastian Bach"))) // Hamcrest.. .andExpect(xpath(composerName, NS, 1).string(startsWith("Johann"))) // Hamcrest.. .andExpect(xpath(composerName, NS, 1).string(notNullValue())) // Hamcrest.. .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @Test public void testNumber() throws Exception { String composerDouble = "/ns:people/composers/composer[%s]/someDouble"; this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath(composerDouble, NS, 1).number(21d)) .andExpect(xpath(composerDouble, NS, 2).number(.0025)) .andExpect(xpath(composerDouble, NS, 3).number(1.6035)) .andExpect(xpath(composerDouble, NS, 4).number(Double.NaN)) .andExpect(xpath(composerDouble, NS, 1).number(equalTo(21d))) // Hamcrest.. .andExpect(xpath(composerDouble, NS, 3).number(closeTo(1.6, .01))) // Hamcrest.. .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @Test public void testBoolean() throws Exception { String performerBooleanValue = "/ns:people/performers/performer[%s]/someBoolean"; this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath(performerBooleanValue, NS, 1).booleanValue(false)) .andExpect(xpath(performerBooleanValue, NS, 2).booleanValue(true)) .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @Test public void testNodeCount() throws Exception { this.mockServer.expect(requestTo("/composers")) .andExpect(content().contentType("application/xml")) .andExpect(xpath("/ns:people/composers/composer", NS).nodeCount(4)) .andExpect(xpath("/ns:people/performers/performer", NS).nodeCount(2)) .andExpect(xpath("/ns:people/composers/composer", NS).nodeCount(equalTo(4))) // Hamcrest.. .andExpect(xpath("/ns:people/performers/performer", NS).nodeCount(equalTo(2))) // Hamcrest.. .andRespond(withSuccess()); this.restTemplate.put(new URI("/composers"), this.people); this.mockServer.verify(); } @SuppressWarnings("unused") @XmlRootElement(name="people", namespace="http://example.org/music/people") @XmlAccessorType(XmlAccessType.FIELD) private static class PeopleWrapper { @XmlElementWrapper(name="composers") @XmlElement(name="composer") private List<Person> composers; @XmlElementWrapper(name="performers") @XmlElement(name="performer") private List<Person> performers; public PeopleWrapper() { } public PeopleWrapper(List<Person> composers, List<Person> performers) { this.composers = composers; this.performers = performers; } public List<Person> getComposers() { return this.composers; } public List<Person> getPerformers() { return this.performers; } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kms.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * A structure for specifying the conditions under which the operations permitted by the grant are allowed. * </p> * <p> * You can use this structure to allow the operations permitted by the grant only when a specified encryption context is * present. For more information about encryption context, see <a * href="http://docs.aws.amazon.com/kms/latest/developerguide/encryption-context.html">Encryption Context</a> in the * <i>AWS Key Management Service Developer Guide</i>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/GrantConstraints" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GrantConstraints implements Serializable, Cloneable { /** * <p> * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an encryption * context that matches this list or is a subset of this list, the grant allows the operation. Otherwise, the * operation is not allowed. * </p> */ private com.amazonaws.internal.SdkInternalMap<String, String> encryptionContextSubset; /** * <p> * Contains a list of key-value pairs that must be present in the encryption context of a subsequent operation * permitted by the grant. When a subsequent operation permitted by the grant includes an encryption context that * matches this list, the grant allows the operation. Otherwise, the operation is not allowed. * </p> */ private com.amazonaws.internal.SdkInternalMap<String, String> encryptionContextEquals; /** * <p> * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an encryption * context that matches this list or is a subset of this list, the grant allows the operation. Otherwise, the * operation is not allowed. * </p> * * @return Contains a list of key-value pairs, a subset of which must be present in the encryption context of a * subsequent operation permitted by the grant. When a subsequent operation permitted by the grant includes * an encryption context that matches this list or is a subset of this list, the grant allows the operation. * Otherwise, the operation is not allowed. */ public java.util.Map<String, String> getEncryptionContextSubset() { if (encryptionContextSubset == null) { encryptionContextSubset = new com.amazonaws.internal.SdkInternalMap<String, String>(); } return encryptionContextSubset; } /** * <p> * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an encryption * context that matches this list or is a subset of this list, the grant allows the operation. Otherwise, the * operation is not allowed. * </p> * * @param encryptionContextSubset * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a * subsequent operation permitted by the grant. When a subsequent operation permitted by the grant includes * an encryption context that matches this list or is a subset of this list, the grant allows the operation. * Otherwise, the operation is not allowed. */ public void setEncryptionContextSubset(java.util.Map<String, String> encryptionContextSubset) { this.encryptionContextSubset = encryptionContextSubset == null ? null : new com.amazonaws.internal.SdkInternalMap<String, String>( encryptionContextSubset); } /** * <p> * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an encryption * context that matches this list or is a subset of this list, the grant allows the operation. Otherwise, the * operation is not allowed. * </p> * * @param encryptionContextSubset * Contains a list of key-value pairs, a subset of which must be present in the encryption context of a * subsequent operation permitted by the grant. When a subsequent operation permitted by the grant includes * an encryption context that matches this list or is a subset of this list, the grant allows the operation. * Otherwise, the operation is not allowed. * @return Returns a reference to this object so that method calls can be chained together. */ public GrantConstraints withEncryptionContextSubset(java.util.Map<String, String> encryptionContextSubset) { setEncryptionContextSubset(encryptionContextSubset); return this; } public GrantConstraints addEncryptionContextSubsetEntry(String key, String value) { if (null == this.encryptionContextSubset) { this.encryptionContextSubset = new com.amazonaws.internal.SdkInternalMap<String, String>(); } if (this.encryptionContextSubset.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.encryptionContextSubset.put(key, value); return this; } /** * Removes all the entries added into EncryptionContextSubset. * * @return Returns a reference to this object so that method calls can be chained together. */ public GrantConstraints clearEncryptionContextSubsetEntries() { this.encryptionContextSubset = null; return this; } /** * <p> * Contains a list of key-value pairs that must be present in the encryption context of a subsequent operation * permitted by the grant. When a subsequent operation permitted by the grant includes an encryption context that * matches this list, the grant allows the operation. Otherwise, the operation is not allowed. * </p> * * @return Contains a list of key-value pairs that must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an * encryption context that matches this list, the grant allows the operation. Otherwise, the operation is * not allowed. */ public java.util.Map<String, String> getEncryptionContextEquals() { if (encryptionContextEquals == null) { encryptionContextEquals = new com.amazonaws.internal.SdkInternalMap<String, String>(); } return encryptionContextEquals; } /** * <p> * Contains a list of key-value pairs that must be present in the encryption context of a subsequent operation * permitted by the grant. When a subsequent operation permitted by the grant includes an encryption context that * matches this list, the grant allows the operation. Otherwise, the operation is not allowed. * </p> * * @param encryptionContextEquals * Contains a list of key-value pairs that must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an * encryption context that matches this list, the grant allows the operation. Otherwise, the operation is not * allowed. */ public void setEncryptionContextEquals(java.util.Map<String, String> encryptionContextEquals) { this.encryptionContextEquals = encryptionContextEquals == null ? null : new com.amazonaws.internal.SdkInternalMap<String, String>( encryptionContextEquals); } /** * <p> * Contains a list of key-value pairs that must be present in the encryption context of a subsequent operation * permitted by the grant. When a subsequent operation permitted by the grant includes an encryption context that * matches this list, the grant allows the operation. Otherwise, the operation is not allowed. * </p> * * @param encryptionContextEquals * Contains a list of key-value pairs that must be present in the encryption context of a subsequent * operation permitted by the grant. When a subsequent operation permitted by the grant includes an * encryption context that matches this list, the grant allows the operation. Otherwise, the operation is not * allowed. * @return Returns a reference to this object so that method calls can be chained together. */ public GrantConstraints withEncryptionContextEquals(java.util.Map<String, String> encryptionContextEquals) { setEncryptionContextEquals(encryptionContextEquals); return this; } public GrantConstraints addEncryptionContextEqualsEntry(String key, String value) { if (null == this.encryptionContextEquals) { this.encryptionContextEquals = new com.amazonaws.internal.SdkInternalMap<String, String>(); } if (this.encryptionContextEquals.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.encryptionContextEquals.put(key, value); return this; } /** * Removes all the entries added into EncryptionContextEquals. * * @return Returns a reference to this object so that method calls can be chained together. */ public GrantConstraints clearEncryptionContextEqualsEntries() { this.encryptionContextEquals = null; return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEncryptionContextSubset() != null) sb.append("EncryptionContextSubset: ").append(getEncryptionContextSubset()).append(","); if (getEncryptionContextEquals() != null) sb.append("EncryptionContextEquals: ").append(getEncryptionContextEquals()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GrantConstraints == false) return false; GrantConstraints other = (GrantConstraints) obj; if (other.getEncryptionContextSubset() == null ^ this.getEncryptionContextSubset() == null) return false; if (other.getEncryptionContextSubset() != null && other.getEncryptionContextSubset().equals(this.getEncryptionContextSubset()) == false) return false; if (other.getEncryptionContextEquals() == null ^ this.getEncryptionContextEquals() == null) return false; if (other.getEncryptionContextEquals() != null && other.getEncryptionContextEquals().equals(this.getEncryptionContextEquals()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEncryptionContextSubset() == null) ? 0 : getEncryptionContextSubset().hashCode()); hashCode = prime * hashCode + ((getEncryptionContextEquals() == null) ? 0 : getEncryptionContextEquals().hashCode()); return hashCode; } @Override public GrantConstraints clone() { try { return (GrantConstraints) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.zfgc.model.users.profile; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.fasterxml.jackson.annotation.JsonIgnore; import com.zfgc.model.BaseZfgcModel; import com.zfgc.model.avatar.AvatarStaging; import com.zfgc.model.lkup.LkupMemberGroup; import com.zfgc.model.users.Hostname; import com.zfgc.model.users.IpAddress; import com.zfgc.model.users.SecondaryMemberGroups; import com.zfgc.model.users.UserContactInfo; import com.zfgc.model.users.UserPermissionView; import com.zfgc.model.users.UserSecurityInfo; import com.zfgc.util.time.ZfgcTimeUtils; public class UserProfileView extends BaseZfgcModel { private Integer usersId; private String displayName; private String loginName; @JsonIgnore private Date dateRegistered; private ProfileSummary profileSummary = new ProfileSummary(); private UserContactInfo userContactInfo = new UserContactInfo(); private UserSecurityInfo userSecurityInfo = new UserSecurityInfo(); private PersonalInfo personalInfo = new PersonalInfo(); private NotificationSettings notificationSettings = new NotificationSettings(); private PersonalMessagingSettings personalMessagingSettings = new PersonalMessagingSettings(); private List<Buddy> buddyList = new ArrayList<>(); private List<Buddy> ignoreList = new ArrayList(); private Avatar avatar = new Avatar(); private AvatarStaging stagedAvatar; private LkupMemberGroup primaryMemberGroup; private SecondaryMemberGroups secondaryMemberGroups; private IpAddress primaryIpAddress; private Hostname primaryHostname; private Boolean hidePm = false; private Date lastLogin; private Boolean isOnlineFlag; private Boolean activeFlag; private String timeZone; private Integer timeOffset; private UserPermissionView userPermissionView; @JsonIgnore private UserProfileView savedProfile; public String getLastLoginAsString(){ SimpleDateFormat sdf = ZfgcTimeUtils.getZfgcSimpleDateTimeFormat(super.getUserTimeZone()); if(lastLogin == null){ return ""; } return sdf.format(lastLogin); } public UserProfileView getSavedProfile() { return savedProfile; } public void setSavedSummary(UserProfileView savedProfile) { this.savedProfile = savedProfile; } public Integer getUsersId() { return usersId; } public void setUsersId(Integer usersId) { this.usersId = usersId; } public ProfileSummary getProfileSummary() { return profileSummary; } public void setProfileSummary(ProfileSummary profileSummary) { this.profileSummary = profileSummary; } @Override public String getHMAC() { // TODO Auto-generated method stub return null; } @Override public BaseZfgcModel copy(BaseZfgcModel other) { // TODO Auto-generated method stub return null; } public UserContactInfo getUserContactInfo() { return userContactInfo; } public void setUserContactInfo(UserContactInfo userContactInfo) { this.userContactInfo = userContactInfo; } public String getLoginName() { return loginName; } public void setLoginName(String loginName) { this.loginName = loginName; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public Date getDateRegistered() { return dateRegistered; } public void setDateRegistered(Date dateRegistered) { this.dateRegistered = dateRegistered; } public String getDateRegisteredAsString(){ if(dateRegistered == null){ return ""; } SimpleDateFormat sdf = ZfgcTimeUtils.getZfgcSimpleDateFormat(super.getUserTimeZone()); return sdf.format(dateRegistered); } public String getUserLocalTimeAsString(){ if(getTimeZone() == null){ return ""; } Date now = new Date(); SimpleDateFormat sdf = ZfgcTimeUtils.getZfgcSimpleDateTimeFormat(getTimeZone()); return sdf.format(now); } public Avatar getAvatar() { return avatar; } public void setAvatar(Avatar avatar) { this.avatar = avatar; } public PersonalInfo getPersonalInfo() { return personalInfo; } public void setPersonalInfo(PersonalInfo personalInfo) { this.personalInfo = personalInfo; } public UserSecurityInfo getUserSecurityInfo() { return userSecurityInfo; } public void setUserSecurityInfo(UserSecurityInfo userSecurityInfo) { this.userSecurityInfo = userSecurityInfo; } public NotificationSettings getNotificationSettings() { return notificationSettings; } public void setNotificationSettings(NotificationSettings notificationSettings) { this.notificationSettings = notificationSettings; } public PersonalMessagingSettings getPersonalMessagingSettings() { return personalMessagingSettings; } public void setPersonalMessagingSettings(PersonalMessagingSettings personalMessagingSettings) { this.personalMessagingSettings = personalMessagingSettings; } public List<Buddy> getBuddyList() { return buddyList; } public void setBuddyList(List<Buddy> buddyList) { this.buddyList = buddyList; } public Date getLastLogin() { return lastLogin; } public void setLastLogin(Date lastLogin) { this.lastLogin = lastLogin; } public Boolean getIsOnlineFlag() { return isOnlineFlag; } public void setIsOnlineFlag(Boolean isOnlineFlag) { this.isOnlineFlag = isOnlineFlag; } public Boolean getActiveFlag() { return activeFlag; } public void setActiveFlag(Boolean activeFlag) { this.activeFlag = activeFlag; } public LkupMemberGroup getPrimaryMemberGroup() { return primaryMemberGroup; } public void setPrimaryMemberGroup(LkupMemberGroup primaryMemberGroup) { this.primaryMemberGroup = primaryMemberGroup; } public IpAddress getPrimaryIpAddress() { return primaryIpAddress; } public void setPrimaryIpAddress(IpAddress primaryIpAddress) { this.primaryIpAddress = primaryIpAddress; } public String getTimeZone() { return timeZone; } public void setTimeZone(String timeZone) { this.timeZone = timeZone; } public Integer getTimeOffset() { return timeOffset; } public void setTimeOffset(Integer timeOffset) { this.timeOffset = timeOffset; } public SecondaryMemberGroups getSecondaryMemberGroups() { return secondaryMemberGroups; } public void setSecondaryMemberGroups(SecondaryMemberGroups secondaryMemberGroups) { this.secondaryMemberGroups = secondaryMemberGroups; } public Boolean getHidePm() { return hidePm; } public void setHidePm(Boolean hidePm) { this.hidePm = hidePm; } public Hostname getPrimaryHostname() { return primaryHostname; } public void setPrimaryHostname(Hostname primaryHostname) { this.primaryHostname = primaryHostname; } public List<Buddy> getIgnoreList() { return ignoreList; } public void setIgnoreList(List<Buddy> ignoreList) { this.ignoreList = ignoreList; } public UserPermissionView getUserPermissionView() { return userPermissionView; } public void setUserPermissionView(UserPermissionView userPermissionView) { this.userPermissionView = userPermissionView; } public AvatarStaging getStagedAvatar() { return stagedAvatar; } public void setStagedAvatar(AvatarStaging stagedAvatar) { this.stagedAvatar = stagedAvatar; } }
/* * Copyright 2015 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package im.vector.adapters; import android.annotation.SuppressLint; import android.content.ClipData; import android.content.ClipboardManager; import android.content.Context; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.Room; import org.matrix.androidsdk.db.MXMediasCache; import org.matrix.androidsdk.rest.model.PowerLevels; import org.matrix.androidsdk.rest.model.RoomMember; import org.matrix.androidsdk.rest.model.RoomThirdPartyInvite; import org.matrix.androidsdk.rest.model.User; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import im.vector.R; import im.vector.activity.CommonActivityUtils; import im.vector.util.VectorUtils; /** * An adapter which can display room information. */ public class VectorRoomDetailsMembersAdapter extends BaseExpandableListAdapter { public interface OnParticipantsListener { /** * The user taps on the dedicated "Remove" button * @param aRoomParticipant the participant to remove */ void onRemoveClick(final ParticipantAdapterItem aRoomParticipant); /** * The user taps on "Leave" button */ void onLeaveClick(); /** * The user selects / deselects a member. * @param userId */ void onSelectUserId(String userId); /** * The user taps on a cell. * The standard onClickListener might not work because * the upper view is scrollable. * @param aRoomParticipant the clicked participant */ void onClick(final ParticipantAdapterItem aRoomParticipant); // group expanding state management void onGroupCollapsedNotif(int aGroupPosition); void onGroupExpandedNotif(int aGroupPosition); } // search events listener public interface OnRoomMembersSearchListener { /** * The search is ended. * @param aSearchCountResult the number of matched members * @param aIsSearchPerformed true if the search has been performed, false otherwise */ void onSearchEnd(final int aSearchCountResult, final boolean aIsSearchPerformed); } private final String LOG_TAG ="VectorRoomDlsMemAdapt"; private final Context mContext; private final LayoutInflater mLayoutInflater; private View mSwipingCellView; private final MXSession mSession; private Room mRoom; private final int mChildLayoutResourceId; private final int mGroupLayoutResourceId; private boolean mIsMultiSelectionMode; private ArrayList<String> mSelectedUserIds = new ArrayList<String>(); private ArrayList<ArrayList<ParticipantAdapterItem>> mRoomMembersListByGroupPosition; private ArrayList<String> mDisplaynamesList = new ArrayList<String>(); private int mGroupIndexInvitedMembers = -1; // "Invited" index private int mGroupIndexPresentMembers = -1; // "Favourites" index // search list view: list view displaying the result of the search based on "mSearchPattern" private String mSearchPattern = ""; //ParticipantAdapterItem mFirstEntry; private OnParticipantsListener mOnParticipantsListener; /** * Recycle view holder class. * Used in the group views of the expandable list view. */ private static class GroupViewHolder { final TextView mTitleTxtView; final ImageView mExpanderLogoImageView; GroupViewHolder(View aView){ mTitleTxtView = (TextView) aView.findViewById(org.matrix.androidsdk.R.id.heading); mExpanderLogoImageView = (ImageView)aView.findViewById(org.matrix.androidsdk.R.id.heading_image); } } /** * Recycle view holder class. * Used in the child views of the expandable list view. */ private static class ChildMemberViewHolder { final ImageView mMemberAvatarImageView; final ImageView mMemberAvatarBadgeImageView; final TextView mMemberNameTextView; final TextView mMemberStatusTextView; final View mHiddenListActionsView; final View mDeleteActionsView; final RelativeLayout mSwipeCellLayout; final CheckBox mMultipleSelectionCheckBox; ChildMemberViewHolder(View aParentView){ mMemberAvatarImageView = (ImageView)aParentView.findViewById(R.id.filtered_list_avatar); mMemberAvatarBadgeImageView = (ImageView) aParentView.findViewById(R.id.filtered_list_avatar_badge); mMemberNameTextView = (TextView) aParentView.findViewById(R.id.filtered_list_name); mMemberStatusTextView = (TextView) aParentView.findViewById(R.id.filtered_list_status); mHiddenListActionsView = aParentView.findViewById(R.id.filtered_list_actions); mSwipeCellLayout = (RelativeLayout) aParentView.findViewById(R.id.filtered_list_cell); mMultipleSelectionCheckBox = (CheckBox)aParentView.findViewById(R.id.filtered_list_checkbox); mDeleteActionsView = aParentView.findViewById(R.id.filtered_list_delete_action); } } /** * Constructor. * @param aContext the context. * @param aChildLayoutResourceId the child layout of the expandable list view * @param aGroupHeaderLayoutResourceId the group layout of the expandable list view * @param aSession the session * @param aRoomId the room id * @param aMediasCache the medias cache */ public VectorRoomDetailsMembersAdapter(Context aContext, int aChildLayoutResourceId, int aGroupHeaderLayoutResourceId, MXSession aSession, String aRoomId, MXMediasCache aMediasCache) { mContext = aContext; mLayoutInflater = LayoutInflater.from(aContext); mChildLayoutResourceId = aChildLayoutResourceId;// R.layout.adapter_item_vector_add_participants mGroupLayoutResourceId = aGroupHeaderLayoutResourceId; // R.layout.adapter_item_vector_recent_header mSession = aSession; mRoom = mSession.getDataHandler().getRoom(aRoomId); // display check box to select multiple items // by default, they are not displayed mIsMultiSelectionMode = false; } /** * Search a pattern in the known members list. * @param aPattern the pattern to search * @param searchListener the search listener * @param aIsRefreshForced set to tru to force the refresh */ @SuppressLint("LongLogTag") public void setSearchedPattern(String aPattern, final OnRoomMembersSearchListener searchListener, boolean aIsRefreshForced) { if (TextUtils.isEmpty(aPattern)) { // refresh list members without any pattern filter (nominal display) mSearchPattern = null; updateRoomMembersDataModel(searchListener); } else { // new pattern different from previous one? if (!aPattern.trim().equals(mSearchPattern) || aIsRefreshForced) { mSearchPattern = aPattern.trim().toLowerCase(); updateRoomMembersDataModel(searchListener); } else { // search pattern is identical, notify listener and exit if (null != searchListener) { int searchItemsCount = getItemsCount(); searchListener.onSearchEnd(searchItemsCount, false/*search not updated*/); } } } } /** * @return the total number of items */ public int getItemsCount() { int itemsCount = getChildrenCount(mGroupIndexInvitedMembers); itemsCount += getChildrenCount(mGroupIndexPresentMembers); return itemsCount; } /** * Update the paticipants listener * @param onParticipantsListener */ public void setOnParticipantsListener(OnParticipantsListener onParticipantsListener) { mOnParticipantsListener = onParticipantsListener; } /** * @return the list of selected user ids */ public ArrayList<String> getSelectedUserIds() { return mSelectedUserIds; } /** * @param isMultiSelectionMode the new selection mode */ public void setMultiSelectionMode(boolean isMultiSelectionMode) { mIsMultiSelectionMode = isMultiSelectionMode; mSelectedUserIds = new ArrayList<String>(); } /** * Test if the adapter data model is filtered with the search pattern. * @return true if search mode is enabled, false otherwise */ private boolean isSearchModeEnabled() { return (!TextUtils.isEmpty(mSearchPattern)); } /** * Compare 2 string and returns sort order. * @param s1 string 1. * @param s2 string 2. * @return the sort order. */ private int alphaComparator(String s1, String s2) { if (s1 == null) { return -1; } else if (s2 == null) { return 1; } return String.CASE_INSENSITIVE_ORDER.compare(s1, s2); } /** * Update the data model of the adapter which is based on a set of ParticipantAdapterItem objects. * @param aSearchListener search events listener, set to null if search not enabled */ public void updateRoomMembersDataModel(final OnRoomMembersSearchListener aSearchListener) { if (!mSession.isAlive()) { Log.e(LOG_TAG, "updateRoomMembersDataModel the session is not anymore valid"); return; } boolean isSearchEnabled = false; int groupIndex = 0; ParticipantAdapterItem participantItem; ArrayList<ParticipantAdapterItem> presentMembersList = new ArrayList<ParticipantAdapterItem>(); if (isSearchModeEnabled()) { isSearchEnabled = true; } if (null == mRoomMembersListByGroupPosition) { mRoomMembersListByGroupPosition = new ArrayList<ArrayList<ParticipantAdapterItem>>(); } else { mRoomMembersListByGroupPosition.clear(); } mDisplaynamesList = new ArrayList<String>(); // reset group indexes mGroupIndexPresentMembers = -1; mGroupIndexInvitedMembers = -1; // retrieve the room members ArrayList<ParticipantAdapterItem> actualParticipants = new ArrayList<ParticipantAdapterItem>(); ArrayList<ParticipantAdapterItem> invitedMembers = new ArrayList<ParticipantAdapterItem>(); Collection<RoomMember> activeMembers = mRoom.getActiveMembers(); String myUserId = mSession.getMyUserId(); final PowerLevels powerLevels = mRoom.getLiveState().getPowerLevels(); // search loop to extract the following members: current user, invited, administrator and others for (RoomMember member : activeMembers) { participantItem = new ParticipantAdapterItem(member); // if search is enabled, just skipp the member if pattern does not match if(isSearchEnabled && (!participantItem.contains(mSearchPattern))){ continue; } // oneself member ("You") is displayed on first raw if (member.getUserId().equals(myUserId)) { presentMembersList.add(participantItem); } else { if (RoomMember.MEMBERSHIP_INVITE.equals(member.membership)) { // invited members invitedMembers.add(participantItem); } else { // the other members.. actualParticipants.add(participantItem); } } if (!TextUtils.isEmpty(participantItem.mDisplayName)) { mDisplaynamesList.add(participantItem.mDisplayName); } } // add 3rd party invite Collection<RoomThirdPartyInvite> thirdPartyInvites = mRoom.getLiveState().thirdPartyInvites(); for (RoomThirdPartyInvite invite : thirdPartyInvites) { // If the home server has converted the 3pid invite into a room member, do not show it if (null == mRoom.getLiveState().memberWithThirdPartyInviteToken(invite.token)) { ParticipantAdapterItem participant = new ParticipantAdapterItem(invite.display_name, "", null); if ((!isSearchEnabled) || participant.contains(mSearchPattern)) { invitedMembers.add(participant); } } } // Comparator to order members alphabetically Comparator<ParticipantAdapterItem> comparator = new Comparator<ParticipantAdapterItem>() { @Override public int compare(ParticipantAdapterItem part1, ParticipantAdapterItem part2) { User userA = mSession.getDataHandler().getUser(part1.mUserId); User userB = mSession.getDataHandler().getUser(part2.mUserId); String userADisplayName = part1.getComparisonDisplayName(); String userBDisplayName = part2.getComparisonDisplayName(); boolean isUserA_Active = false; boolean isUserB_Active = false; if ((null != userA) && (null != userA.currently_active)) { isUserA_Active = userA.currently_active; } if ((null != userB) && (null != userB.currently_active)) { isUserB_Active = userB.currently_active; } int powerLevelA = 0; int powerLevelB = 0; if (null != powerLevels) { if ((null != userA) && (null != userA.user_id)) { powerLevelA = powerLevels.getUserPowerLevel(userA.user_id); } if ((null != userB) && (null != userB.user_id)) { powerLevelB = powerLevels.getUserPowerLevel(userB.user_id); } } if ((null == userA) && (null == userB)) { return alphaComparator(userADisplayName, userBDisplayName); } else if ((null != userA) && (null == userB)) { return +1; } else if ((null == userA) && (null != userB)) { return -1; } else if (isUserA_Active && isUserB_Active) { if (powerLevelA == powerLevelB) { return alphaComparator(userADisplayName, userBDisplayName); } else { return (powerLevelB - powerLevelA) > 0 ? +1 : -1; } } if (isUserA_Active && !isUserB_Active) { return -1; } if (!isUserA_Active && isUserB_Active) { return +1; } // Finally, compare the timestamps long lastActiveAgoA = (null != userA) ? userA.getAbsoluteLastActiveAgo() : 0; long lastActiveAgoB = (null != userB) ? userB.getAbsoluteLastActiveAgo() : 0; long diff = lastActiveAgoA - lastActiveAgoB; if (diff == 0) { return alphaComparator(userADisplayName, userBDisplayName); } // if only one member has a lastActiveAgo, prefer it if (0 == lastActiveAgoA) { return +1; } else if (0 == lastActiveAgoB) { return -1; } return (diff > 0) ? +1 : -1; } }; // create "members present in the room" list Collections.sort(actualParticipants, comparator); presentMembersList.addAll(actualParticipants); // first group: members present in the room if (0 != presentMembersList.size()) { mRoomMembersListByGroupPosition.add(presentMembersList); mGroupIndexPresentMembers = groupIndex; groupIndex++; } // second group: invited members only if (0 != invitedMembers.size()) { Collections.sort(invitedMembers, comparator); mRoomMembersListByGroupPosition.add(invitedMembers); mGroupIndexInvitedMembers = groupIndex; } // notify end of search if listener is provided if (null != aSearchListener) { aSearchListener.onSearchEnd(getItemsCount(), isSearchEnabled); } // force UI rendering update notifyDataSetChanged(); } /** * * @return the participant User Ids except oneself. */ public ArrayList<String> getUserIdsList() { ArrayList<String> idsListRetValue = new ArrayList<String>(); if (mGroupIndexPresentMembers >= 0) { int listSize = mRoomMembersListByGroupPosition.get(mGroupIndexPresentMembers).size(); // the first item is always oneself, so skipp first element for (int index = 1; index < listSize; index++) { ParticipantAdapterItem item = mRoomMembersListByGroupPosition.get(mGroupIndexPresentMembers).get(index); // sanity check if (null != item.mUserId) { idsListRetValue.add(item.mUserId); } } } return idsListRetValue; } /** * Compute the name of the group according to its position. * @param aGroupPosition index of the section * @return group title corresponding to the index */ private String getGroupTitle(int aGroupPosition) { String retValue; if (mGroupIndexInvitedMembers == aGroupPosition) { retValue = mContext.getResources().getString(R.string.room_details_people_invited_group_name); } else if (mGroupIndexPresentMembers== aGroupPosition) { retValue = mContext.getResources().getString(R.string.room_details_people_present_group_name); } else { // unknown section - should not happen retValue = "??"; } return retValue; } // ============================================================================================= // BaseExpandableListAdapter implementation @Override public void onGroupCollapsed(int aGroupPosition) { super.onGroupCollapsed(aGroupPosition); if (null != mOnParticipantsListener) { mOnParticipantsListener.onGroupCollapsedNotif(aGroupPosition); } } @Override public void onGroupExpanded(int aGroupPosition) { super.onGroupExpanded(aGroupPosition); if (null != mOnParticipantsListener) { mOnParticipantsListener.onGroupExpandedNotif(aGroupPosition); } } @Override public int getGroupCount() { if (null != mRoomMembersListByGroupPosition) { return mRoomMembersListByGroupPosition.size(); } else { return 0; } } @SuppressLint("LongLogTag") @Override public int getChildrenCount(int aGroupPosition) { int countRetValue = 0; try { if ( (null != mRoomMembersListByGroupPosition) && (-1 != aGroupPosition)) { countRetValue = mRoomMembersListByGroupPosition.get(aGroupPosition).size(); } } catch(Exception ex) { Log.e(LOG_TAG,"## getChildrenCount(): Exception Msg=" + ex.getMessage()); } return countRetValue; } @Override public Object getGroup(int aGroupPosition) { return getGroupTitle(aGroupPosition); } @Override public Object getChild(int aGroupPosition, int aChildPosition) { Object reValueObject = null; if(null != mRoomMembersListByGroupPosition) { reValueObject = mRoomMembersListByGroupPosition.get(aGroupPosition).get(aChildPosition); } return reValueObject; } @Override public long getGroupId(int aGroupPosition) { return getGroupTitle(aGroupPosition).hashCode(); } @Override public long getChildId(int groupPosition, int childPosition) { return 0L; } @Override public boolean hasStableIds() { return false; } @Override public View getGroupView(int aGroupPosition, boolean aIsExpanded, View aConvertView, ViewGroup aParentView) { GroupViewHolder viewHolder; if (aConvertView == null) { aConvertView = mLayoutInflater.inflate(mGroupLayoutResourceId, null); viewHolder = new GroupViewHolder(aConvertView); aConvertView.setTag(viewHolder); } else { viewHolder = (GroupViewHolder)aConvertView.getTag(); } // set the group title String titleValue = getGroupTitle(aGroupPosition); viewHolder.mTitleTxtView.setText(titleValue); // set the expander logo int expanderLogoResId = aIsExpanded?R.drawable.ic_material_expand_more_black:R.drawable.ic_material_expand_less_black; viewHolder.mExpanderLogoImageView.setImageResource(expanderLogoResId); return aConvertView; } @Override public View getChildView(final int aGroupPosition, final int aChildPosition, boolean isLastChild, View aConvertView, ViewGroup aParentView) { final ChildMemberViewHolder viewHolder; boolean isActionsMenuHidden; final ParticipantAdapterItem participant; boolean isSearchMode = isSearchModeEnabled(); final boolean isLoggedUserPosition = ((0==aChildPosition) && (mGroupIndexPresentMembers==aGroupPosition)); participant = mRoomMembersListByGroupPosition.get(aGroupPosition).get(aChildPosition); // set group/child positions participant.mReferenceGroupPosition = aGroupPosition; participant.mReferenceChildPosition = aChildPosition; if (aConvertView == null) { aConvertView = mLayoutInflater.inflate(mChildLayoutResourceId, aParentView, false); viewHolder = new ChildMemberViewHolder(aConvertView); aConvertView.setTag(viewHolder); } else { viewHolder = (ChildMemberViewHolder)aConvertView.getTag(); } if (!mSession.isAlive()) { Log.e(LOG_TAG, "getChildView : the session is not anymore valid"); return aConvertView; } // 1 - display member avatar if (null != participant.mAvatarBitmap) { viewHolder.mMemberAvatarImageView.setImageBitmap(participant.mAvatarBitmap); } else { if (TextUtils.isEmpty(participant.mUserId)) { VectorUtils.loadUserAvatar(mContext, mSession, viewHolder.mMemberAvatarImageView, participant.mAvatarUrl, participant.mDisplayName, participant.mDisplayName); } else { // try to provide a better display for a participant when the user is known. if (TextUtils.equals(participant.mUserId, participant.mDisplayName) || TextUtils.isEmpty(participant.mAvatarUrl)) { User user = mSession.getDataHandler().getStore().getUser(participant.mUserId); if (null != user) { if (TextUtils.equals(participant.mUserId, participant.mDisplayName) && !TextUtils.isEmpty(user.displayname)) { participant.mDisplayName = user.displayname; } if (null == participant.mAvatarUrl) { participant.mAvatarUrl = user.avatar_url; } } } VectorUtils.loadUserAvatar(mContext, mSession, viewHolder.mMemberAvatarImageView, participant.mAvatarUrl, participant.mUserId, participant.mDisplayName); } } // 2 - display member name // Specific member name: member is "You" - at 0 position we must find the logged user, we then do not display its name, but R.string.you String memberName = (isLoggedUserPosition && !isSearchMode) ? (String)mContext.getText(R.string.you) : participant.mDisplayName; // detect if the displayname is used several times if (!TextUtils.isEmpty(memberName)) { int pos = mDisplaynamesList.indexOf(memberName); if (pos >= 0) { if (pos == mDisplaynamesList.lastIndexOf(memberName)) { pos = -1; } } if ((pos >= 0) && !TextUtils.isEmpty(participant.mUserId)) { memberName += " (" + participant.mUserId + ")"; } } viewHolder.mMemberNameTextView.setText(memberName); // 2b admin badge viewHolder.mMemberAvatarBadgeImageView.setVisibility(View.GONE); PowerLevels powerLevels = null; if (null != mRoom) { if (null != (powerLevels = mRoom.getLiveState().getPowerLevels())) { if (powerLevels.getUserPowerLevel(participant.mUserId) >= CommonActivityUtils.UTILS_POWER_LEVEL_ADMIN) { viewHolder.mMemberAvatarBadgeImageView.setVisibility(View.VISIBLE); viewHolder.mMemberAvatarBadgeImageView.setImageResource(R.drawable.admin_icon); } else if (powerLevels.getUserPowerLevel(participant.mUserId) >= CommonActivityUtils.UTILS_POWER_LEVEL_MODERATOR) { viewHolder.mMemberAvatarBadgeImageView.setVisibility(View.VISIBLE); viewHolder.mMemberAvatarBadgeImageView.setImageResource(R.drawable.mod_icon); } } } // 3 - display member status viewHolder.mMemberStatusTextView.setText(VectorUtils.getUserOnlineStatus(mContext, mSession, participant.mUserId, null)); // add "remove member from room" action viewHolder.mDeleteActionsView.setOnClickListener(new View.OnClickListener() { @SuppressLint("LongLogTag") @Override public void onClick(View v) { if (null != mOnParticipantsListener) { try { if (isLoggedUserPosition) { // logged user's leaving the room.. mOnParticipantsListener.onLeaveClick(); } else { mOnParticipantsListener.onRemoveClick(participant); } } catch (Exception e) { Log.e(LOG_TAG,"## Delete action listener: Exception Msg="+e.getMessage()); } } } }); // manage the swipe to display actions if (null != mSwipingCellView) { mSwipingCellView.setTranslationX(0); mSwipingCellView = null; } // cancel any translation viewHolder.mSwipeCellLayout.setTranslationX(0); // during a room creation, there is no dedicated power level if (null != powerLevels) { int myPowerLevel; int memberPowerLevel; int kickPowerLevel; myPowerLevel = powerLevels.getUserPowerLevel(mSession.getCredentials().userId); memberPowerLevel = powerLevels.getUserPowerLevel(participant.mUserId); kickPowerLevel = powerLevels.kick; if(isLoggedUserPosition) { // always offer possibility to leave the room to the logged member isActionsMenuHidden = false; } else { // hide actions menu if my power level is lower or equal than the member's one isActionsMenuHidden = (((myPowerLevel <= memberPowerLevel) || (myPowerLevel < kickPowerLevel))); } } else { isActionsMenuHidden = (null == mRoom); } // set swipe layout click handler: notify the listener of the adapter viewHolder.mSwipeCellLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (null != mOnParticipantsListener) { if (!TextUtils.isEmpty(participant.mUserId)) { String userId = participant.mUserId; // check if the userId is valid if (android.util.Patterns.EMAIL_ADDRESS.matcher(userId).matches() || (userId.startsWith("@") && (userId.indexOf(":") > 1))) { mOnParticipantsListener.onClick(participant); } else { Toast.makeText(mContext, R.string.malformed_id, Toast.LENGTH_LONG).show(); } } } } }); // set long click handler: copy member name to clipboard View.OnLongClickListener onLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { VectorUtils.copyToClipboard(mContext, viewHolder.mMemberNameTextView.getText()); return true; } }; // the cellLayout setOnLongClickListener might be trapped by the scroll management // so add it to some UI items. viewHolder.mSwipeCellLayout.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { return true; } }); // long tap on the avatar or the display name copy it into the clipboard. viewHolder.mMemberNameTextView.setOnLongClickListener(onLongClickListener); viewHolder.mMemberAvatarImageView.setOnLongClickListener(onLongClickListener); // SWIPE: the swipe should be enabled when there is no search and the user can kick other members if (isSearchMode || isActionsMenuHidden || (null == participant.mRoomMember)) { viewHolder.mSwipeCellLayout.setOnTouchListener(null); } else { viewHolder.mSwipeCellLayout.setOnTouchListener(new View.OnTouchListener() { private float mStartX = 0; @Override public boolean onTouch(final View v, MotionEvent event) { final int hiddenViewWidth = viewHolder.mHiddenListActionsView.getWidth(); boolean isMotionTrapped = true; switch (event.getAction()) { case MotionEvent.ACTION_DOWN: { // cancel hidden view display if (null == mSwipingCellView) { mSwipingCellView = viewHolder.mSwipeCellLayout; } mStartX = event.getX(); break; } case MotionEvent.ACTION_MOVE: { float x = event.getX() + v.getTranslationX(); float deltaX = Math.max(Math.min(x - mStartX, 0), -hiddenViewWidth); viewHolder.mSwipeCellLayout.setTranslationX(deltaX); } break; case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: { float x = event.getX() + v.getTranslationX(); // assume it is a tap if (Math.abs(x - mStartX) < 10) { // ignore the cancel event if (event.getAction() == MotionEvent.ACTION_UP) { if (null != mOnParticipantsListener) { mOnParticipantsListener.onClick(participant); } } isMotionTrapped = false; } else { float deltaX = -Math.max(Math.min(x - mStartX, 0), -hiddenViewWidth); if (deltaX > (hiddenViewWidth / 2)) { viewHolder.mSwipeCellLayout.setTranslationX(-hiddenViewWidth); } else { viewHolder.mSwipeCellLayout.setTranslationX(0); mSwipingCellView = null; } } break; } default: isMotionTrapped = false; } return isMotionTrapped; } }); } int backgroundColor = mContext.getResources().getColor(android.R.color.white); // multi selections mode // do not display a checkbox for oneself if (mIsMultiSelectionMode && !TextUtils.equals(mSession.getMyUserId(), participant.mUserId) && (null != participant.mRoomMember)) { viewHolder.mMultipleSelectionCheckBox.setVisibility(View.VISIBLE); viewHolder.mMultipleSelectionCheckBox.setChecked(mSelectedUserIds.indexOf(participant.mUserId) >= 0); if (viewHolder.mMultipleSelectionCheckBox.isChecked()) { backgroundColor = mContext.getResources().getColor(R.color.vector_05_gray); } viewHolder.mMultipleSelectionCheckBox.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (viewHolder.mMultipleSelectionCheckBox.isChecked()) { mSelectedUserIds.add(participant.mUserId); viewHolder.mSwipeCellLayout.setBackgroundColor(mContext.getResources().getColor(R.color.vector_05_gray)); } else { mSelectedUserIds.remove(participant.mUserId); viewHolder.mSwipeCellLayout.setBackgroundColor(mContext.getResources().getColor(android.R.color.white)); } if (null != mOnParticipantsListener) { mOnParticipantsListener.onSelectUserId(participant.mUserId); } } }); } else { viewHolder.mMultipleSelectionCheckBox.setVisibility(View.GONE); } viewHolder.mSwipeCellLayout.setBackgroundColor(backgroundColor); return aConvertView; } @Override public boolean isChildSelectable(int i, int i1) { return false; } // ============================================================================================= }
package org.batfish.common.bdd; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.BitSet; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; import net.sf.javabdd.BDD; import net.sf.javabdd.BDDException; import net.sf.javabdd.BDDFactory; public class BDDInteger { private final BDDFactory _factory; private final BDD[] _bitvec; private final long _maxVal; /** Certain API calls are only valid when this BDD has only variables in it. */ private boolean _hasVariablesOnly; private BDD _vars; /* * Create an integer, but don't initialize its bit values */ private BDDInteger(BDDFactory factory, int length) { checkArgument(length < 64, "Only lengths up to 63 are supported"); _factory = factory; _bitvec = new BDD[length]; _maxVal = 0xFFFF_FFFF_FFFF_FFFFL >>> (64 - length); _hasVariablesOnly = false; } public BDDInteger(BDDInteger other) { this(other._factory, other._bitvec.length); setValue(other); } /** * Returns true if this {@link BDDInteger} has only variables in its bitvec, as opposed to * zero/one and more complex BDDs. * * <p>This is typically true only for {@link BDDInteger} values constructed from {@link * BDDInteger#makeFromIndex(BDDFactory, int, int, boolean)} or copied from them. */ public boolean hasVariablesOnly() { return _hasVariablesOnly; } /** Returns the number of bits in this {@link BDDInteger}. */ public int size() { return _bitvec.length; } /* * Create an integer, and initialize its values as "don't care" * This requires knowing the start index variables the bitvector * will use. */ public static BDDInteger makeFromIndex( BDDFactory factory, int length, int start, boolean reverse) { assert factory.varNum() >= start + length; BDDInteger bdd = new BDDInteger(factory, length); for (int i = 0; i < length; i++) { int idx; if (reverse) { idx = start + length - i - 1; } else { idx = start + i; } bdd._bitvec[i] = bdd._factory.ithVar(idx); } bdd._hasVariablesOnly = true; return bdd; } /** Find a representative value of the represented integer that satisfies a given constraint. */ public Optional<Long> getValueSatisfying(BDD bdd) { if (bdd.isZero()) { return Optional.empty(); } if (_hasVariablesOnly) { return Optional.of(satAssignmentToLong(bdd.minAssignmentBits())); } return Optional.of(satAssignmentToLong(bdd.satOne())); } /** * Returns the smallest long produced when evaluating the given assignment {@link BDD} over the * representative bits in {@link #getBitvec()}. * * <p>When this {@link BDDInteger#hasVariablesOnly()} is {@code false}, this function will perform * better if the assignment {@link BDD} is smaller, i.e., is produced by {@link BDD#satOne()} * instead of {@link BDD#fullSatOne()}. */ public Long satAssignmentToLong(BDD satAssignment) { checkArgument(satAssignment.isAssignment(), "not a satisfying assignment"); if (_hasVariablesOnly) { // Shortcut for performance. return satAssignmentToLong(satAssignment.minAssignmentBits()); } if (_bitvec.length > Long.SIZE) { throw new IllegalArgumentException( "Can't get a representative of a BDDInteger with more than Long.SIZE bits"); } long value = 0; for (int i = 0; i < _bitvec.length; i++) { BDD bitBDD = _bitvec[_bitvec.length - i - 1]; // a.diff(b) is a.and(b.not()). When the input is only a partial assignment (like satOne), // this biases towards lexicographically smaller solutions: set a 1 only if you can't set 0. if (!satAssignment.diffSat(bitBDD)) { value |= 1L << i; } } return value; } public Long satAssignmentToLong(BitSet bits) { checkState( _hasVariablesOnly, "satAssignmentToLong can only be called on a BDDInteger with hasVariablesOnly() true"); if (_bitvec.length > Long.SIZE) { throw new IllegalArgumentException( "Can't get a representative of a BDDInteger with more than Long.SIZE bits"); } long value = 0; for (int i = 0; i < _bitvec.length; i++) { BDD bitBDD = _bitvec[_bitvec.length - i - 1]; if (bits.get(bitBDD.level())) { value |= 1L << i; } } return value; } /** * Return a list of values satisfying the input {@link BDD}, up to some maximum number. * * @param bdd A constraint on this. * @param max The maximum number of values desired. * @return The satisfying values. */ public List<Long> getValuesSatisfying(BDD bdd, int max) { ImmutableList.Builder<Long> values = new ImmutableList.Builder<>(); checkArgument(max > 0, "max must be > 0"); int num = 0; BDD pred = bdd; while (num < max) { if (pred.isZero()) { break; } long val = satAssignmentToLong(pred.satOne()); values.add(val); pred = pred.diff(value(val)); num++; } return values.build(); } /* * Create an integer and initialize it to a concrete value */ public static BDDInteger makeFromValue(BDDFactory factory, int length, long value) { BDDInteger bdd = new BDDInteger(factory, length); bdd.setValue(value); bdd._hasVariablesOnly = false; return bdd; } /* * Map an if-then-else over each bit in the bitvector */ public BDDInteger ite(BDD b, BDDInteger other) { BDDInteger val = new BDDInteger(this); for (int i = 0; i < _bitvec.length; i++) { val._bitvec[i] = b.ite(_bitvec[i], other._bitvec[i]); } val._hasVariablesOnly = false; return val; } /* * Create a BDD representing the exact value */ public BDD value(long val) { checkArgument(val >= 0, "value is negative"); checkArgument(val <= _maxVal, "value %s is out of range [0, %s]", val, _maxVal); long currentVal = val; BDD[] bits = new BDD[_bitvec.length]; for (int i = _bitvec.length - 1; i >= 0; i--) { BDD b = _bitvec[i]; if ((currentVal & 1) != 0) { bits[i] = b.id(); } else { bits[i] = b.not(); } currentVal >>= 1; } BDD ret = _factory.andAll(bits); for (BDD b : bits) { b.free(); } return ret; } // Helper function to compute leq on the last N bits of the input value. private BDD leqN(long val, int n) { assert n <= _bitvec.length; long currentVal = val; BDD acc = _factory.one(); // whether the suffix of BDD is leq suffix of val. for (int i = 0; i < n; ++i) { BDD bit = _bitvec[_bitvec.length - i - 1]; if ((currentVal & 1) != 0) { // since this bit of val is 1: 0 implies lt OR 1 and suffix leq. ('1 and' is redundant). acc = bit.imp(acc); // "not i or acc" rewritten "i implies acc". } else { // since this bit of val is 0: must be 0 and have leq suffix. acc = bit.less(acc); // "not i and acc" rewritten "i less acc" } currentVal >>= 1; } return acc; } /* * Less than or equal to on integers */ public BDD leq(long val) { checkArgument(val >= 0, "value is negative"); checkArgument(val <= _maxVal, "value %s is out of range [0, %s]", val, _maxVal); return leqN(val, _bitvec.length); } // Helper function to compute geq on the last N bits of the input value. private BDD geqN(long val, int n) { assert n <= _bitvec.length; long currentVal = val; BDD acc = _factory.one(); // whether the suffix of BDD is geq suffix of val. for (int i = 0; i < n; ++i) { BDD bit = _bitvec[_bitvec.length - i - 1]; if ((currentVal & 1) != 0) { // since this bit of val is 1: must be 1 and have geq suffix. acc = bit.and(acc); } else { // since this bit of val is 0: 1 implies gt OR 0 and suffix geq. ('0 and' is redundant.) acc = bit.or(acc); } currentVal >>= 1; } return acc; } /* * Greater than or equal to on integers */ public BDD geq(long val) { checkArgument(val >= 0, "value is negative"); checkArgument(val <= _maxVal, "value %s is out of range [0, %s]", val, _maxVal); return geqN(val, _bitvec.length); } /* * Integers in the given range, inclusive, where {@code a} is less than or equal to {@code b}. */ // This is basically this.geq(a).and(this.leq(b)). Differences: // 1. Short-circuit a == b // 2. Save work in the case where a and b have a common prefix, including when a and/or b is the // start/end of the prefix. public BDD range(long a, long b) { checkArgument(a <= b, "range is not ordered correctly"); checkArgument(a >= 0, "value is negative"); checkArgument(b <= _maxVal, "value %s is out of range [0, %s]", b, _maxVal); if (a == b) { return value(a); } long bitOfFirstDifference = Long.highestOneBit(a ^ b); int sizeOfDifferentSuffix = Long.numberOfTrailingZeros(bitOfFirstDifference) + 1; assert sizeOfDifferentSuffix < 64; long suffixMask = 0xFFFF_FFFF_FFFF_FFFFL >>> (64 - sizeOfDifferentSuffix); BDD lower = ((a & suffixMask) == 0) ? _factory.one() : geqN(a, sizeOfDifferentSuffix); BDD upper = ((b & suffixMask) == suffixMask) ? _factory.one() : leqN(b, sizeOfDifferentSuffix); BDD between = lower.and(upper); long currentVal = a >> sizeOfDifferentSuffix; for (int i = sizeOfDifferentSuffix; i < _bitvec.length; ++i) { BDD bit = _bitvec[_bitvec.length - i - 1]; if ((currentVal & 1) != 0) { between = bit.and(between); } else { between = bit.less(between); // "not i and x" rewritten "i less x" } currentVal >>= 1; } return between; } /* * Set this BDD to have an exact value */ public void setValue(long val) { checkArgument(val >= 0, "Cannot set a negative value"); checkArgument(val <= _maxVal, "value %s is out of range [0, %s]", val, _maxVal); long currentVal = val; for (int i = _bitvec.length - 1; i >= 0; i--) { if ((currentVal & 1) != 0) { _bitvec[i] = _factory.one(); } else { _bitvec[i] = _factory.zero(); } currentVal >>= 1; } _hasVariablesOnly = false; } /* * Set this BDD to be equal to another BDD */ public void setValue(BDDInteger other) { for (int i = 0; i < _bitvec.length; ++i) { _bitvec[i] = other._bitvec[i].id(); } _hasVariablesOnly = other._hasVariablesOnly; } /* * Add two BDDs bitwise to create a new BDD */ public BDDInteger add(BDDInteger other) { BDD[] as = _bitvec; BDD[] bs = other._bitvec; checkArgument(as.length > 0, "Cannot add BDDIntegers of length 0"); checkArgument(as.length == bs.length, "Cannot add BDDIntegers of different length"); BDD carry = _factory.zero(); BDDInteger sum = new BDDInteger(_factory, as.length); BDD[] cs = sum._bitvec; for (int i = cs.length - 1; i > 0; --i) { cs[i] = as[i].xor(bs[i]).xor(carry); carry = as[i].and(bs[i]).or(carry.and(as[i].or(bs[i]))); } cs[0] = as[0].xor(bs[0]).xor(carry); sum._hasVariablesOnly = false; return sum; } /* * Subtract one BDD from another bitwise to create a new BDD */ public BDDInteger sub(BDDInteger var1) { if (_bitvec.length != var1._bitvec.length) { throw new BDDException(); } else { BDD var3 = _factory.zero(); BDDInteger var4 = new BDDInteger(_factory, _bitvec.length); for (int var5 = var4._bitvec.length - 1; var5 >= 0; --var5) { var4._bitvec[var5] = _bitvec[var5].xor(var1._bitvec[var5]); var4._bitvec[var5] = var4._bitvec[var5].xor(var3.id()); BDD var6 = var1._bitvec[var5].or(var3); BDD var7 = _bitvec[var5].less(var6); var6.free(); var6 = _bitvec[var5].and(var1._bitvec[var5]); var6 = var6.and(var3); var6 = var6.or(var7); var3 = var6; } var3.free(); var4._hasVariablesOnly = false; return var4; } } public BDD[] getBitvec() { return _bitvec; } /** Returns a {@link BDD} containing all the variables of this {@link BDDInteger}. */ public @Nonnull BDD getVars() { checkState( _hasVariablesOnly, "getVars can only be called on a BDDInteger with hasVariablesOnly() true"); if (_vars == null) { _vars = _factory.andAll(_bitvec); } return _vars; } public BDDFactory getFactory() { return _factory; } @Override public boolean equals(Object o) { if (!(o instanceof BDDInteger)) { return false; } BDDInteger other = (BDDInteger) o; // No need to check _factory, and _hasVariablesOnly is 1-1 with _bitvec. return Arrays.equals(_bitvec, other._bitvec); } @Override public int hashCode() { return Arrays.hashCode(_bitvec); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.usecases; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import javax.jms.Connection; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.MessageProducer; import javax.jms.Session; import junit.framework.Test; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.command.ActiveMQTopic; import org.apache.activemq.store.PersistenceAdapter; import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter; import org.apache.activemq.store.kahadb.KahaDBStore; import org.apache.activemq.util.Wait; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DurableSubsOfflineSelectorIndexUseTest extends org.apache.activemq.TestSupport { private static final Logger LOG = LoggerFactory.getLogger(DurableSubsOfflineSelectorIndexUseTest.class); public int messageCount = 400; private BrokerService broker; private ActiveMQTopic topic; private List<Throwable> exceptions = new ArrayList<>(); @Override protected ActiveMQConnectionFactory createConnectionFactory() throws Exception { ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://" + getName(true)); connectionFactory.setWatchTopicAdvisories(false); return connectionFactory; } @Override protected Connection createConnection() throws Exception { return createConnection("id"); } protected Connection createConnection(String name) throws Exception { Connection con = super.createConnection(); con.setClientID(name); con.start(); return con; } public static Test suite() { return suite(DurableSubsOfflineSelectorIndexUseTest.class); } @Override protected void setUp() throws Exception { exceptions.clear(); topic = (ActiveMQTopic) createDestination(); createBroker(); super.setUp(); } @Override protected void tearDown() throws Exception { super.tearDown(); destroyBroker(); } private void createBroker() throws Exception { createBroker(true); } private void createBroker(boolean deleteAllMessages) throws Exception { broker = BrokerFactory.createBroker("broker:(vm://" + getName(true) + ")"); broker.setBrokerName(getName(true)); broker.setDeleteAllMessagesOnStartup(deleteAllMessages); broker.getManagementContext().setCreateConnector(false); broker.setAdvisorySupport(false); broker.addConnector("tcp://0.0.0.0:0"); setDefaultPersistenceAdapter(broker); broker.start(); } private void destroyBroker() throws Exception { if (broker != null) broker.stop(); } public void initCombosForTestIndexPageUsage() { addCombinationValues("messageCount", new Integer[]{890, 900, 400}); } public void testIndexPageUsage() throws Exception { Connection con = createConnection(); Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); session.createDurableSubscriber(topic, "true", "filter = 'true'", true); session.close(); session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); session.createDurableSubscriber(topic, "false", "filter = 'false'", true); session.close(); con.close(); // send messages final Connection sendCon = createConnection("send"); final Session sendSession = sendCon.createSession(false, Session.AUTO_ACKNOWLEDGE); final MessageProducer producer = sendSession.createProducer(null); Thread sendThread = new Thread() { @Override public void run() { try { for (int i = 0; i < messageCount; i++) { boolean filter = i % 2 == 1; Message message = sendSession.createMessage(); message.setStringProperty("filter", filter ? "true" : "false"); producer.send(topic, message); if (i > 0 && i % 1000 == 0) { LOG.info("Sent:" + i); } } sendSession.close(); sendCon.close(); } catch (Exception e) { exceptions.add(e); } } }; sendThread.start(); sendThread.join(); // settle with sent messages TimeUnit.SECONDS.sleep(4); // consume messages con = createConnection(); session = con.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageConsumer consumerTrue = session.createDurableSubscriber(topic, "true", "filter = 'true'", true); Listener listenerT = new Listener(); consumerTrue.setMessageListener(listenerT); waitFor(listenerT, messageCount / 2); MessageConsumer consumerFalse = session.createDurableSubscriber(topic, "false", "filter = 'false'", true); Listener listenerF = new Listener(); consumerFalse.setMessageListener(listenerF); waitFor(listenerF, messageCount / 2); assertEquals(messageCount / 2, listenerT.count); assertEquals(messageCount / 2, listenerF.count); consumerTrue.close(); session.unsubscribe("true"); consumerFalse.close(); session.unsubscribe("false"); session.close(); con.close(); PersistenceAdapter persistenceAdapter = broker.getPersistenceAdapter(); if (persistenceAdapter instanceof KahaDBPersistenceAdapter) { final KahaDBStore store = ((KahaDBPersistenceAdapter) persistenceAdapter).getStore(); LOG.info("Store page count: " + store.getPageFile().getPageCount()); LOG.info("Store free page count: " + store.getPageFile().getFreePageCount()); LOG.info("Store page in-use: " + (store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount())); assertTrue("no leak of pages, always use just 10", Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisified() throws Exception { return 10 == store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount(); } }, TimeUnit.SECONDS.toMillis(10))); } } private void waitFor(final Listener listener, final int count) throws Exception { assertTrue("got all messages on time", Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisified() throws Exception { return listener.count == count; } }, TimeUnit.MINUTES.toMillis(10))); } public static class Listener implements MessageListener { int count = 0; String id = null; Listener() { } @Override public void onMessage(Message message) { count++; if (id != null) { try { LOG.info(id + ", " + message.getJMSMessageID()); } catch (Exception ignored) { } } } } }
/* * Copyright (C) 2014 LinuxTek, Inc. All Rights Reserved. */ package com.linuxtek.kona.util; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; public class KMap<K extends Comparable<? super K>,V> extends LinkedHashMap<K,V> { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(KMap.class); private boolean convertNullToEmptyString = false; private boolean isSorted = false; public KMap() { super(); } public KMap(boolean convertNullToEmptyString) { this(); this.convertNullToEmptyString = convertNullToEmptyString; } public KMap(int initialCapacity) { super(initialCapacity); } public KMap(int initialCapacity, float loadFactor) { super(initialCapacity, loadFactor); } public KMap(int initialCapacity, float loadFactor, boolean accessOrder) { super(initialCapacity, loadFactor, accessOrder); } public KMap(Map<K,V> m) { super(m); } public static <K extends Comparable<? super K>,V> KMap<K,V> getInstance() { return (new KMap<K,V>()); } public static <K extends Comparable<? super K>,V> KMap<K,V> getInstance(Map<K,V> m) { return (new KMap<K,V>(m)); } public boolean isSorted() { return (isSorted); } public V get(K key, V defaultValue) { V value = super.get(key); if (value == null) { value = defaultValue; } return value; } public String getString(K key) { V value = get(key); return (value.toString()); } public K getKey(V value) { if (!containsValue(value)) return (null); Iterator<K> it = keys(); while (it.hasNext()) { K key = it.next(); if (value.equals(get(key))) return (key); } return (null); } public K getKey(V value, K defaultKey) { K key = getKey(value); if (key == null) key = defaultKey; return key; } public Iterator<V> iterator() { return (values().iterator()); } public Iterator<K> keys() { return (keySet().iterator()); } // returns keys sorted by "natural ordering" public Iterator<K> sortedKeys() { List<K> list = new ArrayList<K>(keySet()); Collections.sort(list); return (list.iterator()); } // returns keys sorted by using the comparator public Iterator<K> sortedKeys(Comparator<K> c) { List<K> list = new ArrayList<K>(keySet()); Collections.sort(list, c); return (list.iterator()); } // returns keys sorted by values // assumes 1:1 public Iterator<K> keysSortedByValue(Comparator<V> c) { List<V> list = new ArrayList<V>(values()); List<K> keys = new ArrayList<K>(list.size()); Collections.sort(list, c); for (V v : list) { K key = getKeyByValue(this, v); keys.add(key); } return keys.iterator(); } public static <K, V> K getKeyByValue(Map<K, V> map, V value) { for (Map.Entry<K, V> entry : map.entrySet()) { if (value.equals(entry.getValue())) { return entry.getKey(); } } return null; } public static <K, V> Set<K> getKeysByValue(Map<K, V> map, V value) { Set<K> keys = new HashSet<K>(); for (Map.Entry<K, V> entry : map.entrySet()) { if (value.equals(entry.getValue())) { keys.add(entry.getKey()); } } return keys; } public KMap<K,V> sort() { KMap<K,V> map = new KMap<K,V>(); Iterator<K> it = sortedKeys(); while (it.hasNext()) { K key = it.next(); V value = get(key); map.put(key, value); } map.isSorted = true; return (map); } public KMap<K,V> sort(Comparator<K> c) { KMap<K,V> map = new KMap<K,V>(); Iterator<K> it = sortedKeys(c); while (it.hasNext()) { K key = it.next(); V value = get(key); map.put(key, value); } map.isSorted = true; return (map); } // returns keys sorted by "natural ordering" public KMap<K,V> reverse() { KMap<K,V> map = new KMap<K,V>(); List<K> list = new ArrayList<K>(keySet()); Collections.reverse(list); Iterator<K> it = list.iterator(); while (it.hasNext()) { K key = it.next(); V value = get(key); map.put(key, value); } return (map); } public String toCommaList() { return (toCommaList(this.values())); } public static <V> String toCommaList(Collection<V> items) { if (items == null || items.size() == 0) return (null); String itemList = ""; Iterator<V> it = items.iterator(); while (it.hasNext()) { String item = it.next().toString(); itemList += item + ","; } itemList = itemList.substring(0, itemList.length()-1); return (itemList); } public String toString() { String s = "---- Begin KMap Listing ----\n"; Iterator<K> it = sortedKeys(); while (it.hasNext()) { K key = it.next(); V value = get(key); s += "[" + key + "," + value + "]\n"; } s += "---- End KMap Listing ----\n"; return (s); } }
package ij.io; import ij.VirtualStack; import ij.IJ; import java.io.*; import java.util.Properties; /** This class consists of public fields that describe an image file. */ public class FileInfo implements Cloneable { /** 8-bit unsigned integer (0-255). */ public static final int GRAY8 = 0; /** 16-bit signed integer (-32768-32767). Imported signed images are converted to unsigned by adding 32768. */ public static final int GRAY16_SIGNED = 1; /** 16-bit unsigned integer (0-65535). */ public static final int GRAY16_UNSIGNED = 2; /** 32-bit signed integer. Imported 32-bit integer images are converted to floating-point. */ public static final int GRAY32_INT = 3; /** 32-bit floating-point. */ public static final int GRAY32_FLOAT = 4; /** 8-bit unsigned integer with color lookup table. */ public static final int COLOR8 = 5; /** 24-bit interleaved RGB. Import/export only. */ public static final int RGB = 6; /** 24-bit planer RGB. Import only. */ public static final int RGB_PLANAR = 7; /** 1-bit black and white. Import only. */ public static final int BITMAP = 8; /** 32-bit interleaved ARGB. Import only. */ public static final int ARGB = 9; /** 24-bit interleaved BGR. Import only. */ public static final int BGR = 10; /** 32-bit unsigned integer. Imported 32-bit integer images are converted to floating-point. */ public static final int GRAY32_UNSIGNED = 11; /** 48-bit interleaved RGB. */ public static final int RGB48 = 12; /** 12-bit unsigned integer (0-4095). Import only. */ public static final int GRAY12_UNSIGNED = 13; /** 24-bit unsigned integer. Import only. */ public static final int GRAY24_UNSIGNED = 14; /** 32-bit interleaved BARG (MCID). Import only. */ public static final int BARG = 15; /** 64-bit floating-point. Import only.*/ public static final int GRAY64_FLOAT = 16; /** 48-bit planar RGB. Import only. */ public static final int RGB48_PLANAR = 17; /** 32-bit interleaved ABGR. Import only. */ public static final int ABGR = 18; /** 32-bit interleaved CMYK. Import only. */ public static final int CMYK = 19; // File formats public static final int UNKNOWN = 0; public static final int RAW = 1; public static final int TIFF = 2; public static final int GIF_OR_JPG = 3; public static final int FITS = 4; public static final int BMP = 5; public static final int DICOM = 6; public static final int ZIP_ARCHIVE = 7; public static final int PGM = 8; public static final int IMAGEIO = 9; // Compression modes public static final int COMPRESSION_UNKNOWN = 0; public static final int COMPRESSION_NONE= 1; public static final int LZW = 2; public static final int LZW_WITH_DIFFERENCING = 3; public static final int JPEG = 4; public static final int PACK_BITS = 5; public static final int ZIP = 6; /* File format (TIFF, GIF_OR_JPG, BMP, etc.). Used by the File/Revert command */ public int fileFormat; /* File type (GRAY8, GRAY_16_UNSIGNED, RGB, etc.) */ public int fileType; public String fileName; public String directory; public String url; public int width; public int height; public int offset=0; // Use getOffset() to read public int nImages; public int gapBetweenImages; // Use getGap() to read public boolean whiteIsZero; public boolean intelByteOrder; public int compression; public int[] stripOffsets; public int[] stripLengths; public int rowsPerStrip; public int lutSize; public byte[] reds; public byte[] greens; public byte[] blues; public Object pixels; public String debugInfo; public String[] sliceLabels; public String info; public InputStream inputStream; public VirtualStack virtualStack; public int sliceNumber; // used by FileInfoVirtualStack public double pixelWidth=1.0; public double pixelHeight=1.0; public double pixelDepth=1.0; public String unit; public int calibrationFunction; public double[] coefficients; public String valueUnit; public double frameInterval; public String description; // Use <i>longOffset</i> instead of <i>offset</i> when offset>2147483647. public long longOffset; // Use getOffset() to read // Use <i>longGap</i> instead of <i>gapBetweenImages</i> when gap>2147483647. public long longGap; // Use getGap() to read // Extra metadata to be stored in the TIFF header public int[] metaDataTypes; // must be < 0xffffff public byte[][] metaData; public double[] displayRanges; public byte[][] channelLuts; public byte[] plot; // serialized plot public byte[] roi; // serialized roi public byte[][] overlay; // serialized overlay objects public int samplesPerPixel; public String openNextDir, openNextName; public String[] properties; // {key,value,key,value,...} public boolean imageSaved; /** Creates a FileInfo object with all of its fields set to their default value. */ public FileInfo() { // assign default values fileFormat = UNKNOWN; fileType = GRAY8; fileName = "Untitled"; directory = ""; url = ""; nImages = 1; compression = COMPRESSION_NONE; samplesPerPixel = 1; } /** Returns the file path. */ public String getFilePath() { String dir = directory; if (dir==null) dir = ""; dir = IJ.addSeparator(dir); return dir + fileName; } /** Returns the offset as a long. */ public final long getOffset() { return longOffset>0L?longOffset:((long)offset)&0xffffffffL; } /** Returns the gap between images as a long. */ public final long getGap() { return longGap>0L?longGap:((long)gapBetweenImages)&0xffffffffL; } /** Returns the number of bytes used per pixel. */ public int getBytesPerPixel() { switch (fileType) { case GRAY8: case COLOR8: case BITMAP: return 1; case GRAY16_SIGNED: case GRAY16_UNSIGNED: case GRAY12_UNSIGNED: return 2; case GRAY32_INT: case GRAY32_UNSIGNED: case GRAY32_FLOAT: case ARGB: case GRAY24_UNSIGNED: case BARG: case ABGR: case CMYK: return 4; case RGB: case RGB_PLANAR: case BGR: return 3; case RGB48: case RGB48_PLANAR: return 6; case GRAY64_FLOAT : return 8; default: return 0; } } public String toString() { return "name=" + fileName + ", dir=" + directory + ", width=" + width + ", height=" + height + ", nImages=" + nImages + ", offset=" + getOffset() + ", gap=" + getGap() + ", type=" + getType() + ", byteOrder=" + (intelByteOrder?"little":"big") + ", format=" + fileFormat + ", url=" + url + ", whiteIsZero=" + (whiteIsZero?"t":"f") + ", lutSize=" + lutSize + ", comp=" + compression + ", ranges=" + (displayRanges!=null?""+displayRanges.length/2:"null") + ", samples=" + samplesPerPixel; } /** Returns JavaScript code that can be used to recreate this FileInfo. */ public String getCode() { String code = "fi = new FileInfo();\n"; String type = null; if (fileType==GRAY8) type = "GRAY8"; else if (fileType==GRAY16_UNSIGNED) type = "GRAY16_UNSIGNED"; else if (fileType==GRAY32_FLOAT) type = "GRAY32_FLOAT"; else if (fileType==RGB) type = "RGB"; if (type!=null) code += "fi.fileType = FileInfo."+type+";\n"; code += "fi.width = "+width+";\n"; code += "fi.height = "+height+";\n"; if (nImages>1) code += "fi.nImages = "+nImages+";\n"; if (getOffset()>0) code += "fi.longOffset = "+getOffset()+";\n"; if (intelByteOrder) code += "fi.intelByteOrder = true;\n"; return code; } private String getType() { switch (fileType) { case GRAY8: return "byte"; case GRAY16_SIGNED: return "short"; case GRAY16_UNSIGNED: return "ushort"; case GRAY32_INT: return "int"; case GRAY32_UNSIGNED: return "uint"; case GRAY32_FLOAT: return "float"; case COLOR8: return "byte(lut)"; case RGB: return "RGB"; case RGB_PLANAR: return "RGB(p)"; case RGB48: return "RGB48"; case BITMAP: return "bitmap"; case ARGB: return "ARGB"; case ABGR: return "ABGR"; case BGR: return "BGR"; case BARG: return "BARG"; case CMYK: return "CMYK"; case GRAY64_FLOAT: return "double"; case RGB48_PLANAR: return "RGB48(p)"; default: return ""; } } public synchronized Object clone() { try {return super.clone();} catch (CloneNotSupportedException e) {return null;} } }
/* * Copyright (c) 2008-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.binding; import com.mongodb.MongoInternalException; import com.mongodb.MongoTimeoutException; import com.mongodb.ReadPreference; import com.mongodb.async.SingleResultCallback; import com.mongodb.connection.AsyncConnection; import com.mongodb.connection.Cluster; import com.mongodb.connection.Server; import com.mongodb.connection.ServerDescription; import com.mongodb.selector.PrimaryServerSelector; import com.mongodb.selector.ReadPreferenceServerSelector; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static com.mongodb.ReadPreference.primary; import static com.mongodb.assertions.Assertions.isTrue; import static com.mongodb.assertions.Assertions.notNull; /** * An asynchronous binding that ensures that all reads use the same connection, and all writes use the same connection. * * <p>If the readPreference is {#link ReadPreference.primary()} then all reads and writes will use the same connection.</p> */ public class AsyncSingleConnectionBinding extends AbstractReferenceCounted implements AsyncReadWriteBinding { private final ReadPreference readPreference; private AsyncConnection readConnection; private AsyncConnection writeConnection; private volatile Server readServer; private volatile Server writeServer; /** * Create a new binding with the given cluster. * * @param cluster a non-null Cluster which will be used to select a server to bind to * @param maxWaitTime the maximum time to wait for a connection to become available. * @param timeUnit a non-null TimeUnit for the maxWaitTime */ public AsyncSingleConnectionBinding(final Cluster cluster, final long maxWaitTime, final TimeUnit timeUnit) { this(cluster, primary(), maxWaitTime, timeUnit); } /** * Create a new binding with the given cluster. * * @param cluster a non-null Cluster which will be used to select a server to bind to * @param readPreference the readPreference for reads, if not primary a separate connection will be used for reads * @param maxWaitTime the maximum time to wait for a connection to become available. * @param timeUnit a non-null TimeUnit for the maxWaitTime */ public AsyncSingleConnectionBinding(final Cluster cluster, final ReadPreference readPreference, final long maxWaitTime, final TimeUnit timeUnit) { notNull("cluster", cluster); this.readPreference = notNull("readPreference", readPreference); final CountDownLatch latch = new CountDownLatch(2); cluster.selectServerAsync(new PrimaryServerSelector(), new SingleResultCallback<Server>() { @Override public void onResult(final Server result, final Throwable t) { if (t == null) { writeServer = result; latch.countDown(); } } }); cluster.selectServerAsync(new ReadPreferenceServerSelector(readPreference), new SingleResultCallback<Server>() { @Override public void onResult(final Server result, final Throwable t) { if (t == null) { readServer = result; latch.countDown(); } } }); awaitLatch(maxWaitTime, timeUnit, latch); if (writeServer == null || readServer == null) { throw new MongoInternalException("Failure to select server"); } final CountDownLatch writeServerLatch = new CountDownLatch(1); writeServer.getConnectionAsync(new SingleResultCallback<AsyncConnection>() { @Override public void onResult(final AsyncConnection result, final Throwable t) { writeConnection = result; writeServerLatch.countDown(); } }); awaitLatch(maxWaitTime, timeUnit, writeServerLatch); if (writeConnection == null) { throw new MongoInternalException("Failure to get connection"); } final CountDownLatch readServerLatch = new CountDownLatch(1); readServer.getConnectionAsync(new SingleResultCallback<AsyncConnection>() { @Override public void onResult(final AsyncConnection result, final Throwable t) { readConnection = result; readServerLatch.countDown(); } }); awaitLatch(maxWaitTime, timeUnit, readServerLatch); if (readConnection == null) { throw new MongoInternalException("Failure to get connection"); } } private void awaitLatch(final long maxWaitTime, final TimeUnit timeUnit, final CountDownLatch latch) { try { if (!latch.await(maxWaitTime, timeUnit)) { throw new MongoTimeoutException("Failed to get servers"); } } catch (InterruptedException e) { throw new MongoInternalException(e.getMessage(), e); } } @Override public AsyncReadWriteBinding retain() { super.retain(); return this; } @Override public ReadPreference getReadPreference() { return readPreference; } @Override public void getReadConnectionSource(final SingleResultCallback<AsyncConnectionSource> callback) { isTrue("open", getCount() > 0); if (readPreference == primary()) { getWriteConnectionSource(callback); } else { callback.onResult(new SingleAsyncConnectionSource(readServer, readConnection), null); } } @Override public void getWriteConnectionSource(final SingleResultCallback<AsyncConnectionSource> callback) { isTrue("open", getCount() > 0); callback.onResult(new SingleAsyncConnectionSource(writeServer, writeConnection), null); } @Override public void release() { super.release(); if (getCount() == 0) { readConnection.release(); writeConnection.release(); } } private final class SingleAsyncConnectionSource extends AbstractReferenceCounted implements AsyncConnectionSource { private final Server server; private final AsyncConnection connection; private SingleAsyncConnectionSource(final Server server, final AsyncConnection connection) { this.server = server; this.connection = connection; AsyncSingleConnectionBinding.this.retain(); } @Override public ServerDescription getServerDescription() { return server.getDescription(); } @Override public void getConnection(final SingleResultCallback<AsyncConnection> callback) { isTrue("open", super.getCount() > 0); callback.onResult(connection.retain(), null); } public AsyncConnectionSource retain() { super.retain(); return this; } @Override public void release() { super.release(); if (super.getCount() == 0) { AsyncSingleConnectionBinding.this.release(); } } } }
/** */ package gluemodel.CIM.IEC61970.Core.impl; import gluemodel.CIM.IEC61970.Core.BasicIntervalSchedule; import gluemodel.CIM.IEC61970.Core.CorePackage; import gluemodel.CIM.IEC61970.Domain.UnitMultiplier; import gluemodel.CIM.IEC61970.Domain.UnitSymbol; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Basic Interval Schedule</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link gluemodel.CIM.IEC61970.Core.impl.BasicIntervalScheduleImpl#getValue2Multiplier <em>Value2 Multiplier</em>}</li> * <li>{@link gluemodel.CIM.IEC61970.Core.impl.BasicIntervalScheduleImpl#getValue1Multiplier <em>Value1 Multiplier</em>}</li> * <li>{@link gluemodel.CIM.IEC61970.Core.impl.BasicIntervalScheduleImpl#getStartTime <em>Start Time</em>}</li> * <li>{@link gluemodel.CIM.IEC61970.Core.impl.BasicIntervalScheduleImpl#getValue2Unit <em>Value2 Unit</em>}</li> * <li>{@link gluemodel.CIM.IEC61970.Core.impl.BasicIntervalScheduleImpl#getValue1Unit <em>Value1 Unit</em>}</li> * </ul> * * @generated */ public class BasicIntervalScheduleImpl extends IdentifiedObjectImpl implements BasicIntervalSchedule { /** * The default value of the '{@link #getValue2Multiplier() <em>Value2 Multiplier</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue2Multiplier() * @generated * @ordered */ protected static final UnitMultiplier VALUE2_MULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M; /** * The cached value of the '{@link #getValue2Multiplier() <em>Value2 Multiplier</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue2Multiplier() * @generated * @ordered */ protected UnitMultiplier value2Multiplier = VALUE2_MULTIPLIER_EDEFAULT; /** * The default value of the '{@link #getValue1Multiplier() <em>Value1 Multiplier</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue1Multiplier() * @generated * @ordered */ protected static final UnitMultiplier VALUE1_MULTIPLIER_EDEFAULT = UnitMultiplier.SMALL_M; /** * The cached value of the '{@link #getValue1Multiplier() <em>Value1 Multiplier</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue1Multiplier() * @generated * @ordered */ protected UnitMultiplier value1Multiplier = VALUE1_MULTIPLIER_EDEFAULT; /** * The default value of the '{@link #getStartTime() <em>Start Time</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStartTime() * @generated * @ordered */ protected static final String START_TIME_EDEFAULT = null; /** * The cached value of the '{@link #getStartTime() <em>Start Time</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStartTime() * @generated * @ordered */ protected String startTime = START_TIME_EDEFAULT; /** * The default value of the '{@link #getValue2Unit() <em>Value2 Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue2Unit() * @generated * @ordered */ protected static final UnitSymbol VALUE2_UNIT_EDEFAULT = UnitSymbol.A; /** * The cached value of the '{@link #getValue2Unit() <em>Value2 Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue2Unit() * @generated * @ordered */ protected UnitSymbol value2Unit = VALUE2_UNIT_EDEFAULT; /** * The default value of the '{@link #getValue1Unit() <em>Value1 Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue1Unit() * @generated * @ordered */ protected static final UnitSymbol VALUE1_UNIT_EDEFAULT = UnitSymbol.A; /** * The cached value of the '{@link #getValue1Unit() <em>Value1 Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue1Unit() * @generated * @ordered */ protected UnitSymbol value1Unit = VALUE1_UNIT_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected BasicIntervalScheduleImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return CorePackage.Literals.BASIC_INTERVAL_SCHEDULE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public UnitMultiplier getValue2Multiplier() { return value2Multiplier; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue2Multiplier(UnitMultiplier newValue2Multiplier) { UnitMultiplier oldValue2Multiplier = value2Multiplier; value2Multiplier = newValue2Multiplier == null ? VALUE2_MULTIPLIER_EDEFAULT : newValue2Multiplier; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_MULTIPLIER, oldValue2Multiplier, value2Multiplier)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public UnitMultiplier getValue1Multiplier() { return value1Multiplier; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue1Multiplier(UnitMultiplier newValue1Multiplier) { UnitMultiplier oldValue1Multiplier = value1Multiplier; value1Multiplier = newValue1Multiplier == null ? VALUE1_MULTIPLIER_EDEFAULT : newValue1Multiplier; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_MULTIPLIER, oldValue1Multiplier, value1Multiplier)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getStartTime() { return startTime; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStartTime(String newStartTime) { String oldStartTime = startTime; startTime = newStartTime; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.BASIC_INTERVAL_SCHEDULE__START_TIME, oldStartTime, startTime)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public UnitSymbol getValue2Unit() { return value2Unit; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue2Unit(UnitSymbol newValue2Unit) { UnitSymbol oldValue2Unit = value2Unit; value2Unit = newValue2Unit == null ? VALUE2_UNIT_EDEFAULT : newValue2Unit; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_UNIT, oldValue2Unit, value2Unit)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public UnitSymbol getValue1Unit() { return value1Unit; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue1Unit(UnitSymbol newValue1Unit) { UnitSymbol oldValue1Unit = value1Unit; value1Unit = newValue1Unit == null ? VALUE1_UNIT_EDEFAULT : newValue1Unit; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_UNIT, oldValue1Unit, value1Unit)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_MULTIPLIER: return getValue2Multiplier(); case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_MULTIPLIER: return getValue1Multiplier(); case CorePackage.BASIC_INTERVAL_SCHEDULE__START_TIME: return getStartTime(); case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_UNIT: return getValue2Unit(); case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_UNIT: return getValue1Unit(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_MULTIPLIER: setValue2Multiplier((UnitMultiplier)newValue); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_MULTIPLIER: setValue1Multiplier((UnitMultiplier)newValue); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__START_TIME: setStartTime((String)newValue); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_UNIT: setValue2Unit((UnitSymbol)newValue); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_UNIT: setValue1Unit((UnitSymbol)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_MULTIPLIER: setValue2Multiplier(VALUE2_MULTIPLIER_EDEFAULT); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_MULTIPLIER: setValue1Multiplier(VALUE1_MULTIPLIER_EDEFAULT); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__START_TIME: setStartTime(START_TIME_EDEFAULT); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_UNIT: setValue2Unit(VALUE2_UNIT_EDEFAULT); return; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_UNIT: setValue1Unit(VALUE1_UNIT_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_MULTIPLIER: return value2Multiplier != VALUE2_MULTIPLIER_EDEFAULT; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_MULTIPLIER: return value1Multiplier != VALUE1_MULTIPLIER_EDEFAULT; case CorePackage.BASIC_INTERVAL_SCHEDULE__START_TIME: return START_TIME_EDEFAULT == null ? startTime != null : !START_TIME_EDEFAULT.equals(startTime); case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE2_UNIT: return value2Unit != VALUE2_UNIT_EDEFAULT; case CorePackage.BASIC_INTERVAL_SCHEDULE__VALUE1_UNIT: return value1Unit != VALUE1_UNIT_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (value2Multiplier: "); result.append(value2Multiplier); result.append(", value1Multiplier: "); result.append(value1Multiplier); result.append(", startTime: "); result.append(startTime); result.append(", value2Unit: "); result.append(value2Unit); result.append(", value1Unit: "); result.append(value1Unit); result.append(')'); return result.toString(); } } //BasicIntervalScheduleImpl
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.ipojo.test.scenarios.service.dependency; import java.util.Properties; import org.apache.felix.ipojo.ComponentInstance; import org.apache.felix.ipojo.architecture.Architecture; import org.apache.felix.ipojo.architecture.InstanceDescription; import org.apache.felix.ipojo.junit4osgi.OSGiTestCase; import org.apache.felix.ipojo.test.scenarios.service.dependency.service.CheckService; import org.apache.felix.ipojo.test.scenarios.util.Utils; import org.osgi.framework.ServiceReference; public class ProxiedSetMultipleDependencies extends OSGiTestCase { ComponentInstance instance1, instance2; ComponentInstance fooProvider1, fooProvider2; public void setUp() { try { Properties prov = new Properties(); prov.put("instance.name","FooProvider1"); fooProvider1 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov); fooProvider1.stop(); Properties prov2 = new Properties(); prov2.put("instance.name","FooProvider2"); fooProvider2 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov2); fooProvider2.stop(); Properties i1 = new Properties(); i1.put("instance.name","Simple"); instance1 = Utils.getFactoryByName(getContext(), "ProxiedSimpleSetCheckServiceProvider").createComponentInstance(i1); Properties i2 = new Properties(); i2.put("instance.name","Optional"); instance2 = Utils.getFactoryByName(getContext(), "ProxiedOptionalSetCheckServiceProvider").createComponentInstance(i2); } catch(Exception e) { fail(e.getMessage()); } } public void tearDown() { instance1.dispose(); instance2.dispose(); fooProvider1.dispose(); fooProvider2.dispose(); instance1 = null; instance2 = null; fooProvider1 = null; fooProvider2 = null; } public void testSimple() { ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance1.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID); fooProvider1.start(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance1.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 1", ((Boolean)props.get("result")).booleanValue()); // True, a provider is here assertEquals("check void bind invocation - 1", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 1", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 1", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 1", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 1", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 1", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 1", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 1", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 1", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.start(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 2", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here assertEquals("check void bind invocation - 2", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 2", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 2", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 2", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 2", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 2", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 2", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 2", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 2", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } public void testOptional() { ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance2.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance2.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); // False : no provider assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 0.0); fooProvider1.start(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 1", ((Boolean)props.get("result")).booleanValue()); // True, a provider is here assertEquals("check void bind invocation - 1", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 1", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 1", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 1", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 1", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 1", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 1", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 1", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 1", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.start(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 2", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here assertEquals("check void bind invocation - 2", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 2", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 2", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 2", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 2", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 2", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 2", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 2", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 2", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, it still one provider. assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 4", ((Boolean)props.get("result")).booleanValue()); // False, no more provider. assertEquals("check void bind invocation - 4", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 4", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 4", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 4", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 4", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 4", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 4", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 4", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 4", ((Double)props.get("double")).doubleValue(), 0.0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } }
// This file is part of OpenTSDB. // Copyright (C) 2010-2012 The OpenTSDB Authors. // // This program is free software: you can redistribute it and/or modify it // under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 2.1 of the License, or (at your // option) any later version. This program is distributed in the hope that it // will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty // of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser // General Public License for more details. You should have received a copy // of the GNU Lesser General Public License along with this program. If not, // see <http://www.gnu.org/licenses/>. package net.opentsdb.tools; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; /** * A dead simple command-line argument parser. * Because I couldn't find any one in Java that wasn't horribly bloated. * <p/> * Example: * <pre>{@literal * public static void main(String[] args) { * final ArgP argp = new ArgP(); * argp.addOption("--verbose", "Whether or not to be verbose."); * argp.addOption("--path", "PATH", "The input path to read."); * try { * args = argp.parse(args); * } catch (IllegalArgumentException e) { * System.err.println(e.getMessage()); * System.err.print(argp.usage()); // Note: usage already ends with \n. * System.exit(1); * } * final boolean verbose = argp.has("--verbose"); * final String path = argp.get("--path"); // Check that it's non-null. * ... * } * }</pre> * This parser honors the convention that argument {@code --} means * "stop parsing options". * <p/> * This class is not thread-safe. */ public final class ArgP { /** * Maps an option name (e.g, {@code "--foo"}) to a 2-element array * {@code ["META", "Help string"]} */ private final HashMap<String, String[]> options = new HashMap<String, String[]>(); /** * Maps an option name to the value parsed for this option. * The value can be {@code null}. */ private HashMap<String, String> parsed; /** * Constructor. */ public ArgP() { } /** * Registers an option in this argument parser. * * @param name The name of the option to recognize (e.g. {@code --foo}). * @param meta The meta-variable to associate with the value of the option. * @param help A short description of this option. * @throws IllegalArgumentException if the given name was already used. * @throws IllegalArgumentException if the name doesn't start with a dash. * @throws IllegalArgumentException if any of the given strings is empty. */ public void addOption(final String name, final String meta, final String help) { if (name.isEmpty()) { throw new IllegalArgumentException("empty name"); } else if (name.charAt(0) != '-') { throw new IllegalArgumentException("name must start with a `-': " + name); } else if (meta != null && meta.isEmpty()) { throw new IllegalArgumentException("empty meta"); } else if (help.isEmpty()) { throw new IllegalArgumentException("empty help"); } final String[] prev = options.put(name, new String[]{meta, help}); if (prev != null) { options.put(name, prev); // Undo the `put' above. throw new IllegalArgumentException("Option " + name + " already defined" + " in " + this); } } /** * Registers an option that doesn't take a value in this argument parser. * * @param name The name of the option to recognize (e.g. {@code --foo}). * @param help A short description of this option. * @throws IllegalArgumentException if the given name was already used. * @throws IllegalArgumentException if the name doesn't start with a dash. * @throws IllegalArgumentException if any of the given strings is empty. */ public void addOption(final String name, final String help) { addOption(name, null, help); } /** * Returns whether or not the given option name exists. * <p>Calling * {@link #addOption(String, String, String) addOption}{@code (foo, ...)} * entails that {@code optionExists(foo)} returns {@code true}. * * @param name The name of the option to recognize (e.g. {@code --foo}). */ public boolean optionExists(final String name) { return options.containsKey(name); } /** * Parses the command line given in argument. * * @return The remaining words that weren't options (i.e. that didn't start * with a dash). * @throws IllegalArgumentException if the given command line wasn't valid. */ public String[] parse(final String[] args) { parsed = new HashMap<String, String>(options.size()); ArrayList<String> unparsed = null; for (int i = 0; i < args.length; i++) { final String arg = args[i]; String[] opt = options.get(arg); if (opt != null) { // Perfect match: got --foo if (opt[0] != null) { // This option requires an argument. if (++i < args.length) { parsed.put(arg, args[i]); } else { throw new IllegalArgumentException("Missing argument for " + arg); } } else { parsed.put(arg, null); } continue; } // Is it a --foo=blah? final int equal = arg.indexOf('=', 1); if (equal > 0) { // Looks like so. final String name = arg.substring(0, equal); opt = options.get(name); if (opt != null) { parsed.put(name, arg.substring(equal + 1, arg.length())); continue; } } // Not a flag. if (unparsed == null) { unparsed = new ArrayList<String>(args.length - i); } if (!arg.isEmpty() && arg.charAt(0) == '-') { if (arg.length() == 2 && arg.charAt(1) == '-') { // `--' for (i++; i < args.length; i++) { unparsed.add(args[i]); } break; } throw new IllegalArgumentException("Unrecognized option " + arg); } unparsed.add(arg); } if (unparsed != null) { return unparsed.toArray(new String[unparsed.size()]); } else { return new String[0]; } } /** * Returns the value of the given option, if it was given. * Returns {@code null} if the option wasn't given, or if the option doesn't * take a value (in which case you should use {@link #has} instead). * * @param name The name of the option to recognize (e.g. {@code --foo}). * @throws IllegalArgumentException if this option wasn't registered with * {@link #addOption}. * @throws IllegalStateException if {@link #parse} wasn't called. */ public String get(final String name) { if (!options.containsKey(name)) { throw new IllegalArgumentException("Unknown option " + name); } else if (parsed == null) { throw new IllegalStateException("parse() wasn't called"); } return parsed.get(name); } /** * Returns the value of the given option, or a default value. * * @param name The name of the option to recognize (e.g. {@code --foo}). * @param defaultv The default value to return if the option wasn't given. * @throws IllegalArgumentException if this option wasn't registered with * {@link #addOption}. * @throws IllegalStateException if {@link #parse} wasn't called. */ public String get(final String name, final String defaultv) { final String value = get(name); return value == null ? defaultv : value; } /** * Returns whether or not the given option was given. * * @param name The name of the option to recognize (e.g. {@code --foo}). * @throws IllegalArgumentException if this option wasn't registered with * {@link #addOption}. * @throws IllegalStateException if {@link #parse} wasn't called. */ public boolean has(final String name) { if (!options.containsKey(name)) { throw new IllegalArgumentException("Unknown option " + name); } else if (parsed == null) { throw new IllegalStateException("parse() wasn't called"); } return parsed.containsKey(name); } /** * Appends the usage to the given buffer. * * @param buf The buffer to write to. */ public void addUsageTo(final StringBuilder buf) { final ArrayList<String> names = new ArrayList<String>(options.keySet()); Collections.sort(names); int max_length = 0; for (final String name : names) { final String[] opt = options.get(name); final int length = name.length() + (opt[0] == null ? 0 : opt[0].length() + 1); if (length > max_length) { max_length = length; } } for (final String name : names) { final String[] opt = options.get(name); int length = name.length(); buf.append(" ").append(name); if (opt[0] != null) { length += opt[0].length() + 1; buf.append('=').append(opt[0]); } for (int i = length; i <= max_length; i++) { buf.append(' '); } buf.append(opt[1]).append('\n'); } } /** * Returns a usage string. */ public String usage() { final StringBuilder buf = new StringBuilder(16 * options.size()); addUsageTo(buf); return buf.toString(); } public String toString() { final StringBuilder buf = new StringBuilder(16 * options.size()); buf.append("ArgP("); for (final String name : options.keySet()) { final String[] opt = options.get(name); buf.append(name) .append("=(").append(opt[0]).append(", ").append(opt[1]).append(')') .append(", "); } buf.setLength(buf.length() - 2); buf.append(')'); return buf.toString(); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.json.JsonConcatenate; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.DefaultSourcePathResolver; import com.facebook.buck.rules.RuleKeyObjectSink; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.SourceWithFlags; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.FileListableLinkerInputArg; import com.facebook.buck.rules.args.RuleKeyAppendableFunction; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.args.StringWithMacrosArg; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.rules.macros.StringWithMacros; import com.facebook.buck.rules.query.QueryUtils; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.MoreCollectors; import com.facebook.buck.util.RichStream; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Multimaps; import com.google.common.io.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.SortedSet; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.stream.StreamSupport; public class CxxDescriptionEnhancer { private static final Logger LOG = Logger.get(CxxDescriptionEnhancer.class); public static final Flavor SANDBOX_TREE_FLAVOR = InternalFlavor.of("sandbox"); public static final Flavor HEADER_SYMLINK_TREE_FLAVOR = InternalFlavor.of("private-headers"); public static final Flavor EXPORTED_HEADER_SYMLINK_TREE_FLAVOR = InternalFlavor.of("headers"); public static final Flavor STATIC_FLAVOR = InternalFlavor.of("static"); public static final Flavor STATIC_PIC_FLAVOR = InternalFlavor.of("static-pic"); public static final Flavor SHARED_FLAVOR = InternalFlavor.of("shared"); public static final Flavor MACH_O_BUNDLE_FLAVOR = InternalFlavor.of("mach-o-bundle"); public static final Flavor SHARED_LIBRARY_SYMLINK_TREE_FLAVOR = InternalFlavor.of("shared-library-symlink-tree"); public static final Flavor CXX_LINK_BINARY_FLAVOR = InternalFlavor.of("binary"); protected static final MacroHandler MACRO_HANDLER = new MacroHandler(ImmutableMap.of("location", new LocationMacroExpander())); private static final Pattern SONAME_EXT_MACRO_PATTERN = Pattern.compile("\\$\\(ext(?: ([.0-9]+))?\\)"); private CxxDescriptionEnhancer() {} public static CxxPreprocessables.HeaderMode getHeaderModeForPlatform( BuildRuleResolver resolver, CxxPlatform cxxPlatform, boolean shouldCreateHeadersSymlinks) { boolean useHeaderMap = (cxxPlatform.getCpp().resolve(resolver).supportsHeaderMaps() && cxxPlatform.getCxxpp().resolve(resolver).supportsHeaderMaps()); return !useHeaderMap ? CxxPreprocessables.HeaderMode.SYMLINK_TREE_ONLY : (shouldCreateHeadersSymlinks ? CxxPreprocessables.HeaderMode.SYMLINK_TREE_WITH_HEADER_MAP : CxxPreprocessables.HeaderMode.HEADER_MAP_ONLY); } public static HeaderSymlinkTree createHeaderSymlinkTree( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, CxxPreprocessables.HeaderMode mode, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility, Flavor... flavors) { BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( buildTarget, headerVisibility, flavors); Path headerSymlinkTreeRoot = CxxDescriptionEnhancer.getHeaderSymlinkTreePath( projectFilesystem, buildTarget, headerVisibility, flavors); return CxxPreprocessables.createHeaderSymlinkTreeBuildRule( headerSymlinkTreeTarget, projectFilesystem, headerSymlinkTreeRoot, headers, mode); } public static HeaderSymlinkTree createHeaderSymlinkTree( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility, boolean shouldCreateHeadersSymlinks) { return createHeaderSymlinkTree( buildTarget, projectFilesystem, getHeaderModeForPlatform(resolver, cxxPlatform, shouldCreateHeadersSymlinks), headers, headerVisibility, cxxPlatform.getFlavor()); } public static SymlinkTree createSandboxSymlinkTree( BuildTarget baseBuildTarget, ProjectFilesystem projectFilesystem, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> map) { BuildTarget sandboxSymlinkTreeTarget = CxxDescriptionEnhancer.createSandboxSymlinkTreeTarget( baseBuildTarget, cxxPlatform.getFlavor()); Path sandboxSymlinkTreeRoot = CxxDescriptionEnhancer.getSandboxSymlinkTreePath( projectFilesystem, sandboxSymlinkTreeTarget); return new SymlinkTree( sandboxSymlinkTreeTarget, projectFilesystem, sandboxSymlinkTreeRoot, map); } public static HeaderSymlinkTree requireHeaderSymlinkTree( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, CxxPlatform cxxPlatform, ImmutableMap<Path, SourcePath> headers, HeaderVisibility headerVisibility, boolean shouldCreateHeadersSymlinks) { BuildTarget untypedTarget = CxxLibraryDescription.getUntypedBuildTarget(buildTarget); return (HeaderSymlinkTree) ruleResolver.computeIfAbsent( // TODO(yiding): this build target gets recomputed in createHeaderSymlinkTree, it should // be passed down instead. CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget( untypedTarget, headerVisibility, cxxPlatform.getFlavor()), (ignored) -> createHeaderSymlinkTree( untypedTarget, projectFilesystem, ruleResolver, cxxPlatform, headers, headerVisibility, shouldCreateHeadersSymlinks)); } private static SymlinkTree requireSandboxSymlinkTree( BuildTarget buildTarget, BuildRuleResolver ruleResolver, CxxPlatform cxxPlatform) throws NoSuchBuildTargetException { BuildTarget untypedTarget = CxxLibraryDescription.getUntypedBuildTarget(buildTarget); BuildTarget headerSymlinkTreeTarget = CxxDescriptionEnhancer.createSandboxSymlinkTreeTarget( untypedTarget, cxxPlatform.getFlavor()); BuildRule rule = ruleResolver.requireRule(headerSymlinkTreeTarget); Preconditions.checkState( rule instanceof SymlinkTree, rule.getBuildTarget() + " " + rule.getClass().toString()); return (SymlinkTree) rule; } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the symlink tree of * headers. */ @VisibleForTesting public static BuildTarget createHeaderSymlinkTreeTarget( BuildTarget target, HeaderVisibility headerVisibility, Flavor... flavors) { return BuildTarget.builder(target) .addFlavors(getHeaderSymlinkTreeFlavor(headerVisibility)) .addFlavors(flavors) .build(); } @VisibleForTesting public static BuildTarget createSandboxSymlinkTreeTarget(BuildTarget target, Flavor platform) { return BuildTarget.builder(target).addFlavors(platform).addFlavors(SANDBOX_TREE_FLAVOR).build(); } /** @return the absolute {@link Path} to use for the symlink tree of headers. */ public static Path getHeaderSymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, HeaderVisibility headerVisibility, Flavor... flavors) { return BuildTargets.getGenPath( filesystem, createHeaderSymlinkTreeTarget(target, headerVisibility, flavors), "%s"); } public static Path getSandboxSymlinkTreePath(ProjectFilesystem filesystem, BuildTarget target) { return BuildTargets.getGenPath(filesystem, target, "%s"); } public static Flavor getHeaderSymlinkTreeFlavor(HeaderVisibility headerVisibility) { switch (headerVisibility) { case PUBLIC: return EXPORTED_HEADER_SYMLINK_TREE_FLAVOR; case PRIVATE: return HEADER_SYMLINK_TREE_FLAVOR; default: throw new RuntimeException("Unexpected value of enum ExportMode"); } } static ImmutableMap<String, SourcePath> parseOnlyHeaders( BuildTarget buildTarget, SourcePathRuleFinder ruleFinder, SourcePathResolver sourcePathResolver, String parameterName, SourceList exportedHeaders) { return exportedHeaders.toNameMap( buildTarget, sourcePathResolver, parameterName, path -> !CxxGenruleDescription.wrapsCxxGenrule(ruleFinder, path), path -> path); } static ImmutableMap<String, SourcePath> parseOnlyPlatformHeaders( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver sourcePathResolver, CxxPlatform cxxPlatform, String headersParameterName, SourceList headers, String platformHeadersParameterName, PatternMatchedCollection<SourceList> platformHeaders) throws NoSuchBuildTargetException { ImmutableMap.Builder<String, SourcePath> parsed = ImmutableMap.builder(); java.util.function.Function<SourcePath, SourcePath> fixup = path -> { try { return CxxGenruleDescription.fixupSourcePath(resolver, ruleFinder, cxxPlatform, path); } catch (NoSuchBuildTargetException e) { throw new RuntimeException(e); } }; // Include all normal exported headers that are generated by `cxx_genrule`. parsed.putAll( headers.toNameMap( buildTarget, sourcePathResolver, headersParameterName, path -> CxxGenruleDescription.wrapsCxxGenrule(ruleFinder, path), fixup)); // Include all platform specific headers. for (SourceList sourceList : platformHeaders.getMatchingValues(cxxPlatform.getFlavor().toString())) { parsed.putAll( sourceList.toNameMap( buildTarget, sourcePathResolver, platformHeadersParameterName, path -> true, fixup)); } return parsed.build(); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "headers" parameter. */ public static ImmutableMap<Path, SourcePath> parseHeaders( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver sourcePathResolver, Optional<CxxPlatform> cxxPlatform, CxxConstructorArg args) throws NoSuchBuildTargetException { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); // Add platform-agnostic headers. headers.putAll( parseOnlyHeaders( buildTarget, ruleFinder, sourcePathResolver, "headers", args.getHeaders())); // Add platform-specific headers. if (cxxPlatform.isPresent()) { headers.putAll( parseOnlyPlatformHeaders( buildTarget, resolver, ruleFinder, sourcePathResolver, cxxPlatform.get(), "headers", args.getHeaders(), "platform_headers", args.getPlatformHeaders())); } return CxxPreprocessables.resolveHeaderMap( args.getHeaderNamespace().map(Paths::get).orElse(buildTarget.getBasePath()), headers.build()); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "exportedHeaders" parameter. */ public static ImmutableMap<Path, SourcePath> parseExportedHeaders( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver sourcePathResolver, Optional<CxxPlatform> cxxPlatform, CxxLibraryDescription.CommonArg args) throws NoSuchBuildTargetException { ImmutableMap.Builder<String, SourcePath> headers = ImmutableMap.builder(); // Include platform-agnostic headers. headers.putAll( parseOnlyHeaders( buildTarget, ruleFinder, sourcePathResolver, "exported_headers", args.getExportedHeaders())); // If a platform is specific, include platform-specific headers. if (cxxPlatform.isPresent()) { headers.putAll( parseOnlyPlatformHeaders( buildTarget, resolver, ruleFinder, sourcePathResolver, cxxPlatform.get(), "exported_headers", args.getExportedHeaders(), "exported_platform_headers", args.getExportedPlatformHeaders())); } return CxxPreprocessables.resolveHeaderMap( args.getHeaderNamespace().map(Paths::get).orElse(buildTarget.getBasePath()), headers.build()); } /** * @return a map of header locations to input {@link SourcePath} objects formed by parsing the * input {@link SourcePath} objects for the "exportedHeaders" parameter. */ public static ImmutableMap<Path, SourcePath> parseExportedPlatformHeaders( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver sourcePathResolver, CxxPlatform cxxPlatform, CxxLibraryDescription.CommonArg args) throws NoSuchBuildTargetException { return CxxPreprocessables.resolveHeaderMap( args.getHeaderNamespace().map(Paths::get).orElse(buildTarget.getBasePath()), parseOnlyPlatformHeaders( buildTarget, resolver, ruleFinder, sourcePathResolver, cxxPlatform, "exported_headers", args.getExportedHeaders(), "exported_platform_headers", args.getExportedPlatformHeaders())); } /** * @return a list {@link CxxSource} objects formed by parsing the input {@link SourcePath} objects * for the "srcs" parameter. */ public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, CxxConstructorArg args) { return parseCxxSources( buildTarget, resolver, ruleFinder, pathResolver, cxxPlatform, args.getSrcs(), args.getPlatformSrcs()); } public static ImmutableMap<String, CxxSource> parseCxxSources( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableSortedSet<SourceWithFlags> srcs, PatternMatchedCollection<ImmutableSortedSet<SourceWithFlags>> platformSrcs) { ImmutableMap.Builder<String, SourceWithFlags> sources = ImmutableMap.builder(); putAllSources(buildTarget, resolver, ruleFinder, pathResolver, cxxPlatform, srcs, sources); for (ImmutableSortedSet<SourceWithFlags> sourcesWithFlags : platformSrcs.getMatchingValues(cxxPlatform.getFlavor().toString())) { putAllSources( buildTarget, resolver, ruleFinder, pathResolver, cxxPlatform, sourcesWithFlags, sources); } return resolveCxxSources(sources.build()); } private static void putAllSources( BuildTarget buildTarget, BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, SourcePathResolver pathResolver, CxxPlatform cxxPlatform, ImmutableSortedSet<SourceWithFlags> sourcesWithFlags, ImmutableMap.Builder<String, SourceWithFlags> sources) { sources.putAll( pathResolver.getSourcePathNames( buildTarget, "srcs", sourcesWithFlags .stream() .map( s -> { try { return s.withSourcePath( CxxGenruleDescription.fixupSourcePath( resolver, ruleFinder, cxxPlatform, Preconditions.checkNotNull(s.getSourcePath()))); } catch (NoSuchBuildTargetException e) { throw new RuntimeException(e); } }) .collect(MoreCollectors.toImmutableList()), x -> true, SourceWithFlags::getSourcePath)); } public static ImmutableList<CxxPreprocessorInput> collectCxxPreprocessorInput( BuildTarget target, CxxPlatform cxxPlatform, Iterable<BuildRule> deps, ImmutableMultimap<CxxSource.Type, ? extends Arg> preprocessorFlags, ImmutableList<HeaderSymlinkTree> headerSymlinkTrees, ImmutableSet<FrameworkPath> frameworks, Iterable<CxxPreprocessorInput> cxxPreprocessorInputFromDeps, ImmutableList<String> includeDirs, Optional<SymlinkTree> symlinkTree) throws NoSuchBuildTargetException { // Add the private includes of any rules which this rule depends on, and which list this rule as // a test. BuildTarget targetWithoutFlavor = BuildTarget.of(target.getUnflavoredBuildTarget()); ImmutableList.Builder<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRulesBuilder = ImmutableList.builder(); for (BuildRule rule : deps) { if (rule instanceof NativeTestable) { NativeTestable testable = (NativeTestable) rule; if (testable.isTestedBy(targetWithoutFlavor)) { LOG.debug( "Adding private includes of tested rule %s to testing rule %s", rule.getBuildTarget(), target); cxxPreprocessorInputFromTestedRulesBuilder.add( testable.getPrivateCxxPreprocessorInput(cxxPlatform)); // Add any dependent headers cxxPreprocessorInputFromTestedRulesBuilder.addAll( CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, ImmutableList.of(rule))); } } } ImmutableList<CxxPreprocessorInput> cxxPreprocessorInputFromTestedRules = cxxPreprocessorInputFromTestedRulesBuilder.build(); LOG.verbose( "Rules tested by target %s added private includes %s", target, cxxPreprocessorInputFromTestedRules); ImmutableList.Builder<CxxHeaders> allIncludes = ImmutableList.builder(); for (HeaderSymlinkTree headerSymlinkTree : headerSymlinkTrees) { allIncludes.add( CxxSymlinkTreeHeaders.from(headerSymlinkTree, CxxPreprocessables.IncludeType.LOCAL)); } CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder(); builder.putAllPreprocessorFlags(preprocessorFlags); // headers from #sandbox are put before #private-headers and #headers on purpose // this is the only way to control windows behavior if (symlinkTree.isPresent()) { for (String includeDir : includeDirs) { builder.addIncludes( CxxSandboxInclude.from( symlinkTree.get(), includeDir, CxxPreprocessables.IncludeType.LOCAL)); } } builder.addAllIncludes(allIncludes.build()).addAllFrameworks(frameworks); CxxPreprocessorInput localPreprocessorInput = builder.build(); return ImmutableList.<CxxPreprocessorInput>builder() .add(localPreprocessorInput) .addAll(cxxPreprocessorInputFromDeps) .addAll(cxxPreprocessorInputFromTestedRules) .build(); } public static BuildTarget createStaticLibraryBuildTarget( BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic) { return BuildTarget.builder(target) .addFlavors(platform) .addFlavors(pic == CxxSourceRuleFactory.PicType.PDC ? STATIC_FLAVOR : STATIC_PIC_FLAVOR) .build(); } public static BuildTarget createSharedLibraryBuildTarget( BuildTarget target, Flavor platform, Linker.LinkType linkType) { Flavor linkFlavor; switch (linkType) { case SHARED: linkFlavor = SHARED_FLAVOR; break; case MACH_O_BUNDLE: linkFlavor = MACH_O_BUNDLE_FLAVOR; break; case EXECUTABLE: default: throw new IllegalStateException( "Only SHARED and MACH_O_BUNDLE types expected, got: " + linkType); } return BuildTarget.builder(target).addFlavors(platform).addFlavors(linkFlavor).build(); } public static Path getStaticLibraryPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic, String extension) { return getStaticLibraryPath(filesystem, target, platform, pic, extension, ""); } public static Path getStaticLibraryPath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform, CxxSourceRuleFactory.PicType pic, String extension, String suffix) { String name = String.format("lib%s%s.%s", target.getShortName(), suffix, extension); return BuildTargets.getGenPath( filesystem, createStaticLibraryBuildTarget(target, platform, pic), "%s") .resolve(name); } public static String getSharedLibrarySoname( Optional<String> declaredSoname, BuildTarget target, CxxPlatform platform) { if (!declaredSoname.isPresent()) { return getDefaultSharedLibrarySoname(target, platform); } return getNonDefaultSharedLibrarySoname( declaredSoname.get(), platform.getSharedLibraryExtension(), platform.getSharedLibraryVersionedExtensionFormat()); } @VisibleForTesting static String getNonDefaultSharedLibrarySoname( String declared, String sharedLibraryExtension, String sharedLibraryVersionedExtensionFormat) { Matcher match = SONAME_EXT_MACRO_PATTERN.matcher(declared); if (!match.find()) { return declared; } String version = match.group(1); if (version == null) { return match.replaceFirst(sharedLibraryExtension); } return match.replaceFirst(String.format(sharedLibraryVersionedExtensionFormat, version)); } public static String getDefaultSharedLibrarySoname(BuildTarget target, CxxPlatform platform) { String libName = Joiner.on('_') .join( ImmutableList.builder() .addAll( StreamSupport.stream(target.getBasePath().spliterator(), false) .map(Object::toString) .filter(x -> !x.isEmpty()) .iterator()) .add(target.getShortName()) .build()); String extension = platform.getSharedLibraryExtension(); return String.format("lib%s.%s", libName, extension); } public static Path getSharedLibraryPath( ProjectFilesystem filesystem, BuildTarget sharedLibraryTarget, String soname) { return BuildTargets.getGenPath(filesystem, sharedLibraryTarget, "%s/" + soname); } private static Path getBinaryOutputPath( BuildTarget target, ProjectFilesystem filesystem, Optional<String> extension) { String format = extension.map(ext -> "%s." + ext).orElse("%s"); return BuildTargets.getGenPath(filesystem, target, format); } @VisibleForTesting public static BuildTarget createCxxLinkTarget( BuildTarget target, Optional<LinkerMapMode> flavoredLinkerMapMode) { if (flavoredLinkerMapMode.isPresent()) { target = target.withAppendedFlavors(flavoredLinkerMapMode.get().getFlavor()); } return target.withAppendedFlavors(CXX_LINK_BINARY_FLAVOR); } /** * @return a function that transforms the {@link FrameworkPath} to search paths with any embedded * macros expanded. */ public static RuleKeyAppendableFunction<FrameworkPath, Path> frameworkPathToSearchPath( final CxxPlatform cxxPlatform, final SourcePathResolver resolver) { return new RuleKeyAppendableFunction<FrameworkPath, Path>() { private RuleKeyAppendableFunction<String, String> translateMacrosFn = CxxFlags.getTranslateMacrosFn(cxxPlatform); @Override public void appendToRuleKey(RuleKeyObjectSink sink) { sink.setReflectively("translateMacrosFn", translateMacrosFn); } @Override public Path apply(FrameworkPath input) { String pathAsString = FrameworkPath.getUnexpandedSearchPath( resolver::getAbsolutePath, Functions.identity(), input) .toString(); return Paths.get(translateMacrosFn.apply(pathAsString)); } }; } public static CxxLinkAndCompileRules createBuildRulesForCxxBinaryDescriptionArg( TargetGraph targetGraph, BuildTarget target, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxBinaryDescription.CommonArg args, ImmutableSet<BuildTarget> extraDeps, Optional<StripStyle> stripStyle, Optional<LinkerMapMode> flavoredLinkerMapMode) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); ImmutableMap<String, CxxSource> srcs = parseCxxSources(target, resolver, ruleFinder, pathResolver, cxxPlatform, args); ImmutableMap<Path, SourcePath> headers = parseHeaders(target, resolver, ruleFinder, pathResolver, Optional.of(cxxPlatform), args); // Build the binary deps. ImmutableSortedSet.Builder<BuildRule> depsBuilder = ImmutableSortedSet.naturalOrder(); // Add original declared and extra deps. args.getCxxDeps().get(resolver, cxxPlatform).forEach(depsBuilder::add); // Add in deps found via deps query. ImmutableList<BuildRule> depQueryDeps = args.getDepsQuery() .map( query -> QueryUtils.resolveDepQuery( target, query, resolver, cellRoots, targetGraph, args.getDeps())) .orElse(Stream.of()) .collect(MoreCollectors.toImmutableList()); depsBuilder.addAll(depQueryDeps); // Add any extra deps passed in. extraDeps.stream().map(resolver::getRule).forEach(depsBuilder::add); ImmutableSortedSet<BuildRule> deps = depsBuilder.build(); return createBuildRulesForCxxBinary( target, projectFilesystem, resolver, cellRoots, cxxBuckConfig, cxxPlatform, srcs, headers, deps, args.getLinkDepsQueryWhole() ? RichStream.from(depQueryDeps).map(BuildRule::getBuildTarget).toImmutableSet() : ImmutableSet.of(), stripStyle, flavoredLinkerMapMode, args.getLinkStyle().orElse(Linker.LinkableDepType.STATIC), args.getThinLto(), args.getPreprocessorFlags(), args.getPlatformPreprocessorFlags(), args.getLangPreprocessorFlags(), args.getFrameworks(), args.getLibraries(), args.getCompilerFlags(), args.getLangCompilerFlags(), args.getPlatformCompilerFlags(), args.getPrefixHeader(), args.getPrecompiledHeader(), args.getLinkerFlags(), args.getPlatformLinkerFlags(), args.getCxxRuntimeType(), args.getIncludeDirs(), Optional.empty()); } public static CxxLinkAndCompileRules createBuildRulesForCxxBinary( BuildTarget target, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableMap<String, CxxSource> srcs, ImmutableMap<Path, SourcePath> headers, SortedSet<BuildRule> deps, ImmutableSet<BuildTarget> linkWholeDeps, Optional<StripStyle> stripStyle, Optional<LinkerMapMode> flavoredLinkerMapMode, Linker.LinkableDepType linkStyle, boolean thinLto, ImmutableList<StringWithMacros> preprocessorFlags, PatternMatchedCollection<ImmutableList<StringWithMacros>> platformPreprocessorFlags, ImmutableMap<CxxSource.Type, ImmutableList<StringWithMacros>> langPreprocessorFlags, ImmutableSortedSet<FrameworkPath> frameworks, ImmutableSortedSet<FrameworkPath> libraries, ImmutableList<StringWithMacros> compilerFlags, ImmutableMap<CxxSource.Type, ImmutableList<StringWithMacros>> langCompilerFlags, PatternMatchedCollection<ImmutableList<StringWithMacros>> platformCompilerFlags, Optional<SourcePath> prefixHeader, Optional<SourcePath> precompiledHeader, ImmutableList<StringWithMacros> linkerFlags, PatternMatchedCollection<ImmutableList<StringWithMacros>> platformLinkerFlags, Optional<Linker.CxxRuntimeType> cxxRuntimeType, ImmutableList<String> includeDirs, Optional<Boolean> xcodePrivateHeadersSymlinks) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); // TODO(beefon): should be: // Path linkOutput = getLinkOutputPath( // createCxxLinkTarget(params.getBuildTarget(), flavoredLinkerMapMode), // projectFilesystem); Path linkOutput = getBinaryOutputPath( flavoredLinkerMapMode.isPresent() ? target.withAppendedFlavors(flavoredLinkerMapMode.get().getFlavor()) : target, projectFilesystem, cxxPlatform.getBinaryExtension()); ImmutableList.Builder<Arg> argsBuilder = ImmutableList.builder(); CommandTool.Builder executableBuilder = new CommandTool.Builder(); // Setup the header symlink tree and combine all the preprocessor input from this rule // and all dependencies. boolean shouldCreatePrivateHeadersSymlinks = xcodePrivateHeadersSymlinks.orElse(cxxBuckConfig.getPrivateHeadersSymlinksEnabled()); HeaderSymlinkTree headerSymlinkTree = requireHeaderSymlinkTree( target, projectFilesystem, resolver, cxxPlatform, headers, HeaderVisibility.PRIVATE, shouldCreatePrivateHeadersSymlinks); Optional<SymlinkTree> sandboxTree = Optional.empty(); if (cxxBuckConfig.sandboxSources()) { sandboxTree = createSandboxTree(target, resolver, cxxPlatform); } ImmutableList<CxxPreprocessorInput> cxxPreprocessorInput = collectCxxPreprocessorInput( target, cxxPlatform, deps, ImmutableListMultimap.copyOf( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( preprocessorFlags, platformPreprocessorFlags, langPreprocessorFlags, cxxPlatform), f -> toStringWithMacrosArgs(target, cellRoots, resolver, cxxPlatform, f))), ImmutableList.of(headerSymlinkTree), frameworks, CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, RichStream.from(deps) .filter(CxxPreprocessorDep.class::isInstance) .toImmutableList()), includeDirs, sandboxTree); ImmutableListMultimap.Builder<CxxSource.Type, Arg> allCompilerFlagsBuilder = ImmutableListMultimap.builder(); allCompilerFlagsBuilder.putAll( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( compilerFlags, platformCompilerFlags, langCompilerFlags, cxxPlatform), f -> toStringWithMacrosArgs(target, cellRoots, resolver, cxxPlatform, f))); if (thinLto) { allCompilerFlagsBuilder.putAll(CxxFlags.toLanguageFlags(StringArg.from("-flto=thin"))); } ImmutableListMultimap<CxxSource.Type, Arg> allCompilerFlags = allCompilerFlagsBuilder.build(); // Generate and add all the build rules to preprocess and compile the source to the // resolver and get the `SourcePath`s representing the generated object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = CxxSourceRuleFactory.requirePreprocessAndCompileRules( projectFilesystem, target, resolver, sourcePathResolver, ruleFinder, cxxBuckConfig, cxxPlatform, cxxPreprocessorInput, allCompilerFlags, prefixHeader, precompiledHeader, srcs, linkStyle == Linker.LinkableDepType.STATIC ? CxxSourceRuleFactory.PicType.PDC : CxxSourceRuleFactory.PicType.PIC, sandboxTree); // Build up the linker flags, which support macro expansion. CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( linkerFlags, platformLinkerFlags, cxxPlatform) .stream() .map(f -> toStringWithMacrosArgs(target, cellRoots, resolver, cxxPlatform, f)) .forEach(argsBuilder::add); // Special handling for dynamically linked binaries. if (linkStyle == Linker.LinkableDepType.SHARED) { // Create a symlink tree with for all shared libraries needed by this binary. SymlinkTree sharedLibraries = requireSharedLibrarySymlinkTree( target, projectFilesystem, resolver, cxxPlatform, deps, NativeLinkable.class::isInstance); // Embed a origin-relative library path into the binary so it can find the shared libraries. // The shared libraries root is absolute. Also need an absolute path to the linkOutput Path absLinkOut = target.getCellPath().resolve(linkOutput); argsBuilder.addAll( StringArg.from( Linkers.iXlinker( "-rpath", String.format( "%s/%s", cxxPlatform.getLd().resolve(resolver).origin(), absLinkOut.getParent().relativize(sharedLibraries.getRoot()).toString())))); // Add all the shared libraries and the symlink tree as inputs to the tool that represents // this binary, so that users can attach the proper deps. executableBuilder.addDep(sharedLibraries); executableBuilder.addInputs(sharedLibraries.getLinks().values()); } // Add object files into the args. ImmutableList<SourcePathArg> objectArgs = SourcePathArg.from(objects.values()) .stream() .map( input -> { Preconditions.checkArgument(input instanceof SourcePathArg); return (SourcePathArg) input; }) .collect(MoreCollectors.toImmutableList()); argsBuilder.addAll(FileListableLinkerInputArg.from(objectArgs)); BuildTarget linkRuleTarget = createCxxLinkTarget(target, flavoredLinkerMapMode); CxxLink cxxLink = createCxxLinkRule( projectFilesystem, resolver, cxxBuckConfig, cxxPlatform, RichStream.from(deps).filter(NativeLinkable.class).toImmutableList(), linkStyle, thinLto, frameworks, libraries, cxxRuntimeType, sourcePathResolver, ruleFinder, linkOutput, argsBuilder, linkRuleTarget, linkWholeDeps); BuildRule binaryRuleForExecutable; Optional<CxxStrip> cxxStrip = Optional.empty(); if (stripStyle.isPresent()) { BuildTarget cxxTarget = target; if (flavoredLinkerMapMode.isPresent()) { cxxTarget = cxxTarget.withAppendedFlavors(flavoredLinkerMapMode.get().getFlavor()); } CxxStrip stripRule = createCxxStripRule( cxxTarget, projectFilesystem, resolver, stripStyle.get(), cxxLink, cxxPlatform); cxxStrip = Optional.of(stripRule); binaryRuleForExecutable = stripRule; } else { binaryRuleForExecutable = cxxLink; } // Add the output of the link as the lone argument needed to invoke this binary as a tool. executableBuilder.addArg(SourcePathArg.of(binaryRuleForExecutable.getSourcePathToOutput())); return new CxxLinkAndCompileRules( cxxLink, cxxStrip, ImmutableSortedSet.copyOf(objects.keySet()), executableBuilder.build(), deps); } private static CxxLink createCxxLinkRule( ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, Iterable<? extends NativeLinkable> deps, Linker.LinkableDepType linkStyle, boolean thinLto, ImmutableSortedSet<FrameworkPath> frameworks, ImmutableSortedSet<FrameworkPath> libraries, Optional<Linker.CxxRuntimeType> cxxRuntimeType, SourcePathResolver sourcePathResolver, SourcePathRuleFinder ruleFinder, Path linkOutput, ImmutableList.Builder<Arg> argsBuilder, BuildTarget linkRuleTarget, ImmutableSet<BuildTarget> linkWholeDeps) throws NoSuchBuildTargetException { return (CxxLink) resolver.computeIfAbsentThrowing( linkRuleTarget, ignored -> // Generate the final link rule. We use the top-level target as the link rule's // target, so that it corresponds to the actual binary we build. CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, resolver, sourcePathResolver, ruleFinder, linkRuleTarget, Linker.LinkType.EXECUTABLE, Optional.empty(), linkOutput, linkStyle, thinLto, deps, cxxRuntimeType, Optional.empty(), ImmutableSet.of(), linkWholeDeps, NativeLinkableInput.builder() .setArgs(argsBuilder.build()) .setFrameworks(frameworks) .setLibraries(libraries) .build(), Optional.empty())); } public static CxxStrip createCxxStripRule( BuildTarget baseBuildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, StripStyle stripStyle, BuildRule unstrippedBinaryRule, CxxPlatform cxxPlatform) { return (CxxStrip) resolver.computeIfAbsent( baseBuildTarget.withAppendedFlavors(CxxStrip.RULE_FLAVOR, stripStyle.getFlavor()), stripBuildTarget -> new CxxStrip( stripBuildTarget, projectFilesystem, Preconditions.checkNotNull( unstrippedBinaryRule.getSourcePathToOutput(), "Cannot strip BuildRule with no output (%s)", unstrippedBinaryRule.getBuildTarget()), new SourcePathRuleFinder(resolver), stripStyle, cxxPlatform.getStrip(), CxxDescriptionEnhancer.getBinaryOutputPath( stripBuildTarget, projectFilesystem, cxxPlatform.getBinaryExtension()))); } public static BuildRule createUberCompilationDatabase( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver) throws NoSuchBuildTargetException { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = ruleResolver.requireMetadata( buildTarget .withoutFlavors(CxxCompilationDatabase.UBER_COMPILATION_DATABASE) .withAppendedFlavors(CxxCompilationDatabase.COMPILATION_DATABASE), CxxCompilationDatabaseDependencies.class); Preconditions.checkState(compilationDatabases.isPresent()); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return new JsonConcatenate( buildTarget, projectFilesystem, new BuildRuleParams( () -> ImmutableSortedSet.copyOf( ruleFinder.filterBuildRuleInputs(compilationDatabases.get().getSourcePaths())), () -> ImmutableSortedSet.of(), ImmutableSortedSet.of()), pathResolver.getAllAbsolutePaths(compilationDatabases.get().getSourcePaths()), "compilation-database-concatenate", "Concatenate compilation databases", "uber-compilation-database", "compile_commands.json"); } public static Optional<CxxCompilationDatabaseDependencies> createCompilationDatabaseDependencies( BuildTarget buildTarget, FlavorDomain<CxxPlatform> platforms, BuildRuleResolver resolver, CxxConstructorArg args) throws NoSuchBuildTargetException { Preconditions.checkState( buildTarget.getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)); Optional<Flavor> cxxPlatformFlavor = platforms.getFlavor(buildTarget); Preconditions.checkState( cxxPlatformFlavor.isPresent(), "Could not find cxx platform in:\n%s", Joiner.on(", ").join(buildTarget.getFlavors())); ImmutableSet.Builder<SourcePath> sourcePaths = ImmutableSet.builder(); for (BuildTarget dep : args.getDeps()) { Optional<CxxCompilationDatabaseDependencies> compilationDatabases = resolver.requireMetadata( BuildTarget.builder(dep) .addFlavors(CxxCompilationDatabase.COMPILATION_DATABASE) .addFlavors(cxxPlatformFlavor.get()) .build(), CxxCompilationDatabaseDependencies.class); if (compilationDatabases.isPresent()) { sourcePaths.addAll(compilationDatabases.get().getSourcePaths()); } } // Not all parts of Buck use require yet, so require the rule here so it's available in the // resolver for the parts that don't. BuildRule buildRule = resolver.requireRule(buildTarget); sourcePaths.add(buildRule.getSourcePathToOutput()); return Optional.of(CxxCompilationDatabaseDependencies.of(sourcePaths.build())); } public static Optional<SymlinkTree> createSandboxTree( BuildTarget buildTarget, BuildRuleResolver ruleResolver, CxxPlatform cxxPlatform) throws NoSuchBuildTargetException { return Optional.of(requireSandboxSymlinkTree(buildTarget, ruleResolver, cxxPlatform)); } /** * @return the {@link BuildTarget} to use for the {@link BuildRule} generating the symlink tree of * shared libraries. */ public static BuildTarget createSharedLibrarySymlinkTreeTarget( BuildTarget target, Flavor platform) { return BuildTarget.builder(target) .addFlavors(SHARED_LIBRARY_SYMLINK_TREE_FLAVOR) .addFlavors(platform) .build(); } /** @return the {@link Path} to use for the symlink tree of headers. */ public static Path getSharedLibrarySymlinkTreePath( ProjectFilesystem filesystem, BuildTarget target, Flavor platform) { return BuildTargets.getGenPath( filesystem, createSharedLibrarySymlinkTreeTarget(target, platform), "%s"); } /** * Build a {@link HeaderSymlinkTree} of all the shared libraries found via the top-level rule's * transitive dependencies. */ public static SymlinkTree createSharedLibrarySymlinkTree( BuildTarget baseBuildTarget, ProjectFilesystem filesystem, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse, Predicate<Object> skip) throws NoSuchBuildTargetException { BuildTarget symlinkTreeTarget = createSharedLibrarySymlinkTreeTarget(baseBuildTarget, cxxPlatform.getFlavor()); Path symlinkTreeRoot = getSharedLibrarySymlinkTreePath(filesystem, baseBuildTarget, cxxPlatform.getFlavor()); ImmutableSortedMap<String, SourcePath> libraries = NativeLinkables.getTransitiveSharedLibraries(cxxPlatform, deps, traverse, skip); ImmutableMap.Builder<Path, SourcePath> links = ImmutableMap.builder(); for (Map.Entry<String, SourcePath> ent : libraries.entrySet()) { links.put(Paths.get(ent.getKey()), ent.getValue()); } return new SymlinkTree(symlinkTreeTarget, filesystem, symlinkTreeRoot, links.build()); } public static SymlinkTree createSharedLibrarySymlinkTree( BuildTarget baseBuildTarget, ProjectFilesystem filesystem, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse) throws NoSuchBuildTargetException { return createSharedLibrarySymlinkTree( baseBuildTarget, filesystem, cxxPlatform, deps, traverse, x -> false); } public static SymlinkTree requireSharedLibrarySymlinkTree( BuildTarget buildTarget, ProjectFilesystem filesystem, BuildRuleResolver resolver, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse) throws NoSuchBuildTargetException { BuildTarget target = createSharedLibrarySymlinkTreeTarget(buildTarget, cxxPlatform.getFlavor()); SymlinkTree tree = resolver.getRuleOptionalWithType(target, SymlinkTree.class).orElse(null); if (tree == null) { tree = resolver.addToIndex( createSharedLibrarySymlinkTree(buildTarget, filesystem, cxxPlatform, deps, traverse)); } return tree; } public static Flavor flavorForLinkableDepType(Linker.LinkableDepType linkableDepType) { switch (linkableDepType) { case STATIC: return STATIC_FLAVOR; case STATIC_PIC: return STATIC_PIC_FLAVOR; case SHARED: return SHARED_FLAVOR; } throw new RuntimeException(String.format("Unsupported LinkableDepType: '%s'", linkableDepType)); } public static SymlinkTree createSandboxTreeBuildRule( BuildRuleResolver resolver, CxxConstructorArg args, CxxPlatform platform, BuildTarget buildTarget, ProjectFilesystem projectFilesystem) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); ImmutableCollection<SourcePath> privateHeaders = parseHeaders( buildTarget, resolver, ruleFinder, sourcePathResolver, Optional.of(platform), args) .values(); ImmutableCollection<CxxSource> sources = parseCxxSources(buildTarget, resolver, ruleFinder, sourcePathResolver, platform, args) .values(); HashMap<Path, SourcePath> links = new HashMap<>(); for (SourcePath headerPath : privateHeaders) { links.put( Paths.get(sourcePathResolver.getSourcePathName(buildTarget, headerPath)), headerPath); } if (args instanceof CxxLibraryDescription.CommonArg) { ImmutableCollection<SourcePath> publicHeaders = CxxDescriptionEnhancer.parseExportedHeaders( buildTarget, resolver, ruleFinder, sourcePathResolver, Optional.of(platform), (CxxLibraryDescription.CommonArg) args) .values(); for (SourcePath headerPath : publicHeaders) { links.put( Paths.get(sourcePathResolver.getSourcePathName(buildTarget, headerPath)), headerPath); } } for (CxxSource source : sources) { SourcePath sourcePath = source.getPath(); links.put( Paths.get(sourcePathResolver.getSourcePathName(buildTarget, sourcePath)), sourcePath); } return createSandboxSymlinkTree( buildTarget, projectFilesystem, platform, ImmutableMap.copyOf(links)); } /** Resolve the map of names to SourcePaths to a map of names to CxxSource objects. */ private static ImmutableMap<String, CxxSource> resolveCxxSources( ImmutableMap<String, SourceWithFlags> sources) { ImmutableMap.Builder<String, CxxSource> cxxSources = ImmutableMap.builder(); // For each entry in the input C/C++ source, build a CxxSource object to wrap // it's name, input path, and output object file path. for (ImmutableMap.Entry<String, SourceWithFlags> ent : sources.entrySet()) { String extension = Files.getFileExtension(ent.getKey()); Optional<CxxSource.Type> type = CxxSource.Type.fromExtension(extension); if (!type.isPresent()) { throw new HumanReadableException("invalid extension \"%s\": %s", extension, ent.getKey()); } cxxSources.put( ent.getKey(), CxxSource.of(type.get(), ent.getValue().getSourcePath(), ent.getValue().getFlags())); } return cxxSources.build(); } public static Arg toStringWithMacrosArgs( BuildTarget target, CellPathResolver cellPathResolver, BuildRuleResolver resolver, CxxPlatform cxxPlatform, StringWithMacros flag) { return StringWithMacrosArg.of( flag, ImmutableList.of(new CxxLocationMacroExpander(cxxPlatform)), target, cellPathResolver, resolver); } public static String normalizeModuleName(String moduleName) { return moduleName.replaceAll("[^A-Za-z0-9]", "_"); } }
package com.codeborne.selenide; import com.codeborne.selenide.proxy.SelenideProxyServer; import org.junit.jupiter.api.Test; import org.openqa.selenium.StaleElementReferenceException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import static com.codeborne.selenide.CheckResult.Verdict.ACCEPT; import static com.codeborne.selenide.CheckResult.Verdict.REJECT; import static com.codeborne.selenide.Condition.and; import static com.codeborne.selenide.Condition.attribute; import static com.codeborne.selenide.Condition.attributeMatching; import static com.codeborne.selenide.Condition.be; import static com.codeborne.selenide.Condition.checked; import static com.codeborne.selenide.Condition.cssClass; import static com.codeborne.selenide.Condition.cssValue; import static com.codeborne.selenide.Condition.disabled; import static com.codeborne.selenide.Condition.enabled; import static com.codeborne.selenide.Condition.exist; import static com.codeborne.selenide.Condition.have; import static com.codeborne.selenide.Condition.hidden; import static com.codeborne.selenide.Condition.id; import static com.codeborne.selenide.Condition.name; import static com.codeborne.selenide.Condition.not; import static com.codeborne.selenide.Condition.or; import static com.codeborne.selenide.Condition.selected; import static com.codeborne.selenide.Condition.text; import static com.codeborne.selenide.Condition.type; import static com.codeborne.selenide.Condition.visible; import static com.codeborne.selenide.Mocks.elementWithAttribute; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; final class ConditionTest { private final WebDriver webDriver = new DummyWebDriver(); private final SelenideProxyServer proxy = mock(SelenideProxyServer.class); private final SelenideConfig config = new SelenideConfig(); private final Driver driver = new DriverStub(config, new Browser("opera", false), webDriver, proxy); @Test void displaysHumanReadableName() { assertThat(visible).hasToString("visible"); assertThat(hidden).hasToString("hidden"); assertThat(attribute("lastName", "Malkovich")).hasToString("attribute lastName=\"Malkovich\""); } @Test void value() { WebElement element = elementWithAttribute("value", "John Malkovich"); assertThat(Condition.value("Peter").check(driver, element).verdict).isEqualTo(REJECT); assertThat(Condition.value("john").check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("john malkovich").check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("John").check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("John Malkovich").check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("malko").check(driver, element).verdict).isEqualTo(ACCEPT); } @Test void valueToString() { assertThat(Condition.value("John Malkovich")) .hasToString("value=\"John Malkovich\""); } @Test void elementIsVisible() { assertThat(visible.check(driver, elementWithVisibility(true)).verdict).isEqualTo(ACCEPT); assertThat(visible.check(driver, elementWithVisibility(false)).verdict).isEqualTo(REJECT); } private WebElement elementWithVisibility(boolean isVisible) { WebElement element = mock(WebElement.class); when(element.isDisplayed()).thenReturn(isVisible); return element; } @Test void elementExists() { assertThat(exist.check(driver, elementWithVisibility(true)).verdict).isEqualTo(ACCEPT); assertThat(exist.check(driver, elementWithVisibility(false)).verdict).isEqualTo(ACCEPT); } @Test void elementExists_returnsFalse_ifItThrowsException() { WebElement element = mock(WebElement.class); when(element.isDisplayed()).thenThrow(new StaleElementReferenceException("ups")); assertThat(exist.check(driver, element).verdict).isEqualTo(REJECT); } @Test void elementIsHidden() { assertThat(hidden.check(driver, elementWithVisibility(false)).verdict).isEqualTo(ACCEPT); assertThat(hidden.check(driver, elementWithVisibility(true)).verdict).isEqualTo(REJECT); } @Test void elementIsHiddenWithStaleElementException() { WebElement element = mock(WebElement.class); doThrow(new StaleElementReferenceException("Oooops")).when(element).isDisplayed(); assertThat(hidden.check(driver, element).verdict).isEqualTo(ACCEPT); } @Test void elementHasAttribute() { assertThat(attribute("name").check(driver, elementWithAttribute("name", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(attribute("name").check(driver, elementWithAttribute("name", "")).verdict).isEqualTo(ACCEPT); assertThat(attribute("name").check(driver, elementWithAttribute("id", "id3")).verdict).isEqualTo(REJECT); } @Test void elementHasAttributeWithGivenValue() { Condition condition = attribute("name", "selenide"); assertThat(condition.check(driver, elementWithAttribute("name", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(condition.check(driver, elementWithAttribute("name", "selenide is great")).verdict).isEqualTo(REJECT); assertThat(condition.check(driver, elementWithAttribute("id", "id2")).verdict).isEqualTo(REJECT); } @Test void matchingAttributeWithRegex() { Condition condition = attributeMatching("name", "selenide.*"); assertThat(condition.check(driver, elementWithAttribute("name", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(condition.check(driver, elementWithAttribute("name", "selenide is great")).verdict).isEqualTo(ACCEPT); assertThat(condition.check(driver, elementWithAttribute("id", "selenide")).verdict).isEqualTo(REJECT); assertThat(condition.check(driver, elementWithAttribute("name", "another selenide")).verdict).isEqualTo(REJECT); } @Test void elementHasValue() { assertThat(Condition.value("selenide").check(driver, elementWithAttribute("value", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("selenide").check(driver, elementWithAttribute("value", "selenide is great")).verdict).isEqualTo(ACCEPT); assertThat(Condition.value("selenide").check(driver, elementWithAttribute("value", "is great")).verdict).isEqualTo(REJECT); } @Test void elementHasName() { assertThat(name("selenide").check(driver, elementWithAttribute("name", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(name("selenide").check(driver, elementWithAttribute("name", "selenide is great")).verdict).isEqualTo(REJECT); } @Test void checksValueOfTypeAttribute() { assertThat(type("radio").check(driver, elementWithAttribute("type", "radio")).verdict).isEqualTo(ACCEPT); assertThat(type("radio").check(driver, elementWithAttribute("type", "radio-button")).verdict).isEqualTo(REJECT); } @Test void checksValueOfIdAttribute() { assertThat(id("selenide").check(driver, elementWithAttribute("id", "selenide")).verdict).isEqualTo(ACCEPT); assertThat(id("selenide").check(driver, elementWithAttribute("id", "selenide is great")).verdict).isEqualTo(REJECT); } @Test void checksValueOfClassAttribute() { assertThat(cssClass("btn").check(driver, elementWithAttribute("class", "btn btn-warning")).verdict).isEqualTo(ACCEPT); assertThat(cssClass("btn-warning").check(driver, elementWithAttribute("class", "btn btn-warning")).verdict).isEqualTo(ACCEPT); assertThat(cssClass("active").check(driver, elementWithAttribute("class", "btn btn-warning")).verdict).isEqualTo(REJECT); assertThat(cssClass("").check(driver, elementWithAttribute("class", "btn btn-warning active")).verdict).isEqualTo(REJECT); assertThat(cssClass("active").check(driver, elementWithAttribute("href", "no-class")).verdict).isEqualTo(REJECT); } @Test void elementHasCssValue() { assertThat(cssValue("display", "none").check(driver, elementWithCssStyle("display", "none")).verdict).isEqualTo(ACCEPT); assertThat(cssValue("font-size", "24").check(driver, elementWithCssStyle("font-size", "20")).verdict).isEqualTo(REJECT); } private WebElement elementWithCssStyle(String propertyName, String value) { WebElement element = mock(WebElement.class); when(element.getCssValue(propertyName)).thenReturn(value); return element; } @Test void elementHasClassToString() { assertThat(cssClass("Foo")).hasToString("css class \"Foo\""); } @Test void elementEnabled() { assertThat(enabled.check(driver, elementWithEnabled(true)).verdict).isEqualTo(ACCEPT); assertThat(enabled.check(driver, elementWithEnabled(false)).verdict).isEqualTo(REJECT); } private WebElement elementWithEnabled(boolean isEnabled) { WebElement element = mock(WebElement.class); when(element.isEnabled()).thenReturn(isEnabled); return element; } @Test void elementEnabledActualValue() { assertThat(enabled.check(driver, elementWithEnabled(true)).actualValue).isEqualTo("enabled"); assertThat(enabled.check(driver, elementWithEnabled(false)).actualValue).isEqualTo("disabled"); } @Test void elementDisabled() { assertThat(disabled.check(driver, elementWithEnabled(false)).verdict).isEqualTo(ACCEPT); assertThat(disabled.check(driver, elementWithEnabled(true)).verdict).isEqualTo(REJECT); } @Test void elementDisabledActualValue() { assertThat(disabled.check(driver, elementWithEnabled(true)).actualValue).isEqualTo("enabled"); assertThat(disabled.check(driver, elementWithEnabled(false)).actualValue).isEqualTo("disabled"); } @Test void elementSelected() { assertThat(selected.check(driver, elementWithSelected(true)).verdict).isEqualTo(ACCEPT); assertThat(selected.check(driver, elementWithSelected(false)).verdict).isEqualTo(REJECT); } private WebElement elementWithSelected(boolean isSelected) { WebElement element = mock(WebElement.class); when(element.isSelected()).thenReturn(isSelected); return element; } @Test void elementSelectedActualValue() { assertThat(selected.check(driver, elementWithSelected(true)).actualValue).isEqualTo("selected"); assertThat(selected.check(driver, elementWithSelected(false)).actualValue).isEqualTo("not selected"); } @Test void elementChecked() { assertThat(checked.check(driver, elementWithSelected(true)).verdict).isEqualTo(ACCEPT); assertThat(checked.check(driver, elementWithSelected(false)).verdict).isEqualTo(REJECT); } @Test void elementCheckedActualValue() { assertThat(checked.check(driver, elementWithSelected(true)).actualValue).isEqualTo("checked"); assertThat(checked.check(driver, elementWithSelected(false)).actualValue).isEqualTo("unchecked"); } @Test void elementNotCondition() { assertThat(not(checked).check(driver, elementWithSelected(false)).verdict).isEqualTo(ACCEPT); assertThat(not(checked).check(driver, elementWithSelected(true)).verdict).isEqualTo(REJECT); } @Test void elementNotConditionActualValue() { assertThat(not(checked).check(driver, elementWithSelected(false)).actualValue).isEqualTo("unchecked"); assertThat(not(checked).check(driver, elementWithSelected(true)).actualValue).isEqualTo("checked"); } @Test void elementAndCondition() { WebElement element = mockElement(true, "text"); assertThat(and("selected with text", be(selected), have(text("text"))).check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(and("selected with text", not(be(selected)), have(text("text"))).check(driver, element).verdict).isEqualTo(REJECT); assertThat(and("selected with text", be(selected), have(text("incorrect"))).check(driver, element).verdict).isEqualTo(REJECT); } @Test void elementAndConditionActualValue() { WebElement element = mockElement(false, "text"); Condition condition = and("selected with text", be(selected), have(text("text"))); assertThat(condition.check(driver, element).actualValue).isEqualTo("not selected"); assertThat(condition.check(driver, element).verdict).isEqualTo(REJECT); } @Test void elementAndConditionToString() { WebElement element = mockElement(false, "text"); Condition condition = and("selected with text", be(selected), have(text("text"))); assertThat(condition).hasToString("selected with text: be selected and have text \"text\""); assertThat(condition.check(driver, element).verdict).isEqualTo(REJECT); assertThat(condition).hasToString("selected with text: be selected and have text \"text\""); } @Test void elementOrCondition() { WebElement element = mockElement(false, "text"); when(element.isDisplayed()).thenReturn(true); assertThat(or("Visible, not Selected", visible, checked).check(driver, element).verdict).isEqualTo(ACCEPT); assertThat(or("Selected with text", checked, text("incorrect")).check(driver, element).verdict).isEqualTo(REJECT); } @Test void elementOrConditionActualValue() { WebElement element = mockElement(false, "some text"); Condition condition = or("selected with text", be(selected), have(text("some text"))); assertThat(condition.check(driver, element).actualValue).isEqualTo("text=\"some text\""); assertThat(condition.check(driver, element).verdict).isEqualTo(ACCEPT); } @Test void elementOrConditionToString() { WebElement element = mockElement(false, "text"); Condition condition = or("selected with text", be(selected), have(text("text"))); assertThat(condition).hasToString("selected with text: be selected or have text \"text\""); assertThat(condition.check(driver, element).verdict).isEqualTo(ACCEPT); } @Test void conditionBe() { Condition condition = be(visible); assertThat(condition).hasToString("be visible"); } @Test void conditionHave() { Condition condition = have(attribute("name")); assertThat(condition).hasToString("have attribute name"); } @Test void conditionMissingElementSatisfiesCondition() { Condition condition = attribute("name"); assertThat(condition.missingElementSatisfiesCondition()).isFalse(); } @Test void conditionToString() { Condition condition = attribute("name").because("it's awesome"); assertThat(condition).hasToString("attribute name (because it's awesome)"); } @Test void shouldHaveText_doesNotAccept_nullParameter() { //noinspection ConstantConditions assertThatThrownBy(() -> text(null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Argument must not be null or empty string. Use $.shouldBe(empty) or $.shouldHave(exactText(\"\")."); } @Test void shouldHaveText_doesNotAccept_emptyString() { assertThatThrownBy(() -> text("")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Argument must not be null or empty string. Use $.shouldBe(empty) or $.shouldHave(exactText(\"\")."); } @Test void shouldHaveText_accepts_blankNonEmptyString() { text(" "); text(" "); text("\t"); text("\n"); } private WebElement mockElement(boolean isSelected, String text) { WebElement element = mock(WebElement.class); when(element.isSelected()).thenReturn(isSelected); when(element.getText()).thenReturn(text); return element; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.configuration; import static java.util.stream.Collectors.joining; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_CLUSTER_CONFIGURATION; import static org.apache.geode.distributed.ConfigurationProperties.LOAD_CLUSTER_CONFIGURATION_FROM_DIR; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.NAME; import static org.apache.geode.distributed.ConfigurationProperties.USE_CLUSTER_CONFIGURATION; import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPorts; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.apache.geode.test.dunit.Host.getHost; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Arrays; import java.util.Properties; import org.junit.Test; import org.apache.geode.cache.Region; import org.apache.geode.distributed.Locator; import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService; import org.apache.geode.distributed.internal.InternalLocator; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.util.test.TestUtil; public class ConfigurationPersistenceServiceUsingDirDUnitTest extends JUnit4CacheTestCase { @Override public final void preTearDownCacheTestCase() throws Exception { for (int i = 0; i < 2; i++) { VM vm = getHost(0).getVM(i); vm.invoke("Removing shared configuration", () -> { InternalLocator locator = InternalLocator.getLocator(); if (locator == null) { return; } InternalConfigurationPersistenceService sharedConfig = locator.getConfigurationPersistenceService(); if (sharedConfig != null) { sharedConfig.destroySharedConfiguration(); } }); } } @Test public void basicClusterConfigDirWithOneLocator() throws Exception { final int[] ports = getRandomAvailableTCPPorts(1); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void basicClusterConfigDirWithTwoLocators() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsNoRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Shut down the locators in reverse order to how we will start them up in the next step. // Unless we start them asynchronously, the older one will want to wait for a new diskstore // to become available and will time out. for (int i = locatorCount; i > 0; i--) { VM vm = getHost(0).getVM(i - 1); stopLocator(vm); } for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); vm.invoke(() -> disconnectFromDS()); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsAndRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Shut down the locators in reverse order to how we will start them up in the next step. // Unless we start them asynchronously, the older one will want to wait for a new diskstore // to become available and will time out. for (int i = locatorCount; i > 0; i--) { VM vm = getHost(0).getVM(i - 1); stopLocator(vm); } for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } @Test public void updateClusterConfigDirWithTwoLocatorsRollingRestartAndRollingServerRestart() throws Exception { final int[] ports = getRandomAvailableTCPPorts(2); final int locatorCount = ports.length; for (int i = 0; i < locatorCount; i++) { VM vm = getHost(0).getVM(i); copyClusterXml(vm, "cluster-empty.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region absence", () -> { Region r = getRootRegion("newReplicatedRegion"); assertNull("Region does exist", r); }); } // Roll the locators for (int i = locatorCount - 1; i >= 0; i--) { VM vm = getHost(0).getVM(i); stopLocator(vm); copyClusterXml(vm, "cluster-region.xml"); startLocator(vm, i, ports); waitForSharedConfiguration(vm); } // Roll the servers for (int i = 2; i < 4; i++) { VM vm = getHost(0).getVM(i); restartCache(vm, i, ports); vm.invoke("Checking for region presence", () -> { await().until(() -> getRootRegion("newReplicatedRegion") != null); }); } } private void copyClusterXml(final VM vm, final String clusterXml) { vm.invoke("Copying new cluster.xml from " + clusterXml, () -> { String clusterXmlPath = TestUtil .getResourcePath(ConfigurationPersistenceServiceUsingDirDUnitTest.class, clusterXml); InputStream cacheXml = new FileInputStream(clusterXmlPath); assertNotNull("Could not create InputStream from " + clusterXmlPath, cacheXml); Files.createDirectories(Paths.get("cluster_config", "cluster")); Files.copy(cacheXml, Paths.get("cluster_config", "cluster", "cluster.xml"), StandardCopyOption.REPLACE_EXISTING); }); } private void startLocator(final VM vm, final int i, final int[] locatorPorts) { vm.invoke("Creating locator on " + vm, () -> { final String locatorName = "locator" + i; final File logFile = new File("locator-" + i + ".log"); final Properties props = new Properties(); props.setProperty(NAME, locatorName); props.setProperty(MCAST_PORT, "0"); props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "true"); props.setProperty(LOAD_CLUSTER_CONFIGURATION_FROM_DIR, "true"); if (locatorPorts.length > 1) { int otherLocatorPort = locatorPorts[(i + 1) % locatorPorts.length]; props.setProperty(LOCATORS, "localhost[" + otherLocatorPort + "]"); } Locator.startLocatorAndDS(locatorPorts[i], logFile, props); }); } private void waitForSharedConfiguration(final VM vm) { vm.invoke("Waiting for shared configuration", () -> { final InternalLocator locator = InternalLocator.getLocator(); await().until(() -> { return locator.isSharedConfigurationRunning(); }); }); } private void stopLocator(final VM vm) { vm.invoke("Stopping locator on " + vm, () -> { InternalLocator locator = InternalLocator.getLocator(); assertNotNull("No locator found", locator); locator.stop(); disconnectAllFromDS(); }); } private void restartCache(final VM vm, final int i, final int[] locatorPorts) { vm.invoke("Creating cache on VM " + i, () -> { disconnectFromDS(); final Properties props = new Properties(); props.setProperty(NAME, "member" + i); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, getLocatorStr(locatorPorts)); props.setProperty(LOG_FILE, "server-" + i + ".log"); props.setProperty(USE_CLUSTER_CONFIGURATION, "true"); props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "true"); getSystem(props); getCache(); }); } private String getLocatorStr(final int[] locatorPorts) { return Arrays.stream(locatorPorts).mapToObj(p -> "localhost[" + p + "]").collect(joining(",")); } }
/* Copyright 2013-2015 Immutables Authors and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.immutables.mongo.repository.internal; import com.google.common.base.CharMatcher; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Range; import java.util.regex.Pattern; import javax.annotation.Nullable; import static com.google.common.base.Preconditions.*; public final class Constraints { private Constraints() {} private static final CharMatcher NON_LITERAL_REGEX_CHARACTERS = CharMatcher.anyOf("\\^$[]().*+").precomputed(); /** * This "host" could accepts {@link ConstraintVisitor}s. */ public interface ConstraintHost { <V extends ConstraintVisitor<V>> V accept(V visitor); } public interface ConstraintVisitor<V extends ConstraintVisitor<V>> { V in(String name, boolean negate, Iterable<?> values); V equal(String name, boolean negate, @Nullable Object value); V range(String name, boolean negate, Range<?> range); V size(String name, boolean negate, int size); V present(String name, boolean negate); V match(String name, boolean negate, Pattern pattern); V nested(String name, ConstraintHost constraints); V disjunction(); } private static final class PatternConstraint extends ConsConstraint { PatternConstraint(Constraint tail, String name, boolean negate, Pattern pattern) { super(tail, name, negate, checkNotNull(pattern)); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.match(name, negate, (Pattern) value); } } private static final class InConstraint extends ConsConstraint { InConstraint(Constraint tail, String name, boolean negate, Iterable<?> value) { super(tail, name, negate, ImmutableSet.copyOf(value)); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.in(name, negate, (Iterable<?>) value); } } private static final class EqualToConstraint extends ConsConstraint { EqualToConstraint(Constraint tail, String name, boolean negate, Object value) { super(tail, name, negate, value); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.equal(name, negate, value); } } private static final class RangeConstraint extends ConsConstraint { RangeConstraint(Constraint tail, String name, boolean negate, Range<?> value) { super(tail, name, negate, checkNotNull(value)); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.range(name, negate, (Range<?>) value); } } private static final class SizeConstraint extends ConsConstraint { SizeConstraint(Constraint tail, String name, boolean negate, int value) { super(tail, name, negate, value); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.size(name, negate, (Integer) value); } } private static final class PresenseConstraint extends ConsConstraint { PresenseConstraint(Constraint tail, String name, boolean negate) { super(tail, name, negate, null); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.present(name, negate); } } private static final class NestedConstraint extends ConsConstraint { NestedConstraint(Constraint tail, String name, ConstraintHost value) { super(tail, name, false, value); } @Override <V extends ConstraintVisitor<V>> V dispatch(V visitor) { return visitor.nested(name, (ConstraintHost) value); } } private static final class DisjunctionConstraint extends Constraint { private final Constraint tail; DisjunctionConstraint(Constraint tail) { this.tail = tail; } @Override public <V extends ConstraintVisitor<V>> V accept(V visitor) { return tail.accept(visitor).disjunction(); } } private static final Constraint NIL = new Constraint() { @Override public final <V extends ConstraintVisitor<V>> V accept(V visitor) { return visitor; } @Override public boolean isNil() { return true; } }; public static Constraint nilConstraint() { return NIL; } public abstract static class Constraint implements ConstraintVisitor<Constraint>, ConstraintHost { public boolean isNil() { return false; } @Override public Constraint in(String name, boolean negate, Iterable<?> values) { return new InConstraint(this, name, negate, values); } @Override public Constraint equal(String name, boolean negate, @Nullable Object value) { return new EqualToConstraint(this, name, negate, value); } @Override public Constraint range(String name, boolean negate, Range<?> range) { return new RangeConstraint(this, name, negate, range); } @Override public Constraint size(String name, boolean negate, int size) { return new SizeConstraint(this, name, negate, size); } @Override public Constraint present(String name, boolean negate) { return new PresenseConstraint(this, name, negate); } @Override public Constraint match(String name, boolean negate, Pattern pattern) { return new PatternConstraint(this, name, negate, pattern); } @Override public Constraint disjunction() { return new DisjunctionConstraint(this); } @Override public Constraint nested(String name, ConstraintHost constraints) { return new NestedConstraint(this, name, constraints); } } private abstract static class ConsConstraint extends Constraint { final Constraint tail; final String name; final boolean negate; @Nullable final Object value; ConsConstraint( Constraint tail, String name, boolean negate, @Nullable Object value) { this.tail = checkNotNull(tail); this.name = checkNotNull(name); this.value = value; this.negate = negate; } @Override public final <V extends ConstraintVisitor<V>> V accept(V visitor) { return dispatch(tail.accept(visitor)); } abstract <V extends ConstraintVisitor<V>> V dispatch(V visitor); } public static Pattern prefixPatternOf(String prefix) { checkArgument(NON_LITERAL_REGEX_CHARACTERS.matchesNoneOf(prefix), "Prefix [%s] should be literal, otherwise use constructed regex Pattern", prefix); return Pattern.compile("^" + prefix); } }
/* * Autopsy Forensic Browser * * Copyright 2017-2019 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.communications; import org.sleuthkit.autopsy.guiutils.RefreshThrottler; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.Subscribe; import java.awt.event.ItemListener; import java.beans.PropertyChangeListener; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JPanel; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.ingest.IngestManager; import static org.sleuthkit.autopsy.ingest.IngestManager.IngestJobEvent.COMPLETED; import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.CommunicationsFilter; import org.sleuthkit.datamodel.CommunicationsFilter.AccountTypeFilter; import org.sleuthkit.datamodel.CommunicationsFilter.DateRangeFilter; import org.sleuthkit.datamodel.CommunicationsFilter.DeviceFilter; import org.sleuthkit.datamodel.CommunicationsFilter.MostRecentFilter; import org.sleuthkit.datamodel.DataSource; import static org.sleuthkit.datamodel.Relationship.Type.CALL_LOG; import static org.sleuthkit.datamodel.Relationship.Type.CONTACT; import static org.sleuthkit.datamodel.Relationship.Type.MESSAGE; /** * Panel that holds the Filter control widgets and triggers queries against the * CommunicationsManager on user filtering changes. */ @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives final public class FiltersPanel extends JPanel { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(FiltersPanel.class.getName()); private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED); private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(DATA_ADDED); /** * Map from Account.Type to the checkbox for that account type's filter. */ @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private final Map<Account.Type, JCheckBox> accountTypeMap = new HashMap<>(); /** * Map from datasource device id to the checkbox for that datasource. */ @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private final Map<String, JCheckBox> devicesMap = new HashMap<>(); /** * Listens to ingest events to enable refresh button */ private final PropertyChangeListener ingestListener; private final PropertyChangeListener ingestJobListener; /** * Flag that indicates the UI is not up-to-date with respect to the case DB * and it should be refreshed (by reapplying the filters). */ private boolean needsRefresh; /** * Listen to check box state changes and validates that at least one box is * selected for device and account type ( other wise there will be no * results) */ private final ItemListener validationListener; private final RefreshThrottler refreshThrottler; /** * Is the device account type filter enabled or not. It should be enabled * when the Table/Brows mode is active and disabled when the visualization * is active. Initially false since the browse/table mode is active * initially. */ private boolean deviceAccountTypeEnabled; private Case openCase = null; @NbBundle.Messages({"refreshText=Refresh Results", "applyText=Apply"}) public FiltersPanel() { initComponents(); initalizeDeviceAccountType(); setDateTimeFiltersToDefault(); deviceRequiredLabel.setVisible(false); accountTypeRequiredLabel.setVisible(false); startDatePicker.setDate(LocalDate.now().minusWeeks(3)); endDatePicker.setDateToToday(); startDatePicker.getSettings().setVetoPolicy( //no end date, or start is before end startDate -> endCheckBox.isSelected() == false || startDate.compareTo(endDatePicker.getDate()) <= 0 ); endDatePicker.getSettings().setVetoPolicy( //no start date, or end is after start endDate -> startCheckBox.isSelected() == false || endDate.compareTo(startDatePicker.getDate()) >= 0 ); updateTimeZone(); validationListener = itemEvent -> validateFilters(); UserPreferences.addChangeListener(preferenceChangeEvent -> { if (preferenceChangeEvent.getKey().equals(UserPreferences.DISPLAY_TIMES_IN_LOCAL_TIME) || preferenceChangeEvent.getKey().equals(UserPreferences.TIME_ZONE_FOR_DISPLAYS)) { updateTimeZone(); } }); this.ingestListener = pce -> { String eventType = pce.getPropertyName(); if (eventType.equals(DATA_ADDED.toString())) { // Indicate that a refresh may be needed, unless the data added is Keyword or Hashset hits ModuleDataEvent eventData = (ModuleDataEvent) pce.getOldValue(); if (!needsRefresh && null != eventData && (eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT.getTypeID() || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG.getTypeID() || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID())) { needsRefresh = true; validateFilters(); } } }; refreshThrottler = new RefreshThrottler(new FilterPanelRefresher(false, false)); this.ingestJobListener = pce -> { String eventType = pce.getPropertyName(); if (eventType.equals(COMPLETED.toString()) && !needsRefresh) { needsRefresh = true; validateFilters(); } }; applyFiltersButton.addActionListener(e -> applyFilters()); refreshButton.addActionListener(e -> applyFilters()); } /** * Validate that filters are in a consistent state and will result in some * results. Checks that at least one device and at least one account type is * selected. Disables the apply and refresh button and shows warnings if the * filters are not valid. */ private void validateFilters() { boolean someDevice = devicesMap.values().stream().anyMatch(JCheckBox::isSelected); boolean someAccountType = accountTypeMap.values().stream().anyMatch(JCheckBox::isSelected); boolean validLimit = validateLimitValue(); deviceRequiredLabel.setVisible(someDevice == false); accountTypeRequiredLabel.setVisible(someAccountType == false); limitErrorMsgLabel.setVisible(!validLimit); applyFiltersButton.setEnabled(someDevice && someAccountType && validLimit); refreshButton.setEnabled(someDevice && someAccountType && needsRefresh && validLimit); needsRefreshLabel.setVisible(needsRefresh); } private boolean validateLimitValue() { String selectedValue = (String) limitComboBox.getSelectedItem(); if (selectedValue.trim().equalsIgnoreCase("all")) { return true; } else { try { int value = Integer.parseInt(selectedValue); return value > 0; } catch (NumberFormatException ex) { return false; } } } void initalizeFilters() { applyFiltersButton.setEnabled(false); refreshButton.setEnabled(true); needsRefreshLabel.setText("Loading filters..."); needsRefreshLabel.setVisible(true); (new Thread(new Runnable(){ @Override public void run() { new FilterPanelRefresher(true, true).refresh(); } })).start(); } private void updateTimeZone() { dateRangeLabel.setText("Date Range (" + Utils.getUserPreferredZoneId().toString() + "):"); } @Override public void addNotify() { super.addNotify(); refreshThrottler.registerForIngestModuleEvents(); IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, ingestListener); IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, ingestJobListener); Case.addEventTypeSubscriber(EnumSet.of(CURRENT_CASE), evt -> { //clear the device filter widget when the case changes. devicesMap.clear(); devicesListPane.removeAll(); accountTypeMap.clear(); accountTypeListPane.removeAll(); initalizeDeviceAccountType(); }); } @Override public void removeNotify() { super.removeNotify(); refreshThrottler.unregisterEventListener(); IngestManager.getInstance().removeIngestModuleEventListener(ingestListener); IngestManager.getInstance().removeIngestJobEventListener(ingestJobListener); } private void initalizeDeviceAccountType() { CheckBoxIconPanel panel = createAccoutTypeCheckBoxPanel(Account.Type.DEVICE, true); accountTypeMap.put(Account.Type.DEVICE, panel.getCheckBox()); accountTypeListPane.add(panel); } /** * Populate the Account Types filter widgets. * * @param accountTypesInUse List of accountTypes currently in use * @param checkNewOnes * * @return True, if a new accountType was found */ private boolean updateAccountTypeFilter(List<Account.Type> accountTypesInUse, boolean checkNewOnes) { boolean newOneFound = false; for (Account.Type type : accountTypesInUse) { if (!accountTypeMap.containsKey(type) && !type.equals(Account.Type.CREDIT_CARD)) { CheckBoxIconPanel panel = createAccoutTypeCheckBoxPanel(type, checkNewOnes); accountTypeMap.put(type, panel.getCheckBox()); accountTypeListPane.add(panel); newOneFound = true; } } if (newOneFound) { accountTypeListPane.validate(); } return newOneFound; } /** * Helper function to create a new instance of the CheckBoxIconPanel base on * the Account.Type and initalState (check box state). * * @param type Account.Type to display on the panel * @param initalState initial check box state * * @return instance of the CheckBoxIconPanel */ private CheckBoxIconPanel createAccoutTypeCheckBoxPanel(Account.Type type, boolean initalState) { CheckBoxIconPanel panel = new CheckBoxIconPanel( type.getDisplayName(), new ImageIcon(FiltersPanel.class.getResource(Utils.getIconFilePath(type)))); panel.setSelected(initalState); panel.addItemListener(validationListener); return panel; } /** * Populate the devices filter widgets. * * @param dataSourceMap * @param checkNewOnes * * @return true if a new device was found */ private void updateDeviceFilterPanel(Map<String, DataSource> dataSourceMap, boolean checkNewOnes) { boolean newOneFound = false; for (Entry<String, DataSource> entry : dataSourceMap.entrySet()) { if (devicesMap.containsKey(entry.getValue().getDeviceId())) { continue; } final JCheckBox jCheckBox = new JCheckBox(entry.getKey(), checkNewOnes); jCheckBox.addItemListener(validationListener); jCheckBox.setToolTipText(entry.getKey()); devicesListPane.add(jCheckBox); devicesMap.put(entry.getValue().getDeviceId(), jCheckBox); newOneFound = true; } if (newOneFound) { devicesListPane.removeAll(); List<JCheckBox> checkList = new ArrayList<>(devicesMap.values()); checkList.sort(new DeviceCheckBoxComparator()); for (JCheckBox cb : checkList) { devicesListPane.add(cb); } devicesListPane.revalidate(); } } private void updateDateTimePicker(Integer start, Integer end) { if (start != null && start != 0) { startDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(start), Utils.getUserPreferredZoneId()).toLocalDate()); } if (end != null && end != 0) { endDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(end), Utils.getUserPreferredZoneId()).toLocalDate()); } } /** * Given a list of subFilters, set the states of the panel controls * accordingly. * * @param commFilter Contains a list of subFilters */ public void setFilters(CommunicationsFilter commFilter) { List<CommunicationsFilter.SubFilter> subFilters = commFilter.getAndFilters(); subFilters.forEach(subFilter -> { if (subFilter instanceof DeviceFilter) { setDeviceFilter((DeviceFilter) subFilter); } else if (subFilter instanceof AccountTypeFilter) { setAccountTypeFilter((AccountTypeFilter) subFilter); } else if (subFilter instanceof MostRecentFilter) { setMostRecentFilter((MostRecentFilter) subFilter); } }); } /** * Sets the state of the device filter check boxes * * @param deviceFilter Selected devices */ private void setDeviceFilter(DeviceFilter deviceFilter) { Collection<String> deviceIDs = deviceFilter.getDevices(); devicesMap.forEach((type, cb) -> { cb.setSelected(deviceIDs.contains(type)); }); } /** * Set the state of the account type checkboxes to match the passed in * filter * * @param typeFilter Account Types to be selected */ private void setAccountTypeFilter(AccountTypeFilter typeFilter) { accountTypeMap.forEach((type, cb) -> { cb.setSelected(typeFilter.getAccountTypes().contains(type)); }); } /** * Set up the startDatePicker and startCheckBox based on the passed in * DateControlState. * * @param state new control state */ private void setStartDateControlState(DateControlState state) { startDatePicker.setDate(state.getDate()); startCheckBox.setSelected(state.isEnabled()); startDatePicker.setEnabled(state.isEnabled()); } /** * Set up the endDatePicker and endCheckBox based on the passed in * DateControlState. * * @param state new control state */ private void setEndDateControlState(DateControlState state) { endDatePicker.setDate(state.getDate()); endCheckBox.setSelected(state.isEnabled()); endDatePicker.setEnabled(state.isEnabled()); } /** * Sets the state of the most recent UI controls based on the current values * in MostRecentFilter. * * @param filter The MostRecentFilter state to be set */ private void setMostRecentFilter(MostRecentFilter filter) { int limit = filter.getLimit(); if (limit > 0) { limitComboBox.setSelectedItem(filter.getLimit()); } else { limitComboBox.setSelectedItem("All"); } } @Subscribe void filtersBack(CVTEvents.StateChangeEvent event) { if (event.getCommunicationsState().getCommunicationsFilter() != null) { setFilters(event.getCommunicationsState().getCommunicationsFilter()); setStartDateControlState(event.getCommunicationsState().getStartControlState()); setEndDateControlState(event.getCommunicationsState().getEndControlState()); needsRefresh = false; validateFilters(); } } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { java.awt.GridBagConstraints gridBagConstraints; setLayout(new java.awt.GridBagLayout()); scrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); scrollPane.setAutoscrolls(true); scrollPane.setBorder(null); mainPanel.setLayout(new java.awt.GridBagLayout()); limitPane.setLayout(new java.awt.GridBagLayout()); mostRecentLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.mostRecentLabel.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new java.awt.Insets(0, 9, 0, 9); limitPane.add(mostRecentLabel, gridBagConstraints); limitComboBox.setEditable(true); limitComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "All", "10000", "5000", "1000", "500", "100" })); limitComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { limitComboBoxActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; limitPane.add(limitComboBox, gridBagConstraints); limitTitlePanel.setLayout(new java.awt.GridBagLayout()); limitHeaderLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.limitHeaderLabel.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; limitTitlePanel.add(limitHeaderLabel, gridBagConstraints); limitErrorMsgLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/error-icon-16.png"))); // NOI18N limitErrorMsgLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.limitErrorMsgLabel.text")); // NOI18N limitErrorMsgLabel.setForeground(new java.awt.Color(255, 0, 0)); limitErrorMsgLabel.setHorizontalTextPosition(javax.swing.SwingConstants.LEADING); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; limitTitlePanel.add(limitErrorMsgLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.gridwidth = 2; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(0, 0, 9, 0); limitPane.add(limitTitlePanel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 4; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(15, 0, 15, 25); mainPanel.add(limitPane, gridBagConstraints); startDatePicker.setEnabled(false); dateRangeLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/calendar.png"))); // NOI18N dateRangeLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.dateRangeLabel.text")); // NOI18N startCheckBox.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.startCheckBox.text")); // NOI18N startCheckBox.addChangeListener(new javax.swing.event.ChangeListener() { public void stateChanged(javax.swing.event.ChangeEvent evt) { startCheckBoxStateChanged(evt); } }); endCheckBox.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.endCheckBox.text")); // NOI18N endCheckBox.addChangeListener(new javax.swing.event.ChangeListener() { public void stateChanged(javax.swing.event.ChangeEvent evt) { endCheckBoxStateChanged(evt); } }); endDatePicker.setEnabled(false); javax.swing.GroupLayout dateRangePaneLayout = new javax.swing.GroupLayout(dateRangePane); dateRangePane.setLayout(dateRangePaneLayout); dateRangePaneLayout.setHorizontalGroup( dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(dateRangePaneLayout.createSequentialGroup() .addGroup(dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(dateRangeLabel) .addGroup(dateRangePaneLayout.createSequentialGroup() .addContainerGap() .addGroup(dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, dateRangePaneLayout.createSequentialGroup() .addComponent(endCheckBox) .addGap(12, 12, 12) .addComponent(endDatePicker, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(dateRangePaneLayout.createSequentialGroup() .addComponent(startCheckBox) .addGap(12, 12, 12) .addComponent(startDatePicker, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))))) .addGap(0, 0, Short.MAX_VALUE)) ); dateRangePaneLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {endCheckBox, startCheckBox}); dateRangePaneLayout.setVerticalGroup( dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(dateRangePaneLayout.createSequentialGroup() .addComponent(dateRangeLabel) .addGap(6, 6, 6) .addGroup(dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(startDatePicker, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(startCheckBox)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(dateRangePaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(endDatePicker, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(endCheckBox))) ); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(15, 0, 0, 25); mainPanel.add(dateRangePane, gridBagConstraints); devicesPane.setPreferredSize(new java.awt.Dimension(300, 300)); devicesPane.setLayout(new java.awt.GridBagLayout()); unCheckAllDevicesButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.unCheckAllDevicesButton.text")); // NOI18N unCheckAllDevicesButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { unCheckAllDevicesButtonActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 9); devicesPane.add(unCheckAllDevicesButton, gridBagConstraints); devicesLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/image.png"))); // NOI18N devicesLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.devicesLabel.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 9, 0); devicesPane.add(devicesLabel, gridBagConstraints); checkAllDevicesButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.checkAllDevicesButton.text")); // NOI18N checkAllDevicesButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { checkAllDevicesButtonActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0); devicesPane.add(checkAllDevicesButton, gridBagConstraints); devicesScrollPane.setMaximumSize(new java.awt.Dimension(32767, 30)); devicesScrollPane.setMinimumSize(new java.awt.Dimension(27, 30)); devicesScrollPane.setPreferredSize(new java.awt.Dimension(3, 30)); devicesListPane.setMinimumSize(new java.awt.Dimension(4, 100)); devicesListPane.setLayout(new javax.swing.BoxLayout(devicesListPane, javax.swing.BoxLayout.Y_AXIS)); devicesScrollPane.setViewportView(devicesListPane); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.gridwidth = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; devicesPane.add(devicesScrollPane, gridBagConstraints); deviceRequiredLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/error-icon-16.png"))); // NOI18N deviceRequiredLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.deviceRequiredLabel.text")); // NOI18N deviceRequiredLabel.setForeground(new java.awt.Color(255, 0, 0)); deviceRequiredLabel.setHorizontalTextPosition(javax.swing.SwingConstants.LEFT); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 9, 0); devicesPane.add(deviceRequiredLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(15, 0, 0, 25); mainPanel.add(devicesPane, gridBagConstraints); accountTypesPane.setLayout(new java.awt.GridBagLayout()); unCheckAllAccountTypesButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.unCheckAllAccountTypesButton.text")); // NOI18N unCheckAllAccountTypesButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { unCheckAllAccountTypesButtonActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 9); accountTypesPane.add(unCheckAllAccountTypesButton, gridBagConstraints); accountTypesLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/accounts.png"))); // NOI18N accountTypesLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.accountTypesLabel.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; accountTypesPane.add(accountTypesLabel, gridBagConstraints); checkAllAccountTypesButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.checkAllAccountTypesButton.text")); // NOI18N checkAllAccountTypesButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { checkAllAccountTypesButtonActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0); accountTypesPane.add(checkAllAccountTypesButton, gridBagConstraints); accountTypesScrollPane.setMaximumSize(new java.awt.Dimension(32767, 210)); accountTypesScrollPane.setMinimumSize(new java.awt.Dimension(20, 210)); accountTypesScrollPane.setName(""); // NOI18N accountTypesScrollPane.setPreferredSize(new java.awt.Dimension(2, 210)); accountTypeListPane.setLayout(new javax.swing.BoxLayout(accountTypeListPane, javax.swing.BoxLayout.PAGE_AXIS)); accountTypesScrollPane.setViewportView(accountTypeListPane); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.gridwidth = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0); accountTypesPane.add(accountTypesScrollPane, gridBagConstraints); accountTypeRequiredLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/error-icon-16.png"))); // NOI18N accountTypeRequiredLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.accountTypeRequiredLabel.text")); // NOI18N accountTypeRequiredLabel.setForeground(new java.awt.Color(255, 0, 0)); accountTypeRequiredLabel.setHorizontalTextPosition(javax.swing.SwingConstants.LEFT); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; gridBagConstraints.gridwidth = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; accountTypesPane.add(accountTypeRequiredLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(15, 0, 0, 25); mainPanel.add(accountTypesPane, gridBagConstraints); topPane.setLayout(new java.awt.GridBagLayout()); filtersTitleLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/funnel.png"))); // NOI18N filtersTitleLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.filtersTitleLabel.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; topPane.add(filtersTitleLabel, gridBagConstraints); refreshButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/arrow-circle-double-135.png"))); // NOI18N refreshButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.refreshButton.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; topPane.add(refreshButton, gridBagConstraints); applyFiltersButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/communications/images/tick.png"))); // NOI18N applyFiltersButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.applyFiltersButton.text")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHEAST; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 5); topPane.add(applyFiltersButton, gridBagConstraints); needsRefreshLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.needsRefreshLabel.text")); // NOI18N needsRefreshLabel.setForeground(new java.awt.Color(255, 0, 0)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.gridwidth = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; topPane.add(needsRefreshLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_END; gridBagConstraints.weightx = 1.0; gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 25); mainPanel.add(topPane, gridBagConstraints); scrollPane.setViewportView(mainPanel); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new java.awt.Insets(9, 15, 0, 0); add(scrollPane, gridBagConstraints); }// </editor-fold>//GEN-END:initComponents /** * Post an event with the new filters. */ void applyFilters() { needsRefresh = false; validateFilters(); CVTEvents.getCVTEventBus().post(new CVTEvents.FilterChangeEvent(getFilter(), getStartControlState(), getEndControlState())); } /** * Get an instance of CommunicationsFilters base on the current panel state. * * @return an instance of CommunicationsFilter */ private CommunicationsFilter getFilter() { CommunicationsFilter commsFilter = new CommunicationsFilter(); commsFilter.addAndFilter(getDeviceFilter()); commsFilter.addAndFilter(getAccountTypeFilter()); commsFilter.addAndFilter(getDateRangeFilter()); commsFilter.addAndFilter(new CommunicationsFilter.RelationshipTypeFilter( ImmutableSet.of(CALL_LOG, MESSAGE, CONTACT))); commsFilter.addAndFilter(getMostRecentFilter()); return commsFilter; } /** * Get a DeviceFilter that matches the state of the UI widgets. * * @return a DeviceFilter */ private DeviceFilter getDeviceFilter() { DeviceFilter deviceFilter = new DeviceFilter( devicesMap.entrySet().stream() .filter(entry -> entry.getValue().isSelected()) .map(Entry::getKey) .collect(Collectors.toSet())); return deviceFilter; } /** * Get an AccountTypeFilter that matches the state of the UI widgets * * @return an AccountTypeFilter */ private AccountTypeFilter getAccountTypeFilter() { AccountTypeFilter accountTypeFilter = new AccountTypeFilter( accountTypeMap.entrySet().stream() .filter(entry -> entry.getValue().isSelected()) .map(entry -> entry.getKey()) .collect(Collectors.toSet())); return accountTypeFilter; } /** * Get an DateRangeFilter that matches the state of the UI widgets * * @return an DateRangeFilter */ private DateRangeFilter getDateRangeFilter() { ZoneId zone = Utils.getUserPreferredZoneId(); return new DateRangeFilter(startCheckBox.isSelected() ? startDatePicker.getDate().atStartOfDay(zone).toEpochSecond() : 0, endCheckBox.isSelected() ? endDatePicker.getDate().atStartOfDay(zone).toEpochSecond() : 0); } /** * Get a MostRecentFilter that based on the current state of the ui * controls. * * @return A new instance of MostRecentFilter */ private MostRecentFilter getMostRecentFilter() { String value = (String) limitComboBox.getSelectedItem(); if (value.trim().equalsIgnoreCase("all")) { return new MostRecentFilter(-1); } else { try { int count = Integer.parseInt(value); return new MostRecentFilter(count); } catch (NumberFormatException ex) { return null; } } } private DateControlState getStartControlState() { return new DateControlState(startDatePicker.getDate(), startCheckBox.isSelected()); } private DateControlState getEndControlState() { return new DateControlState(endDatePicker.getDate(), endCheckBox.isSelected()); } /** * Set the selection state of all the account type check boxes * * @param selected The selection state to set the check boxes to. */ @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private void setAllAccountTypesSelected(boolean selected) { setAllSelected(accountTypeMap, selected); } /** * Set the selection state of all the device check boxes * * @param selected The selection state to set the check boxes to. */ @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private void setAllDevicesSelected(boolean selected) { setAllSelected(devicesMap, selected); } /** * Helper method that sets all the check boxes in the given map to the given * selection state. * * @param map A map from anything to JCheckBoxes. * @param selected The selection state to set all the check boxes to. */ @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private void setAllSelected(Map<?, JCheckBox> map, boolean selected) { map.values().forEach(box -> box.setSelected(selected)); } private void setDateTimeFiltersToDefault() { startDatePicker.setDate(LocalDate.now().minusWeeks(3)); endDatePicker.setDate(LocalDate.now()); } private void unCheckAllAccountTypesButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_unCheckAllAccountTypesButtonActionPerformed setAllAccountTypesSelected(false); }//GEN-LAST:event_unCheckAllAccountTypesButtonActionPerformed private void checkAllAccountTypesButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_checkAllAccountTypesButtonActionPerformed setAllAccountTypesSelected(true); }//GEN-LAST:event_checkAllAccountTypesButtonActionPerformed private void unCheckAllDevicesButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_unCheckAllDevicesButtonActionPerformed setAllDevicesSelected(false); }//GEN-LAST:event_unCheckAllDevicesButtonActionPerformed private void checkAllDevicesButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_checkAllDevicesButtonActionPerformed setAllDevicesSelected(true); }//GEN-LAST:event_checkAllDevicesButtonActionPerformed private void startCheckBoxStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_startCheckBoxStateChanged startDatePicker.setEnabled(startCheckBox.isSelected()); validateFilters(); }//GEN-LAST:event_startCheckBoxStateChanged private void endCheckBoxStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_endCheckBoxStateChanged endDatePicker.setEnabled(endCheckBox.isSelected()); validateFilters(); }//GEN-LAST:event_endCheckBoxStateChanged private void limitComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_limitComboBoxActionPerformed validateFilters(); }//GEN-LAST:event_limitComboBoxActionPerformed /** * A class to wrap the state of the date controls that consist of a date * picker and a checkbox. * */ final class DateControlState { private final LocalDate date; private final boolean enabled; /** * Wraps the state of the date controls that consist of a date picker * and checkbox * * @param date LocalDate value of the datepicker * @param enabled State of the checkbox */ protected DateControlState(LocalDate date, boolean enabled) { this.date = date; this.enabled = enabled; } /** * Returns the given LocalDate from the datepicker * * @return Current state LocalDate */ public LocalDate getDate() { return date; } /** * Returns the given state of the datepicker checkbox * * @return boolean, whether or not the datepicker was enabled */ public boolean isEnabled() { return enabled; } } // Variables declaration - do not modify//GEN-BEGIN:variables private final javax.swing.JPanel accountTypeListPane = new javax.swing.JPanel(); private final javax.swing.JLabel accountTypeRequiredLabel = new javax.swing.JLabel(); private final javax.swing.JLabel accountTypesLabel = new javax.swing.JLabel(); private final javax.swing.JPanel accountTypesPane = new javax.swing.JPanel(); private final javax.swing.JScrollPane accountTypesScrollPane = new javax.swing.JScrollPane(); private final javax.swing.JButton applyFiltersButton = new javax.swing.JButton(); private final javax.swing.JButton checkAllAccountTypesButton = new javax.swing.JButton(); private final javax.swing.JButton checkAllDevicesButton = new javax.swing.JButton(); private final javax.swing.JLabel dateRangeLabel = new javax.swing.JLabel(); private final javax.swing.JPanel dateRangePane = new javax.swing.JPanel(); private final javax.swing.JLabel deviceRequiredLabel = new javax.swing.JLabel(); private final javax.swing.JLabel devicesLabel = new javax.swing.JLabel(); private final javax.swing.JPanel devicesListPane = new javax.swing.JPanel(); private final javax.swing.JPanel devicesPane = new javax.swing.JPanel(); private final javax.swing.JScrollPane devicesScrollPane = new javax.swing.JScrollPane(); private final javax.swing.JCheckBox endCheckBox = new javax.swing.JCheckBox(); private final com.github.lgooddatepicker.components.DatePicker endDatePicker = new com.github.lgooddatepicker.components.DatePicker(); private final javax.swing.JLabel filtersTitleLabel = new javax.swing.JLabel(); private final javax.swing.JComboBox<String> limitComboBox = new javax.swing.JComboBox<>(); private final javax.swing.JLabel limitErrorMsgLabel = new javax.swing.JLabel(); private final javax.swing.JLabel limitHeaderLabel = new javax.swing.JLabel(); private final javax.swing.JPanel limitPane = new javax.swing.JPanel(); private final javax.swing.JPanel limitTitlePanel = new javax.swing.JPanel(); private final javax.swing.JPanel mainPanel = new javax.swing.JPanel(); private final javax.swing.JLabel mostRecentLabel = new javax.swing.JLabel(); private final javax.swing.JLabel needsRefreshLabel = new javax.swing.JLabel(); private final javax.swing.JButton refreshButton = new javax.swing.JButton(); private final javax.swing.JScrollPane scrollPane = new javax.swing.JScrollPane(); private final javax.swing.JCheckBox startCheckBox = new javax.swing.JCheckBox(); private final com.github.lgooddatepicker.components.DatePicker startDatePicker = new com.github.lgooddatepicker.components.DatePicker(); private final javax.swing.JPanel topPane = new javax.swing.JPanel(); private final javax.swing.JButton unCheckAllAccountTypesButton = new javax.swing.JButton(); private final javax.swing.JButton unCheckAllDevicesButton = new javax.swing.JButton(); // End of variables declaration//GEN-END:variables /** * This class is a small panel that appears to just be a checkbox but adds * the functionality of being able to show an icon between the checkbox and * label. */ final class CheckBoxIconPanel extends JPanel { private static final long serialVersionUID = 1L; private final JCheckBox checkbox; private final JLabel label; /** * Creates a JPanel instance with the specified label and image. * * @param labelText The text to be displayed by the checkbox label. * @param image The image to be dispayed by the label. */ private CheckBoxIconPanel(String labelText, Icon image) { checkbox = new JCheckBox(); label = new JLabel(labelText); label.setIcon(image); setLayout(new BoxLayout(this, BoxLayout.X_AXIS)); add(checkbox); add(label); add(Box.createHorizontalGlue()); } /** * Sets the state of the checkbox. * * @param selected true if the button is selected, otherwise false */ void setSelected(boolean selected) { checkbox.setSelected(selected); } @Override public void setEnabled(boolean enabled) { checkbox.setEnabled(enabled); } /** * Returns the instance of the JCheckBox. * * @return JCheckbox instance */ JCheckBox getCheckBox() { return checkbox; } /** * Adds an ItemListener to the checkbox. * * @param l the ItemListener to be added. */ void addItemListener(ItemListener l) { checkbox.addItemListener(l); } } /** * Extends the CVTFilterRefresher abstract class to add the calls to update * the ui controls with the data found. Note that updateFilterPanel is run * in the EDT. */ final class FilterPanelRefresher extends CVTFilterRefresher { private final boolean selectNewOption; private final boolean refreshAfterUpdate; FilterPanelRefresher(boolean selectNewOptions, boolean refreshAfterUpdate) { this.selectNewOption = selectNewOptions; this.refreshAfterUpdate = refreshAfterUpdate; } @Override void updateFilterPanel(CVTFilterRefresher.FilterPanelData data) { updateDateTimePicker(data.getStartTime(), data.getEndTime()); updateDeviceFilterPanel(data.getDataSourceMap(), selectNewOption); updateAccountTypeFilter(data.getAccountTypesInUse(), selectNewOption); FiltersPanel.this.repaint(); if (refreshAfterUpdate) { applyFilters(); } if (!isEnabled()) { setEnabled(true); } needsRefreshLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.needsRefreshLabel.text")); // NOI18N validateFilters(); repaint(); } } /** * Sorts a list of JCheckBoxes in alphabetical order of the text field * value. */ class DeviceCheckBoxComparator implements Comparator<JCheckBox> { @Override public int compare(JCheckBox e1, JCheckBox e2) { return e1.getText().toLowerCase().compareTo(e2.getText().toLowerCase()); } } }
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.Unpooled; import io.netty.buffer.UnpooledByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOutboundHandlerAdapter; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultChannelId; import io.netty.channel.DefaultEventLoopGroup; import io.netty.channel.EventLoopGroup; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalServerChannel; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.ByteToMessageDecoder; import io.netty.handler.codec.CodecException; import io.netty.handler.codec.DecoderException; import io.netty.handler.codec.UnsupportedMessageTypeException; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.handler.ssl.util.SelfSignedCertificate; import io.netty.util.AbstractReferenceCounted; import io.netty.util.IllegalReferenceCountException; import io.netty.util.ReferenceCountUtil; import io.netty.util.ReferenceCounted; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.FutureListener; import io.netty.util.concurrent.ImmediateEventExecutor; import io.netty.util.concurrent.ImmediateExecutor; import io.netty.util.concurrent.Promise; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.PlatformDependent; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.function.Executable; import java.net.InetSocketAddress; import java.net.Socket; import java.nio.channels.ClosedChannelException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import javax.net.ssl.SSLProtocolException; import javax.net.ssl.X509ExtendedTrustManager; import static io.netty.buffer.Unpooled.wrappedBuffer; import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; public class SslHandlerTest { private static final Executor DIRECT_EXECUTOR = new Executor() { @Override public void execute(Runnable command) { command.run(); } }; @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testNonApplicationDataFailureFailsQueuedWrites() throws NoSuchAlgorithmException, InterruptedException { final CountDownLatch writeLatch = new CountDownLatch(1); final Queue<ChannelPromise> writesToFail = new ConcurrentLinkedQueue<ChannelPromise>(); SSLEngine engine = newClientModeSSLEngine(); SslHandler handler = new SslHandler(engine) { @Override public void write(final ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { super.write(ctx, msg, promise); writeLatch.countDown(); } }; EmbeddedChannel ch = new EmbeddedChannel(new ChannelDuplexHandler() { @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) { if (msg instanceof ByteBuf) { if (((ByteBuf) msg).isReadable()) { writesToFail.add(promise); } else { promise.setSuccess(); } } ReferenceCountUtil.release(msg); } }, handler); try { final CountDownLatch writeCauseLatch = new CountDownLatch(1); final AtomicReference<Throwable> failureRef = new AtomicReference<Throwable>(); ch.write(Unpooled.wrappedBuffer(new byte[]{1})).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { failureRef.compareAndSet(null, future.cause()); writeCauseLatch.countDown(); } }); writeLatch.await(); // Simulate failing the SslHandler non-application writes after there are applications writes queued. ChannelPromise promiseToFail; while ((promiseToFail = writesToFail.poll()) != null) { promiseToFail.setFailure(new RuntimeException("fake exception")); } writeCauseLatch.await(); Throwable writeCause = failureRef.get(); assertNotNull(writeCause); assertThat(writeCause, is(CoreMatchers.<Throwable>instanceOf(SSLException.class))); Throwable cause = handler.handshakeFuture().cause(); assertNotNull(cause); assertThat(cause, is(CoreMatchers.<Throwable>instanceOf(SSLException.class))); } finally { assertFalse(ch.finishAndReleaseAll()); } } @Test public void testNoSslHandshakeEventWhenNoHandshake() throws Exception { final AtomicBoolean inActive = new AtomicBoolean(false); SSLEngine engine = SSLContext.getDefault().createSSLEngine(); EmbeddedChannel ch = new EmbeddedChannel( DefaultChannelId.newInstance(), false, false, new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { // Not forward the event to the SslHandler but just close the Channel. ctx.close(); } }, new SslHandler(engine) { @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { // We want to override what Channel.isActive() will return as otherwise it will // return true and so trigger an handshake. inActive.set(true); super.handlerAdded(ctx); inActive.set(false); } }, new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof SslHandshakeCompletionEvent) { throw (Exception) ((SslHandshakeCompletionEvent) evt).cause(); } } }) { @Override public boolean isActive() { return !inActive.get() && super.isActive(); } }; ch.register(); assertFalse(ch.finishAndReleaseAll()); } @Test @Timeout(value = 3000, unit = TimeUnit.MILLISECONDS) public void testClientHandshakeTimeout() throws Exception { assertThrows(SslHandshakeTimeoutException.class, new Executable() { @Override public void execute() throws Throwable { testHandshakeTimeout(true); } }); } @Test @Timeout(value = 3000, unit = TimeUnit.MILLISECONDS) public void testServerHandshakeTimeout() throws Exception { assertThrows(SslHandshakeTimeoutException.class, new Executable() { @Override public void execute() throws Throwable { testHandshakeTimeout(false); } }); } private static SSLEngine newServerModeSSLEngine() throws NoSuchAlgorithmException { SSLEngine engine = SSLContext.getDefault().createSSLEngine(); // Set the mode before we try to do the handshake as otherwise it may throw an IllegalStateException. // See: // - https://docs.oracle.com/javase/10/docs/api/javax/net/ssl/SSLEngine.html#beginHandshake() // - https://mail.openjdk.java.net/pipermail/security-dev/2018-July/017715.html engine.setUseClientMode(false); return engine; } private static SSLEngine newClientModeSSLEngine() throws NoSuchAlgorithmException { SSLEngine engine = SSLContext.getDefault().createSSLEngine(); // Set the mode before we try to do the handshake as otherwise it may throw an IllegalStateException. // See: // - https://docs.oracle.com/javase/10/docs/api/javax/net/ssl/SSLEngine.html#beginHandshake() // - https://mail.openjdk.java.net/pipermail/security-dev/2018-July/017715.html engine.setUseClientMode(true); return engine; } private static void testHandshakeTimeout(boolean client) throws Exception { SSLEngine engine = SSLContext.getDefault().createSSLEngine(); engine.setUseClientMode(client); SslHandler handler = new SslHandler(engine); handler.setHandshakeTimeoutMillis(1); EmbeddedChannel ch = new EmbeddedChannel(handler); try { while (!handler.handshakeFuture().isDone()) { Thread.sleep(10); // We need to run all pending tasks as the handshake timeout is scheduled on the EventLoop. ch.runPendingTasks(); } handler.handshakeFuture().syncUninterruptibly(); } finally { ch.finishAndReleaseAll(); } } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testHandshakeAndClosePromiseFailedOnRemoval() throws Exception { SSLEngine engine = SSLContext.getDefault().createSSLEngine(); engine.setUseClientMode(true); SslHandler handler = new SslHandler(engine); final AtomicReference<Throwable> handshakeRef = new AtomicReference<Throwable>(); final AtomicReference<Throwable> closeRef = new AtomicReference<Throwable>(); EmbeddedChannel ch = new EmbeddedChannel(handler, new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof SslHandshakeCompletionEvent) { handshakeRef.set(((SslHandshakeCompletionEvent) evt).cause()); } else if (evt instanceof SslCloseCompletionEvent) { closeRef.set(((SslCloseCompletionEvent) evt).cause()); } } }); assertFalse(handler.handshakeFuture().isDone()); assertFalse(handler.sslCloseFuture().isDone()); ch.pipeline().remove(handler); try { while (!handler.handshakeFuture().isDone() || handshakeRef.get() == null || !handler.sslCloseFuture().isDone() || closeRef.get() == null) { Thread.sleep(10); // Continue running all pending tasks until we notified for everything. ch.runPendingTasks(); } assertSame(handler.handshakeFuture().cause(), handshakeRef.get()); assertSame(handler.sslCloseFuture().cause(), closeRef.get()); } finally { ch.finishAndReleaseAll(); } } @Test public void testTruncatedPacket() throws Exception { SSLEngine engine = newServerModeSSLEngine(); final EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(engine)); // Push the first part of a 5-byte handshake message. ch.writeInbound(wrappedBuffer(new byte[]{22, 3, 1, 0, 5})); // Should decode nothing yet. assertThat(ch.readInbound(), is(nullValue())); DecoderException e = assertThrows(DecoderException.class, new Executable() { @Override public void execute() throws Throwable { // Push the second part of the 5-byte handshake message. ch.writeInbound(wrappedBuffer(new byte[]{2, 0, 0, 1, 0})); } }); // Be sure we cleanup the channel and release any pending messages that may have been generated because // of an alert. // See https://github.com/netty/netty/issues/6057. ch.finishAndReleaseAll(); // The pushed message is invalid, so it should raise an exception if it decoded the message correctly. assertThat(e.getCause(), is(instanceOf(SSLProtocolException.class))); } @Test public void testNonByteBufWriteIsReleased() throws Exception { SSLEngine engine = newServerModeSSLEngine(); final EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(engine)); final AbstractReferenceCounted referenceCounted = new AbstractReferenceCounted() { @Override public ReferenceCounted touch(Object hint) { return this; } @Override protected void deallocate() { } }; ExecutionException e = assertThrows(ExecutionException.class, new Executable() { @Override public void execute() throws Throwable { ch.write(referenceCounted).get(); } }); assertThat(e.getCause(), is(instanceOf(UnsupportedMessageTypeException.class))); assertEquals(0, referenceCounted.refCnt()); assertTrue(ch.finishAndReleaseAll()); } @Test public void testNonByteBufNotPassThrough() throws Exception { SSLEngine engine = newServerModeSSLEngine(); final EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(engine)); assertThrows(UnsupportedMessageTypeException.class, new Executable() { @Override public void execute() throws Throwable { ch.writeOutbound(new Object()); } }); ch.finishAndReleaseAll(); } @Test public void testIncompleteWriteDoesNotCompletePromisePrematurely() throws NoSuchAlgorithmException { SSLEngine engine = newServerModeSSLEngine(); EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(engine)); ChannelPromise promise = ch.newPromise(); ByteBuf buf = Unpooled.buffer(10).writeZero(10); ch.writeAndFlush(buf, promise); assertFalse(promise.isDone()); assertTrue(ch.finishAndReleaseAll()); assertTrue(promise.isDone()); assertThat(promise.cause(), is(instanceOf(SSLException.class))); } @Test public void testReleaseSslEngine() throws Exception { OpenSsl.ensureAvailability(); SelfSignedCertificate cert = new SelfSignedCertificate(); try { SslContext sslContext = SslContextBuilder.forServer(cert.certificate(), cert.privateKey()) .sslProvider(SslProvider.OPENSSL) .build(); try { assertEquals(1, ((ReferenceCounted) sslContext).refCnt()); SSLEngine sslEngine = sslContext.newEngine(ByteBufAllocator.DEFAULT); EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(sslEngine)); assertEquals(2, ((ReferenceCounted) sslContext).refCnt()); assertEquals(1, ((ReferenceCounted) sslEngine).refCnt()); assertTrue(ch.finishAndReleaseAll()); ch.close().syncUninterruptibly(); assertEquals(1, ((ReferenceCounted) sslContext).refCnt()); assertEquals(0, ((ReferenceCounted) sslEngine).refCnt()); } finally { ReferenceCountUtil.release(sslContext); } } finally { cert.delete(); } } private static final class TlsReadTest extends ChannelOutboundHandlerAdapter { private volatile boolean readIssued; @Override public void read(ChannelHandlerContext ctx) throws Exception { readIssued = true; super.read(ctx); } public void test(final boolean dropChannelActive) throws Exception { SSLEngine engine = SSLContext.getDefault().createSSLEngine(); engine.setUseClientMode(true); EmbeddedChannel ch = new EmbeddedChannel(false, false, this, new SslHandler(engine), new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { if (!dropChannelActive) { ctx.fireChannelActive(); } } } ); ch.config().setAutoRead(false); assertFalse(ch.config().isAutoRead()); ch.register(); assertTrue(readIssued); readIssued = false; assertTrue(ch.writeOutbound(Unpooled.EMPTY_BUFFER)); assertTrue(readIssued); assertTrue(ch.finishAndReleaseAll()); } } @Test public void testIssueReadAfterActiveWriteFlush() throws Exception { // the handshake is initiated by channelActive new TlsReadTest().test(false); } @Test public void testIssueReadAfterWriteFlushActive() throws Exception { // the handshake is initiated by flush new TlsReadTest().test(true); } @Test @Timeout(value = 30000, unit = TimeUnit.MILLISECONDS) public void testRemoval() throws Exception { NioEventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; try { final Promise<Void> clientPromise = group.next().newPromise(); Bootstrap bootstrap = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(newHandler(SslContextBuilder.forClient().trustManager( InsecureTrustManagerFactory.INSTANCE).build(), clientPromise)); SelfSignedCertificate ssc = new SelfSignedCertificate(); final Promise<Void> serverPromise = group.next().newPromise(); ServerBootstrap serverBootstrap = new ServerBootstrap() .group(group, group) .channel(NioServerSocketChannel.class) .childHandler(newHandler(SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build(), serverPromise)); sc = serverBootstrap.bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); cc = bootstrap.connect(sc.localAddress()).syncUninterruptibly().channel(); serverPromise.syncUninterruptibly(); clientPromise.syncUninterruptibly(); } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); } } private static ChannelHandler newHandler(final SslContext sslCtx, final Promise<Void> promise) { return new ChannelInitializer() { @Override protected void initChannel(final Channel ch) { final SslHandler sslHandler = sslCtx.newHandler(ch.alloc()); sslHandler.setHandshakeTimeoutMillis(1000); ch.pipeline().addFirst(sslHandler); sslHandler.handshakeFuture().addListener(new FutureListener<Channel>() { @Override public void operationComplete(final Future<Channel> future) { ch.pipeline().remove(sslHandler); // Schedule the close so removal has time to propagate exception if any. ch.eventLoop().execute(new Runnable() { @Override public void run() { ch.close(); } }); } }); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { if (cause instanceof CodecException) { cause = cause.getCause(); } if (cause instanceof IllegalReferenceCountException) { promise.setFailure(cause); } } @Override public void channelInactive(ChannelHandlerContext ctx) { promise.trySuccess(null); } }); } }; } @Test public void testCloseFutureNotified() throws Exception { SSLEngine engine = newServerModeSSLEngine(); SslHandler handler = new SslHandler(engine); EmbeddedChannel ch = new EmbeddedChannel(handler); ch.close(); // When the channel is closed the SslHandler will write an empty buffer to the channel. ByteBuf buf = ch.readOutbound(); assertFalse(buf.isReadable()); buf.release(); assertFalse(ch.finishAndReleaseAll()); assertTrue(handler.handshakeFuture().cause() instanceof ClosedChannelException); assertTrue(handler.sslCloseFuture().cause() instanceof ClosedChannelException); } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testEventsFired() throws Exception { SSLEngine engine = newServerModeSSLEngine(); final BlockingQueue<SslCompletionEvent> events = new LinkedBlockingQueue<SslCompletionEvent>(); EmbeddedChannel channel = new EmbeddedChannel(new SslHandler(engine), new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof SslCompletionEvent) { events.add((SslCompletionEvent) evt); } } }); assertTrue(events.isEmpty()); assertTrue(channel.finishAndReleaseAll()); SslCompletionEvent evt = events.take(); assertTrue(evt instanceof SslHandshakeCompletionEvent); assertTrue(evt.cause() instanceof ClosedChannelException); evt = events.take(); assertTrue(evt instanceof SslCloseCompletionEvent); assertTrue(evt.cause() instanceof ClosedChannelException); assertTrue(events.isEmpty()); } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testHandshakeFailBeforeWritePromise() throws Exception { SelfSignedCertificate ssc = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build(); final CountDownLatch latch = new CountDownLatch(2); final CountDownLatch latch2 = new CountDownLatch(2); final BlockingQueue<Object> events = new LinkedBlockingQueue<Object>(); Channel serverChannel = null; Channel clientChannel = null; EventLoopGroup group = new DefaultEventLoopGroup(); try { ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(sslServerCtx.newHandler(ch.alloc())); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) { ByteBuf buf = ctx.alloc().buffer(10); buf.writeZero(buf.capacity()); ctx.writeAndFlush(buf).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { events.add(future); latch.countDown(); } }); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof SslCompletionEvent) { events.add(evt); latch.countDown(); latch2.countDown(); } } }); } }); Bootstrap cb = new Bootstrap(); cb.group(group) .channel(LocalChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addFirst(new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) { ByteBuf buf = ctx.alloc().buffer(1000); buf.writeZero(buf.capacity()); ctx.writeAndFlush(buf); } }); } }); serverChannel = sb.bind(new LocalAddress("SslHandlerTest")).sync().channel(); clientChannel = cb.connect(serverChannel.localAddress()).sync().channel(); latch.await(); SslCompletionEvent evt = (SslCompletionEvent) events.take(); assertTrue(evt instanceof SslHandshakeCompletionEvent); assertThat(evt.cause(), is(instanceOf(SSLException.class))); ChannelFuture future = (ChannelFuture) events.take(); assertThat(future.cause(), is(instanceOf(SSLException.class))); serverChannel.close().sync(); serverChannel = null; clientChannel.close().sync(); clientChannel = null; latch2.await(); evt = (SslCompletionEvent) events.take(); assertTrue(evt instanceof SslCloseCompletionEvent); assertThat(evt.cause(), is(instanceOf(ClosedChannelException.class))); assertTrue(events.isEmpty()); } finally { if (serverChannel != null) { serverChannel.close(); } if (clientChannel != null) { clientChannel.close(); } group.shutdownGracefully(); } } @Test public void writingReadOnlyBufferDoesNotBreakAggregation() throws Exception { SelfSignedCertificate ssc = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build(); final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final CountDownLatch serverReceiveLatch = new CountDownLatch(1); try { final int expectedBytes = 11; sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(sslServerCtx.newHandler(ch.alloc())); ch.pipeline().addLast(new SimpleChannelInboundHandler<ByteBuf>() { private int readBytes; @Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { readBytes += msg.readableBytes(); if (readBytes >= expectedBytes) { serverReceiveLatch.countDown(); } } }); } }).bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); cc = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(sslClientCtx.newHandler(ch.alloc())); } }).connect(sc.localAddress()).syncUninterruptibly().channel(); // We first write a ReadOnlyBuffer because SslHandler will attempt to take the first buffer and append to it // until there is no room, or the aggregation size threshold is exceeded. We want to verify that we don't // throw when a ReadOnlyBuffer is used and just verify that we don't aggregate in this case. ByteBuf firstBuffer = Unpooled.buffer(10); firstBuffer.writeByte(0); firstBuffer = firstBuffer.asReadOnly(); ByteBuf secondBuffer = Unpooled.buffer(10); secondBuffer.writeZero(secondBuffer.capacity()); cc.write(firstBuffer); cc.writeAndFlush(secondBuffer).syncUninterruptibly(); serverReceiveLatch.countDown(); } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslServerCtx); ReferenceCountUtil.release(sslClientCtx); } } @Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testCloseOnHandshakeFailure() throws Exception { final SelfSignedCertificate ssc = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.key(), ssc.cert()).build(); final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(new SelfSignedCertificate().cert()) .build(); EventLoopGroup group = new NioEventLoopGroup(1); Channel sc = null; Channel cc = null; try { LocalAddress address = new LocalAddress(getClass().getSimpleName() + ".testCloseOnHandshakeFailure"); ServerBootstrap sb = new ServerBootstrap() .group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(sslServerCtx.newHandler(ch.alloc())); } }); sc = sb.bind(address).syncUninterruptibly().channel(); final AtomicReference<SslHandler> sslHandlerRef = new AtomicReference<SslHandler>(); Bootstrap b = new Bootstrap() .group(group) .channel(LocalChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { SslHandler handler = sslClientCtx.newHandler(ch.alloc()); // We propagate the SslHandler via an AtomicReference to the outer-scope as using // pipeline.get(...) may return null if the pipeline was teared down by the time we call it. // This will happen if the channel was closed in the meantime. sslHandlerRef.set(handler); ch.pipeline().addLast(handler); } }); cc = b.connect(sc.localAddress()).syncUninterruptibly().channel(); SslHandler handler = sslHandlerRef.get(); handler.handshakeFuture().awaitUninterruptibly(); assertFalse(handler.handshakeFuture().isSuccess()); cc.closeFuture().syncUninterruptibly(); } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslServerCtx); ReferenceCountUtil.release(sslClientCtx); } } @Test public void testOutboundClosedAfterChannelInactive() throws Exception { SslContext context = SslContextBuilder.forClient().build(); SSLEngine engine = context.newEngine(UnpooledByteBufAllocator.DEFAULT); EmbeddedChannel channel = new EmbeddedChannel(); assertFalse(channel.finish()); channel.pipeline().addLast(new SslHandler(engine)); assertFalse(engine.isOutboundDone()); channel.close().syncUninterruptibly(); assertTrue(engine.isOutboundDone()); } @Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testHandshakeFailedByWriteBeforeChannelActive() throws Exception { final SslContext sslClientCtx = SslContextBuilder.forClient() .protocols(SslProtocols.SSL_v3) .trustManager(InsecureTrustManagerFactory.INSTANCE) .sslProvider(SslProvider.JDK).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final CountDownLatch activeLatch = new CountDownLatch(1); final AtomicReference<AssertionError> errorRef = new AtomicReference<AssertionError>(); final SslHandler sslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); try { sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInboundHandlerAdapter()) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); cc = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(sslHandler); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { if (cause instanceof AssertionError) { errorRef.set((AssertionError) cause); } } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { activeLatch.countDown(); } }); } }).connect(sc.localAddress()).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { // Write something to trigger the handshake before fireChannelActive is called. future.channel().writeAndFlush(wrappedBuffer(new byte [] { 1, 2, 3, 4 })); } }).syncUninterruptibly().channel(); // Ensure there is no AssertionError thrown by having the handshake failed by the writeAndFlush(...) before // channelActive(...) was called. Let's first wait for the activeLatch countdown to happen and after this // check if we saw and AssertionError (even if we timed out waiting). activeLatch.await(5, TimeUnit.SECONDS); AssertionError error = errorRef.get(); if (error != null) { throw error; } assertThat(sslHandler.handshakeFuture().await().cause(), CoreMatchers.<Throwable>instanceOf(SSLException.class)); } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } @Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testHandshakeTimeoutFlushStartsHandshake() throws Exception { testHandshakeTimeout0(false); } @Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testHandshakeTimeoutStartTLS() throws Exception { testHandshakeTimeout0(true); } private static void testHandshakeTimeout0(final boolean startTls) throws Exception { final SslContext sslClientCtx = SslContextBuilder.forClient() .startTls(true) .trustManager(InsecureTrustManagerFactory.INSTANCE) .sslProvider(SslProvider.JDK).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final SslHandler sslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); sslHandler.setHandshakeTimeout(500, TimeUnit.MILLISECONDS); try { sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInboundHandlerAdapter()) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(sslHandler); if (startTls) { ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.writeAndFlush(wrappedBuffer(new byte[] { 1, 2, 3, 4 })); } }); } } }).connect(sc.localAddress()); if (!startTls) { future.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { // Write something to trigger the handshake before fireChannelActive is called. future.channel().writeAndFlush(wrappedBuffer(new byte [] { 1, 2, 3, 4 })); } }); } cc = future.syncUninterruptibly().channel(); Throwable cause = sslHandler.handshakeFuture().await().cause(); assertThat(cause, CoreMatchers.<Throwable>instanceOf(SSLException.class)); assertThat(cause.getMessage(), containsString("timed out")); } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } @Test public void testHandshakeWithExecutorThatExecuteDirectlyJDK() throws Throwable { testHandshakeWithExecutor(DIRECT_EXECUTOR, SslProvider.JDK, false); } @Test public void testHandshakeWithImmediateExecutorJDK() throws Throwable { testHandshakeWithExecutor(ImmediateExecutor.INSTANCE, SslProvider.JDK, false); } @Test public void testHandshakeWithImmediateEventExecutorJDK() throws Throwable { testHandshakeWithExecutor(ImmediateEventExecutor.INSTANCE, SslProvider.JDK, false); } @Test public void testHandshakeWithExecutorJDK() throws Throwable { ExecutorService executorService = Executors.newCachedThreadPool(); try { testHandshakeWithExecutor(executorService, SslProvider.JDK, false); } finally { executorService.shutdown(); } } @Test public void testHandshakeWithExecutorThatExecuteDirectlyOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(DIRECT_EXECUTOR, SslProvider.OPENSSL, false); } @Test public void testHandshakeWithImmediateExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(ImmediateExecutor.INSTANCE, SslProvider.OPENSSL, false); } @Test public void testHandshakeWithImmediateEventExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(ImmediateEventExecutor.INSTANCE, SslProvider.OPENSSL, false); } @Test public void testHandshakeWithExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); ExecutorService executorService = Executors.newCachedThreadPool(); try { testHandshakeWithExecutor(executorService, SslProvider.OPENSSL, false); } finally { executorService.shutdown(); } } @Test public void testHandshakeMTLSWithExecutorThatExecuteDirectlyJDK() throws Throwable { testHandshakeWithExecutor(DIRECT_EXECUTOR, SslProvider.JDK, true); } @Test public void testHandshakeMTLSWithImmediateExecutorJDK() throws Throwable { testHandshakeWithExecutor(ImmediateExecutor.INSTANCE, SslProvider.JDK, true); } @Test public void testHandshakeMTLSWithImmediateEventExecutorJDK() throws Throwable { testHandshakeWithExecutor(ImmediateEventExecutor.INSTANCE, SslProvider.JDK, true); } @Test public void testHandshakeMTLSWithExecutorJDK() throws Throwable { ExecutorService executorService = Executors.newCachedThreadPool(); try { testHandshakeWithExecutor(executorService, SslProvider.JDK, true); } finally { executorService.shutdown(); } } @Test public void testHandshakeMTLSWithExecutorThatExecuteDirectlyOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(DIRECT_EXECUTOR, SslProvider.OPENSSL, true); } @Test public void testHandshakeMTLSWithImmediateExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(ImmediateExecutor.INSTANCE, SslProvider.OPENSSL, true); } @Test public void testHandshakeMTLSWithImmediateEventExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); testHandshakeWithExecutor(ImmediateEventExecutor.INSTANCE, SslProvider.OPENSSL, true); } @Test public void testHandshakeMTLSWithExecutorOpenSsl() throws Throwable { OpenSsl.ensureAvailability(); ExecutorService executorService = Executors.newCachedThreadPool(); try { testHandshakeWithExecutor(executorService, SslProvider.OPENSSL, true); } finally { executorService.shutdown(); } } private static void testHandshakeWithExecutor(Executor executor, SslProvider provider, boolean mtls) throws Throwable { final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslClientCtx; final SslContext sslServerCtx; if (mtls) { sslClientCtx = SslContextBuilder.forClient().protocols(SslProtocols.TLS_v1_2) .trustManager(InsecureTrustManagerFactory.INSTANCE).keyManager(cert.key(), cert.cert()) .sslProvider(provider).build(); sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()).protocols(SslProtocols.TLS_v1_2) .trustManager(InsecureTrustManagerFactory.INSTANCE) .clientAuth(ClientAuth.REQUIRE) .sslProvider(provider).build(); } else { sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE) .sslProvider(provider).build(); sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .sslProvider(provider).build(); } EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final SslHandler clientSslHandler = new SslHandler( sslClientCtx.newEngine(UnpooledByteBufAllocator.DEFAULT), executor); final SslHandler serverSslHandler = new SslHandler( sslServerCtx.newEngine(UnpooledByteBufAllocator.DEFAULT), executor); final AtomicReference<Throwable> causeRef = new AtomicReference<Throwable>(); try { sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(serverSslHandler); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { causeRef.compareAndSet(null, cause); } }); } }) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(clientSslHandler); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { causeRef.compareAndSet(null, cause); } }); } }).connect(sc.localAddress()); cc = future.syncUninterruptibly().channel(); assertTrue(clientSslHandler.handshakeFuture().await().isSuccess()); assertTrue(serverSslHandler.handshakeFuture().await().isSuccess()); Throwable cause = causeRef.get(); if (cause != null) { throw cause; } } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } @Test public void testClientHandshakeTimeoutBecauseExecutorNotExecute() throws Exception { testHandshakeTimeoutBecauseExecutorNotExecute(true); } @Test public void testServerHandshakeTimeoutBecauseExecutorNotExecute() throws Exception { testHandshakeTimeoutBecauseExecutorNotExecute(false); } private static void testHandshakeTimeoutBecauseExecutorNotExecute(final boolean client) throws Exception { final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE) .sslProvider(SslProvider.JDK).build(); final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .sslProvider(SslProvider.JDK).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final SslHandler clientSslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT, new Executor() { @Override public void execute(Runnable command) { if (!client) { command.run(); } // Do nothing to simulate slow execution. } }); if (client) { clientSslHandler.setHandshakeTimeout(100, TimeUnit.MILLISECONDS); } final SslHandler serverSslHandler = sslServerCtx.newHandler(UnpooledByteBufAllocator.DEFAULT, new Executor() { @Override public void execute(Runnable command) { if (client) { command.run(); } // Do nothing to simulate slow execution. } }); if (!client) { serverSslHandler.setHandshakeTimeout(100, TimeUnit.MILLISECONDS); } try { sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(serverSslHandler) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(clientSslHandler); } }).connect(sc.localAddress()); cc = future.syncUninterruptibly().channel(); if (client) { Throwable cause = clientSslHandler.handshakeFuture().await().cause(); assertThat(cause, CoreMatchers.<Throwable>instanceOf(SslHandshakeTimeoutException.class)); assertFalse(serverSslHandler.handshakeFuture().await().isSuccess()); } else { Throwable cause = serverSslHandler.handshakeFuture().await().cause(); assertThat(cause, CoreMatchers.<Throwable>instanceOf(SslHandshakeTimeoutException.class)); assertFalse(clientSslHandler.handshakeFuture().await().isSuccess()); } } finally { if (cc != null) { cc.close().syncUninterruptibly(); } if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testSessionTicketsWithTLSv12() throws Throwable { testSessionTickets(SslProvider.OPENSSL, SslProtocols.TLS_v1_2, true); } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testSessionTicketsWithTLSv13() throws Throwable { assumeTrue(SslProvider.isTlsv13Supported(SslProvider.OPENSSL)); testSessionTickets(SslProvider.OPENSSL, SslProtocols.TLS_v1_3, true); } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testSessionTicketsWithTLSv12AndNoKey() throws Throwable { testSessionTickets(SslProvider.OPENSSL, SslProtocols.TLS_v1_2, false); } @Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testSessionTicketsWithTLSv13AndNoKey() throws Throwable { assumeTrue(OpenSsl.isTlsv13Supported()); testSessionTickets(SslProvider.OPENSSL, SslProtocols.TLS_v1_3, false); } private static void testSessionTickets(SslProvider provider, String protocol, boolean withKey) throws Throwable { OpenSsl.ensureAvailability(); final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE) .sslProvider(provider) .protocols(protocol) .build(); // Explicit enable session cache as it's disabled by default atm. ((OpenSslContext) sslClientCtx).sessionContext() .setSessionCacheEnabled(true); final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .sslProvider(provider) .protocols(protocol) .build(); if (withKey) { OpenSslSessionTicketKey key = new OpenSslSessionTicketKey(new byte[OpenSslSessionTicketKey.NAME_SIZE], new byte[OpenSslSessionTicketKey.HMAC_KEY_SIZE], new byte[OpenSslSessionTicketKey.AES_KEY_SIZE]); ((OpenSslSessionContext) sslClientCtx.sessionContext()).setTicketKeys(key); ((OpenSslSessionContext) sslServerCtx.sessionContext()).setTicketKeys(key); } else { ((OpenSslSessionContext) sslClientCtx.sessionContext()).setTicketKeys(); ((OpenSslSessionContext) sslServerCtx.sessionContext()).setTicketKeys(); } EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; final byte[] bytes = new byte[96]; PlatformDependent.threadLocalRandom().nextBytes(bytes); try { final AtomicReference<AssertionError> assertErrorRef = new AtomicReference<AssertionError>(); sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { final SslHandler sslHandler = sslServerCtx.newHandler(ch.alloc()); ch.pipeline().addLast(sslServerCtx.newHandler(UnpooledByteBufAllocator.DEFAULT)); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { private int handshakeCount; @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof SslHandshakeCompletionEvent) { handshakeCount++; ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) sslHandler.engine(); // This test only works for non TLSv1.3 as TLSv1.3 will establish sessions after // the handshake is done. // See https://www.openssl.org/docs/man1.1.1/man3/SSL_CTX_sess_set_get_cb.html if (!SslProtocols.TLS_v1_3.equals(engine.getSession().getProtocol())) { // First should not re-use the session try { assertEquals(handshakeCount > 1, engine.isSessionReused()); } catch (AssertionError error) { assertErrorRef.set(error); return; } } ctx.writeAndFlush(Unpooled.wrappedBuffer(bytes)); } } }); } }) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); InetSocketAddress serverAddr = (InetSocketAddress) sc.localAddress(); testSessionTickets(serverAddr, group, sslClientCtx, bytes, false); testSessionTickets(serverAddr, group, sslClientCtx, bytes, true); AssertionError error = assertErrorRef.get(); if (error != null) { throw error; } } finally { if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } private static void testSessionTickets(InetSocketAddress serverAddress, EventLoopGroup group, SslContext sslClientCtx, final byte[] bytes, boolean isReused) throws Throwable { Channel cc = null; final BlockingQueue<Object> queue = new LinkedBlockingQueue<Object>(); try { final SslHandler clientSslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT, serverAddress.getAddress().getHostAddress(), serverAddress.getPort()); ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(clientSslHandler); ch.pipeline().addLast(new ByteToMessageDecoder() { @Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) { if (in.readableBytes() == bytes.length) { queue.add(in.readBytes(bytes.length)); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { queue.add(cause); } }); } }).connect(serverAddress); cc = future.syncUninterruptibly().channel(); assertTrue(clientSslHandler.handshakeFuture().sync().isSuccess()); ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) clientSslHandler.engine(); // This test only works for non TLSv1.3 as TLSv1.3 will establish sessions after // the handshake is done. // See https://www.openssl.org/docs/man1.1.1/man3/SSL_CTX_sess_set_get_cb.html if (!SslProtocols.TLS_v1_3.equals(engine.getSession().getProtocol())) { assertEquals(isReused, engine.isSessionReused()); } Object obj = queue.take(); if (obj instanceof ByteBuf) { ByteBuf buffer = (ByteBuf) obj; ByteBuf expected = Unpooled.wrappedBuffer(bytes); try { assertEquals(expected, buffer); } finally { expected.release(); buffer.release(); } } else { throw (Throwable) obj; } } finally { if (cc != null) { cc.close().syncUninterruptibly(); } } } @Test @Timeout(value = 10000, unit = TimeUnit.MILLISECONDS) public void testHandshakeFailureOnlyFireExceptionOnce() throws Exception { final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(new X509ExtendedTrustManager() { @Override public void checkClientTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { failVerification(); } @Override public void checkServerTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { failVerification(); } @Override public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { failVerification(); } @Override public void checkServerTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { failVerification(); } @Override public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { failVerification(); } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { failVerification(); } @Override public X509Certificate[] getAcceptedIssuers() { return EmptyArrays.EMPTY_X509_CERTIFICATES; } private void failVerification() throws CertificateException { throw new CertificateException(); } }) .sslProvider(SslProvider.JDK).build(); final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .sslProvider(SslProvider.JDK).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; final SslHandler clientSslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); final SslHandler serverSslHandler = sslServerCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); try { final Object terminalEvent = new Object(); final BlockingQueue<Object> errorQueue = new LinkedBlockingQueue<Object>(); sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(serverSslHandler); ch.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(final ChannelHandlerContext ctx, Throwable cause) { errorQueue.add(cause); } @Override public void channelInactive(ChannelHandlerContext ctx) { errorQueue.add(terminalEvent); } }); } }) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); final ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(clientSslHandler); } }).connect(sc.localAddress()); future.syncUninterruptibly(); clientSslHandler.handshakeFuture().addListener(new FutureListener<Channel>() { @Override public void operationComplete(Future<Channel> f) { future.channel().close(); } }); assertFalse(clientSslHandler.handshakeFuture().await().isSuccess()); assertFalse(serverSslHandler.handshakeFuture().await().isSuccess()); Object error = errorQueue.take(); assertThat(error, Matchers.instanceOf(DecoderException.class)); assertThat(((Throwable) error).getCause(), Matchers.<Throwable>instanceOf(SSLException.class)); Object terminal = errorQueue.take(); assertSame(terminalEvent, terminal); assertNull(errorQueue.poll(1, TimeUnit.MILLISECONDS)); } finally { if (sc != null) { sc.close().syncUninterruptibly(); } group.shutdownGracefully(); } } @Test public void testHandshakeFailureCipherMissmatchTLSv12Jdk() throws Exception { testHandshakeFailureCipherMissmatch(SslProvider.JDK, false); } @Test public void testHandshakeFailureCipherMissmatchTLSv13Jdk() throws Exception { assumeTrue(SslProvider.isTlsv13Supported(SslProvider.JDK)); testHandshakeFailureCipherMissmatch(SslProvider.JDK, true); } @Test public void testHandshakeFailureCipherMissmatchTLSv12OpenSsl() throws Exception { OpenSsl.ensureAvailability(); testHandshakeFailureCipherMissmatch(SslProvider.OPENSSL, false); } @Test public void testHandshakeFailureCipherMissmatchTLSv13OpenSsl() throws Exception { OpenSsl.ensureAvailability(); assumeTrue(SslProvider.isTlsv13Supported(SslProvider.OPENSSL)); assumeFalse(OpenSsl.isBoringSSL(), "BoringSSL does not support setting ciphers for TLSv1.3 explicit"); testHandshakeFailureCipherMissmatch(SslProvider.OPENSSL, true); } private static void testHandshakeFailureCipherMissmatch(SslProvider provider, boolean tls13) throws Exception { final String clientCipher; final String serverCipher; final String protocol; if (tls13) { clientCipher = "TLS_AES_128_GCM_SHA256"; serverCipher = "TLS_AES_256_GCM_SHA384"; protocol = SslProtocols.TLS_v1_3; } else { clientCipher = "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256"; serverCipher = "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384"; protocol = SslProtocols.TLS_v1_2; } final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE) .protocols(protocol) .ciphers(Collections.singleton(clientCipher)) .sslProvider(provider).build(); final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .protocols(protocol) .ciphers(Collections.singleton(serverCipher)) .sslProvider(provider).build(); EventLoopGroup group = new NioEventLoopGroup(); Channel sc = null; Channel cc = null; final SslHandler clientSslHandler = sslClientCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); final SslHandler serverSslHandler = sslServerCtx.newHandler(UnpooledByteBufAllocator.DEFAULT); class SslEventHandler extends ChannelInboundHandlerAdapter { private final AtomicReference<SslHandshakeCompletionEvent> ref; SslEventHandler(AtomicReference<SslHandshakeCompletionEvent> ref) { this.ref = ref; } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof SslHandshakeCompletionEvent) { ref.set((SslHandshakeCompletionEvent) evt); } super.userEventTriggered(ctx, evt); } } final AtomicReference<SslHandshakeCompletionEvent> clientEvent = new AtomicReference<SslHandshakeCompletionEvent>(); final AtomicReference<SslHandshakeCompletionEvent> serverEvent = new AtomicReference<SslHandshakeCompletionEvent>(); try { sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(serverSslHandler); ch.pipeline().addLast(new SslEventHandler(serverEvent)); } }) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); ChannelFuture future = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(clientSslHandler); ch.pipeline().addLast(new SslEventHandler(clientEvent)); } }).connect(sc.localAddress()); cc = future.syncUninterruptibly().channel(); Throwable clientCause = clientSslHandler.handshakeFuture().await().cause(); assertThat(clientCause, CoreMatchers.<Throwable>instanceOf(SSLException.class)); assertThat(clientCause.getCause(), not(CoreMatchers.<Throwable>instanceOf(ClosedChannelException.class))); Throwable serverCause = serverSslHandler.handshakeFuture().await().cause(); assertThat(serverCause, CoreMatchers.<Throwable>instanceOf(SSLException.class)); assertThat(serverCause.getCause(), not(CoreMatchers.<Throwable>instanceOf(ClosedChannelException.class))); cc.close().syncUninterruptibly(); sc.close().syncUninterruptibly(); Throwable eventClientCause = clientEvent.get().cause(); assertThat(eventClientCause, CoreMatchers.<Throwable>instanceOf(SSLException.class)); assertThat(eventClientCause.getCause(), not(CoreMatchers.<Throwable>instanceOf(ClosedChannelException.class))); Throwable serverEventCause = serverEvent.get().cause(); assertThat(serverEventCause, CoreMatchers.<Throwable>instanceOf(SSLException.class)); assertThat(serverEventCause.getCause(), not(CoreMatchers.<Throwable>instanceOf(ClosedChannelException.class))); } finally { group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); } } @Test public void testHandshakeEventsTls12JDK() throws Exception { testHandshakeEvents(SslProvider.JDK, SslProtocols.TLS_v1_2); } @Test public void testHandshakeEventsTls12Openssl() throws Exception { OpenSsl.ensureAvailability(); testHandshakeEvents(SslProvider.OPENSSL, SslProtocols.TLS_v1_2); } @Test public void testHandshakeEventsTls13JDK() throws Exception { assumeTrue(SslProvider.isTlsv13Supported(SslProvider.JDK)); testHandshakeEvents(SslProvider.JDK, SslProtocols.TLS_v1_3); } @Test public void testHandshakeEventsTls13Openssl() throws Exception { OpenSsl.ensureAvailability(); assumeTrue(SslProvider.isTlsv13Supported(SslProvider.OPENSSL)); testHandshakeEvents(SslProvider.OPENSSL, SslProtocols.TLS_v1_3); } private void testHandshakeEvents(SslProvider provider, String protocol) throws Exception { final SslContext sslClientCtx = SslContextBuilder.forClient() .trustManager(InsecureTrustManagerFactory.INSTANCE) .protocols(protocol) .sslProvider(provider).build(); final SelfSignedCertificate cert = new SelfSignedCertificate(); final SslContext sslServerCtx = SslContextBuilder.forServer(cert.key(), cert.cert()) .protocols(protocol) .sslProvider(provider).build(); EventLoopGroup group = new NioEventLoopGroup(); final LinkedBlockingQueue<SslHandshakeCompletionEvent> serverCompletionEvents = new LinkedBlockingQueue<SslHandshakeCompletionEvent>(); final LinkedBlockingQueue<SslHandshakeCompletionEvent> clientCompletionEvents = new LinkedBlockingQueue<SslHandshakeCompletionEvent>(); try { Channel sc = new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(sslServerCtx.newHandler(UnpooledByteBufAllocator.DEFAULT)); ch.pipeline().addLast(new SslHandshakeCompletionEventHandler(serverCompletionEvents)); } }) .bind(new InetSocketAddress(0)).syncUninterruptibly().channel(); Bootstrap bs = new Bootstrap() .group(group) .channel(NioSocketChannel.class) .handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) { ch.pipeline().addLast(sslClientCtx.newHandler( UnpooledByteBufAllocator.DEFAULT, "netty.io", 9999)); ch.pipeline().addLast(new SslHandshakeCompletionEventHandler(clientCompletionEvents)); } }) .remoteAddress(sc.localAddress()); Channel cc1 = bs.connect().sync().channel(); Channel cc2 = bs.connect().sync().channel(); // We expect 4 events as we have 2 connections and for each connection there should be one event // on the server-side and one on the client-side. for (int i = 0; i < 2; i++) { SslHandshakeCompletionEvent event = clientCompletionEvents.take(); assertTrue(event.isSuccess()); } for (int i = 0; i < 2; i++) { SslHandshakeCompletionEvent event = serverCompletionEvents.take(); assertTrue(event.isSuccess()); } cc1.close().sync(); cc2.close().sync(); sc.close().sync(); assertEquals(0, clientCompletionEvents.size()); assertEquals(0, serverCompletionEvents.size()); } finally { group.shutdownGracefully(); ReferenceCountUtil.release(sslClientCtx); ReferenceCountUtil.release(sslServerCtx); } } private static class SslHandshakeCompletionEventHandler extends ChannelInboundHandlerAdapter { private final Queue<SslHandshakeCompletionEvent> completionEvents; SslHandshakeCompletionEventHandler(Queue<SslHandshakeCompletionEvent> completionEvents) { this.completionEvents = completionEvents; } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof SslHandshakeCompletionEvent) { completionEvents.add((SslHandshakeCompletionEvent) evt); } } @Override public boolean isSharable() { return true; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; public class UpdateModelSnapshotAction extends ActionType<UpdateModelSnapshotAction.Response> { public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/update"; private UpdateModelSnapshotAction() { super(NAME, Response::new); } public static class Request extends ActionRequest implements ToXContentObject { private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new); static { PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, ModelSnapshotField.SNAPSHOT_ID); PARSER.declareString(Request::setDescription, ModelSnapshot.DESCRIPTION); PARSER.declareBoolean(Request::setRetain, ModelSnapshot.RETAIN); } public static Request parseRequest(String jobId, String snapshotId, XContentParser parser) { Request request = PARSER.apply(parser, null); if (jobId != null) { request.jobId = jobId; } if (snapshotId != null) { request.snapshotId = snapshotId; } return request; } private String jobId; private String snapshotId; private String description; private Boolean retain; public Request() { } public Request(StreamInput in) throws IOException { super(in); jobId = in.readString(); snapshotId = in.readString(); description = in.readOptionalString(); retain = in.readOptionalBoolean(); } public Request(String jobId, String snapshotId) { this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshotField.SNAPSHOT_ID.getPreferredName()); } public String getJobId() { return jobId; } public String getSnapshotId() { return snapshotId; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Boolean getRetain() { return retain; } public void setRetain(Boolean retain) { this.retain = retain; } @Override public ActionRequestValidationException validate() { return null; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(jobId); out.writeString(snapshotId); out.writeOptionalString(description); out.writeOptionalBoolean(retain); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Job.ID.getPreferredName(), jobId); builder.field(ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), snapshotId); if (description != null) { builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description); } if (retain != null) { builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain); } builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(jobId, snapshotId, description, retain); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Request other = (Request) obj; return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId) && Objects.equals(description, other.description) && Objects.equals(retain, other.retain); } } public static class Response extends ActionResponse implements StatusToXContentObject { private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); private static final ParseField MODEL = new ParseField("model"); private final ModelSnapshot model; public Response(StreamInput in) throws IOException { super(in); model = new ModelSnapshot(in); } public Response(ModelSnapshot modelSnapshot) { model = modelSnapshot; } public ModelSnapshot getModel() { return model; } @Override public void writeTo(StreamOutput out) throws IOException { model.writeTo(out); } @Override public RestStatus status() { return RestStatus.OK; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ACKNOWLEDGED.getPreferredName(), true); builder.field(MODEL.getPreferredName()); builder = model.toXContent(builder, params); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(model); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Response other = (Response) obj; return Objects.equals(model, other.model); } @Override public final String toString() { return Strings.toString(this); } } }
package objects; import screens.GameScreen; import utils.AssetLord; import utils.LevelGenerate; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.Texture.TextureFilter; import com.badlogic.gdx.graphics.g2d.ParticleEffect; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.Body; import com.badlogic.gdx.physics.box2d.BodyDef; import com.badlogic.gdx.physics.box2d.Fixture; import com.badlogic.gdx.physics.box2d.FixtureDef; import com.badlogic.gdx.physics.box2d.PolygonShape; import com.badlogic.gdx.physics.box2d.World; import com.badlogic.gdx.physics.box2d.BodyDef.BodyType; import com.softnuke.biosleep.MyGame; public class Lasers { //TODO: under construction, incomplete float width, height; private Vector2 position; Body body; World world; PolygonShape shape; Fixture bodyFixture; TextureRegion texRegion; Sprite onSprite, offSprite; static final Vector2 Center = new Vector2(0,0); float bHEIGHT = MyGame.bHEIGHT; float bWIDTH = MyGame.bWIDTH; ParticleEffect effect; Light lightE, lightD; //false means not visible public boolean visible = false; //current type public boolean STATE_ENABLED = false; Sprite glow, ray; public Color color = new Color(Color.RED); public Color rayColor = new Color(Color.WHITE); public Lasers(World wor, Vector2 pos, Light le, Light ld){ position = pos; height = 0.6f; width = 0.6f; lightE = le; lightD = ld; init(wor); } private void init(World w){ world = w; visible = true; // if(POWER_TYPE == SCORE_BONUS) // texRegion = new TextureRegion(GameScreen.getInstance().getAssetLord().manager.get(AssetLord.color_blocks_texture, Texture.class), 32*3, 32*1, 32, 32); // else if(POWER_TYPE == SHIELD) // texRegion = new TextureRegion(GameScreen.getInstance().getAssetLord().manager.get(AssetLord.color_blocks_texture, Texture.class), 32*2, 32*1, 32, 32); //TextureAtlas atlas = GameScreen.getInstance().getAssetLord().manager.get(AssetLord.game_atlas, TextureAtlas.class); Texture texE = new Texture("level/switch-enabled.png"); texE.setFilter(TextureFilter.Nearest, TextureFilter.Nearest); Texture texD = new Texture("level/switch-disabled.png"); texD.setFilter(TextureFilter.Nearest, TextureFilter.Nearest); onSprite = new Sprite(texE); onSprite.setSize(width, width * onSprite.getHeight()/onSprite.getWidth()); offSprite = new Sprite(texD); offSprite.setSize(width, width * offSprite.getHeight()/offSprite.getWidth()); onSprite.setPosition(position.x - width/2, position.y - height/2); offSprite.setPosition(position.x - width/2, position.y - height/2); if(GameScreen.PLAYER_PARTICLES){ //TODO:change this later effect = new ParticleEffect(GameScreen.getInstance().getAssetLord().manager.get(AssetLord.portal_particle, ParticleEffect.class)); effect.scaleEffect(0.5f); effect.setPosition(position.x - width/6, position.y + height/2); //effect.start(); //effect.setEmittersCleanUpBlendFunction(false); } //reset lights lightE.enable(); lightD.disable(); create(); } private void create(){ BodyDef bodyDef = new BodyDef(); bodyDef.type = BodyType.StaticBody; bodyDef.position.set(position); shape = new PolygonShape(); shape.setAsBox(width/2, height/2, Enemy.CENTER_VECTOR, 0); FixtureDef fixtureDef = new FixtureDef(); fixtureDef.shape = shape; fixtureDef.density = 0.0f; fixtureDef.friction = 0.2f; fixtureDef.restitution = 0; fixtureDef.isSensor = true; fixtureDef.filter.categoryBits = LevelGenerate.CATEGORY_WALL; fixtureDef.filter.maskBits = (short) (LevelGenerate.CATEGORY_PLAYER); body = world.createBody(bodyDef); bodyFixture = body.createFixture(fixtureDef); body.setUserData("switch"); shape.dispose(); } public void render(SpriteBatch batch){ //effect.draw(batch); if(!visible) return; //check if it is gone off the screen without user consumption //if(body.getPosition().y+height < -bWIDTH) // setOffScreen(false); //batch.draw(texRegion, position.x-width/2, position.y-height/2, 0, 0, width, height, 1f, 1f, 0); if(STATE_ENABLED) onSprite.draw(batch); else offSprite.draw(batch); } public void renderParticles(SpriteBatch batch){ if(!visible) return; } public void update(float delta, float viewportWidth){ position = body.getPosition(); if(position.x > viewportWidth-bWIDTH*0.8 && position.x < viewportWidth+bWIDTH*0.8) visible = true; else visible = false; //effect.update(delta); } public void reset(){ visible = true; //effect.start(); STATE_ENABLED = false; lightE.disable(); lightD.enable(); if(true) return; } public Fixture getFixture(){ return bodyFixture; } public void setOffScreen(boolean collision){ //hide bodies visible = false; if(GameScreen.PLAYER_PARTICLES){ effect.allowCompletion(); } } public void toggle(){ STATE_ENABLED = !STATE_ENABLED; if(STATE_ENABLED){ lightE.enable(); lightD.disable(); } else{ lightE.disable(); lightD.enable(); } } // public void updatePreviousPos() { // previous = body.getPosition(); // } // // public void interpolate(float alpha, float invAlpha){ // Vector2 pos = body.getPosition(); // posx = pos.x * alpha + previous.x * invAlpha; // posy = pos.y * alpha + previous.y * invAlpha; // } public float getY(){ return position.y; } public float getX(){ return position.x; } public float getWidth(){ return width; } public float getHeight(){ return height; } public void dispose(){ world.destroyBody(body); if(GameScreen.PLAYER_PARTICLES){ effect.dispose(); } } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Comparator; import java.util.Map; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.PostingsConsumer; import org.apache.lucene.codecs.TermStats; import org.apache.lucene.codecs.TermsConsumer; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; // TODO: break into separate freq and prox writers as // codecs; make separate container (tii/tis/skip/*) that can // be configured as any number of files 1..N final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implements Comparable<FreqProxTermsWriterPerField> { final FreqProxTermsWriter parent; final TermsHashPerField termsHashPerField; final FieldInfo fieldInfo; final DocumentsWriterPerThread.DocState docState; final FieldInvertState fieldState; private boolean hasFreq; private boolean hasProx; private boolean hasOffsets; PayloadAttribute payloadAttribute; OffsetAttribute offsetAttribute; public FreqProxTermsWriterPerField(TermsHashPerField termsHashPerField, FreqProxTermsWriter parent, FieldInfo fieldInfo) { this.termsHashPerField = termsHashPerField; this.parent = parent; this.fieldInfo = fieldInfo; docState = termsHashPerField.docState; fieldState = termsHashPerField.fieldState; setIndexOptions(fieldInfo.getIndexOptions()); } @Override int getStreamCount() { if (!hasProx) { return 1; } else { return 2; } } @Override void finish() { if (hasPayloads) { fieldInfo.setStorePayloads(); } } boolean hasPayloads; @Override void skippingLongTerm() {} @Override public int compareTo(FreqProxTermsWriterPerField other) { return fieldInfo.name.compareTo(other.fieldInfo.name); } // Called after flush void reset() { // Record, up front, whether our in-RAM format will be // with or without term freqs: setIndexOptions(fieldInfo.getIndexOptions()); payloadAttribute = null; } private void setIndexOptions(IndexOptions indexOptions) { if (indexOptions == null) { // field could later be updated with indexed=true, so set everything on hasFreq = hasProx = hasOffsets = true; } else { hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; } } @Override boolean start(IndexableField[] fields, int count) { for(int i=0;i<count;i++) { if (fields[i].fieldType().indexed()) { return true; } } return false; } @Override void start(IndexableField f) { if (fieldState.attributeSource.hasAttribute(PayloadAttribute.class)) { payloadAttribute = fieldState.attributeSource.getAttribute(PayloadAttribute.class); } else { payloadAttribute = null; } if (hasOffsets) { offsetAttribute = fieldState.attributeSource.addAttribute(OffsetAttribute.class); } else { offsetAttribute = null; } } void writeProx(final int termID, int proxCode) { //System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode); assert hasProx; final BytesRef payload; if (payloadAttribute == null) { payload = null; } else { payload = payloadAttribute.getPayload(); } if (payload != null && payload.length > 0) { termsHashPerField.writeVInt(1, (proxCode<<1)|1); termsHashPerField.writeVInt(1, payload.length); termsHashPerField.writeBytes(1, payload.bytes, payload.offset, payload.length); hasPayloads = true; } else { termsHashPerField.writeVInt(1, proxCode<<1); } FreqProxPostingsArray postings = (FreqProxPostingsArray) termsHashPerField.postingsArray; postings.lastPositions[termID] = fieldState.position; } void writeOffsets(final int termID, int offsetAccum) { assert hasOffsets; final int startOffset = offsetAccum + offsetAttribute.startOffset(); final int endOffset = offsetAccum + offsetAttribute.endOffset(); //System.out.println("writeOffsets termID=" + termID + " prevOffset=" + prevOffset + " startOff=" + startOffset + " endOff=" + endOffset); FreqProxPostingsArray postings = (FreqProxPostingsArray) termsHashPerField.postingsArray; assert startOffset - postings.lastOffsets[termID] >= 0; termsHashPerField.writeVInt(1, startOffset - postings.lastOffsets[termID]); termsHashPerField.writeVInt(1, endOffset - startOffset); postings.lastOffsets[termID] = startOffset; } @Override void newTerm(final int termID) { // First time we're seeing this term since the last // flush assert docState.testPoint("FreqProxTermsWriterPerField.newTerm start"); FreqProxPostingsArray postings = (FreqProxPostingsArray) termsHashPerField.postingsArray; postings.lastDocIDs[termID] = docState.docID; if (!hasFreq) { postings.lastDocCodes[termID] = docState.docID; } else { postings.lastDocCodes[termID] = docState.docID << 1; postings.termFreqs[termID] = 1; if (hasProx) { writeProx(termID, fieldState.position); if (hasOffsets) { writeOffsets(termID, fieldState.offset); } } else { assert !hasOffsets; } } fieldState.maxTermFrequency = Math.max(1, fieldState.maxTermFrequency); fieldState.uniqueTermCount++; } @Override void addTerm(final int termID) { assert docState.testPoint("FreqProxTermsWriterPerField.addTerm start"); FreqProxPostingsArray postings = (FreqProxPostingsArray) termsHashPerField.postingsArray; assert !hasFreq || postings.termFreqs[termID] > 0; if (!hasFreq) { assert postings.termFreqs == null; if (docState.docID != postings.lastDocIDs[termID]) { assert docState.docID > postings.lastDocIDs[termID]; termsHashPerField.writeVInt(0, postings.lastDocCodes[termID]); postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; postings.lastDocIDs[termID] = docState.docID; fieldState.uniqueTermCount++; } } else if (docState.docID != postings.lastDocIDs[termID]) { assert docState.docID > postings.lastDocIDs[termID]:"id: "+docState.docID + " postings ID: "+ postings.lastDocIDs[termID] + " termID: "+termID; // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush // Now that we know doc freq for previous doc, // write it & lastDocCode if (1 == postings.termFreqs[termID]) { termsHashPerField.writeVInt(0, postings.lastDocCodes[termID]|1); } else { termsHashPerField.writeVInt(0, postings.lastDocCodes[termID]); termsHashPerField.writeVInt(0, postings.termFreqs[termID]); } postings.termFreqs[termID] = 1; fieldState.maxTermFrequency = Math.max(1, fieldState.maxTermFrequency); postings.lastDocCodes[termID] = (docState.docID - postings.lastDocIDs[termID]) << 1; postings.lastDocIDs[termID] = docState.docID; if (hasProx) { writeProx(termID, fieldState.position); if (hasOffsets) { postings.lastOffsets[termID] = 0; writeOffsets(termID, fieldState.offset); } } else { assert !hasOffsets; } fieldState.uniqueTermCount++; } else { fieldState.maxTermFrequency = Math.max(fieldState.maxTermFrequency, ++postings.termFreqs[termID]); if (hasProx) { writeProx(termID, fieldState.position-postings.lastPositions[termID]); } if (hasOffsets) { writeOffsets(termID, fieldState.offset); } } } @Override ParallelPostingsArray createPostingsArray(int size) { return new FreqProxPostingsArray(size, hasFreq, hasProx, hasOffsets); } static final class FreqProxPostingsArray extends ParallelPostingsArray { public FreqProxPostingsArray(int size, boolean writeFreqs, boolean writeProx, boolean writeOffsets) { super(size); if (writeFreqs) { termFreqs = new int[size]; } lastDocIDs = new int[size]; lastDocCodes = new int[size]; if (writeProx) { lastPositions = new int[size]; if (writeOffsets) { lastOffsets = new int[size]; } } else { assert !writeOffsets; } //System.out.println("PA init freqs=" + writeFreqs + " pos=" + writeProx + " offs=" + writeOffsets); } int termFreqs[]; // # times this term occurs in the current doc int lastDocIDs[]; // Last docID where this term occurred int lastDocCodes[]; // Code for prior doc int lastPositions[]; // Last position where this term occurred int lastOffsets[]; // Last endOffset where this term occurred @Override ParallelPostingsArray newInstance(int size) { return new FreqProxPostingsArray(size, termFreqs != null, lastPositions != null, lastOffsets != null); } @Override void copyTo(ParallelPostingsArray toArray, int numToCopy) { assert toArray instanceof FreqProxPostingsArray; FreqProxPostingsArray to = (FreqProxPostingsArray) toArray; super.copyTo(toArray, numToCopy); System.arraycopy(lastDocIDs, 0, to.lastDocIDs, 0, numToCopy); System.arraycopy(lastDocCodes, 0, to.lastDocCodes, 0, numToCopy); if (lastPositions != null) { assert to.lastPositions != null; System.arraycopy(lastPositions, 0, to.lastPositions, 0, numToCopy); } if (lastOffsets != null) { assert to.lastOffsets != null; System.arraycopy(lastOffsets, 0, to.lastOffsets, 0, numToCopy); } if (termFreqs != null) { assert to.termFreqs != null; System.arraycopy(termFreqs, 0, to.termFreqs, 0, numToCopy); } } @Override int bytesPerPosting() { int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * RamUsageEstimator.NUM_BYTES_INT; if (lastPositions != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } if (lastOffsets != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } if (termFreqs != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } return bytes; } } public void abort() {} BytesRef payload; /* Walk through all unique text tokens (Posting * instances) found in this field and serialize them * into a single RAM segment. */ void flush(String fieldName, FieldsConsumer consumer, final SegmentWriteState state) throws IOException { if (!fieldInfo.isIndexed()) { return; // nothing to flush, don't bother the codec with the unindexed field } final TermsConsumer termsConsumer = consumer.addField(fieldInfo); final Comparator<BytesRef> termComp = termsConsumer.getComparator(); // CONFUSING: this.indexOptions holds the index options // that were current when we first saw this field. But // it's possible this has changed, eg when other // documents are indexed that cause a "downgrade" of the // IndexOptions. So we must decode the in-RAM buffer // according to this.indexOptions, but then write the // new segment to the directory according to // currentFieldIndexOptions: final IndexOptions currentFieldIndexOptions = fieldInfo.getIndexOptions(); assert currentFieldIndexOptions != null; final boolean writeTermFreq = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; final boolean writePositions = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; final boolean writeOffsets = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; final boolean readTermFreq = this.hasFreq; final boolean readPositions = this.hasProx; final boolean readOffsets = this.hasOffsets; //System.out.println("flush readTF=" + readTermFreq + " readPos=" + readPositions + " readOffs=" + readOffsets); // Make sure FieldInfo.update is working correctly!: assert !writeTermFreq || readTermFreq; assert !writePositions || readPositions; assert !writeOffsets || readOffsets; assert !writeOffsets || writePositions; final Map<Term,Integer> segDeletes; if (state.segUpdates != null && state.segUpdates.terms.size() > 0) { segDeletes = state.segUpdates.terms; } else { segDeletes = null; } final int[] termIDs = termsHashPerField.sortPostings(termComp); final int numTerms = termsHashPerField.bytesHash.size(); final BytesRef text = new BytesRef(); final FreqProxPostingsArray postings = (FreqProxPostingsArray) termsHashPerField.postingsArray; final ByteSliceReader freq = new ByteSliceReader(); final ByteSliceReader prox = new ByteSliceReader(); FixedBitSet visitedDocs = new FixedBitSet(state.segmentInfo.getDocCount()); long sumTotalTermFreq = 0; long sumDocFreq = 0; Term protoTerm = new Term(fieldName); for (int i = 0; i < numTerms; i++) { final int termID = termIDs[i]; //System.out.println("term=" + termID); // Get BytesRef final int textStart = postings.textStarts[termID]; termsHashPerField.bytePool.setBytesRef(text, textStart); termsHashPerField.initReader(freq, termID, 0); if (readPositions || readOffsets) { termsHashPerField.initReader(prox, termID, 1); } // TODO: really TermsHashPerField should take over most // of this loop, including merge sort of terms from // multiple threads and interacting with the // TermsConsumer, only calling out to us (passing us the // DocsConsumer) to handle delivery of docs/positions final PostingsConsumer postingsConsumer = termsConsumer.startTerm(text); final int delDocLimit; if (segDeletes != null) { protoTerm.bytes = text; final Integer docIDUpto = segDeletes.get(protoTerm); if (docIDUpto != null) { delDocLimit = docIDUpto; } else { delDocLimit = 0; } } else { delDocLimit = 0; } // Now termStates has numToMerge FieldMergeStates // which all share the same term. Now we must // interleave the docID streams. int docFreq = 0; long totalTermFreq = 0; int docID = 0; while(true) { //System.out.println(" cycle"); final int termFreq; if (freq.eof()) { if (postings.lastDocCodes[termID] != -1) { // Return last doc docID = postings.lastDocIDs[termID]; if (readTermFreq) { termFreq = postings.termFreqs[termID]; } else { termFreq = -1; } postings.lastDocCodes[termID] = -1; } else { // EOF break; } } else { final int code = freq.readVInt(); if (!readTermFreq) { docID += code; termFreq = -1; } else { docID += code >>> 1; if ((code & 1) != 0) { termFreq = 1; } else { termFreq = freq.readVInt(); } } assert docID != postings.lastDocIDs[termID]; } docFreq++; assert docID < state.segmentInfo.getDocCount(): "doc=" + docID + " maxDoc=" + state.segmentInfo.getDocCount(); // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat // dangerous because it can yield non-deterministic // behavior since we may see the docID before we see // the term that caused it to be deleted. This // would mean some (but not all) of its postings may // make it into the index, which'd alter the docFreq // for those terms. We could fix this by doing two // passes, ie first sweep marks all del docs, and // 2nd sweep does the real flush, but I suspect // that'd add too much time to flush. visitedDocs.set(docID); postingsConsumer.startDoc(docID, writeTermFreq ? termFreq : -1); if (docID < delDocLimit) { // Mark it deleted. TODO: we could also skip // writing its postings; this would be // deterministic (just for this Term's docs). // TODO: can we do this reach-around in a cleaner way???? if (state.liveDocs == null) { state.liveDocs = docState.docWriter.codec.liveDocsFormat().newLiveDocs(state.segmentInfo.getDocCount()); } if (state.liveDocs.get(docID)) { state.delCountOnFlush++; state.liveDocs.clear(docID); } } totalTermFreq += termFreq; // Carefully copy over the prox + payload info, // changing the format to match Lucene's segment // format. if (readPositions || readOffsets) { // we did record positions (& maybe payload) and/or offsets int position = 0; int offset = 0; for(int j=0;j<termFreq;j++) { final BytesRef thisPayload; if (readPositions) { final int code = prox.readVInt(); position += code >>> 1; if ((code & 1) != 0) { // This position has a payload final int payloadLength = prox.readVInt(); if (payload == null) { payload = new BytesRef(); payload.bytes = new byte[payloadLength]; } else if (payload.bytes.length < payloadLength) { payload.grow(payloadLength); } prox.readBytes(payload.bytes, 0, payloadLength); payload.length = payloadLength; thisPayload = payload; } else { thisPayload = null; } if (readOffsets) { final int startOffset = offset + prox.readVInt(); final int endOffset = startOffset + prox.readVInt(); if (writePositions) { if (writeOffsets) { assert startOffset >=0 && endOffset >= startOffset : "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset; postingsConsumer.addPosition(position, thisPayload, startOffset, endOffset); } else { postingsConsumer.addPosition(position, thisPayload, -1, -1); } } offset = startOffset; } else if (writePositions) { postingsConsumer.addPosition(position, thisPayload, -1, -1); } } } } postingsConsumer.finishDoc(); } termsConsumer.finishTerm(text, new TermStats(docFreq, writeTermFreq ? totalTermFreq : -1)); sumTotalTermFreq += totalTermFreq; sumDocFreq += docFreq; } termsConsumer.finish(writeTermFreq ? sumTotalTermFreq : -1, sumDocFreq, visitedDocs.cardinality()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.ignite.compute.ComputeJobSibling; import org.apache.ignite.compute.ComputeTaskSession; import org.apache.ignite.compute.ComputeTaskSessionAttributeListener; import org.apache.ignite.compute.ComputeTaskSessionScope; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteUuid; import org.jetbrains.annotations.Nullable; /** * Test task session. */ public class GridTestTaskSession implements ComputeTaskSession { /** */ private String taskName; /** */ private String jobTypeId; /** */ private IgniteUuid sesId; /** */ private UUID taskNodeId = UUID.randomUUID(); /** */ private Map<Object, Object> attrs = new HashMap<>(); /** */ private Collection<ComputeTaskSessionAttributeListener> lsnrs = new ArrayList<>(); /** */ private ClassLoader clsLdr = getClass().getClassLoader(); /** */ public GridTestTaskSession() { /* No-op. */ } /** * @param sesId Session ID. */ public GridTestTaskSession(IgniteUuid sesId) { this.sesId = sesId; } /** * @param taskName Task name. * @param jobTypeId Job type ID. * @param sesId Session ID. */ public GridTestTaskSession(String taskName, String jobTypeId, IgniteUuid sesId) { this.taskName = taskName; this.jobTypeId = jobTypeId; this.sesId = sesId; } /** {@inheritDoc} */ @Override public UUID getTaskNodeId() { return taskNodeId; } /** {@inheritDoc} */ @Override public <K, V> V waitForAttribute(K key, long timeout) { assert false : "Not implemented"; return null; } /** {@inheritDoc} */ @Override public <K, V> boolean waitForAttribute(K key, @Nullable V val, long timeout) throws InterruptedException { assert false : "Not implemented"; return false; } /** {@inheritDoc} */ @Override public Map<?, ?> waitForAttributes(Collection<?> keys, long timeout) { assert false : "Not implemented"; return null; } /** {@inheritDoc} */ @Override public boolean waitForAttributes(Map<?, ?> attrs, long timeout) throws InterruptedException { assert false : "Not implemented"; return false; } /** {@inheritDoc} */ @Override public String getTaskName() { return taskName; } /** {@inheritDoc} */ @Override public IgniteUuid getId() { return sesId; } /** {@inheritDoc} */ @Override public long getEndTime() { return Long.MAX_VALUE; } /** {@inheritDoc} */ @Override public ClassLoader getClassLoader() { return clsLdr; } /** {@inheritDoc} */ @Nullable @Override public Collection<ComputeJobSibling> getJobSiblings() { return null; } /** {@inheritDoc} */ @Nullable @Override public Collection<ComputeJobSibling> refreshJobSiblings() { return getJobSiblings(); } /** {@inheritDoc} */ @Override public ComputeJobSibling getJobSibling(IgniteUuid jobId) { return null; } /** {@inheritDoc} */ @Override public void setAttribute(Object key, Object val) { attrs.put(key, val); } /** {@inheritDoc} */ @Override public <K, V> V getAttribute(K key) { return (V)attrs.get(key); } /** {@inheritDoc} */ @Override public void setAttributes(Map<?, ?> attrs) { this.attrs.putAll(attrs); } /** {@inheritDoc} */ @Override public Map<Object, Object> getAttributes() { return Collections.unmodifiableMap(attrs); } /** {@inheritDoc} */ @Override public void addAttributeListener(ComputeTaskSessionAttributeListener lsnr, boolean rewind) { lsnrs.add(lsnr); } /** {@inheritDoc} */ @Override public boolean removeAttributeListener(ComputeTaskSessionAttributeListener lsnr) { return lsnrs.remove(lsnr); } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state) { assert false : "Not implemented"; } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state, ComputeTaskSessionScope scope, long timeout) { assert false : "Not implemented"; } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state, ComputeTaskSessionScope scope, long timeout, boolean overwrite) { assert false : "Not implemented"; } /** {@inheritDoc} */ @Override public <T> T loadCheckpoint(String key) { assert false : "Not implemented"; return null; } /** {@inheritDoc} */ @Override public boolean removeCheckpoint(String key) { assert false : "Not implemented"; return false; } /** {@inheritDoc} */ @Nullable @Override public Collection<UUID> getTopology() { return null; } /** {@inheritDoc} */ @Override public long getStartTime() { return 0; } /** {@inheritDoc} */ @Override public IgniteFuture<?> mapFuture() { assert false : "Not implemented"; return null; } /** {@inheritDoc} */ @Override public String toString() { StringBuilder buf = new StringBuilder(); buf.append(getClass().getName()); buf.append(" [taskName='").append(taskName).append('\''); buf.append(", jobTypeId='").append(jobTypeId).append('\''); buf.append(", sesId=").append(sesId); buf.append(", clsLdr=").append(clsLdr); buf.append(']'); return buf.toString(); } }
/* * Copyright (c) 2015 Yannic Siebenhaar * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package bge.math; /** * Class for representing a Vector with 4 components. * Commonly used for calculations with matrices. * * Created by Yannic Siebenhaar on 20.07.2015. */ public class Vector4 { /** * The x component. */ public float x; /** * The y component. */ public float y; /** * The z component. */ public float z; /** * The w component. */ public float w; /** * Constructs a new Vector with given values. * * @param x The x component of a Vector. * @param y The y component of a Vector. * @param z The z component of a Vector. * @param w The w component of a Vector. */ public Vector4(final float x, final float y, final float z, final float w) { this.x = x; this.y = y; this.z = z; this.w = w; } /** * Constructs a new Vector. All components are set to 0.0f. */ public Vector4() { this.x = 0.0f; this.y = 0.0f; this.z = 0.0f; this.w = 0.0f; } /** * Copy Constructor for getting a new Vector with same values. * @param vector The Vector to copy from. */ public Vector4(Vector4 vector) { this.x = vector.x; this.y = vector.y; this.z = vector.z; this.w = vector.w; } /** * Setter method to set all values of the Vector. If you want to set a single value, use the public property. * * @param x The x component of a Vector. * @param y The y component of a Vector. * @param z The z component of a Vector. * @param w The w component of a Vector. */ public void set(final float x, final float y, final float z, final float w) { this.x = x; this.y = y; this.z = z; this.w = w; } /** * Performs an addition with another Vector. This operation returns a new Vector which contains the calculated data. * * @param rhs A Vector as right hand side argument. * @return A new Vector that holds the result. */ public Vector4 add(final Vector4 rhs) { return new Vector4(x + rhs.x, y + rhs.y, z + rhs.z, w + rhs.w); } /** * Performs a subtraction with another Vector. This operation returns a new Vector which contains the calculated data. * * @param rhs A Vector as right hand side argument. * @return A new Vector that holds the result. */ public Vector4 sub(final Vector4 rhs) { return new Vector4(x - rhs.x, y - rhs.y, z - rhs.z, w - rhs.w); } /** * Performs a multiplication with another Vector. This operation returns a new Vector which contains the calculated data. * * @param rhs A Vector as right hand side argument. * @return A new Vector that holds the result. */ public Vector4 mul(final Vector4 rhs) { return new Vector4(x * rhs.x, y * rhs.y, z * rhs.z, w * rhs.w); } /** * Performs a multiplication with a float value. All components are multiplicated with given value. * This operation returns a new Vector which contains the calculated data. * * @param rhs The number. * @return A new Vector that holds the result. */ public Vector4 mul(final float rhs) { return new Vector4(x * rhs, y * rhs, z * rhs, w * rhs); } /** * Performs a division with another Vector. This operation returns a new Vector which contains the calculated data. * * @param rhs A Vector as right hand side argument. * @return A new Vector that holds the result. */ public Vector4 div(final Vector4 rhs) { return new Vector4(x / rhs.x, y / rhs.y, z / rhs.z, w / rhs.w); } /** * Calculates the length of the Vector and returns the result as floating point number. * * @return The length of the Vector. */ public float length() { return (float) Math.sqrt(x * x + y * y + z * z + w * w); } /** * Calculates the dot product of two Vectors and returns the result as floating point number. * * @param rhs Right hand side value to calculate dot product. * @return The dot product of the Vector. */ public float dot(final Vector4 rhs) { return x * rhs.x + y * rhs.y + z * rhs.z + w * rhs.w; } /** * Normalizes the Vector. Normalizing means dividing all components (x, y, z, w) by the length of a Vector. * This Operation affects the Vector and changes data. * @return The updated instance. */ public Vector4 normalize() { final float length = (float) Math.sqrt(x * x + y * y + z * z + w * w); x /= length; y /= length; z /= length; w /= length; return this; } /** * Negates the Vector. All values are multiplied with -1. * This Operation affects the Vector and changes data. * * @return The updated instance. */ public Vector4 negate() { x = -x; y = -y; z = -z; w = -w; return this; } /** * Returns a new instance of the negated Vector. * All components are multiplied with -1. * This Operation does not affects the Vector. * * @return A new instance of the negated Vector. */ public Vector4 getNegated() { return new Vector4(-x, -y, -z, -w); } /** * Returns a new instance of the normalized Vector. * Normalizing means dividing all components (x, y, z, w) by the length of a Vector. * This Operation does not affects the Vector. * @return A new instance of the normalized Vector. */ public Vector4 getNormalized() { final float length = (float) Math.sqrt(x * x + y * y + z * z + w * w); final float newX = x / length; final float newY = y / length; final float newZ = z / length; final float newW = w / length; return new Vector4(newX, newY, newZ, newW); } /** * Calculates the distance of two Vectors. * * @param rhs Right hand side value to calculate the distance. * @return The distance as floating point number. */ public float distance(final Vector4 rhs) { final float newX = rhs.x - x; final float newY = rhs.y - y; final float newZ = rhs.z - z; final float newW = rhs.w - w; return (float) Math.sqrt(newX * newX + newY * newY + newZ * newZ + newW * newW); } /** * Converts a Vector3 into a Vector4. The w component is set to 0.0f. * * @param rhs Vector which will be converted. * @return A new Vector that contains data of the given Vector. */ public static Vector4 parseVector(final Vector3 rhs) { return new Vector4(rhs.x, rhs.y, rhs.z, 0.0f); } /** * Converts a Vector2 into a Vector4. The z and w components are set to 0.0f. * * @param rhs Vector which will be converted. * @return A new Vector that contains data of the given Vector. */ public static Vector4 parseVector(final Vector2 rhs) { return new Vector4(rhs.x, rhs.y, 0.0f, 0.0f); } /** * Converts Vector4 into a String. Useful for debugging. * @return The converted String. */ public String toString() { return "Vector4: (" + this.x + "), (" + this.y + "), (" + this.z + "), (" + this.w + ")"; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.qjournal.server; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.net.URL; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.security.PrivilegedExceptionAction; import java.util.Iterator; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.math.LongRange; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.qjournal.protocol.JournalNotFormattedException; import org.apache.hadoop.hdfs.qjournal.protocol.JournalOutOfSyncException; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PersistedRecoveryPaxosData; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto; import org.apache.hadoop.hdfs.qjournal.protocol.RequestInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.StorageErrorReporter; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.namenode.EditLogOutputStream; import org.apache.hadoop.hdfs.server.namenode.FileJournalManager; import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile; import org.apache.hadoop.hdfs.server.namenode.JournalManager; import org.apache.hadoop.hdfs.server.namenode.TransferFsImage; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.hdfs.util.AtomicFileOutputStream; import org.apache.hadoop.hdfs.util.BestEffortLongFile; import org.apache.hadoop.hdfs.util.PersistentLongFile; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.protobuf.TextFormat; /** * A JournalNode can manage journals for several clusters at once. * Each such journal is entirely independent despite being hosted by * the same JVM. */ public class Journal implements Closeable { static final Log LOG = LogFactory.getLog(Journal.class); // Current writing state private EditLogOutputStream curSegment; private long curSegmentTxId = HdfsServerConstants.INVALID_TXID; private long nextTxId = HdfsServerConstants.INVALID_TXID; private long highestWrittenTxId = 0; private final String journalId; private final JNStorage storage; /** * When a new writer comes along, it asks each node to promise * to ignore requests from any previous writer, as identified * by epoch number. In order to make such a promise, the epoch * number of that writer is stored persistently on disk. */ private PersistentLongFile lastPromisedEpoch; /** * Each IPC that comes from a given client contains a serial number * which only increases from the client's perspective. Whenever * we switch epochs, we reset this back to -1. Whenever an IPC * comes from a client, we ensure that it is strictly higher * than any previous IPC. This guards against any bugs in the IPC * layer that would re-order IPCs or cause a stale retry from an old * request to resurface and confuse things. */ private long currentEpochIpcSerial = -1; /** * The epoch number of the last writer to actually write a transaction. * This is used to differentiate log segments after a crash at the very * beginning of a segment. See the the 'testNewerVersionOfSegmentWins' * test case. */ private PersistentLongFile lastWriterEpoch; /** * Lower-bound on the last committed transaction ID. This is not * depended upon for correctness, but acts as a sanity check * during the recovery procedures, and as a visibility mark * for clients reading in-progress logs. */ private BestEffortLongFile committedTxnId; public static final String LAST_PROMISED_FILENAME = "last-promised-epoch"; public static final String LAST_WRITER_EPOCH = "last-writer-epoch"; private static final String COMMITTED_TXID_FILENAME = "committed-txid"; private final FileJournalManager fjm; private final JournalMetrics metrics; private long lastJournalTimestamp = 0; /** * Time threshold for sync calls, beyond which a warning should be logged to the console. */ private static final int WARN_SYNC_MILLIS_THRESHOLD = 1000; Journal(Configuration conf, File logDir, String journalId, StartupOption startOpt, StorageErrorReporter errorReporter) throws IOException { storage = new JNStorage(conf, logDir, startOpt, errorReporter); this.journalId = journalId; refreshCachedData(); this.fjm = storage.getJournalManager(); this.metrics = JournalMetrics.create(this); EditLogFile latest = scanStorageForLatestEdits(); if (latest != null) { updateHighestWrittenTxId(latest.getLastTxId()); } } /** * Reload any data that may have been cached. This is necessary * when we first load the Journal, but also after any formatting * operation, since the cached data is no longer relevant. */ private synchronized void refreshCachedData() { IOUtils.closeStream(committedTxnId); File currentDir = storage.getSingularStorageDir().getCurrentDir(); this.lastPromisedEpoch = new PersistentLongFile( new File(currentDir, LAST_PROMISED_FILENAME), 0); this.lastWriterEpoch = new PersistentLongFile( new File(currentDir, LAST_WRITER_EPOCH), 0); this.committedTxnId = new BestEffortLongFile( new File(currentDir, COMMITTED_TXID_FILENAME), HdfsServerConstants.INVALID_TXID); } /** * Scan the local storage directory, and return the segment containing * the highest transaction. * @return the EditLogFile with the highest transactions, or null * if no files exist. */ private synchronized EditLogFile scanStorageForLatestEdits() throws IOException { if (!fjm.getStorageDirectory().getCurrentDir().exists()) { return null; } LOG.info("Scanning storage " + fjm); List<EditLogFile> files = fjm.getLogFiles(0); while (!files.isEmpty()) { EditLogFile latestLog = files.remove(files.size() - 1); latestLog.scanLog(Long.MAX_VALUE, false); LOG.info("Latest log is " + latestLog); if (latestLog.getLastTxId() == HdfsServerConstants.INVALID_TXID) { // the log contains no transactions LOG.warn("Latest log " + latestLog + " has no transactions. " + "moving it aside and looking for previous log"); latestLog.moveAsideEmptyFile(); } else { return latestLog; } } LOG.info("No files in " + fjm); return null; } /** * Format the local storage with the given namespace. */ void format(NamespaceInfo nsInfo) throws IOException { Preconditions.checkState(nsInfo.getNamespaceID() != 0, "can't format with uninitialized namespace info: %s", nsInfo); LOG.info("Formatting " + this + " with namespace info: " + nsInfo); storage.format(nsInfo); refreshCachedData(); } /** * Unlock and release resources. */ @Override // Closeable public void close() throws IOException { storage.close(); IOUtils.closeStream(committedTxnId); IOUtils.closeStream(curSegment); } JNStorage getStorage() { return storage; } String getJournalId() { return journalId; } /** * @return the last epoch which this node has promised not to accept * any lower epoch, or 0 if no promises have been made. */ synchronized long getLastPromisedEpoch() throws IOException { checkFormatted(); return lastPromisedEpoch.get(); } synchronized public long getLastWriterEpoch() throws IOException { checkFormatted(); return lastWriterEpoch.get(); } synchronized long getCommittedTxnId() throws IOException { return committedTxnId.get(); } synchronized long getLastJournalTimestamp() { return lastJournalTimestamp; } synchronized long getCurrentLagTxns() throws IOException { long committed = committedTxnId.get(); if (committed == 0) { return 0; } return Math.max(committed - highestWrittenTxId, 0L); } synchronized long getHighestWrittenTxId() { return highestWrittenTxId; } /** * Update the highest Tx ID that has been written to the journal. Also update * the {@link FileJournalManager#lastReadableTxId} of the underlying fjm. * @param val The new value */ private void updateHighestWrittenTxId(long val) { highestWrittenTxId = val; fjm.setLastReadableTxId(val); } @VisibleForTesting JournalMetrics getMetricsForTests() { return metrics; } /** * Try to create a new epoch for this journal. * @param nsInfo the namespace, which is verified for consistency or used to * format, if the Journal has not yet been written to. * @param epoch the epoch to start * @return the status information necessary to begin recovery * @throws IOException if the node has already made a promise to another * writer with a higher epoch number, if the namespace is inconsistent, * or if a disk error occurs. */ synchronized NewEpochResponseProto newEpoch( NamespaceInfo nsInfo, long epoch) throws IOException { checkFormatted(); storage.checkConsistentNamespace(nsInfo); // Check that the new epoch being proposed is in fact newer than // any other that we've promised. if (epoch <= getLastPromisedEpoch()) { throw new IOException("Proposed epoch " + epoch + " <= last promise " + getLastPromisedEpoch()); } updateLastPromisedEpoch(epoch); abortCurSegment(); NewEpochResponseProto.Builder builder = NewEpochResponseProto.newBuilder(); EditLogFile latestFile = scanStorageForLatestEdits(); if (latestFile != null) { builder.setLastSegmentTxId(latestFile.getFirstTxId()); } return builder.build(); } private void updateLastPromisedEpoch(long newEpoch) throws IOException { LOG.info("Updating lastPromisedEpoch from " + lastPromisedEpoch.get() + " to " + newEpoch + " for client " + Server.getRemoteIp()); lastPromisedEpoch.set(newEpoch); // Since we have a new writer, reset the IPC serial - it will start // counting again from 0 for this writer. currentEpochIpcSerial = -1; } private void abortCurSegment() throws IOException { if (curSegment == null) { return; } curSegment.abort(); curSegment = null; curSegmentTxId = HdfsServerConstants.INVALID_TXID; } /** * Write a batch of edits to the journal. * {@see QJournalProtocol#journal(RequestInfo, long, long, int, byte[])} */ synchronized void journal(RequestInfo reqInfo, long segmentTxId, long firstTxnId, int numTxns, byte[] records) throws IOException { checkFormatted(); checkWriteRequest(reqInfo); // If numTxns is 0, it's actually a fake send which aims at updating // committedTxId only. So we can return early. if (numTxns == 0) { return; } checkSync(curSegment != null, "Can't write, no segment open"); if (curSegmentTxId != segmentTxId) { // Sanity check: it is possible that the writer will fail IPCs // on both the finalize() and then the start() of the next segment. // This could cause us to continue writing to an old segment // instead of rolling to a new one, which breaks one of the // invariants in the design. If it happens, abort the segment // and throw an exception. JournalOutOfSyncException e = new JournalOutOfSyncException( "Writer out of sync: it thinks it is writing segment " + segmentTxId + " but current segment is " + curSegmentTxId); abortCurSegment(); throw e; } checkSync(nextTxId == firstTxnId, "Can't write txid " + firstTxnId + " expecting nextTxId=" + nextTxId); long lastTxnId = firstTxnId + numTxns - 1; if (LOG.isTraceEnabled()) { LOG.trace("Writing txid " + firstTxnId + "-" + lastTxnId); } // If the edit has already been marked as committed, we know // it has been fsynced on a quorum of other nodes, and we are // "catching up" with the rest. Hence we do not need to fsync. boolean isLagging = lastTxnId <= committedTxnId.get(); boolean shouldFsync = !isLagging; curSegment.writeRaw(records, 0, records.length); curSegment.setReadyToFlush(); StopWatch sw = new StopWatch(); sw.start(); curSegment.flush(shouldFsync); sw.stop(); long nanoSeconds = sw.now(); metrics.addSync( TimeUnit.MICROSECONDS.convert(nanoSeconds, TimeUnit.NANOSECONDS)); long milliSeconds = TimeUnit.MILLISECONDS.convert( nanoSeconds, TimeUnit.NANOSECONDS); if (milliSeconds > WARN_SYNC_MILLIS_THRESHOLD) { LOG.warn("Sync of transaction range " + firstTxnId + "-" + lastTxnId + " took " + milliSeconds + "ms"); } if (isLagging) { // This batch of edits has already been committed on a quorum of other // nodes. So, we are in "catch up" mode. This gets its own metric. metrics.batchesWrittenWhileLagging.incr(1); } metrics.batchesWritten.incr(1); metrics.bytesWritten.incr(records.length); metrics.txnsWritten.incr(numTxns); updateHighestWrittenTxId(lastTxnId); nextTxId = lastTxnId + 1; lastJournalTimestamp = Time.now(); } public void heartbeat(RequestInfo reqInfo) throws IOException { checkRequest(reqInfo); } /** * Ensure that the given request is coming from the correct writer and in-order. * @param reqInfo the request info * @throws IOException if the request is invalid. */ private synchronized void checkRequest(RequestInfo reqInfo) throws IOException { // Invariant 25 from ZAB paper if (reqInfo.getEpoch() < lastPromisedEpoch.get()) { throw new IOException("IPC's epoch " + reqInfo.getEpoch() + " is less than the last promised epoch " + lastPromisedEpoch.get()); } else if (reqInfo.getEpoch() > lastPromisedEpoch.get()) { // A newer client has arrived. Fence any previous writers by updating // the promise. updateLastPromisedEpoch(reqInfo.getEpoch()); } // Ensure that the IPCs are arriving in-order as expected. checkSync(reqInfo.getIpcSerialNumber() > currentEpochIpcSerial, "IPC serial %s from client %s was not higher than prior highest " + "IPC serial %s", reqInfo.getIpcSerialNumber(), Server.getRemoteIp(), currentEpochIpcSerial); currentEpochIpcSerial = reqInfo.getIpcSerialNumber(); if (reqInfo.hasCommittedTxId()) { Preconditions.checkArgument( reqInfo.getCommittedTxId() >= committedTxnId.get(), "Client trying to move committed txid backward from " + committedTxnId.get() + " to " + reqInfo.getCommittedTxId()); committedTxnId.set(reqInfo.getCommittedTxId()); } } private synchronized void checkWriteRequest(RequestInfo reqInfo) throws IOException { checkRequest(reqInfo); if (reqInfo.getEpoch() != lastWriterEpoch.get()) { throw new IOException("IPC's epoch " + reqInfo.getEpoch() + " is not the current writer epoch " + lastWriterEpoch.get()); } } public synchronized boolean isFormatted() { return storage.isFormatted(); } private void checkFormatted() throws JournalNotFormattedException { if (!isFormatted()) { throw new JournalNotFormattedException("Journal " + storage.getSingularStorageDir() + " not formatted"); } } /** * @throws JournalOutOfSyncException if the given expression is not true. * The message of the exception is formatted using the 'msg' and * 'formatArgs' parameters. */ private void checkSync(boolean expression, String msg, Object... formatArgs) throws JournalOutOfSyncException { if (!expression) { throw new JournalOutOfSyncException(String.format(msg, formatArgs)); } } /** * @throws AssertionError if the given expression is not true. * The message of the exception is formatted using the 'msg' and * 'formatArgs' parameters. * * This should be used in preference to Java's built-in assert in * non-performance-critical paths, where a failure of this invariant * might cause the protocol to lose data. */ private void alwaysAssert(boolean expression, String msg, Object... formatArgs) { if (!expression) { throw new AssertionError(String.format(msg, formatArgs)); } } /** * Start a new segment at the given txid. The previous segment * must have already been finalized. */ public synchronized void startLogSegment(RequestInfo reqInfo, long txid, int layoutVersion) throws IOException { assert fjm != null; checkFormatted(); checkRequest(reqInfo); if (curSegment != null) { LOG.warn("Client is requesting a new log segment " + txid + " though we are already writing " + curSegment + ". " + "Aborting the current segment in order to begin the new one."); // The writer may have lost a connection to us and is now // re-connecting after the connection came back. // We should abort our own old segment. abortCurSegment(); } // Paranoid sanity check: we should never overwrite a finalized log file. // Additionally, if it's in-progress, it should have at most 1 transaction. // This can happen if the writer crashes exactly at the start of a segment. EditLogFile existing = fjm.getLogFile(txid); if (existing != null) { if (!existing.isInProgress()) { throw new IllegalStateException("Already have a finalized segment " + existing + " beginning at " + txid); } // If it's in-progress, it should only contain one transaction, // because the "startLogSegment" transaction is written alone at the // start of each segment. existing.scanLog(Long.MAX_VALUE, false); if (existing.getLastTxId() != existing.getFirstTxId()) { throw new IllegalStateException("The log file " + existing + " seems to contain valid transactions"); } } long curLastWriterEpoch = lastWriterEpoch.get(); if (curLastWriterEpoch != reqInfo.getEpoch()) { LOG.info("Updating lastWriterEpoch from " + curLastWriterEpoch + " to " + reqInfo.getEpoch() + " for client " + Server.getRemoteIp()); lastWriterEpoch.set(reqInfo.getEpoch()); } // The fact that we are starting a segment at this txid indicates // that any previous recovery for this same segment was aborted. // Otherwise, no writer would have started writing. So, we can // remove the record of the older segment here. purgePaxosDecision(txid); curSegment = fjm.startLogSegment(txid, layoutVersion); curSegmentTxId = txid; nextTxId = txid; } /** * Finalize the log segment at the given transaction ID. */ public synchronized void finalizeLogSegment(RequestInfo reqInfo, long startTxId, long endTxId) throws IOException { checkFormatted(); checkRequest(reqInfo); boolean needsValidation = true; // Finalizing the log that the writer was just writing. if (startTxId == curSegmentTxId) { if (curSegment != null) { curSegment.close(); curSegment = null; curSegmentTxId = HdfsServerConstants.INVALID_TXID; } checkSync(nextTxId == endTxId + 1, "Trying to finalize in-progress log segment %s to end at " + "txid %s but only written up to txid %s", startTxId, endTxId, nextTxId - 1); // No need to validate the edit log if the client is finalizing // the log segment that it was just writing to. needsValidation = false; } FileJournalManager.EditLogFile elf = fjm.getLogFile(startTxId); if (elf == null) { throw new JournalOutOfSyncException("No log file to finalize at " + "transaction ID " + startTxId); } if (elf.isInProgress()) { if (needsValidation) { LOG.info("Validating log segment " + elf.getFile() + " about to be " + "finalized"); elf.scanLog(Long.MAX_VALUE, false); checkSync(elf.getLastTxId() == endTxId, "Trying to finalize in-progress log segment %s to end at " + "txid %s but log %s on disk only contains up to txid %s", startTxId, endTxId, elf.getFile(), elf.getLastTxId()); } fjm.finalizeLogSegment(startTxId, endTxId); } else { Preconditions.checkArgument(endTxId == elf.getLastTxId(), "Trying to re-finalize already finalized log " + elf + " with different endTxId " + endTxId); } // Once logs are finalized, a different length will never be decided. // During recovery, we treat a finalized segment the same as an accepted // recovery. Thus, we no longer need to keep track of the previously- // accepted decision. The existence of the finalized log segment is enough. purgePaxosDecision(elf.getFirstTxId()); } /** * @see JournalManager#purgeLogsOlderThan(long) */ public synchronized void purgeLogsOlderThan(RequestInfo reqInfo, long minTxIdToKeep) throws IOException { checkFormatted(); checkRequest(reqInfo); storage.purgeDataOlderThan(minTxIdToKeep); } /** * Remove the previously-recorded 'accepted recovery' information * for a given log segment, once it is no longer necessary. * @param segmentTxId the transaction ID to purge * @throws IOException if the file could not be deleted */ private void purgePaxosDecision(long segmentTxId) throws IOException { File paxosFile = storage.getPaxosFile(segmentTxId); if (paxosFile.exists()) { if (!paxosFile.delete()) { throw new IOException("Unable to delete paxos file " + paxosFile); } } } /** * @see QJournalProtocol#getEditLogManifest(String, long, boolean) */ public RemoteEditLogManifest getEditLogManifest(long sinceTxId, boolean inProgressOk) throws IOException { // No need to checkRequest() here - anyone may ask for the list // of segments. checkFormatted(); List<RemoteEditLog> logs = fjm.getRemoteEditLogs(sinceTxId, inProgressOk); if (inProgressOk) { RemoteEditLog log = null; for (Iterator<RemoteEditLog> iter = logs.iterator(); iter.hasNext();) { log = iter.next(); if (log.isInProgress()) { iter.remove(); break; } } if (log != null && log.isInProgress()) { logs.add(new RemoteEditLog(log.getStartTxId(), getHighestWrittenTxId(), true)); } } return new RemoteEditLogManifest(logs, getCommittedTxnId()); } /** * @return the current state of the given segment, or null if the * segment does not exist. */ @VisibleForTesting SegmentStateProto getSegmentInfo(long segmentTxId) throws IOException { EditLogFile elf = fjm.getLogFile(segmentTxId); if (elf == null) { return null; } if (elf.isInProgress()) { elf.scanLog(Long.MAX_VALUE, false); } if (elf.getLastTxId() == HdfsServerConstants.INVALID_TXID) { LOG.info("Edit log file " + elf + " appears to be empty. " + "Moving it aside..."); elf.moveAsideEmptyFile(); return null; } SegmentStateProto ret = SegmentStateProto.newBuilder() .setStartTxId(segmentTxId) .setEndTxId(elf.getLastTxId()) .setIsInProgress(elf.isInProgress()) .build(); LOG.info("getSegmentInfo(" + segmentTxId + "): " + elf + " -> " + TextFormat.shortDebugString(ret)); return ret; } /** * @see QJournalProtocol#prepareRecovery(RequestInfo, long) */ public synchronized PrepareRecoveryResponseProto prepareRecovery( RequestInfo reqInfo, long segmentTxId) throws IOException { checkFormatted(); checkRequest(reqInfo); abortCurSegment(); PrepareRecoveryResponseProto.Builder builder = PrepareRecoveryResponseProto.newBuilder(); PersistedRecoveryPaxosData previouslyAccepted = getPersistedPaxosData(segmentTxId); completeHalfDoneAcceptRecovery(previouslyAccepted); SegmentStateProto segInfo = getSegmentInfo(segmentTxId); boolean hasFinalizedSegment = segInfo != null && !segInfo.getIsInProgress(); if (previouslyAccepted != null && !hasFinalizedSegment) { SegmentStateProto acceptedState = previouslyAccepted.getSegmentState(); assert acceptedState.getEndTxId() == segInfo.getEndTxId() : "prev accepted: " + TextFormat.shortDebugString(previouslyAccepted)+ "\n" + "on disk: " + TextFormat.shortDebugString(segInfo); builder.setAcceptedInEpoch(previouslyAccepted.getAcceptedInEpoch()) .setSegmentState(previouslyAccepted.getSegmentState()); } else { if (segInfo != null) { builder.setSegmentState(segInfo); } } builder.setLastWriterEpoch(lastWriterEpoch.get()); if (committedTxnId.get() != HdfsServerConstants.INVALID_TXID) { builder.setLastCommittedTxId(committedTxnId.get()); } PrepareRecoveryResponseProto resp = builder.build(); LOG.info("Prepared recovery for segment " + segmentTxId + ": " + TextFormat.shortDebugString(resp)); return resp; } /** * @see QJournalProtocol#acceptRecovery(RequestInfo, QJournalProtocolProtos.SegmentStateProto, URL) */ public synchronized void acceptRecovery(RequestInfo reqInfo, SegmentStateProto segment, URL fromUrl) throws IOException { checkFormatted(); checkRequest(reqInfo); abortCurSegment(); long segmentTxId = segment.getStartTxId(); // Basic sanity checks that the segment is well-formed and contains // at least one transaction. Preconditions.checkArgument(segment.getEndTxId() > 0 && segment.getEndTxId() >= segmentTxId, "bad recovery state for segment %s: %s", segmentTxId, TextFormat.shortDebugString(segment)); PersistedRecoveryPaxosData oldData = getPersistedPaxosData(segmentTxId); PersistedRecoveryPaxosData newData = PersistedRecoveryPaxosData.newBuilder() .setAcceptedInEpoch(reqInfo.getEpoch()) .setSegmentState(segment) .build(); // If we previously acted on acceptRecovery() from a higher-numbered writer, // this call is out of sync. We should never actually trigger this, since the // checkRequest() call above should filter non-increasing epoch numbers. if (oldData != null) { alwaysAssert(oldData.getAcceptedInEpoch() <= reqInfo.getEpoch(), "Bad paxos transition, out-of-order epochs.\nOld: %s\nNew: %s\n", oldData, newData); } File syncedFile = null; SegmentStateProto currentSegment = getSegmentInfo(segmentTxId); if (currentSegment == null || currentSegment.getEndTxId() != segment.getEndTxId()) { if (currentSegment == null) { LOG.info("Synchronizing log " + TextFormat.shortDebugString(segment) + ": no current segment in place"); // Update the highest txid for lag metrics updateHighestWrittenTxId(Math.max(segment.getEndTxId(), highestWrittenTxId)); } else { LOG.info("Synchronizing log " + TextFormat.shortDebugString(segment) + ": old segment " + TextFormat.shortDebugString(currentSegment) + " is not the right length"); // Paranoid sanity check: if the new log is shorter than the log we // currently have, we should not end up discarding any transactions // which are already Committed. if (txnRange(currentSegment).containsLong(committedTxnId.get()) && !txnRange(segment).containsLong(committedTxnId.get())) { throw new AssertionError( "Cannot replace segment " + TextFormat.shortDebugString(currentSegment) + " with new segment " + TextFormat.shortDebugString(segment) + ": would discard already-committed txn " + committedTxnId.get()); } // Another paranoid check: we should not be asked to synchronize a log // on top of a finalized segment. alwaysAssert(currentSegment.getIsInProgress(), "Should never be asked to synchronize a different log on top of an " + "already-finalized segment"); // If we're shortening the log, update our highest txid // used for lag metrics. if (txnRange(currentSegment).containsLong(highestWrittenTxId)) { updateHighestWrittenTxId(segment.getEndTxId()); } } syncedFile = syncLog(reqInfo, segment, fromUrl); } else { LOG.info("Skipping download of log " + TextFormat.shortDebugString(segment) + ": already have up-to-date logs"); } // This is one of the few places in the protocol where we have a single // RPC that results in two distinct actions: // // - 1) Downloads the new log segment data (above) // - 2) Records the new Paxos data about the synchronized segment (below) // // These need to be treated as a transaction from the perspective // of any external process. We do this by treating the persistPaxosData() // success as the "commit" of an atomic transaction. If we fail before // this point, the downloaded edit log will only exist at a temporary // path, and thus not change any externally visible state. If we fail // after this point, then any future prepareRecovery() call will see // the Paxos data, and by calling completeHalfDoneAcceptRecovery() will // roll forward the rename of the referenced log file. // // See also: HDFS-3955 // // The fault points here are exercised by the randomized fault injection // test case to ensure that this atomic "transaction" operates correctly. JournalFaultInjector.get().beforePersistPaxosData(); persistPaxosData(segmentTxId, newData); JournalFaultInjector.get().afterPersistPaxosData(); if (syncedFile != null) { FileUtil.replaceFile(syncedFile, storage.getInProgressEditLog(segmentTxId)); } LOG.info("Accepted recovery for segment " + segmentTxId + ": " + TextFormat.shortDebugString(newData)); } private LongRange txnRange(SegmentStateProto seg) { Preconditions.checkArgument(seg.hasEndTxId(), "invalid segment: %s", seg); return new LongRange(seg.getStartTxId(), seg.getEndTxId()); } /** * Synchronize a log segment from another JournalNode. The log is * downloaded from the provided URL into a temporary location on disk, * which is named based on the current request's epoch. * * @return the temporary location of the downloaded file */ private File syncLog(RequestInfo reqInfo, final SegmentStateProto segment, final URL url) throws IOException { final File tmpFile = storage.getSyncLogTemporaryFile( segment.getStartTxId(), reqInfo.getEpoch()); final List<File> localPaths = ImmutableList.of(tmpFile); LOG.info("Synchronizing log " + TextFormat.shortDebugString(segment) + " from " + url); SecurityUtil.doAsLoginUser( new PrivilegedExceptionAction<Void>() { @Override public Void run() throws IOException { // We may have lost our ticket since last checkpoint, log in again, just in case if (UserGroupInformation.isSecurityEnabled()) { UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab(); } boolean success = false; try { TransferFsImage.doGetUrl(url, localPaths, storage, true); assert tmpFile.exists(); success = true; } finally { if (!success) { if (!tmpFile.delete()) { LOG.warn("Failed to delete temporary file " + tmpFile); } } } return null; } }); return tmpFile; } /** * In the case the node crashes in between downloading a log segment * and persisting the associated paxos recovery data, the log segment * will be left in its temporary location on disk. Given the paxos data, * we can check if this was indeed the case, and &quot;roll forward&quot; * the atomic operation. * * See the inline comments in * {@link #acceptRecovery(RequestInfo, SegmentStateProto, URL)} for more * details. * * @throws IOException if the temporary file is unable to be renamed into * place */ private void completeHalfDoneAcceptRecovery( PersistedRecoveryPaxosData paxosData) throws IOException { if (paxosData == null) { return; } long segmentId = paxosData.getSegmentState().getStartTxId(); long epoch = paxosData.getAcceptedInEpoch(); File tmp = storage.getSyncLogTemporaryFile(segmentId, epoch); if (tmp.exists()) { File dst = storage.getInProgressEditLog(segmentId); LOG.info("Rolling forward previously half-completed synchronization: " + tmp + " -> " + dst); FileUtil.replaceFile(tmp, dst); } } /** * Retrieve the persisted data for recovering the given segment from disk. */ private PersistedRecoveryPaxosData getPersistedPaxosData(long segmentTxId) throws IOException { File f = storage.getPaxosFile(segmentTxId); if (!f.exists()) { // Default instance has no fields filled in (they're optional) return null; } InputStream in = new FileInputStream(f); try { PersistedRecoveryPaxosData ret = PersistedRecoveryPaxosData.parseDelimitedFrom(in); Preconditions.checkState(ret != null && ret.getSegmentState().getStartTxId() == segmentTxId, "Bad persisted data for segment %s: %s", segmentTxId, ret); return ret; } finally { IOUtils.closeStream(in); } } /** * Persist data for recovering the given segment from disk. */ private void persistPaxosData(long segmentTxId, PersistedRecoveryPaxosData newData) throws IOException { File f = storage.getPaxosFile(segmentTxId); boolean success = false; AtomicFileOutputStream fos = new AtomicFileOutputStream(f); try { newData.writeDelimitedTo(fos); fos.write('\n'); // Write human-readable data after the protobuf. This is only // to assist in debugging -- it's not parsed at all. OutputStreamWriter writer = new OutputStreamWriter(fos, Charsets.UTF_8); writer.write(String.valueOf(newData)); writer.write('\n'); writer.flush(); fos.flush(); success = true; } finally { if (success) { IOUtils.closeStream(fos); } else { fos.abort(); } } } public synchronized void doPreUpgrade() throws IOException { // Do not hold file lock on committedTxnId, because the containing // directory will be renamed. It will be reopened lazily on next access. IOUtils.cleanup(LOG, committedTxnId); storage.getJournalManager().doPreUpgrade(); } public synchronized void doUpgrade(StorageInfo sInfo) throws IOException { long oldCTime = storage.getCTime(); storage.cTime = sInfo.cTime; int oldLV = storage.getLayoutVersion(); storage.layoutVersion = sInfo.layoutVersion; LOG.info("Starting upgrade of edits directory: " + ".\n old LV = " + oldLV + "; old CTime = " + oldCTime + ".\n new LV = " + storage.getLayoutVersion() + "; new CTime = " + storage.getCTime()); storage.getJournalManager().doUpgrade(storage); storage.createPaxosDir(); // Copy over the contents of the epoch data files to the new dir. File currentDir = storage.getSingularStorageDir().getCurrentDir(); File previousDir = storage.getSingularStorageDir().getPreviousDir(); PersistentLongFile prevLastPromisedEpoch = new PersistentLongFile( new File(previousDir, LAST_PROMISED_FILENAME), 0); PersistentLongFile prevLastWriterEpoch = new PersistentLongFile( new File(previousDir, LAST_WRITER_EPOCH), 0); BestEffortLongFile prevCommittedTxnId = new BestEffortLongFile( new File(previousDir, COMMITTED_TXID_FILENAME), HdfsServerConstants.INVALID_TXID); lastPromisedEpoch = new PersistentLongFile( new File(currentDir, LAST_PROMISED_FILENAME), 0); lastWriterEpoch = new PersistentLongFile( new File(currentDir, LAST_WRITER_EPOCH), 0); committedTxnId = new BestEffortLongFile( new File(currentDir, COMMITTED_TXID_FILENAME), HdfsServerConstants.INVALID_TXID); try { lastPromisedEpoch.set(prevLastPromisedEpoch.get()); lastWriterEpoch.set(prevLastWriterEpoch.get()); committedTxnId.set(prevCommittedTxnId.get()); } finally { IOUtils.cleanup(LOG, prevCommittedTxnId); } } public synchronized void doFinalize() throws IOException { LOG.info("Finalizing upgrade for journal " + storage.getRoot() + "." + (storage.getLayoutVersion()==0 ? "" : "\n cur LV = " + storage.getLayoutVersion() + "; cur CTime = " + storage.getCTime())); storage.getJournalManager().doFinalize(); } public Boolean canRollBack(StorageInfo storage, StorageInfo prevStorage, int targetLayoutVersion) throws IOException { return this.storage.getJournalManager().canRollBack(storage, prevStorage, targetLayoutVersion); } public synchronized void doRollback() throws IOException { // Do not hold file lock on committedTxnId, because the containing // directory will be renamed. It will be reopened lazily on next access. IOUtils.cleanup(LOG, committedTxnId); storage.getJournalManager().doRollback(); } synchronized void discardSegments(long startTxId) throws IOException { storage.getJournalManager().discardSegments(startTxId); // we delete all the segments after the startTxId. let's reset committedTxnId committedTxnId.set(startTxId - 1); } synchronized boolean moveTmpSegmentToCurrent(File tmpFile, File finalFile, long endTxId) throws IOException { final boolean success; if (endTxId <= committedTxnId.get()) { if (!finalFile.getParentFile().exists()) { LOG.error(finalFile.getParentFile() + " doesn't exist. Aborting tmp " + "segment move to current directory"); return false; } Files.move(tmpFile.toPath(), finalFile.toPath(), StandardCopyOption.ATOMIC_MOVE); if (finalFile.exists() && FileUtil.canRead(finalFile)) { success = true; } else { success = false; LOG.warn("Unable to move edits file from " + tmpFile + " to " + finalFile); } } else { success = false; LOG.error("The endTxId of the temporary file is not less than the " + "last committed transaction id. Aborting move to final file" + finalFile); } return success; } public Long getJournalCTime() throws IOException { return storage.getJournalManager().getJournalCTime(); } }
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import com.google.gson.annotations.SerializedName; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.time.OffsetDateTime; import java.util.Objects; /** * ManagedFieldsEntry is a workflow-id, a FieldSet and the group version of the resource that the * fieldset applies to. */ @ApiModel( description = "ManagedFieldsEntry is a workflow-id, a FieldSet and the group version of the resource that the fieldset applies to.") @javax.annotation.Generated( value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-12-10T19:11:23.904Z[Etc/UTC]") public class V1ManagedFieldsEntry { public static final String SERIALIZED_NAME_API_VERSION = "apiVersion"; @SerializedName(SERIALIZED_NAME_API_VERSION) private String apiVersion; public static final String SERIALIZED_NAME_FIELDS_TYPE = "fieldsType"; @SerializedName(SERIALIZED_NAME_FIELDS_TYPE) private String fieldsType; public static final String SERIALIZED_NAME_FIELDS_V1 = "fieldsV1"; @SerializedName(SERIALIZED_NAME_FIELDS_V1) private Object fieldsV1; public static final String SERIALIZED_NAME_MANAGER = "manager"; @SerializedName(SERIALIZED_NAME_MANAGER) private String manager; public static final String SERIALIZED_NAME_OPERATION = "operation"; @SerializedName(SERIALIZED_NAME_OPERATION) private String operation; public static final String SERIALIZED_NAME_SUBRESOURCE = "subresource"; @SerializedName(SERIALIZED_NAME_SUBRESOURCE) private String subresource; public static final String SERIALIZED_NAME_TIME = "time"; @SerializedName(SERIALIZED_NAME_TIME) private OffsetDateTime time; public V1ManagedFieldsEntry apiVersion(String apiVersion) { this.apiVersion = apiVersion; return this; } /** * APIVersion defines the version of this resource that this field set applies to. The format is * \&quot;group/version\&quot; just like the top-level APIVersion field. It is necessary to track * the version of a field set because it cannot be automatically converted. * * @return apiVersion */ @javax.annotation.Nullable @ApiModelProperty( value = "APIVersion defines the version of this resource that this field set applies to. The format is \"group/version\" just like the top-level APIVersion field. It is necessary to track the version of a field set because it cannot be automatically converted.") public String getApiVersion() { return apiVersion; } public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } public V1ManagedFieldsEntry fieldsType(String fieldsType) { this.fieldsType = fieldsType; return this; } /** * FieldsType is the discriminator for the different fields format and version. There is currently * only one possible value: \&quot;FieldsV1\&quot; * * @return fieldsType */ @javax.annotation.Nullable @ApiModelProperty( value = "FieldsType is the discriminator for the different fields format and version. There is currently only one possible value: \"FieldsV1\"") public String getFieldsType() { return fieldsType; } public void setFieldsType(String fieldsType) { this.fieldsType = fieldsType; } public V1ManagedFieldsEntry fieldsV1(Object fieldsV1) { this.fieldsV1 = fieldsV1; return this; } /** * FieldsV1 holds the first JSON version format as described in the \&quot;FieldsV1\&quot; type. * * @return fieldsV1 */ @javax.annotation.Nullable @ApiModelProperty( value = "FieldsV1 holds the first JSON version format as described in the \"FieldsV1\" type.") public Object getFieldsV1() { return fieldsV1; } public void setFieldsV1(Object fieldsV1) { this.fieldsV1 = fieldsV1; } public V1ManagedFieldsEntry manager(String manager) { this.manager = manager; return this; } /** * Manager is an identifier of the workflow managing these fields. * * @return manager */ @javax.annotation.Nullable @ApiModelProperty(value = "Manager is an identifier of the workflow managing these fields.") public String getManager() { return manager; } public void setManager(String manager) { this.manager = manager; } public V1ManagedFieldsEntry operation(String operation) { this.operation = operation; return this; } /** * Operation is the type of operation which lead to this ManagedFieldsEntry being created. The * only valid values for this field are &#39;Apply&#39; and &#39;Update&#39;. * * @return operation */ @javax.annotation.Nullable @ApiModelProperty( value = "Operation is the type of operation which lead to this ManagedFieldsEntry being created. The only valid values for this field are 'Apply' and 'Update'.") public String getOperation() { return operation; } public void setOperation(String operation) { this.operation = operation; } public V1ManagedFieldsEntry subresource(String subresource) { this.subresource = subresource; return this; } /** * Subresource is the name of the subresource used to update that object, or empty string if the * object was updated through the main resource. The value of this field is used to distinguish * between managers, even if they share the same name. For example, a status update will be * distinct from a regular update using the same manager name. Note that the APIVersion field is * not related to the Subresource field and it always corresponds to the version of the main * resource. * * @return subresource */ @javax.annotation.Nullable @ApiModelProperty( value = "Subresource is the name of the subresource used to update that object, or empty string if the object was updated through the main resource. The value of this field is used to distinguish between managers, even if they share the same name. For example, a status update will be distinct from a regular update using the same manager name. Note that the APIVersion field is not related to the Subresource field and it always corresponds to the version of the main resource.") public String getSubresource() { return subresource; } public void setSubresource(String subresource) { this.subresource = subresource; } public V1ManagedFieldsEntry time(OffsetDateTime time) { this.time = time; return this; } /** * Time is timestamp of when these fields were set. It should always be empty if Operation is * &#39;Apply&#39; * * @return time */ @javax.annotation.Nullable @ApiModelProperty( value = "Time is timestamp of when these fields were set. It should always be empty if Operation is 'Apply'") public OffsetDateTime getTime() { return time; } public void setTime(OffsetDateTime time) { this.time = time; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } V1ManagedFieldsEntry v1ManagedFieldsEntry = (V1ManagedFieldsEntry) o; return Objects.equals(this.apiVersion, v1ManagedFieldsEntry.apiVersion) && Objects.equals(this.fieldsType, v1ManagedFieldsEntry.fieldsType) && Objects.equals(this.fieldsV1, v1ManagedFieldsEntry.fieldsV1) && Objects.equals(this.manager, v1ManagedFieldsEntry.manager) && Objects.equals(this.operation, v1ManagedFieldsEntry.operation) && Objects.equals(this.subresource, v1ManagedFieldsEntry.subresource) && Objects.equals(this.time, v1ManagedFieldsEntry.time); } @Override public int hashCode() { return Objects.hash(apiVersion, fieldsType, fieldsV1, manager, operation, subresource, time); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class V1ManagedFieldsEntry {\n"); sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n"); sb.append(" fieldsType: ").append(toIndentedString(fieldsType)).append("\n"); sb.append(" fieldsV1: ").append(toIndentedString(fieldsV1)).append("\n"); sb.append(" manager: ").append(toIndentedString(manager)).append("\n"); sb.append(" operation: ").append(toIndentedString(operation)).append("\n"); sb.append(" subresource: ").append(toIndentedString(subresource)).append("\n"); sb.append(" time: ").append(toIndentedString(time)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.BufferExposingByteArrayInputStream; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.Function; import com.intellij.util.PathUtil; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Convertor; import com.intellij.util.containers.DistinctRootsCollection; import com.intellij.util.io.URLUtil; import com.intellij.util.text.StringFactory; import consulo.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.*; import java.net.MalformedURLException; import java.net.URL; import java.util.*; import static com.intellij.openapi.vfs.VirtualFileVisitor.VisitorException; public class VfsUtilCore { private static final Logger LOG = Logger.getInstance(VfsUtilCore.class); private static final String MAILTO = "mailto"; public static final String LOCALHOST_URI_PATH_PREFIX = URLUtil.LOCALHOST_URI_PATH_PREFIX; public static final char VFS_SEPARATOR_CHAR = '/'; private static final String PROTOCOL_DELIMITER = ":"; /** * Checks whether the <code>ancestor {@link VirtualFile}</code> is parent of <code>file * {@link VirtualFile}</code>. * * @param ancestor the file * @param file the file * @param strict if <code>false</code> then this method returns <code>true</code> if <code>ancestor</code> * and <code>file</code> are equal * @return <code>true</code> if <code>ancestor</code> is parent of <code>file</code>; <code>false</code> otherwise */ public static boolean isAncestor(@Nonnull VirtualFile ancestor, @Nonnull VirtualFile file, boolean strict) { if (!file.getFileSystem().equals(ancestor.getFileSystem())) return false; VirtualFile parent = strict ? file.getParent() : file; while (true) { if (parent == null) return false; if (parent.equals(ancestor)) return true; parent = parent.getParent(); } } /** * @return {@code true} if {@code file} is located under one of {@code roots} or equal to one of them */ public static boolean isUnder(@Nonnull VirtualFile file, @Nullable Set<VirtualFile> roots) { if (roots == null || roots.isEmpty()) return false; VirtualFile parent = file; while (parent != null) { if (roots.contains(parent)) { return true; } parent = parent.getParent(); } return false; } /** * @return {@code true} if {@code url} is located under one of {@code rootUrls} or equal to one of them */ public static boolean isUnder(@Nonnull String url, @Nullable Collection<String> rootUrls) { if (rootUrls == null || rootUrls.isEmpty()) return false; for (String excludesUrl : rootUrls) { if (isEqualOrAncestor(excludesUrl, url)) { return true; } } return false; } public static boolean isEqualOrAncestor(@Nonnull String ancestorUrl, @Nonnull String fileUrl) { if (ancestorUrl.equals(fileUrl)) return true; if (StringUtil.endsWithChar(ancestorUrl, '/')) { return fileUrl.startsWith(ancestorUrl); } else { return StringUtil.startsWithConcatenation(fileUrl, ancestorUrl, "/"); } } public static boolean isAncestor(@Nonnull File ancestor, @Nonnull File file, boolean strict) { return FileUtil.isAncestor(ancestor, file, strict); } /** * Gets relative path of <code>file</code> to <code>root</code> when it's possible * This method is designed to be used for file descriptions (in trees, lists etc.) * * @param file the file * @param root candidate to be parent file (Project base dir, any content roots etc.) * @return relative path of {@code file} or full path if {@code root} is not actual ancestor of {@code file} */ @Nullable public static String getRelativeLocation(@Nullable VirtualFile file, @Nonnull VirtualFile root) { if (file == null) return null; String path = getRelativePath(file, root); return path != null ? path : file.getPresentableUrl(); } @Nullable public static String getRelativePath(@Nonnull VirtualFile file, @Nonnull VirtualFile ancestor) { return getRelativePath(file, ancestor, VFS_SEPARATOR_CHAR); } /** * Gets the relative path of <code>file</code> to its <code>ancestor</code>. Uses <code>separator</code> for * separating files. * * @param file the file * @param ancestor parent file * @param separator character to use as files separator * @return the relative path or {@code null} if {@code ancestor} is not ancestor for {@code file} */ @Nullable public static String getRelativePath(@Nonnull VirtualFile file, @Nonnull VirtualFile ancestor, char separator) { if (!file.getFileSystem().equals(ancestor.getFileSystem())) { return null; } int length = 0; VirtualFile parent = file; while (true) { if (parent == null) return null; if (parent.equals(ancestor)) break; if (length > 0) { length++; } length += parent.getNameSequence().length(); parent = parent.getParent(); } char[] chars = new char[length]; int index = chars.length; parent = file; while (true) { if (parent.equals(ancestor)) break; if (index < length) { chars[--index] = separator; } CharSequence name = parent.getNameSequence(); for (int i = name.length() - 1; i >= 0; i--) { chars[--index] = name.charAt(i); } parent = parent.getParent(); } return StringFactory.createShared(chars); } @Nullable public static VirtualFile getVirtualFileForJar(@Nullable VirtualFile entryVFile) { if (entryVFile == null) return null; final String path = entryVFile.getPath(); final int separatorIndex = path.indexOf("!/"); if (separatorIndex < 0) return null; String localPath = path.substring(0, separatorIndex); return VirtualFileManager.getInstance().findFileByUrl("file://" + localPath); } /** * Makes a copy of the <code>file</code> in the <code>toDir</code> folder and returns it. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if <code>requestor</code> is <code>null</code>. * See {@link VirtualFileEvent#getRequestor} * @param file file to make a copy of * @param toDir directory to make a copy in * @return a copy of the file * @throws IOException if file failed to be copied */ @Nonnull public static VirtualFile copyFile(Object requestor, @Nonnull VirtualFile file, @Nonnull VirtualFile toDir) throws IOException { return copyFile(requestor, file, toDir, file.getName()); } /** * Makes a copy of the <code>file</code> in the <code>toDir</code> folder with the <code>newName</code> and returns it. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if <code>requestor</code> is <code>null</code>. * See {@link VirtualFileEvent#getRequestor} * @param file file to make a copy of * @param toDir directory to make a copy in * @param newName new name of the file * @return a copy of the file * @throws IOException if file failed to be copied */ @Nonnull public static VirtualFile copyFile(Object requestor, @Nonnull VirtualFile file, @Nonnull VirtualFile toDir, @Nonnull String newName) throws IOException { final VirtualFile newChild = toDir.createChildData(requestor, newName); newChild.setBinaryContent(file.contentsToByteArray()); return newChild; } @Nonnull public static InputStream byteStreamSkippingBOM(@Nonnull byte[] buf, @Nonnull VirtualFile file) throws IOException { @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") BufferExposingByteArrayInputStream stream = new BufferExposingByteArrayInputStream(buf); return inputStreamSkippingBOM(stream, file); } @Nonnull public static InputStream inputStreamSkippingBOM(@Nonnull InputStream stream, @SuppressWarnings("UnusedParameters") @Nonnull VirtualFile file) throws IOException { return CharsetToolkit.inputStreamSkippingBOM(stream); } @Nonnull public static OutputStream outputStreamAddingBOM(@Nonnull OutputStream stream, @Nonnull VirtualFile file) throws IOException { byte[] bom = file.getBOM(); if (bom != null) { stream.write(bom); } return stream; } public static boolean iterateChildrenRecursively(@Nonnull final VirtualFile root, @Nullable final VirtualFileFilter filter, @Nonnull final ContentIterator iterator) { final VirtualFileVisitor.Result result = visitChildrenRecursively(root, new VirtualFileVisitor() { @Nonnull @Override public Result visitFileEx(@Nonnull VirtualFile file) { if (filter != null && !filter.accept(file)) return SKIP_CHILDREN; if (!iterator.processFile(file)) return skipTo(root); return CONTINUE; } }); return !Comparing.equal(result.skipToParent, root); } @SuppressWarnings({"UnsafeVfsRecursion", "Duplicates"}) @Nonnull public static VirtualFileVisitor.Result visitChildrenRecursively(@Nonnull VirtualFile file, @Nonnull VirtualFileVisitor<?> visitor) throws VisitorException { boolean pushed = false; try { final boolean visited = visitor.allowVisitFile(file); if (visited) { VirtualFileVisitor.Result result = visitor.visitFileEx(file); if (result.skipChildren) return result; } Iterable<VirtualFile> childrenIterable = null; VirtualFile[] children = null; try { if (file.isValid() && visitor.allowVisitChildren(file) && !visitor.depthLimitReached()) { childrenIterable = visitor.getChildrenIterable(file); if (childrenIterable == null) { children = file.getChildren(); } } } catch (InvalidVirtualFileAccessException e) { LOG.info("Ignoring: " + e.getMessage()); return VirtualFileVisitor.CONTINUE; } if (childrenIterable != null) { visitor.saveValue(); pushed = true; for (VirtualFile child : childrenIterable) { VirtualFileVisitor.Result result = visitChildrenRecursively(child, visitor); if (result.skipToParent != null && !Comparing.equal(result.skipToParent, child)) return result; } } else if (children != null && children.length != 0) { visitor.saveValue(); pushed = true; for (VirtualFile child : children) { VirtualFileVisitor.Result result = visitChildrenRecursively(child, visitor); if (result.skipToParent != null && !Comparing.equal(result.skipToParent, child)) return result; } } if (visited) { visitor.afterChildrenVisited(file); } return VirtualFileVisitor.CONTINUE; } finally { visitor.restoreValue(pushed); } } public static <E extends Exception> VirtualFileVisitor.Result visitChildrenRecursively(@Nonnull VirtualFile file, @Nonnull VirtualFileVisitor visitor, @Nonnull Class<E> eClass) throws E { try { return visitChildrenRecursively(file, visitor); } catch (VisitorException e) { final Throwable cause = e.getCause(); if (eClass.isInstance(cause)) { throw eClass.cast(cause); } throw e; } } /** * Returns {@code true} if given virtual file represents broken symbolic link (which points to non-existent file). */ public static boolean isBrokenLink(@Nonnull VirtualFile file) { return file.is(VFileProperty.SYMLINK) && file.getCanonicalPath() == null; } /** * Returns {@code true} if given virtual file represents broken or recursive symbolic link. */ public static boolean isInvalidLink(@Nonnull VirtualFile link) { final VirtualFile target = link.getCanonicalFile(); return target == null || target.equals(link) || isAncestor(target, link, true); } @Nonnull public static String loadText(@Nonnull VirtualFile file) throws IOException { return loadText(file, (int)file.getLength()); } @Nonnull public static String loadText(@Nonnull VirtualFile file, int length) throws IOException { InputStreamReader reader = new InputStreamReader(file.getInputStream(), file.getCharset()); try { return StringFactory.createShared(FileUtil.loadText(reader, length)); } finally { reader.close(); } } @Nonnull public static byte[] loadBytes(@Nonnull VirtualFile file) throws IOException { return FileUtilRt.isTooLarge(file.getLength()) ? FileUtil.loadFirstAndClose(file.getInputStream(), FileUtilRt.LARGE_FILE_PREVIEW_SIZE) : file.contentsToByteArray(); } @Nonnull public static VirtualFile[] toVirtualFileArray(@Nonnull Collection<? extends VirtualFile> files) { int size = files.size(); if (size == 0) return VirtualFile.EMPTY_ARRAY; //noinspection SSBasedInspection return files.toArray(new VirtualFile[size]); } @Nonnull public static String urlToPath(@Nullable String url) { if (url == null) return ""; return VirtualFileManager.extractPath(url); } @Nonnull public static File virtualToIoFile(@Nonnull VirtualFile file) { return new File(PathUtil.toPresentableUrl(file.getUrl())); } @Nonnull public static String pathToUrl(@Nonnull String path) { return VirtualFileManager.constructUrl(URLUtil.FILE_PROTOCOL, path); } public static List<File> virtualToIoFiles(@Nonnull Collection<VirtualFile> scope) { return ContainerUtil.map2List(scope, new Function<VirtualFile, File>() { @Override public File fun(VirtualFile file) { return virtualToIoFile(file); } }); } @Nonnull public static String toIdeaUrl(@Nonnull String url) { return toIdeaUrl(url, true); } @Nonnull public static String toIdeaUrl(@Nonnull String url, boolean removeLocalhostPrefix) { return URLUtil.toIdeaUrl(url, removeLocalhostPrefix); } @Nonnull public static String fixURLforIDEA(@Nonnull String url) { // removeLocalhostPrefix - false due to backward compatibility reasons return toIdeaUrl(url, false); } @Nonnull public static String convertFromUrl(@Nonnull URL url) { String protocol = url.getProtocol(); String path = url.getPath(); if (protocol.equals(URLUtil.JAR_PROTOCOL)) { if (StringUtil.startsWithConcatenation(path, URLUtil.FILE_PROTOCOL, PROTOCOL_DELIMITER)) { try { URL subURL = new URL(path); path = subURL.getPath(); } catch (MalformedURLException e) { throw new RuntimeException(VfsBundle.message("url.parse.unhandled.exception"), e); } } else { throw new RuntimeException(new IOException(VfsBundle.message("url.parse.error", url.toExternalForm()))); } } if (SystemInfo.isWindows || SystemInfo.isOS2) { while (!path.isEmpty() && path.charAt(0) == '/') { path = path.substring(1, path.length()); } } path = URLUtil.unescapePercentSequences(path); return protocol + "://" + path; } /** * Converts VsfUrl info {@link URL}. * * @param vfsUrl VFS url (as constructed by {@link VirtualFile#getUrl()} * @return converted URL or null if error has occurred. */ @Nullable public static URL convertToURL(@Nonnull String vfsUrl) { if (vfsUrl.startsWith("jar://") || vfsUrl.startsWith(StandardFileSystems.ZIP_PROTOCOL_PREFIX)) { try { // jar:// and zip:// have the same lenght return new URL("jar:file:///" + vfsUrl.substring(StandardFileSystems.ZIP_PROTOCOL_PREFIX.length())); } catch (MalformedURLException e) { return null; } } if (vfsUrl.startsWith(MAILTO)) { try { return new URL(vfsUrl); } catch (MalformedURLException e) { return null; } } String[] split = vfsUrl.split("://"); if (split.length != 2) { LOG.debug("Malformed VFS URL: " + vfsUrl); return null; } String protocol = split[0]; String path = split[1]; try { if (protocol.equals(StandardFileSystems.FILE_PROTOCOL)) { return new URL(StandardFileSystems.FILE_PROTOCOL, "", path); } else { return URLUtil.internProtocol(new URL(vfsUrl)); } } catch (MalformedURLException e) { LOG.debug("MalformedURLException occurred:" + e.getMessage()); return null; } } @Nonnull public static String fixIDEAUrl(@Nonnull String ideaUrl) { final String ideaProtocolMarker = "://"; int idx = ideaUrl.indexOf(ideaProtocolMarker); if (idx >= 0) { String s = ideaUrl.substring(0, idx); if (s.equals("jar") || s.equals(StandardFileSystems.ZIP_PROTOCOL)) { s = "jar:file"; } final String urlWithoutProtocol = ideaUrl.substring(idx + ideaProtocolMarker.length()); ideaUrl = s + ":" + (urlWithoutProtocol.startsWith("/") ? "" : "/") + urlWithoutProtocol; } return ideaUrl; } @Nullable public static VirtualFile findRelativeFile(@Nonnull String uri, @Nullable VirtualFile base) { if (base != null) { if (!base.isValid()) { LOG.error("Invalid file name: " + base.getName() + ", url: " + uri); } } uri = uri.replace('\\', '/'); if (uri.startsWith("file:///")) { uri = uri.substring("file:///".length()); if (!SystemInfo.isWindows) uri = "/" + uri; } else if (uri.startsWith("file:/")) { uri = uri.substring("file:/".length()); if (!SystemInfo.isWindows) uri = "/" + uri; } else { uri = StringUtil.trimStart(uri, "file:"); } VirtualFile file = null; if (uri.startsWith("jar:file:/")) { uri = uri.substring("jar:file:/".length()); if (!SystemInfo.isWindows) uri = "/" + uri; file = VirtualFileManager.getInstance().findFileByUrl(StandardFileSystems.ZIP_PROTOCOL_PREFIX + uri); } else if (!SystemInfo.isWindows && StringUtil.startsWithChar(uri, '/') || SystemInfo.isWindows && uri.length() >= 2 && Character.isLetter(uri.charAt(0)) && uri.charAt(1) == ':') { file = StandardFileSystems.local().findFileByPath(uri); } if (file == null && uri.contains(URLUtil.ARCHIVE_SEPARATOR)) { file = StandardFileSystems.zip().findFileByPath(uri); if (file == null && base == null) { file = VirtualFileManager.getInstance().findFileByUrl(uri); } } if (file == null) { if (base == null) return StandardFileSystems.local().findFileByPath(uri); if (!base.isDirectory()) base = base.getParent(); if (base == null) return StandardFileSystems.local().findFileByPath(uri); file = VirtualFileManager.getInstance().findFileByUrl(base.getUrl() + "/" + uri); if (file == null) return null; } return file; } public static boolean processFilesRecursively(@Nonnull VirtualFile root, @Nonnull Processor<VirtualFile> processor) { if (!processor.process(root)) return false; if (root.isDirectory()) { final LinkedList<VirtualFile[]> queue = new LinkedList<VirtualFile[]>(); queue.add(root.getChildren()); do { final VirtualFile[] files = queue.removeFirst(); for (VirtualFile file : files) { if (!processor.process(file)) return false; if (file.isDirectory()) { queue.add(file.getChildren()); } } } while (!queue.isEmpty()); } return true; } /** * Gets the common ancestor for passed files, or null if the files do not have common ancestors. * * @param file1 fist file * @param file2 second file * @return common ancestor for the passed files. Returns <code>null</code> if * the files do not have common ancestor */ @Nullable public static VirtualFile getCommonAncestor(@Nonnull VirtualFile file1, @Nonnull VirtualFile file2) { if (!file1.getFileSystem().equals(file2.getFileSystem())) { return null; } VirtualFile[] path1 = getPathComponents(file1); VirtualFile[] path2 = getPathComponents(file2); int lastEqualIdx = -1; for (int i = 0; i < path1.length && i < path2.length; i++) { if (path1[i].equals(path2[i])) { lastEqualIdx = i; } else { break; } } return lastEqualIdx == -1 ? null : path1[lastEqualIdx]; } /** * Gets an array of files representing paths from root to the passed file. * * @param file the file * @return virtual files which represents paths from root to the passed file */ @Nonnull static VirtualFile[] getPathComponents(@Nonnull VirtualFile file) { ArrayList<VirtualFile> componentsList = new ArrayList<VirtualFile>(); while (file != null) { componentsList.add(file); file = file.getParent(); } int size = componentsList.size(); VirtualFile[] components = new VirtualFile[size]; for (int i = 0; i < size; i++) { components[i] = componentsList.get(size - i - 1); } return components; } public static boolean hasInvalidFiles(@Nonnull Iterable<VirtualFile> files) { for (VirtualFile file : files) { if (!file.isValid()) { return true; } } return false; } @Nullable public static VirtualFile findContainingDirectory(@Nonnull VirtualFile file, @Nonnull CharSequence name) { VirtualFile parent = file.isDirectory() ? file : file.getParent(); while (parent != null) { if (Comparing.equal(parent.getNameSequence(), name, SystemInfo.isFileSystemCaseSensitive)) { return parent; } parent = parent.getParent(); } return null; } /** * this collection will keep only distinct files/folders, e.g. C:\foo\bar will be removed when C:\foo is added */ public static class DistinctVFilesRootsCollection extends DistinctRootsCollection<VirtualFile> { public DistinctVFilesRootsCollection() { } public DistinctVFilesRootsCollection(Collection<VirtualFile> virtualFiles) { super(virtualFiles); } public DistinctVFilesRootsCollection(VirtualFile[] collection) { super(collection); } @Override protected boolean isAncestor(@Nonnull VirtualFile ancestor, @Nonnull VirtualFile virtualFile) { return VfsUtilCore.isAncestor(ancestor, virtualFile, false); } } public static void processFilesRecursively(@Nonnull VirtualFile root, @Nonnull Processor<VirtualFile> processor, @Nonnull Convertor<VirtualFile, Boolean> directoryFilter) { if (!processor.process(root)) return; if (root.isDirectory() && directoryFilter.convert(root)) { final LinkedList<VirtualFile[]> queue = new LinkedList<VirtualFile[]>(); queue.add(root.getChildren()); do { final VirtualFile[] files = queue.removeFirst(); for (VirtualFile file : files) { if (!processor.process(file)) return; if (file.isDirectory() && directoryFilter.convert(file)) { queue.add(file.getChildren()); } } } while (!queue.isEmpty()); } } }
package com.box.sdk; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import com.box.sdk.BoxGroupMembership.Role; import com.eclipsesource.json.JsonArray; import com.eclipsesource.json.JsonObject; import com.eclipsesource.json.JsonValue; /** * Represents a set of Box users. * * <p>Unless otherwise noted, the methods in this class can throw an unchecked {@link BoxAPIException} (unchecked * meaning that the compiler won't force you to handle it) if an error occurs. If you wish to implement custom error * handling for errors related to the Box REST API, you should capture this exception explicitly.</p> */ public class BoxGroup extends BoxCollaborator { private static final URLTemplate GROUPS_URL_TEMPLATE = new URLTemplate("groups"); private static final URLTemplate GROUP_URL_TEMPLATE = new URLTemplate("groups/%s"); private static final URLTemplate MEMBERSHIPS_URL_TEMPLATE = new URLTemplate("groups/%s/memberships"); private static final URLTemplate ADD_MEMBERSHIP_URL_TEMPLATE = new URLTemplate("group_memberships"); private static final URLTemplate COLLABORATIONS_URL_TEMPLATE = new URLTemplate("groups/%s/collaborations"); /** * Constructs a BoxGroup for a group with a given ID. * @param api the API connection to be used by the group. * @param id the ID of the group. */ public BoxGroup(BoxAPIConnection api, String id) { super(api, id); } /** * Creates a new group with a specified name. * @param api the API connection to be used by the group. * @param name the name of the new group. * @return info about the created group. */ public static BoxGroup.Info createGroup(BoxAPIConnection api, String name) { return createGroup(api, name, null, null, null, null, null); } /** * Creates a new group with a specified name. * @param api the API connection to be used by the group. * @param name the name of the new group. * @param provenance the provenance of the new group * @param externalSyncIdentifier the external_sync_identifier of the new group * @param description the description of the new group * @param invitabilityLevel the invitibility_level of the new group * @param memberViewabilityLevel the member_viewability_level of the new group * @return info about the created group. */ public static BoxGroup.Info createGroup(BoxAPIConnection api, String name, String provenance, String externalSyncIdentifier, String description, String invitabilityLevel, String memberViewabilityLevel) { JsonObject requestJSON = new JsonObject(); requestJSON.add("name", name); if (provenance != null) { requestJSON.add("provenance", provenance); } if (externalSyncIdentifier != null) { requestJSON.add("external_sync_identifier", externalSyncIdentifier); } if (description != null) { requestJSON.add("description", description); } if (invitabilityLevel != null) { requestJSON.add("invitability_level", invitabilityLevel); } if (memberViewabilityLevel != null) { requestJSON.add("member_viewability_level", memberViewabilityLevel); } URL url = GROUPS_URL_TEMPLATE.build(api.getBaseURL()); BoxJSONRequest request = new BoxJSONRequest(api, url, "POST"); request.setBody(requestJSON.toString()); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); BoxGroup group = new BoxGroup(api, responseJSON.get("id").asString()); return group.new Info(responseJSON); } /** * Gets an iterable of all the groups in the enterprise. * @param api the API connection to be used when retrieving the groups. * @return an iterable containing info about all the groups. */ public static Iterable<BoxGroup.Info> getAllGroups(final BoxAPIConnection api) { return new Iterable<BoxGroup.Info>() { public Iterator<BoxGroup.Info> iterator() { URL url = GROUPS_URL_TEMPLATE.build(api.getBaseURL()); return new BoxGroupIterator(api, url); } }; } /** * Gets information about this group. * @return info about this group. */ public Info getInfo() { URL url = GROUP_URL_TEMPLATE.build(this.getAPI().getBaseURL(), this.getID()); BoxAPIRequest request = new BoxAPIRequest(this.getAPI(), url, "GET"); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); return new Info(responseJSON); } /** * Gets information about all of the group memberships for this group. * @return a collection of information about the group memberships for this group. */ public Collection<BoxGroupMembership.Info> getMemberships() { BoxAPIConnection api = this.getAPI(); URL url = MEMBERSHIPS_URL_TEMPLATE.build(api.getBaseURL(), this.getID()); BoxAPIRequest request = new BoxAPIRequest(api, url, "GET"); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); int entriesCount = responseJSON.get("total_count").asInt(); Collection<BoxGroupMembership.Info> memberships = new ArrayList<BoxGroupMembership.Info>(entriesCount); JsonArray entries = responseJSON.get("entries").asArray(); for (JsonValue entry : entries) { JsonObject entryObject = entry.asObject(); BoxGroupMembership membership = new BoxGroupMembership(api, entryObject.get("id").asString()); BoxGroupMembership.Info info = membership.new Info(entryObject); memberships.add(info); } return memberships; } /** * Adds a member to this group with the default role. * @param user the member to be added to this group. * @return info about the new group membership. */ public BoxGroupMembership.Info addMembership(BoxUser user) { return this.addMembership(user, null); } /** * Adds a member to this group with the specified role. * @param user the member to be added to this group. * @param role the role of the user in this group. Can be null to assign the default role. * @return info about the new group membership. */ public BoxGroupMembership.Info addMembership(BoxUser user, Role role) { BoxAPIConnection api = this.getAPI(); JsonObject requestJSON = new JsonObject(); requestJSON.add("user", new JsonObject().add("id", user.getID())); requestJSON.add("group", new JsonObject().add("id", this.getID())); if (role != null) { requestJSON.add("role", role.toJSONString()); } URL url = ADD_MEMBERSHIP_URL_TEMPLATE.build(api.getBaseURL()); BoxJSONRequest request = new BoxJSONRequest(api, url, "POST"); request.setBody(requestJSON.toString()); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); BoxGroupMembership membership = new BoxGroupMembership(api, responseJSON.get("id").asString()); return membership.new Info(responseJSON); } /** * Gets information about all of the collaborations for this group. * @return a collection of information about the collaborations for this group. */ public Collection<BoxCollaboration.Info> getCollaborations() { BoxAPIConnection api = this.getAPI(); URL url = COLLABORATIONS_URL_TEMPLATE.build(api.getBaseURL(), this.getID()); BoxAPIRequest request = new BoxAPIRequest(api, url, "GET"); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); int entriesCount = responseJSON.get("total_count").asInt(); Collection<BoxCollaboration.Info> collaborations = new ArrayList<BoxCollaboration.Info>(entriesCount); JsonArray entries = responseJSON.get("entries").asArray(); for (JsonValue entry : entries) { JsonObject entryObject = entry.asObject(); BoxCollaboration collaboration = new BoxCollaboration(api, entryObject.get("id").asString()); BoxCollaboration.Info info = collaboration.new Info(entryObject); collaborations.add(info); } return collaborations; } /** * Deletes this group. */ public void delete() { URL url = GROUP_URL_TEMPLATE.build(this.getAPI().getBaseURL(), this.getID()); BoxAPIRequest request = new BoxAPIRequest(this.getAPI(), url, "DELETE"); BoxAPIResponse response = request.send(); response.disconnect(); } /** * Contains information about a BoxGroup. */ public class Info extends BoxCollaborator.Info { private String provenance; private String externalSyncIdentifier; private String description; private String invitabilityLevel; private String memberViewabilityLevel; /** * Constructs an empty Info object. */ public Info() { super(); } /** * Constructs an Info object by parsing information from a JSON string. * @param json the JSON string to parse. */ public Info(String json) { super(json); } /** * Constructs an Info object using an already parsed JSON object. * @param jsonObject the parsed JSON object. */ Info(JsonObject jsonObject) { super(jsonObject); } @Override public BoxGroup getResource() { return BoxGroup.this; } @Override protected void parseJSONMember(JsonObject.Member member) { super.parseJSONMember(member); String memberName = member.getName(); JsonValue value = member.getValue(); if (memberName.equals("description")) { this.description = value.asString(); } else if (memberName.equals("external_sync_identifier")) { this.externalSyncIdentifier = value.asString(); } else if (memberName.equals("invitability_level")) { this.invitabilityLevel = value.asString(); } else if (memberName.equals("member_viewability_level")) { this.memberViewabilityLevel = value.asString(); } else if (memberName.equals("provenance")) { this.provenance = value.asString(); } } /** * Gets the description for the group. * @return the description for the group. */ public String getDescription() { return this.description; } /** * Sets the description for the group. * @param description the description for the group. */ public void setDescription(String description) { this.description = description; addPendingChange("description", description); } /** * Gets the external_sync_identifier for the group. * @return the external_sync_identifier for the group. */ public String getExternalSyncIdentifier() { return this.externalSyncIdentifier; } /** * Sets the external_sync_identifier for the group. * @param externalSyncIdentifier the external_sync_identifier for the group. */ public void setExternalSyncIdentifier(String externalSyncIdentifier) { this.externalSyncIdentifier = externalSyncIdentifier; addPendingChange("external_sync_identifier", externalSyncIdentifier); } /** * Gets the invitability_level for the group. * @return the invitability_level for the group. */ public String getInvitabilityLevel() { return this.invitabilityLevel; } /** * Sets the invitability_level for the group. * @param invitabilityLevel the invitability_level for the group. */ public void setInvitabilityLevel(String invitabilityLevel) { this.invitabilityLevel = invitabilityLevel; addPendingChange("invitability_level", invitabilityLevel); } /** * Gets the member_viewability_level for the group. * @return the member_viewability_level for the group. */ public String getMemberViewabilityLevel() { return this.memberViewabilityLevel; } /** * Sets the member_viewability_level for the group. * @param memberViewabilityLevel the member_viewability_level for the group. */ public void setMemberViewabilityLevel(String memberViewabilityLevel) { this.memberViewabilityLevel = memberViewabilityLevel; addPendingChange("member_viewability_level", memberViewabilityLevel); } /** * Gets the provenance for the group. * @return the provenance for the group. */ public String getProvenance() { return this.provenance; } /** * Sets the provenance for the group. * @param provenance the provenance for the group. */ public void setProvenance(String provenance) { this.provenance = provenance; addPendingChange("provenance", provenance); } } }
package com.demigodsrpg.demigods.engine.deity; import com.demigodsrpg.demigods.engine.Demigods; import com.demigodsrpg.demigods.engine.battle.Battle; import com.demigodsrpg.demigods.engine.battle.Participant; import com.demigodsrpg.demigods.engine.entity.DemigodsTameable; import com.demigodsrpg.demigods.engine.entity.player.DemigodsCharacter; import com.demigodsrpg.demigods.engine.entity.player.DemigodsPlayer; import com.demigodsrpg.demigods.engine.entity.player.attribute.Skill; import com.demigodsrpg.demigods.engine.language.English; import com.demigodsrpg.demigods.engine.util.Configs; import com.demigodsrpg.demigods.engine.util.Strings; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang.StringUtils; import org.bukkit.*; import org.bukkit.block.Block; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.entity.Tameable; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.material.MaterialData; import org.bukkit.util.BlockIterator; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.Set; public interface Ability { String getDeity(); String getName(); String getCommand(); int getCost(); int getDelay(); int getRepeat(); List<String> getDetails(); Skill.Type getType(); MaterialData getWeapon(); boolean hasWeapon(); Predicate<Player> getActionPredicate(); Listener getListener(); Runnable getRunnable(); public static class Util { public static final int TARGET_OFFSET = 5; public static boolean preProcessAbility(Player player, Ability ability) { // Define variables DemigodsCharacter character = DemigodsCharacter.of(player); if (!Battle.canTarget(character)) { player.sendMessage(ChatColor.YELLOW + "You can't do that from a no-PVP zone."); return false; } else if (character.getMeta().getFavor() < ability.getCost()) { player.sendMessage(ChatColor.YELLOW + "You do not have enough favor."); return false; } else return DemigodsCharacter.isCooledDown(character, ability.getName()); } /** * Returns true if the <code>target</code> can be attacked by the <code>player</code> with the defined <code>ability</code>. * * @param player the Player doing the ability * @param target the LivingEntity being targeted * @param ability the ability itself * @return true/false depending on if all pre-process tests have passed */ public static boolean preProcessAbility(Player player, LivingEntity target, Ability ability) { // Define variables DemigodsCharacter character = DemigodsCharacter.of(player); if (preProcessAbility(player, ability)) { if (target == null) { player.sendMessage(ChatColor.YELLOW + "No target found."); return false; } else if (Battle.canParticipate(target) && !Battle.canTarget(Battle.defineParticipant(target))) { player.sendMessage(ChatColor.YELLOW + "Target is in a no-PVP zone."); return false; } Participant attacked = Battle.defineParticipant(target); return !(attacked != null && character.alliedTo(attacked)); } return false; } public static Set<LivingEntity> preProcessAbility(Player player, Collection<Entity> targets, Ability ability) { // Define variables DemigodsCharacter character = DemigodsCharacter.of(player); Set<LivingEntity> set = Sets.newHashSet(); if (preProcessAbility(player, ability)) { for (Entity target : targets) { if (target == null) continue; if (!(target instanceof LivingEntity)) continue; else if (Battle.canParticipate(target) && !Battle.canTarget(Battle.defineParticipant(target))) continue; else if (target instanceof Player) { Participant attacked = Battle.defineParticipant(target); if (character.alliedTo(attacked)) continue; if (Battle.isInBattle(character) && !Battle.isInBattle(attacked)) continue; } set.add((LivingEntity) target); } } if (set.isEmpty()) player.sendMessage(ChatColor.YELLOW + "No target found."); return set; } /** * Processes the ability by removing its cost from the <code>character</code>'s * current favor and then setting the players cooldown. * * @param character the character to manipulate. * @param ability the ability whose details to use. */ public static void postProcessAbility(DemigodsCharacter character, Ability ability) { if (ability.getDelay() > 0) DemigodsCharacter.setCooldown(character, ability.getName(), ability.getDelay()); character.getMeta().subtractFavor(ability.getCost()); } /** * Returns true if the event <code>event</code> is caused by a left click. * * @param event the interact event * @return true/false depending on if the event is caused by a left click or not */ public static boolean isLeftClick(PlayerInteractEvent event) { Action action = event.getAction(); return action == Action.LEFT_CLICK_AIR || action == Action.LEFT_CLICK_BLOCK; } /** * Returns the LivingEntity that <code>player</code> is target. * * @param player the player * @return the targeted LivingEntity */ public static LivingEntity autoTarget(Player player) { // Define variables int range = Configs.getSettingInt("caps.target_range") > 140 ? 140 : Configs.getSettingInt("caps.target_range"); final int correction = 3; Location target = null; try { target = player.getTargetBlock((Set) null, range).getLocation(); } catch (Exception ignored) { } if (target == null) return null; BlockIterator iterator = new BlockIterator(player, range); List<Entity> targets = Lists.newArrayList(); final DemigodsCharacter looking = DemigodsCharacter.of(player); // Iterate through the blocks and find the target while (iterator.hasNext()) { final Block block = iterator.next(); targets.addAll(Collections2.filter(player.getNearbyEntities(range, range, range), new Predicate<Entity>() { @Override public boolean apply(Entity entity) { if (entity instanceof LivingEntity && entity.getLocation().distance(block.getLocation()) <= correction) { if (entity instanceof Tameable && ((Tameable) entity).isTamed() && DemigodsTameable.of((LivingEntity) entity) != null) { DemigodsTameable wrapper = DemigodsTameable.of((LivingEntity) entity); if (looking.alliedTo(wrapper)) return false; } else if (entity instanceof Player && DemigodsPlayer.of(((Player) entity)).isACharacter()) { DemigodsCharacter character = DemigodsCharacter.of((Player) entity); if (looking.alliedTo(character) || ((Player) entity).getGameMode().equals(GameMode.CREATIVE)) return false; } return true; } return false; } })); } // Attempt to return the closest entity to the cursor for (Entity entity : targets) if (entity.getLocation().distance(target) <= correction) return (LivingEntity) entity; // If it failed to do that then just return the first entity try { return (LivingEntity) targets.get(0); } catch (Exception ignored) { } return null; } public static Location directTarget(Player player) { return player.getTargetBlock((Set) null, Configs.getSettingInt("caps.target_range")).getLocation(); } /** * Returns true if the <code>player</code> ability hits <code>target</code>. * * @param player the player using the ability * @param target the targeted LivingEntity * @return true/false depending on if the ability hits or misses */ public static boolean target(Player player, Location target, boolean notify) { DemigodsCharacter character = DemigodsCharacter.of(player); Location toHit = adjustedAimLocation(character, target); if (isHit(target, toHit)) return true; if (notify) player.sendMessage(ChatColor.RED + "Missed..."); // TODO Better message. return false; } /** * Returns the location that <code>character</code> is actually aiming * at when target <code>target</code>. * * @param character the character triggering the ability callAbilityEvent * @param target the location the character is target at * @return the aimed at location */ public static Location adjustedAimLocation(DemigodsCharacter character, Location target) { // FIXME: This needs major work. int accuracy = character.getDeity().getAccuracy(); if (accuracy < 3) accuracy = 3; int offset = (int) (TARGET_OFFSET + character.getBukkitOfflinePlayer().getPlayer().getLocation().distance(target)); int adjustedOffset = offset / accuracy; if (adjustedOffset < 1) adjustedOffset = 1; Random random = new Random(); World world = target.getWorld(); int randomInt = random.nextInt(adjustedOffset); int sampleSpace = random.nextInt(3); double X = target.getX(); double Z = target.getZ(); double Y = target.getY(); if (sampleSpace == 0) { X += randomInt; Z += randomInt; } else if (sampleSpace == 1) { X -= randomInt; Z -= randomInt; } else if (sampleSpace == 2) { X -= randomInt; Z += randomInt; } else if (sampleSpace == 3) { X += randomInt; Z -= randomInt; } return new Location(world, X, Y, Z); } /** * Returns true if <code>target</code> is hit at <code>hit</code>. * * @param target the LivingEntity being targeted * @param hit the location actually hit * @return true/false if <code>target</code> is hit */ public static boolean isHit(Location target, Location hit) { return hit.distance(target) <= 2; } public static boolean bindAbility(Player player, String command) { // Define character and ability DemigodsCharacter character = DemigodsCharacter.of(player); Ability ability = getAbilityByCommand(character.getDeity().getName(), command); // Return if it isn't an ability if (ability == null) return false; // Handle enabling the command String abilityName = ability.getName(); ItemStack itemInHand = player.getItemInHand(); if (!character.getMeta().isBound(ability)) { if (itemInHand == null || itemInHand.getType().equals(Material.AIR)) { // Slot must not be empty player.sendMessage(ChatColor.RED + English.ERROR_EMPTY_SLOT.getLine()); return true; } else if (character.getMeta().isBound(itemInHand.getType())) { // Material already bound player.sendMessage(ChatColor.RED + English.ERROR_MATERIAL_BOUND.getLine()); return true; } else if (ability.hasWeapon() && !itemInHand.getData().equals(ability.getWeapon())) { // Weapon required player.sendMessage(ChatColor.RED + English.ERROR_BIND_WEAPON_REQUIRED.getLine().replace("{weapon}", Strings.beautify(ability.getWeapon().getItemType().name()).toLowerCase()).replace("{ability}", abilityName)); return true; } // Save the bind character.getMeta().setBind(ability, itemInHand.getType()); // Let them know player.sendMessage(ChatColor.GREEN + English.SUCCESS_ABILITY_BOUND.getLine().replace("{ability}", StringUtils.capitalize(abilityName)).replace("{material}", (Strings.beginsWithVowel(itemInHand.getType().name()) ? "an " : "a ") + Strings.beautify(itemInHand.getType().name()).toLowerCase())); return true; } else { // Remove the bind character.getMeta().removeBind(ability); // Let them know player.sendMessage(ChatColor.GREEN + English.SUCCESS_ABILITY_UNBOUND.getLine().replace("{ability}", StringUtils.capitalize(abilityName))); return true; } } public static void dealDamage(LivingEntity source, LivingEntity target, double amount, EntityDamageEvent.DamageCause cause) { if (source instanceof Player) { DemigodsPlayer owner = DemigodsPlayer.of(((Player) source)); if (owner != null) { Participant participant = Battle.defineParticipant(target); if (participant != null && owner.getCharacter().alliedTo(participant)) return; } } /** * This code below MAY cause issues, it should be watched whenever something is changed. */ EntityDamageByEntityEvent event = new EntityDamageByEntityEvent(source, target, cause, amount); Bukkit.getPluginManager().callEvent(event); if (amount >= 1 && !event.isCancelled()) { target.setLastDamageCause(event); target.damage(amount); } } /** * Returns the instance of an ability with a deity matching <code>deityName</code> and command matching <code>commandName</code>. * * @param deityName the deity to look for. * @param commandName the command name to look for. * @return the ability found */ public static Ability getAbilityByCommand(final String deityName, final String commandName) { try { return Iterables.find(getLoadedAbilities(), new Predicate<Ability>() { @Override public boolean apply(Ability ability) { return ability.getCommand() != null && ability.getCommand().equalsIgnoreCase(commandName) && ability.getDeity().equalsIgnoreCase(deityName); } }); } catch (Exception ignored) { // ignored } return null; } /** * Returns the instance of an ability with a deity matching <code>deityName</code> and name matching <code>abilityName</code>. * * @param deityName the deity to look for. * @param abilityName the ability name to look for. * @return the ability found */ public static Ability getAbilityByName(final String deityName, final String abilityName) { try { return Iterables.find(getLoadedAbilities(), new Predicate<Ability>() { @Override public boolean apply(Ability ability) { return ability.getCommand() != null && ability.getName().equalsIgnoreCase(abilityName) && ability.getDeity().equalsIgnoreCase(deityName); } }); } catch (Exception ignored) { // ignored } return null; } /** * Finds all ability instances for the currently loaded deities. * * @return a collection of abilities. */ public static Collection<Ability> getLoadedAbilities() { Set<Ability> abilities = Sets.newHashSet(); for (Deity deity : Demigods.getMythos().getDeities()) { abilities.addAll(deity.getAbilities()); } return abilities; } } }
package net.mgsx.game.tutorials.chapters; import com.badlogic.ashley.core.Engine; import com.badlogic.ashley.core.EntitySystem; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input; import com.badlogic.gdx.backends.lwjgl.LwjglApplication; import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import net.mgsx.game.core.GameApplication; import net.mgsx.game.core.GamePipeline; import net.mgsx.game.core.GameRegistry; import net.mgsx.game.core.GameScreen; import net.mgsx.game.core.annotations.Inject; import net.mgsx.game.core.plugins.Plugin; import net.mgsx.game.tutorials.Tutorial; /**@md In this tutorial we will learn how systems works in ECS. This little example is a very basic game out of interest. But you'll learn how ordering systems with {@link GamePipeline} and setup inter system communication with {@link Inject} annotation. **Design consideration** : * following design is for example purpose, you typically use entities and components to store the hero state so this example is a perfect anti-pattern. * system inter dependency should be avoided but could be required in some case. This can be done by externalize shared systems data in a dedicated class. @md*/ // TODO allow injection of shared data : register something other than systems and do injection // of any registered type... @Tutorial(id="ecs-basic", group="ecs", title="ECS Basics", order= 1) public class ECSBasicTutorial extends GameApplication { /**@md Our first system is our game logic : It own the hero position and a hero status. Logic is very simple here : hero is falling down continuously and dying when exiting the screen. @md*/ static //@code public class MyLogicSystem extends EntitySystem { public float position; boolean playerAlive = true; public MyLogicSystem() { super(GamePipeline.LOGIC); } @Override public void addedToEngine(Engine engine) { super.addedToEngine(engine); position = Gdx.graphics.getHeight()/2; } @Override public void update(float deltaTime) { if(position < 0 || position > Gdx.graphics.getHeight()){ playerAlive = false; }else{ position -= deltaTime * 10; } } } //@code /**@md Now we need to render our hero on the screen to see what's happen. Instead of drawing in the logic system, we will create another system for this purpose. Our render system will be responsible of just drawing hero at its position and with a color related to his status. Since position and status are owned by logic system, we will inject our logic system in order to read hero state. Thanks to {@link Inject} annotation, logic system will be automatically set by kit for you. Rendering is very simple here, we're using the LibGDX {@link ShapeRenderer} to draw a gree or red X depending on the hero status. @md*/ static //@code public class MyRenderSystem extends EntitySystem { @Inject MyLogicSystem logic; private ShapeRenderer renderer; public MyRenderSystem() { super(GamePipeline.RENDER); } @Override public void addedToEngine(Engine engine) { super.addedToEngine(engine); renderer = new ShapeRenderer(4); } @Override public void update(float deltaTime) { Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); if(logic.playerAlive){ renderer.setColor(Color.GREEN); }else{ renderer.setColor(Color.RED); } renderer.begin(ShapeType.Line); renderer.x(Gdx.graphics.getWidth()/2, logic.position, 10); renderer.end(); } } //@code /**@md Now we need to control the player. Again instead of doing this in our logic system, we will create a input system for this purpose. Its responsability is to move hero position upward when player press up key. Again our logic system is injected in order to update hero position value. @md*/ static //@code public class MyInputSystem extends EntitySystem { @Inject MyLogicSystem logic; private float moveSpeed; public MyInputSystem() { super(GamePipeline.INPUT); } @Override public void update(float deltaTime) { if(Gdx.input.isKeyPressed(Input.Keys.UP)){ moveSpeed = 50; }else{ moveSpeed = 0; } logic.position += moveSpeed * deltaTime; } } //@code /**@md Now it's time to put all systems together. As seen before, systems are created and added to the engine through Kit plugins. So let create a plugin for our awesome game and add our systems. @md*/ static //@code class MyPlugin implements Plugin { @Override public void initialize(GameScreen engine) { engine.entityEngine.addSystem(new MyLogicSystem()); engine.entityEngine.addSystem(new MyRenderSystem()); engine.entityEngine.addSystem(new MyInputSystem()); } } //@code /**@md Note that since we're using {@link GamePipeline} to define system ordering, there is no matter in which order we add our systems to the engine. Systems will be executed in the order difined by the pipeline (except when systems have same pipeline stage). Ordering is predefined as follow : * {@link GamePipeline#INPUT} * {@link GamePipeline#PHYSICS} * {@link GamePipeline#LOGIC} * {@link GamePipeline#RENDER} There is some intermediate stages, for instance : * {@link GamePipeline#BEFORE_LOGIC} * {@link GamePipeline#AFTER_LOGIC} It is not mandatory to use {@link GamePipeline}. You could extend it to add your own stages or write your own game pipeline, these constants are just integers. @md*/ /**@md It just missing our Game and the game screen. Let's create it. All we have to do is to register our plugin. @md*/ static //@code public class MyGame extends GameApplication { @Override public void create() { super.create(); GameRegistry registry = new GameRegistry(); registry.registerPlugin(new MyPlugin()); setScreen(new GameScreen(this, assets, registry )); } } //@code /**@md Finally we can test our game by creating a desktop launcher @md*/ static //@code public class MyGameDesktopLauncher { public static void main(String[] args) { new LwjglApplication(new MyGame(), new LwjglApplicationConfiguration()); } } //@code }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package org.jetbrains.kotlin.idea.refactoring.move; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.idea.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.idea.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.idea.test.TestRoot; import org.junit.runner.RunWith; /** * This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/move") public class MoveTestGenerated extends AbstractMoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("java/moveClass/callableReferences/nestedToAnotherClassAndAnotherPackage/nestedToAnotherClassAndAnotherPackage.test") public void testJava_moveClass_callableReferences_nestedToAnotherClassAndAnotherPackage_NestedToAnotherClassAndAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/callableReferences/nestedToAnotherClassAndAnotherPackage/nestedToAnotherClassAndAnotherPackage.test"); } @TestMetadata("java/moveClass/callableReferences/nestedToAnotherClass/nestedToAnotherClass.test") public void testJava_moveClass_callableReferences_nestedToAnotherClass_NestedToAnotherClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/callableReferences/nestedToAnotherClass/nestedToAnotherClass.test"); } @TestMetadata("java/moveClass/callableReferences/nestedToTopLevelAndAnotherPackage/nestedToTopLevelAndAnotherPackage.test") public void testJava_moveClass_callableReferences_nestedToTopLevelAndAnotherPackage_NestedToTopLevelAndAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/callableReferences/nestedToTopLevelAndAnotherPackage/nestedToTopLevelAndAnotherPackage.test"); } @TestMetadata("java/moveClass/callableReferences/nestedToTopLevel/nestedToTopLevel.test") public void testJava_moveClass_callableReferences_nestedToTopLevel_NestedToTopLevel() throws Exception { runTest("testData/refactoring/move/java/moveClass/callableReferences/nestedToTopLevel/nestedToTopLevel.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToExternalNestedClass/moveClassToExternalNestedClass.test") public void testJava_moveClass_moveAsMember_moveClassToExternalNestedClass_MoveClassToExternalNestedClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToExternalNestedClass/moveClassToExternalNestedClass.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToNestedSiblingClass/moveClassToNestedSiblingClass.test") public void testJava_moveClass_moveAsMember_moveClassToNestedSiblingClass_MoveClassToNestedSiblingClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToNestedSiblingClass/moveClassToNestedSiblingClass.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToTopLevelClassAndMakePackageLocal/moveClassToTopLevelClassAndMakePackageLocal.test") public void testJava_moveClass_moveAsMember_moveClassToTopLevelClassAndMakePackageLocal_MoveClassToTopLevelClassAndMakePackageLocal() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToTopLevelClassAndMakePackageLocal/moveClassToTopLevelClassAndMakePackageLocal.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToTopLevelClassAndMakePrivate/moveClassToTopLevelClassAndMakePrivate.test") public void testJava_moveClass_moveAsMember_moveClassToTopLevelClassAndMakePrivate_MoveClassToTopLevelClassAndMakePrivate() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToTopLevelClassAndMakePrivate/moveClassToTopLevelClassAndMakePrivate.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToTopLevelClassOfAnotherPackage/moveClassToTopLevelClassOfAnotherPackage.test") public void testJava_moveClass_moveAsMember_moveClassToTopLevelClassOfAnotherPackage_MoveClassToTopLevelClassOfAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToTopLevelClassOfAnotherPackage/moveClassToTopLevelClassOfAnotherPackage.test"); } @TestMetadata("java/moveClass/moveAsMember/moveClassToTopLevelClass/moveClassToTopLevelClass.test") public void testJava_moveClass_moveAsMember_moveClassToTopLevelClass_MoveClassToTopLevelClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveAsMember/moveClassToTopLevelClass/moveClassToTopLevelClass.test"); } @TestMetadata("java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInAnotherPackage/moveNestedClassToTopLevelInAnotherPackage.test") public void testJava_moveClass_moveInnerToTop_moveNestedClassToTopLevelInAnotherPackage_MoveNestedClassToTopLevelInAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInAnotherPackage/moveNestedClassToTopLevelInAnotherPackage.test"); } @TestMetadata("java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda.test") public void testJava_moveClass_moveInnerToTop_moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda_MoveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstanceWithLambda.test"); } @TestMetadata("java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance.test") public void testJava_moveClass_moveInnerToTop_moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance_MoveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance/moveNestedClassToTopLevelInTheSamePackageAndAddOuterInstance.test"); } @TestMetadata("java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndRename/moveNestedClassToTopLevelInTheSamePackageAndRename.test") public void testJava_moveClass_moveInnerToTop_moveNestedClassToTopLevelInTheSamePackageAndRename_MoveNestedClassToTopLevelInTheSamePackageAndRename() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackageAndRename/moveNestedClassToTopLevelInTheSamePackageAndRename.test"); } @TestMetadata("java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackage/moveNestedClassToTopLevelInTheSamePackage.test") public void testJava_moveClass_moveInnerToTop_moveNestedClassToTopLevelInTheSamePackage_MoveNestedClassToTopLevelInTheSamePackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveInnerToTop/moveNestedClassToTopLevelInTheSamePackage/moveNestedClassToTopLevelInTheSamePackage.test"); } @TestMetadata("java/moveClass/moveTopToInner/moveTopLevelClassToNestedClass/moveTopLevelClassToNestedClass.test") public void testJava_moveClass_moveTopToInner_moveTopLevelClassToNestedClass_MoveTopLevelClassToNestedClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveTopToInner/moveTopLevelClassToNestedClass/moveTopLevelClassToNestedClass.test"); } @TestMetadata("java/moveClass/moveTopToInner/moveTopLevelClassToTopLevelClassOfAnotherPackage/moveTopLevelClassToTopLevelClassOfAnotherPackage.test") public void testJava_moveClass_moveTopToInner_moveTopLevelClassToTopLevelClassOfAnotherPackage_MoveTopLevelClassToTopLevelClassOfAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveTopToInner/moveTopLevelClassToTopLevelClassOfAnotherPackage/moveTopLevelClassToTopLevelClassOfAnotherPackage.test"); } @TestMetadata("java/moveClass/moveTopToInner/moveTopLevelClassToTopLevelClass/moveTopLevelClassToTopLevelClass.test") public void testJava_moveClass_moveTopToInner_moveTopLevelClassToTopLevelClass_MoveTopLevelClassToTopLevelClass() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveTopToInner/moveTopLevelClassToTopLevelClass/moveTopLevelClassToTopLevelClass.test"); } @TestMetadata("java/moveClass/moveTop/moveTopLevelClassToAnotherPackage/moveTopLevelClassToAnotherPackage.test") public void testJava_moveClass_moveTop_moveTopLevelClassToAnotherPackage_MoveTopLevelClassToAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveClass/moveTop/moveTopLevelClassToAnotherPackage/moveTopLevelClassToAnotherPackage.test"); } @TestMetadata("java/moveField/moveFieldToExternalNestedClass/moveFieldToExternalNestedClass.test") public void testJava_moveField_moveFieldToExternalNestedClass_MoveFieldToExternalNestedClass() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToExternalNestedClass/moveFieldToExternalNestedClass.test"); } @TestMetadata("java/moveField/moveFieldToNestedSiblingClass/moveFieldToNestedSiblingClass.test") public void testJava_moveField_moveFieldToNestedSiblingClass_MoveFieldToNestedSiblingClass() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToNestedSiblingClass/moveFieldToNestedSiblingClass.test"); } @TestMetadata("java/moveField/moveFieldToTopLevelClassAndMakePackageLocal/moveFieldToTopLevelClassAndMakePackageLocal.test") public void testJava_moveField_moveFieldToTopLevelClassAndMakePackageLocal_MoveFieldToTopLevelClassAndMakePackageLocal() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToTopLevelClassAndMakePackageLocal/moveFieldToTopLevelClassAndMakePackageLocal.test"); } @TestMetadata("java/moveField/moveFieldToTopLevelClassAndMakePrivate/moveFieldToTopLevelClassAndMakePrivate.test") public void testJava_moveField_moveFieldToTopLevelClassAndMakePrivate_MoveFieldToTopLevelClassAndMakePrivate() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToTopLevelClassAndMakePrivate/moveFieldToTopLevelClassAndMakePrivate.test"); } @TestMetadata("java/moveField/moveFieldToTopLevelClassOfAnotherPackage/moveFieldToTopLevelClassOfAnotherPackage.test") public void testJava_moveField_moveFieldToTopLevelClassOfAnotherPackage_MoveFieldToTopLevelClassOfAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToTopLevelClassOfAnotherPackage/moveFieldToTopLevelClassOfAnotherPackage.test"); } @TestMetadata("java/moveField/moveFieldToTopLevelClass/moveFieldToTopLevelClass.test") public void testJava_moveField_moveFieldToTopLevelClass_MoveFieldToTopLevelClass() throws Exception { runTest("testData/refactoring/move/java/moveField/moveFieldToTopLevelClass/moveFieldToTopLevelClass.test"); } @TestMetadata("java/moveFile/moveFileToAnotherPackage/moveFileToAnotherPackage.test") public void testJava_moveFile_moveFileToAnotherPackage_MoveFileToAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveFile/moveFileToAnotherPackage/moveFileToAnotherPackage.test"); } @TestMetadata("java/moveMethod/callableReferences/staticMethodToTopLevelClassInAnotherPackage/staticMethodToTopLevelClassInAnotherPackage.test") public void testJava_moveMethod_callableReferences_staticMethodToTopLevelClassInAnotherPackage_StaticMethodToTopLevelClassInAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveMethod/callableReferences/staticMethodToTopLevelClassInAnotherPackage/staticMethodToTopLevelClassInAnotherPackage.test"); } @TestMetadata("java/moveMethod/callableReferences/staticMethodToTopLevelClass/staticMethodToTopLevelClass.test") public void testJava_moveMethod_callableReferences_staticMethodToTopLevelClass_StaticMethodToTopLevelClass() throws Exception { runTest("testData/refactoring/move/java/moveMethod/callableReferences/staticMethodToTopLevelClass/staticMethodToTopLevelClass.test"); } @TestMetadata("java/moveMethod/moveMethodToExternalNestedClass/moveMethodToExternalNestedClass.test") public void testJava_moveMethod_moveMethodToExternalNestedClass_MoveMethodToExternalNestedClass() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToExternalNestedClass/moveMethodToExternalNestedClass.test"); } @TestMetadata("java/moveMethod/moveMethodToNestedSiblingClass/moveMethodToNestedSiblingClass.test") public void testJava_moveMethod_moveMethodToNestedSiblingClass_MoveMethodToNestedSiblingClass() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToNestedSiblingClass/moveMethodToNestedSiblingClass.test"); } @TestMetadata("java/moveMethod/moveMethodToTopLevelClassAndMakePackageLocal/moveMethodToTopLevelClassAndMakePackageLocal.test") public void testJava_moveMethod_moveMethodToTopLevelClassAndMakePackageLocal_MoveMethodToTopLevelClassAndMakePackageLocal() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToTopLevelClassAndMakePackageLocal/moveMethodToTopLevelClassAndMakePackageLocal.test"); } @TestMetadata("java/moveMethod/moveMethodToTopLevelClassAndMakePrivate/moveMethodToTopLevelClassAndMakePrivate.test") public void testJava_moveMethod_moveMethodToTopLevelClassAndMakePrivate_MoveMethodToTopLevelClassAndMakePrivate() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToTopLevelClassAndMakePrivate/moveMethodToTopLevelClassAndMakePrivate.test"); } @TestMetadata("java/moveMethod/moveMethodToTopLevelClassOfAnotherPackage/moveMethodToTopLevelClassOfAnotherPackage.test") public void testJava_moveMethod_moveMethodToTopLevelClassOfAnotherPackage_MoveMethodToTopLevelClassOfAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToTopLevelClassOfAnotherPackage/moveMethodToTopLevelClassOfAnotherPackage.test"); } @TestMetadata("java/moveMethod/moveMethodToTopLevelClass/moveMethodToTopLevelClass.test") public void testJava_moveMethod_moveMethodToTopLevelClass_MoveMethodToTopLevelClass() throws Exception { runTest("testData/refactoring/move/java/moveMethod/moveMethodToTopLevelClass/moveMethodToTopLevelClass.test"); } @TestMetadata("java/movePackage/movePackageToAnotherPackage/movePackageToAnotherPackage.test") public void testJava_movePackage_movePackageToAnotherPackage_MovePackageToAnotherPackage() throws Exception { runTest("testData/refactoring/move/java/movePackage/movePackageToAnotherPackage/movePackageToAnotherPackage.test"); } @TestMetadata("kotlin/changePackage/addExtensionImport/addExtensionImport.test") public void testKotlin_changePackage_addExtensionImport_AddExtensionImport() throws Exception { runTest("testData/refactoring/move/kotlin/changePackage/addExtensionImport/addExtensionImport.test"); } @TestMetadata("kotlin/changePackage/changeToDefaultPackage/changeToDefaultPackage.test") public void testKotlin_changePackage_changeToDefaultPackage_ChangeToDefaultPackage() throws Exception { runTest("testData/refactoring/move/kotlin/changePackage/changeToDefaultPackage/changeToDefaultPackage.test"); } @TestMetadata("kotlin/changePackage/changeToNonDefaultPackage/changeToNonDefaultPackage.test") public void testKotlin_changePackage_changeToNonDefaultPackage_ChangeToNonDefaultPackage() throws Exception { runTest("testData/refactoring/move/kotlin/changePackage/changeToNonDefaultPackage/changeToNonDefaultPackage.test"); } @TestMetadata("kotlin/changePackage/headerClasses/headerClasses.test") public void testKotlin_changePackage_headerClasses_HeaderClasses() throws Exception { runTest("testData/refactoring/move/kotlin/changePackage/headerClasses/headerClasses.test"); } @TestMetadata("kotlin/changePackage/longName/longName.test") public void testKotlin_changePackage_longName_LongName() throws Exception { runTest("testData/refactoring/move/kotlin/changePackage/longName/longName.test"); } @TestMetadata("kotlin/moveDirectoryWithQuotation/moveDirectoryWithQuotation.test") public void testKotlin_moveDirectoryWithQuotation_MoveDirectoryWithQuotation() throws Exception { runTest("testData/refactoring/move/kotlin/moveDirectoryWithQuotation/moveDirectoryWithQuotation.test"); } @TestMetadata("kotlin/moveDirectory/moveDirectoryWithPackageDirectiveChange/moveDirectoryWithPackageDirectiveChange.test") public void testKotlin_moveDirectory_moveDirectoryWithPackageDirectiveChange_MoveDirectoryWithPackageDirectiveChange() throws Exception { runTest("testData/refactoring/move/kotlin/moveDirectory/moveDirectoryWithPackageDirectiveChange/moveDirectoryWithPackageDirectiveChange.test"); } @TestMetadata("kotlin/moveFile/addExtensionImport/addExtensionImport.test") public void testKotlin_moveFile_addExtensionImport_AddExtensionImport() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/addExtensionImport/addExtensionImport.test"); } @TestMetadata("kotlin/moveFile/callableReferences/callableReferences.test") public void testKotlin_moveFile_callableReferences_CallableReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/callableReferences/callableReferences.test"); } @TestMetadata("kotlin/moveFile/internalReferences/internalReferences.test") public void testKotlin_moveFile_internalReferences_InternalReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/internalReferences/internalReferences.test"); } @TestMetadata("kotlin/moveFile/keepImportAliasRefs/keepImportAliasRefs.test") public void testKotlin_moveFile_keepImportAliasRefs_KeepImportAliasRefs() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/keepImportAliasRefs/keepImportAliasRefs.test"); } @TestMetadata("kotlin/moveFile/longName/longName.test") public void testKotlin_moveFile_longName_LongName() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/longName/longName.test"); } @TestMetadata("kotlin/moveFile/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir.test") public void testKotlin_moveFile_moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir_MoveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir.test"); } @TestMetadata("kotlin/moveFile/moveFileToFile/moveFileToFile.test") public void testKotlin_moveFile_moveFileToFile_MoveFileToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileToFile/moveFileToFile.test"); } @TestMetadata("kotlin/moveFile/moveFileWithDotsAsFileReferences/moveFileWithDotsAsFileReferences.test") public void testKotlin_moveFile_moveFileWithDotsAsFileReferences_MoveFileWithDotsAsFileReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileWithDotsAsFileReferences/moveFileWithDotsAsFileReferences.test"); } @TestMetadata("kotlin/moveFile/moveFileWithPackageRename/moveFileWithPackageRename.test") public void testKotlin_moveFile_moveFileWithPackageRename_MoveFileWithPackageRename() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileWithPackageRename/moveFileWithPackageRename.test"); } @TestMetadata("kotlin/moveFile/moveFileWithoutDeclarations/moveFileWithoutDeclarations.test") public void testKotlin_moveFile_moveFileWithoutDeclarations_MoveFileWithoutDeclarations() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileWithoutDeclarations/moveFileWithoutDeclarations.test"); } @TestMetadata("kotlin/moveFile/moveFileWithoutPackageRename/moveFileWithoutPackageRename.test") public void testKotlin_moveFile_moveFileWithoutPackageRename_MoveFileWithoutPackageRename() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveFileWithoutPackageRename/moveFileWithoutPackageRename.test"); } @TestMetadata("kotlin/moveFile/moveLastFileInPackageWithPackageRename/moveLastFileInPackageWithPackageRename.test") public void testKotlin_moveFile_moveLastFileInPackageWithPackageRename_MoveLastFileInPackageWithPackageRename() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveLastFileInPackageWithPackageRename/moveLastFileInPackageWithPackageRename.test"); } @TestMetadata("kotlin/moveFile/moveMultipleFilesWithImplicitPrefix/moveMultipleFilesWithImplicitPrefix.test") public void testKotlin_moveFile_moveMultipleFilesWithImplicitPrefix_MoveMultipleFilesWithImplicitPrefix() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveMultipleFilesWithImplicitPrefix/moveMultipleFilesWithImplicitPrefix.test"); } @TestMetadata("kotlin/moveFile/moveMultipleFiles/moveMultipleFiles.test") public void testKotlin_moveFile_moveMultipleFiles_MoveMultipleFiles() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/moveMultipleFiles/moveMultipleFiles.test"); } @TestMetadata("kotlin/moveFile/packageWithQuotation/packageWithQuotation.test") public void testKotlin_moveFile_packageWithQuotation_PackageWithQuotation() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/packageWithQuotation/packageWithQuotation.test"); } @TestMetadata("kotlin/moveFile/selfReferenceInImport/selfReferenceInImport.test") public void testKotlin_moveFile_selfReferenceInImport_SelfReferenceInImport() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/selfReferenceInImport/selfReferenceInImport.test"); } @TestMetadata("kotlin/moveFile/typeRefWithArguments/typeRefWithArguments.test") public void testKotlin_moveFile_typeRefWithArguments_TypeRefWithArguments() throws Exception { runTest("testData/refactoring/move/kotlin/moveFile/typeRefWithArguments/typeRefWithArguments.test"); } @TestMetadata("kotlin/moveMethod/moveToClass/companionHasReference/companionHasReference.test") public void testKotlin_moveMethod_moveToClass_companionHasReference_CompanionHasReference() throws Exception { runTest("testData/refactoring/move/kotlin/moveMethod/moveToClass/companionHasReference/companionHasReference.test"); } @TestMetadata("kotlin/moveMethod/moveToClass/parameterAsReference/parameterAsReference.test") public void testKotlin_moveMethod_moveToClass_parameterAsReference_ParameterAsReference() throws Exception { runTest("testData/refactoring/move/kotlin/moveMethod/moveToClass/parameterAsReference/parameterAsReference.test"); } @TestMetadata("kotlin/moveMethod/moveToClass/propertyAsReference/propertyAsReference.test") public void testKotlin_moveMethod_moveToClass_propertyAsReference_PropertyAsReference() throws Exception { runTest("testData/refactoring/move/kotlin/moveMethod/moveToClass/propertyAsReference/propertyAsReference.test"); } @TestMetadata("kotlin/moveMethod/moveToObject/moveToObject.test") public void testKotlin_moveMethod_moveToObject_MoveToObject() throws Exception { runTest("testData/refactoring/move/kotlin/moveMethod/moveToObject/moveToObject.test"); } @TestMetadata("kotlin/moveNestedClass/callableReferences/nestedToAnotherClass/nestedToAnotherClass.test") public void testKotlin_moveNestedClass_callableReferences_nestedToAnotherClass_NestedToAnotherClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/callableReferences/nestedToAnotherClass/nestedToAnotherClass.test"); } @TestMetadata("kotlin/moveNestedClass/callableReferences/nestedToTopLevel/nestedToTopLevel.test") public void testKotlin_moveNestedClass_callableReferences_nestedToTopLevel_NestedToTopLevel() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/callableReferences/nestedToTopLevel/nestedToTopLevel.test"); } @TestMetadata("kotlin/moveNestedClass/deepInnerToTopLevelWithOuterOuterThis/deepInnerToTopLevelWithOuterOuterThis.test") public void testKotlin_moveNestedClass_deepInnerToTopLevelWithOuterOuterThis_DeepInnerToTopLevelWithOuterOuterThis() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/deepInnerToTopLevelWithOuterOuterThis/deepInnerToTopLevelWithOuterOuterThis.test"); } @TestMetadata("kotlin/moveNestedClass/deepInnerToTopLevelWithThis/deepInnerToTopLevelWithThis.test") public void testKotlin_moveNestedClass_deepInnerToTopLevelWithThis_DeepInnerToTopLevelWithThis() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/deepInnerToTopLevelWithThis/deepInnerToTopLevelWithThis.test"); } @TestMetadata("kotlin/moveNestedClass/deepNonInnerToTopLevel/deepNonInnerToTopLevel.test") public void testKotlin_moveNestedClass_deepNonInnerToTopLevel_DeepNonInnerToTopLevel() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/deepNonInnerToTopLevel/deepNonInnerToTopLevel.test"); } @TestMetadata("kotlin/moveNestedClass/deepPrivateClass/deepPrivateClass.test") public void testKotlin_moveNestedClass_deepPrivateClass_DeepPrivateClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/deepPrivateClass/deepPrivateClass.test"); } @TestMetadata("kotlin/moveNestedClass/deepProtectedClass/deepProtectedClass.test") public void testKotlin_moveNestedClass_deepProtectedClass_DeepProtectedClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/deepProtectedClass/deepProtectedClass.test"); } @TestMetadata("kotlin/moveNestedClass/dropEmptyCompanion/dropEmptyCompanion.test") public void testKotlin_moveNestedClass_dropEmptyCompanion_DropEmptyCompanion() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/dropEmptyCompanion/dropEmptyCompanion.test"); } @TestMetadata("kotlin/moveNestedClass/innerToTopLevelAddInstanceParam/innerToTopLevelAddInstanceParam.test") public void testKotlin_moveNestedClass_innerToTopLevelAddInstanceParam_InnerToTopLevelAddInstanceParam() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/innerToTopLevelAddInstanceParam/innerToTopLevelAddInstanceParam.test"); } @TestMetadata("kotlin/moveNestedClass/innerToTopLevelNoThis/innerToTopLevelNoThis.test") public void testKotlin_moveNestedClass_innerToTopLevelNoThis_InnerToTopLevelNoThis() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/innerToTopLevelNoThis/innerToTopLevelNoThis.test"); } @TestMetadata("kotlin/moveNestedClass/innerToTopLevelWithThisOuterRefConflicts/innerToTopLevelWithThisOuterRefConflicts.test") public void testKotlin_moveNestedClass_innerToTopLevelWithThisOuterRefConflicts_InnerToTopLevelWithThisOuterRefConflicts() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/innerToTopLevelWithThisOuterRefConflicts/innerToTopLevelWithThisOuterRefConflicts.test"); } @TestMetadata("kotlin/moveNestedClass/innerToTopLevelWithThis/innerToTopLevelWithThis.test") public void testKotlin_moveNestedClass_innerToTopLevelWithThis_InnerToTopLevelWithThis() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/innerToTopLevelWithThis/innerToTopLevelWithThis.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToOuterClass1/nonInnerToOuterClass1.test") public void testKotlin_moveNestedClass_nonInnerToOuterClass1_NonInnerToOuterClass1() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToOuterClass1/nonInnerToOuterClass1.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToOuterClass2/nonInnerToOuterClass2.test") public void testKotlin_moveNestedClass_nonInnerToOuterClass2_NonInnerToOuterClass2() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToOuterClass2/nonInnerToOuterClass2.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToSubclassWithCompanionAsImplicitDispatchReceiver/nonInnerToSubclassWithCompanionAsImplicitDispatchReceiver.test") public void testKotlin_moveNestedClass_nonInnerToSubclassWithCompanionAsImplicitDispatchReceiver_NonInnerToSubclassWithCompanionAsImplicitDispatchReceiver() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToSubclassWithCompanionAsImplicitDispatchReceiver/nonInnerToSubclassWithCompanionAsImplicitDispatchReceiver.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToTopLevelClass/nonInnerToTopLevelClass.test") public void testKotlin_moveNestedClass_nonInnerToTopLevelClass_NonInnerToTopLevelClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToTopLevelClass/nonInnerToTopLevelClass.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToTopLevelCompanionConflict/nonInnerToTopLevelCompanionConflict.test") public void testKotlin_moveNestedClass_nonInnerToTopLevelCompanionConflict_NonInnerToTopLevelCompanionConflict() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToTopLevelCompanionConflict/nonInnerToTopLevelCompanionConflict.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToTopLevelPrivateClass/nonInnerToTopLevelPrivateClass.test") public void testKotlin_moveNestedClass_nonInnerToTopLevelPrivateClass_NonInnerToTopLevelPrivateClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToTopLevelPrivateClass/nonInnerToTopLevelPrivateClass.test"); } @TestMetadata("kotlin/moveNestedClass/nonInnerToTopLevel/nonInnerToTopLevel.test") public void testKotlin_moveNestedClass_nonInnerToTopLevel_NonInnerToTopLevel() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/nonInnerToTopLevel/nonInnerToTopLevel.test"); } @TestMetadata("kotlin/moveNestedClass/objectToTopLevel/objectToTopLevel.test") public void testKotlin_moveNestedClass_objectToTopLevel_ObjectToTopLevel() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/objectToTopLevel/objectToTopLevel.test"); } @TestMetadata("kotlin/moveNestedClass/privateClass/privateClass.test") public void testKotlin_moveNestedClass_privateClass_PrivateClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/privateClass/privateClass.test"); } @TestMetadata("kotlin/moveNestedClass/protectedClassNoConflicts/protectedClassNoConflicts.test") public void testKotlin_moveNestedClass_protectedClassNoConflicts_ProtectedClassNoConflicts() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/protectedClassNoConflicts/protectedClassNoConflicts.test"); } @TestMetadata("kotlin/moveNestedClass/protectedClass/protectedClass.test") public void testKotlin_moveNestedClass_protectedClass_ProtectedClass() throws Exception { runTest("testData/refactoring/move/kotlin/moveNestedClass/protectedClass/protectedClass.test"); } @TestMetadata("kotlin/movePackage/movePackage/movePackage.test") public void testKotlin_movePackage_movePackage_MovePackage() throws Exception { runTest("testData/refactoring/move/kotlin/movePackage/movePackage/movePackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceAndTargetWithFqNames/differentSourceAndTargetWithFqNames.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentSourceAndTargetWithFqNames_DifferentSourceAndTargetWithFqNames() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceAndTargetWithFqNames/differentSourceAndTargetWithFqNames.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceAndTargetWithImports/differentSourceAndTargetWithImports.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentSourceAndTargetWithImports_DifferentSourceAndTargetWithImports() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceAndTargetWithImports/differentSourceAndTargetWithImports.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceWithFqNames/differentSourceWithFqNames.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentSourceWithFqNames_DifferentSourceWithFqNames() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceWithFqNames/differentSourceWithFqNames.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceWithImports/differentSourceWithImports.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentSourceWithImports_DifferentSourceWithImports() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentSourceWithImports/differentSourceWithImports.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentTargetWithFqNames/differentTargetWithFqNames.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentTargetWithFqNames_DifferentTargetWithFqNames() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentTargetWithFqNames/differentTargetWithFqNames.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentTarget/differentTarget.test") public void testKotlin_moveTopLevelDeclarations_callsAndCallableRefs_internalUsages_differentTarget_DifferentTarget() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/callsAndCallableRefs/internalUsages/differentTarget/differentTarget.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/classAlreadyInaccessible/classAlreadyInaccessible.test") public void testKotlin_moveTopLevelDeclarations_classAlreadyInaccessible_ClassAlreadyInaccessible() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/classAlreadyInaccessible/classAlreadyInaccessible.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/functionAlreadyInaccessible/functionAlreadyInaccessible.test") public void testKotlin_moveTopLevelDeclarations_functionAlreadyInaccessible_FunctionAlreadyInaccessible() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/functionAlreadyInaccessible/functionAlreadyInaccessible.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentSourceAndTarget/differentSourceAndTarget.test") public void testKotlin_moveTopLevelDeclarations_implicitInvokeCalls_differentSourceAndTarget_DifferentSourceAndTarget() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentSourceAndTarget/differentSourceAndTarget.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentSource/differentSource.test") public void testKotlin_moveTopLevelDeclarations_implicitInvokeCalls_differentSource_DifferentSource() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentSource/differentSource.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentTarget/differentTarget.test") public void testKotlin_moveTopLevelDeclarations_implicitInvokeCalls_differentTarget_DifferentTarget() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/implicitInvokeCalls/differentTarget/differentTarget.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/classFromKtToKts/classFromKtToKts.test") public void testKotlin_moveTopLevelDeclarations_misc_classFromKtToKts_ClassFromKtToKts() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/classFromKtToKts/classFromKtToKts.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/classFromKtsToKt/classFromKtsToKt.test") public void testKotlin_moveTopLevelDeclarations_misc_classFromKtsToKt_ClassFromKtsToKt() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/classFromKtsToKt/classFromKtsToKt.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/classFromKtsToKts/classFromKtsToKts.test") public void testKotlin_moveTopLevelDeclarations_misc_classFromKtsToKts_ClassFromKtsToKts() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/classFromKtsToKts/classFromKtsToKts.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/classWithInitializer/classWithInitializer.test") public void testKotlin_moveTopLevelDeclarations_misc_classWithInitializer_ClassWithInitializer() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/classWithInitializer/classWithInitializer.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/companionExtensionMemberRef/companionExtensionMemberRef.test") public void testKotlin_moveTopLevelDeclarations_misc_companionExtensionMemberRef_CompanionExtensionMemberRef() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/companionExtensionMemberRef/companionExtensionMemberRef.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/companionMemberRefDelegation/companionMemberRefDelegation.test") public void testKotlin_moveTopLevelDeclarations_misc_companionMemberRefDelegation_CompanionMemberRefDelegation() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/companionMemberRefDelegation/companionMemberRefDelegation.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/companionMemberRefFakeOverride/companionMemberRefFakeOverride.test") public void testKotlin_moveTopLevelDeclarations_misc_companionMemberRefFakeOverride_CompanionMemberRefFakeOverride() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/companionMemberRefFakeOverride/companionMemberRefFakeOverride.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/companionMemberRefWithJavaParent/companionMemberRefWithJavaParent.test") public void testKotlin_moveTopLevelDeclarations_misc_companionMemberRefWithJavaParent_CompanionMemberRefWithJavaParent() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/companionMemberRefWithJavaParent/companionMemberRefWithJavaParent.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/delegateInObject/delegateInObject.test") public void testKotlin_moveTopLevelDeclarations_misc_delegateInObject_DelegateInObject() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/delegateInObject/delegateInObject.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/facadeClassChangeInTheSamePackage/facadeClassChangeInTheSamePackage.test") public void testKotlin_moveTopLevelDeclarations_misc_facadeClassChangeInTheSamePackage_FacadeClassChangeInTheSamePackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/facadeClassChangeInTheSamePackage/facadeClassChangeInTheSamePackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/fakeOverrideInObject/fakeOverrideInObject.test") public void testKotlin_moveTopLevelDeclarations_misc_fakeOverrideInObject_FakeOverrideInObject() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/fakeOverrideInObject/fakeOverrideInObject.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/functionAndPropertyWithSameName/functionAndPropertyWithSameName.test") public void testKotlin_moveTopLevelDeclarations_misc_functionAndPropertyWithSameName_FunctionAndPropertyWithSameName() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/functionAndPropertyWithSameName/functionAndPropertyWithSameName.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/internalReferences/internalReferences.test") public void testKotlin_moveTopLevelDeclarations_misc_internalReferences_InternalReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/internalReferences/internalReferences.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/kt17032/kt17032.test") public void testKotlin_moveTopLevelDeclarations_misc_kt17032_Kt17032() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/kt17032/kt17032.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/moveClassFromDefaultPackage/moveClassFromDefaultPackage.test") public void testKotlin_moveTopLevelDeclarations_misc_moveClassFromDefaultPackage_MoveClassFromDefaultPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/moveClassFromDefaultPackage/moveClassFromDefaultPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/moveClassToDefaultPackage/moveClassToDefaultPackage.test") public void testKotlin_moveTopLevelDeclarations_misc_moveClassToDefaultPackage_MoveClassToDefaultPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/moveClassToDefaultPackage/moveClassToDefaultPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/moveClassWithImportsRetained/moveClassWithImportsRetained.test") public void testKotlin_moveTopLevelDeclarations_misc_moveClassWithImportsRetained_MoveClassWithImportsRetained() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/moveClassWithImportsRetained/moveClassWithImportsRetained.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/moveFromDefaultPackage/moveFromDefaultPackage.test") public void testKotlin_moveTopLevelDeclarations_misc_moveFromDefaultPackage_MoveFromDefaultPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/moveFromDefaultPackage/moveFromDefaultPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/moveFunctionWithImportsRetained/moveFunctionWithImportsRetained.test") public void testKotlin_moveTopLevelDeclarations_misc_moveFunctionWithImportsRetained_MoveFunctionWithImportsRetained() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/moveFunctionWithImportsRetained/moveFunctionWithImportsRetained.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/mutualDependency/mutualDependency.test") public void testKotlin_moveTopLevelDeclarations_misc_mutualDependency_MutualDependency() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/mutualDependency/mutualDependency.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/nonCodeUsagesWithJavaFacadeMethod/nonCodeUsagesWithJavaFacadeMethod.test") public void testKotlin_moveTopLevelDeclarations_misc_nonCodeUsagesWithJavaFacadeMethod_NonCodeUsagesWithJavaFacadeMethod() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/nonCodeUsagesWithJavaFacadeMethod/nonCodeUsagesWithJavaFacadeMethod.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/nonCodeUsagesWithQuotedName/nonCodeUsagesWithQuotedName.test") public void testKotlin_moveTopLevelDeclarations_misc_nonCodeUsagesWithQuotedName_NonCodeUsagesWithQuotedName() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/nonCodeUsagesWithQuotedName/nonCodeUsagesWithQuotedName.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/protectedConstructorRefInSuperListEntry/protectedConstructorRefInSuperListEntry.test") public void testKotlin_moveTopLevelDeclarations_misc_protectedConstructorRefInSuperListEntry_ProtectedConstructorRefInSuperListEntry() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/protectedConstructorRefInSuperListEntry/protectedConstructorRefInSuperListEntry.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/protectedMembersExternalRefs/protectedMembersExternalRefs.test") public void testKotlin_moveTopLevelDeclarations_misc_protectedMembersExternalRefs_ProtectedMembersExternalRefs() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/protectedMembersExternalRefs/protectedMembersExternalRefs.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/protectedMembersInternalRefs/protectedMembersInternalRefs.test") public void testKotlin_moveTopLevelDeclarations_misc_protectedMembersInternalRefs_ProtectedMembersInternalRefs() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/protectedMembersInternalRefs/protectedMembersInternalRefs.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/sealedClassWithAllSubclasses/sealedClassWithAllSubclasses.test") public void testKotlin_moveTopLevelDeclarations_misc_sealedClassWithAllSubclasses_SealedClassWithAllSubclasses() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/sealedClassWithAllSubclasses/sealedClassWithAllSubclasses.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/sealedClassWithSkippedSubclasses/sealedClassWithSkippedSubclasses.test") public void testKotlin_moveTopLevelDeclarations_misc_sealedClassWithSkippedSubclasses_SealedClassWithSkippedSubclasses() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/sealedClassWithSkippedSubclasses/sealedClassWithSkippedSubclasses.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/sealedSubclassWithSkippedRoot/sealedSubclassWithSkippedRoot.test") public void testKotlin_moveTopLevelDeclarations_misc_sealedSubclassWithSkippedRoot_SealedSubclassWithSkippedRoot() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/sealedSubclassWithSkippedRoot/sealedSubclassWithSkippedRoot.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/selfReferences/selfReferences.test") public void testKotlin_moveTopLevelDeclarations_misc_selfReferences_SelfReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/selfReferences/selfReferences.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/shortenCompanionObject2/shortenCompanionObject2.test") public void testKotlin_moveTopLevelDeclarations_misc_shortenCompanionObject2_ShortenCompanionObject2() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/shortenCompanionObject2/shortenCompanionObject2.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/shortenStringTemplateEntry/shortenStringTemplateEntry.test") public void testKotlin_moveTopLevelDeclarations_misc_shortenStringTemplateEntry_ShortenStringTemplateEntry() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/shortenStringTemplateEntry/shortenStringTemplateEntry.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/singletonsAndStatics/singletonsAndStatics.test") public void testKotlin_moveTopLevelDeclarations_misc_singletonsAndStatics_SingletonsAndStatics() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/singletonsAndStatics/singletonsAndStatics.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/superReferences/superReferences.test") public void testKotlin_moveTopLevelDeclarations_misc_superReferences_SuperReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/superReferences/superReferences.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/syntheticMembers/syntheticMembers.test") public void testKotlin_moveTopLevelDeclarations_misc_syntheticMembers_SyntheticMembers() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/syntheticMembers/syntheticMembers.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/misc/unaffectedQualifiedReferences/unaffectedQualifiedReferences.test") public void testKotlin_moveTopLevelDeclarations_misc_unaffectedQualifiedReferences_UnaffectedQualifiedReferences() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/misc/unaffectedQualifiedReferences/unaffectedQualifiedReferences.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveClassToFile/moveClassToFile.test") public void testKotlin_moveTopLevelDeclarations_moveClassToFile_MoveClassToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveClassToFile/moveClassToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveClassToNewFileAndQuotePackage/moveClassToNewFileAndQuotePackage.test") public void testKotlin_moveTopLevelDeclarations_moveClassToNewFileAndQuotePackage_MoveClassToNewFileAndQuotePackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveClassToNewFileAndQuotePackage/moveClassToNewFileAndQuotePackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveClassToPackage/moveClassToPackage.test") public void testKotlin_moveTopLevelDeclarations_moveClassToPackage_MoveClassToPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveClassToPackage/moveClassToPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveClassWithLongFunction/moveClassWithLongFunction.test") public void testKotlin_moveTopLevelDeclarations_moveClassWithLongFunction_MoveClassWithLongFunction() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveClassWithLongFunction/moveClassWithLongFunction.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveExtensionFunctionToFil/moveExtensionFunctionToFil.test") public void testKotlin_moveTopLevelDeclarations_moveExtensionFunctionToFil_MoveExtensionFunctionToFil() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveExtensionFunctionToFil/moveExtensionFunctionToFil.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveExtensionPropertyToFile/moveExtensionPropertyToFile.test") public void testKotlin_moveTopLevelDeclarations_moveExtensionPropertyToFile_MoveExtensionPropertyToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveExtensionPropertyToFile/moveExtensionPropertyToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveFunctionToFile/moveFunctionToFile.test") public void testKotlin_moveTopLevelDeclarations_moveFunctionToFile_MoveFunctionToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveFunctionToFile/moveFunctionToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveFunctionToPackageUsedInJava/moveFunctionToPackageUsedInJava.test") public void testKotlin_moveTopLevelDeclarations_moveFunctionToPackageUsedInJava_MoveFunctionToPackageUsedInJava() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveFunctionToPackageUsedInJava/moveFunctionToPackageUsedInJava.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveObjectToFile/moveObjectToFile.test") public void testKotlin_moveTopLevelDeclarations_moveObjectToFile_MoveObjectToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveObjectToFile/moveObjectToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveObjectToPackage/moveObjectToPackage.test") public void testKotlin_moveTopLevelDeclarations_moveObjectToPackage_MoveObjectToPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveObjectToPackage/moveObjectToPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveOnlyFunctionToPackageUsedInJava/moveOnlyFunctionToPackageUsedInJava.test") public void testKotlin_moveTopLevelDeclarations_moveOnlyFunctionToPackageUsedInJava_MoveOnlyFunctionToPackageUsedInJava() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveOnlyFunctionToPackageUsedInJava/moveOnlyFunctionToPackageUsedInJava.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePrivateClassWithUsagesInBothAccessors/movePrivateClassWithUsagesInBothAccessors.test") public void testKotlin_moveTopLevelDeclarations_movePrivateClassWithUsagesInBothAccessors_MovePrivateClassWithUsagesInBothAccessors() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePrivateClassWithUsagesInBothAccessors/movePrivateClassWithUsagesInBothAccessors.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePrivateFun/movePrivateFun.test") public void testKotlin_moveTopLevelDeclarations_movePrivateFun_MovePrivateFun() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePrivateFun/movePrivateFun.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePrivatePropertyWithDefaultAccessors/movePrivatePropertyWithDefaultAccessors.test") public void testKotlin_moveTopLevelDeclarations_movePrivatePropertyWithDefaultAccessors_MovePrivatePropertyWithDefaultAccessors() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePrivatePropertyWithDefaultAccessors/movePrivatePropertyWithDefaultAccessors.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePrivateProperty/movePrivateProperty.test") public void testKotlin_moveTopLevelDeclarations_movePrivateProperty_MovePrivateProperty() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePrivateProperty/movePrivateProperty.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePropertyToFile/movePropertyToFile.test") public void testKotlin_moveTopLevelDeclarations_movePropertyToFile_MovePropertyToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePropertyToFile/movePropertyToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/movePropertyToPackage/movePropertyToPackage.test") public void testKotlin_moveTopLevelDeclarations_movePropertyToPackage_MovePropertyToPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/movePropertyToPackage/movePropertyToPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveSealedClassWithImplsToAnotherPackage/moveSealedClassWithImplsToAnotherPackage.test") public void testKotlin_moveTopLevelDeclarations_moveSealedClassWithImplsToAnotherPackage_MoveSealedClassWithImplsToAnotherPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveSealedClassWithImplsToAnotherPackage/moveSealedClassWithImplsToAnotherPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveSealedClassWithImplsToAnotherPackage/moveSealedClassWithNestedImplsToAnotherPackage.test") public void testKotlin_moveTopLevelDeclarations_moveSealedClassWithImplsToAnotherPackage_MoveSealedClassWithNestedImplsToAnotherPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveSealedClassWithImplsToAnotherPackage/moveSealedClassWithNestedImplsToAnotherPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveSealedClassWithNestedImplsToAnotherPackage/moveSealedClassWithNestedImplsToAnotherPackage.test") public void testKotlin_moveTopLevelDeclarations_moveSealedClassWithNestedImplsToAnotherPackage_MoveSealedClassWithNestedImplsToAnotherPackage() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveSealedClassWithNestedImplsToAnotherPackage/moveSealedClassWithNestedImplsToAnotherPackage.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/moveTypealiasToFile/moveTypealiasToFile.test") public void testKotlin_moveTopLevelDeclarations_moveTypealiasToFile_MoveTypealiasToFile() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/moveTypealiasToFile/moveTypealiasToFile.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/objectAlreadyInaccessible/objectAlreadyInaccessible.test") public void testKotlin_moveTopLevelDeclarations_objectAlreadyInaccessible_ObjectAlreadyInaccessible() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/objectAlreadyInaccessible/objectAlreadyInaccessible.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/propertyAlreadyInaccessible/propertyAlreadyInaccessible.test") public void testKotlin_moveTopLevelDeclarations_propertyAlreadyInaccessible_PropertyAlreadyInaccessible() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/propertyAlreadyInaccessible/propertyAlreadyInaccessible.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/protectedMemberUsageWithSuper/protectedMemberUsageWithSuper.test") public void testKotlin_moveTopLevelDeclarations_protectedMemberUsageWithSuper_ProtectedMemberUsageWithSuper() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/protectedMemberUsageWithSuper/protectedMemberUsageWithSuper.test"); } @TestMetadata("kotlin/moveTopLevelDeclarations/protectedMemberUsageWithThis/protectedMemberUsageWithThis.test") public void testKotlin_moveTopLevelDeclarations_protectedMemberUsageWithThis_ProtectedMemberUsageWithThis() throws Exception { runTest("testData/refactoring/move/kotlin/moveTopLevelDeclarations/protectedMemberUsageWithThis/protectedMemberUsageWithThis.test"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.sql.meta.provider.kafka.thrift; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated( value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-12-20") public class SimpleThriftMessage implements org.apache.thrift.TBase<SimpleThriftMessage, SimpleThriftMessage._Fields>, java.io.Serializable, Cloneable, Comparable<SimpleThriftMessage> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SimpleThriftMessage"); private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I32, (short) 1); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField( "name", org.apache.thrift.protocol.TType.STRING, (short) 2); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new SimpleThriftMessageStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new SimpleThriftMessageTupleSchemeFactory(); private int id; // required private @org.apache.thrift.annotation.Nullable java.lang.String name; // required /** * The set of fields this struct contains, along with convenience methods for finding and * manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ID((short) 1, "id"), NAME((short) 2, "name"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch (fieldId) { case 1: // ID return ID; case 2: // NAME return NAME; default: return null; } } /** Find the _Fields constant that matches fieldId, throwing an exception if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __ID_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put( _Fields.ID, new org.apache.thrift.meta_data.FieldMetaData( "id", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData( org.apache.thrift.protocol.TType.I32))); tmpMap.put( _Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData( "name", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData( org.apache.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap( SimpleThriftMessage.class, metaDataMap); } public SimpleThriftMessage() {} public SimpleThriftMessage(int id, java.lang.String name) { this(); this.id = id; setIdIsSet(true); this.name = name; } /** Performs a deep copy on <i>other</i>. */ public SimpleThriftMessage(SimpleThriftMessage other) { __isset_bitfield = other.__isset_bitfield; this.id = other.id; if (other.isSetName()) { this.name = other.name; } } public SimpleThriftMessage deepCopy() { return new SimpleThriftMessage(this); } @Override public void clear() { setIdIsSet(false); this.id = 0; this.name = null; } public int getId() { return this.id; } public SimpleThriftMessage setId(int id) { this.id = id; setIdIsSet(true); return this; } public void unsetId() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID); } /** Returns true if field id is set (has been assigned a value) and false otherwise */ public boolean isSetId() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID); } public void setIdIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value); } @org.apache.thrift.annotation.Nullable public java.lang.String getName() { return this.name; } public SimpleThriftMessage setName(@org.apache.thrift.annotation.Nullable java.lang.String name) { this.name = name; return this; } public void unsetName() { this.name = null; } /** Returns true if field name is set (has been assigned a value) and false otherwise */ public boolean isSetName() { return this.name != null; } public void setNameIsSet(boolean value) { if (!value) { this.name = null; } } public void setFieldValue( _Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case ID: if (value == null) { unsetId(); } else { setId((java.lang.Integer) value); } break; case NAME: if (value == null) { unsetName(); } else { setName((java.lang.String) value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case ID: return getId(); case NAME: return getName(); } throw new java.lang.IllegalStateException(); } /** * Returns true if field corresponding to fieldID is set (has been assigned a value) and false * otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case ID: return isSetId(); case NAME: return isSetName(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that == null) return false; if (that instanceof SimpleThriftMessage) return this.equals((SimpleThriftMessage) that); return false; } public boolean equals(SimpleThriftMessage that) { if (that == null) return false; if (this == that) return true; boolean this_present_id = true; boolean that_present_id = true; if (this_present_id || that_present_id) { if (!(this_present_id && that_present_id)) return false; if (this.id != that.id) return false; } boolean this_present_name = true && this.isSetName(); boolean that_present_name = true && that.isSetName(); if (this_present_name || that_present_name) { if (!(this_present_name && that_present_name)) return false; if (!this.name.equals(that.name)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + id; hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287); if (isSetName()) hashCode = hashCode * 8191 + name.hashCode(); return hashCode; } @Override public int compareTo(SimpleThriftMessage other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.valueOf(isSetId()).compareTo(other.isSetId()); if (lastComparison != 0) { return lastComparison; } if (isSetId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(isSetName()).compareTo(other.isSetName()); if (lastComparison != 0) { return lastComparison; } if (isSetName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("SimpleThriftMessage("); boolean first = true; sb.append("id:"); sb.append(this.id); first = false; if (!first) sb.append(", "); sb.append("name:"); if (this.name == null) { sb.append("null"); } else { sb.append(this.name); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // alas, we cannot check 'id' because it's a primitive and you chose the non-beans generator. if (name == null) { throw new org.apache.thrift.protocol.TProtocolException( "Required field 'name' was not present! Struct: " + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write( new org.apache.thrift.protocol.TCompactProtocol( new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and // doesn't call the default constructor. __isset_bitfield = 0; read( new org.apache.thrift.protocol.TCompactProtocol( new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class SimpleThriftMessageStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public SimpleThriftMessageStandardScheme getScheme() { return new SimpleThriftMessageStandardScheme(); } } private static class SimpleThriftMessageStandardScheme extends org.apache.thrift.scheme.StandardScheme<SimpleThriftMessage> { public void read(org.apache.thrift.protocol.TProtocol iprot, SimpleThriftMessage struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // ID if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.id = iprot.readI32(); struct.setIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.name = iprot.readString(); struct.setNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method if (!struct.isSetId()) { throw new org.apache.thrift.protocol.TProtocolException( "Required field 'id' was not found in serialized data! Struct: " + toString()); } struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, SimpleThriftMessage struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(ID_FIELD_DESC); oprot.writeI32(struct.id); oprot.writeFieldEnd(); if (struct.name != null) { oprot.writeFieldBegin(NAME_FIELD_DESC); oprot.writeString(struct.name); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class SimpleThriftMessageTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public SimpleThriftMessageTupleScheme getScheme() { return new SimpleThriftMessageTupleScheme(); } } private static class SimpleThriftMessageTupleScheme extends org.apache.thrift.scheme.TupleScheme<SimpleThriftMessage> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, SimpleThriftMessage struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; oprot.writeI32(struct.id); oprot.writeString(struct.name); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, SimpleThriftMessage struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.id = iprot.readI32(); struct.setIdIsSet(true); struct.name = iprot.readString(); struct.setNameIsSet(true); } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme( org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY) .getScheme(); } }
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.common.metadata; import java.util.ArrayList; import java.util.List; import org.apache.helix.AccessOption; import org.apache.helix.ZNRecord; import org.apache.helix.store.zk.ZkHelixPropertyStore; import com.linkedin.pinot.common.config.AbstractTableConfig; import com.linkedin.pinot.common.config.TableNameBuilder; import com.linkedin.pinot.common.metadata.instance.InstanceZKMetadata; import com.linkedin.pinot.common.metadata.segment.OfflineSegmentZKMetadata; import com.linkedin.pinot.common.metadata.segment.RealtimeSegmentZKMetadata; import com.linkedin.pinot.common.utils.StringUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ZKMetadataProvider { private static final Logger LOGGER = LoggerFactory.getLogger(ZKMetadataProvider.class); private static final String CLUSTER_TENANT_ISOLATION_ENABLED_KEY = "tenantIsolationEnabled"; private static String PROPERTYSTORE_SEGMENTS_PREFIX = "/SEGMENTS"; private static String PROPERTYSTORE_TABLE_CONFIGS_PREFIX = "/CONFIGS/TABLE"; private static String PROPERTYSTORE_INSTANCE_CONFIGS_PREFIX = "/CONFIGS/INSTANCE"; private static String PROPERTYSTORE_CLUSTER_CONFIGS_PREFIX = "/CONFIGS/CLUSTER"; public static void setRealtimeTableConfig(ZkHelixPropertyStore<ZNRecord> propertyStore, String realtimeTableName, ZNRecord znRecord) { propertyStore.set(constructPropertyStorePathForResourceConfig(realtimeTableName), znRecord, AccessOption.PERSISTENT); } public static void setOfflineTableConfig(ZkHelixPropertyStore<ZNRecord> propertyStore, String offlineTableName, ZNRecord znRecord) { propertyStore.set(constructPropertyStorePathForResourceConfig(offlineTableName), znRecord, AccessOption.PERSISTENT); } public static void setInstanceZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, InstanceZKMetadata instanceZKMetadata) { ZNRecord znRecord = instanceZKMetadata.toZNRecord(); propertyStore.set(StringUtil.join("/", PROPERTYSTORE_INSTANCE_CONFIGS_PREFIX, instanceZKMetadata.getId()), znRecord, AccessOption.PERSISTENT); } public static InstanceZKMetadata getInstanceZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, String instanceId) { ZNRecord znRecord = propertyStore.get(StringUtil.join("/", PROPERTYSTORE_INSTANCE_CONFIGS_PREFIX, instanceId), null, AccessOption.PERSISTENT); if (znRecord == null) { return null; } return new InstanceZKMetadata(znRecord); } public static String constructPropertyStorePathForSegment(String resourceName, String segmentName) { return StringUtil.join("/", PROPERTYSTORE_SEGMENTS_PREFIX, resourceName, segmentName); } public static String constructPropertyStorePathForResource(String resourceName) { return StringUtil.join("/", PROPERTYSTORE_SEGMENTS_PREFIX, resourceName); } public static String constructPropertyStorePathForResourceConfig(String resourceName) { return StringUtil.join("/", PROPERTYSTORE_TABLE_CONFIGS_PREFIX, resourceName); } public static String constructPropertyStorePathForControllerConfig(String controllerConfigKey) { return StringUtil.join("/", PROPERTYSTORE_CLUSTER_CONFIGS_PREFIX, controllerConfigKey); } public static boolean isSegmentExisted(ZkHelixPropertyStore<ZNRecord> propertyStore, String resourceNameForResource, String segmentName) { return propertyStore.exists(constructPropertyStorePathForSegment(resourceNameForResource, segmentName), AccessOption.PERSISTENT); } public static void removeResourceSegmentsFromPropertyStore(ZkHelixPropertyStore<ZNRecord> propertyStore, String resourceName) { String propertyStorePath = constructPropertyStorePathForResource(resourceName); if (propertyStore.exists(propertyStorePath, AccessOption.PERSISTENT)) { propertyStore.remove(propertyStorePath, AccessOption.PERSISTENT); } } public static void removeResourceConfigFromPropertyStore(ZkHelixPropertyStore<ZNRecord> propertyStore, String resourceName) { String propertyStorePath = constructPropertyStorePathForResourceConfig(resourceName); if (propertyStore.exists(propertyStorePath, AccessOption.PERSISTENT)) { propertyStore.remove(propertyStorePath, AccessOption.PERSISTENT); } } public static void setOfflineSegmentZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, OfflineSegmentZKMetadata offlineSegmentZKMetadata) { propertyStore.set(constructPropertyStorePathForSegment( TableNameBuilder.OFFLINE_TABLE_NAME_BUILDER.forTable(offlineSegmentZKMetadata.getTableName()), offlineSegmentZKMetadata.getSegmentName()), offlineSegmentZKMetadata.toZNRecord(), AccessOption.PERSISTENT); } public static void setRealtimeSegmentZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, RealtimeSegmentZKMetadata realtimeSegmentZKMetadata) { propertyStore.set(constructPropertyStorePathForSegment( TableNameBuilder.REALTIME_TABLE_NAME_BUILDER.forTable(realtimeSegmentZKMetadata.getTableName()), realtimeSegmentZKMetadata.getSegmentName()), realtimeSegmentZKMetadata.toZNRecord(), AccessOption.PERSISTENT); } public static OfflineSegmentZKMetadata getOfflineSegmentZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, String tableName, String segmentName) { String offlineTableName = TableNameBuilder.OFFLINE_TABLE_NAME_BUILDER.forTable(tableName); return new OfflineSegmentZKMetadata(propertyStore.get(constructPropertyStorePathForSegment(offlineTableName, segmentName), null, AccessOption.PERSISTENT)); } public static RealtimeSegmentZKMetadata getRealtimeSegmentZKMetadata(ZkHelixPropertyStore<ZNRecord> propertyStore, String tableName, String segmentName) { String realtimeTableName = TableNameBuilder.REALTIME_TABLE_NAME_BUILDER.forTable(tableName); return new RealtimeSegmentZKMetadata(propertyStore.get(constructPropertyStorePathForSegment(realtimeTableName, segmentName), null, AccessOption.PERSISTENT)); } public static AbstractTableConfig getOfflineTableConfig(ZkHelixPropertyStore<ZNRecord> propertyStore, String tableName) { String offlineTableName = TableNameBuilder.OFFLINE_TABLE_NAME_BUILDER.forTable(tableName); ZNRecord znRecord = propertyStore.get(constructPropertyStorePathForResourceConfig(offlineTableName), null, AccessOption.PERSISTENT); if (znRecord == null) { return null; } try { return AbstractTableConfig.fromZnRecord(znRecord); } catch (Exception e) { LOGGER.warn("Caught exception while getting offline table configuration", e); return null; } } public static AbstractTableConfig getRealtimeTableConfig(ZkHelixPropertyStore<ZNRecord> propertyStore, String tableName) { String realtimeTableName = TableNameBuilder.REALTIME_TABLE_NAME_BUILDER.forTable(tableName); ZNRecord znRecord = propertyStore.get(constructPropertyStorePathForResourceConfig(realtimeTableName), null, AccessOption.PERSISTENT); if (znRecord == null) { return null; } try { return AbstractTableConfig.fromZnRecord(znRecord); } catch (Exception e) { LOGGER.warn("Caught exception while getting realtime table configuration", e); return null; } } public static List<OfflineSegmentZKMetadata> getOfflineSegmentZKMetadataListForTable(ZkHelixPropertyStore<ZNRecord> propertyStore, String tableName) { List<OfflineSegmentZKMetadata> resultList = new ArrayList<OfflineSegmentZKMetadata>(); if (propertyStore == null) { return resultList; } String offlineTableName = TableNameBuilder.OFFLINE_TABLE_NAME_BUILDER.forTable(tableName); if (propertyStore.exists(constructPropertyStorePathForResource(offlineTableName), AccessOption.PERSISTENT)) { List<ZNRecord> znRecordList = propertyStore.getChildren(constructPropertyStorePathForResource(offlineTableName), null, AccessOption.PERSISTENT); if (znRecordList != null) { for (ZNRecord record : znRecordList) { resultList.add(new OfflineSegmentZKMetadata(record)); } } } return resultList; } public static List<RealtimeSegmentZKMetadata> getRealtimeSegmentZKMetadataListForTable(ZkHelixPropertyStore<ZNRecord> propertyStore, String resourceName) { List<RealtimeSegmentZKMetadata> resultList = new ArrayList<RealtimeSegmentZKMetadata>(); if (propertyStore == null) { return resultList; } String realtimeTableName = TableNameBuilder.REALTIME_TABLE_NAME_BUILDER.forTable(resourceName); if (propertyStore.exists(constructPropertyStorePathForResource(realtimeTableName), AccessOption.PERSISTENT)) { List<ZNRecord> znRecordList = propertyStore.getChildren(constructPropertyStorePathForResource(realtimeTableName), null, AccessOption.PERSISTENT); if (znRecordList != null) { for (ZNRecord record : znRecordList) { resultList.add(new RealtimeSegmentZKMetadata(record)); } } } return resultList; } public static void setClusterTenantIsolationEnabled(ZkHelixPropertyStore<ZNRecord> propertyStore, boolean isSingleTenantCluster) { if (!propertyStore.exists(constructPropertyStorePathForControllerConfig(CLUSTER_TENANT_ISOLATION_ENABLED_KEY), AccessOption.PERSISTENT)) { ZNRecord znRecord = new ZNRecord(CLUSTER_TENANT_ISOLATION_ENABLED_KEY); znRecord.setBooleanField(CLUSTER_TENANT_ISOLATION_ENABLED_KEY, isSingleTenantCluster); propertyStore.set(constructPropertyStorePathForControllerConfig(CLUSTER_TENANT_ISOLATION_ENABLED_KEY), znRecord, AccessOption.PERSISTENT); } } public static Boolean getClusterTenantIsolationEnabled(ZkHelixPropertyStore<ZNRecord> propertyStore) { String controllerConfigPath = constructPropertyStorePathForControllerConfig(CLUSTER_TENANT_ISOLATION_ENABLED_KEY); if (propertyStore.exists(controllerConfigPath, AccessOption.PERSISTENT)) { ZNRecord znRecord = propertyStore.get(controllerConfigPath, null, AccessOption.PERSISTENT); if (znRecord.getSimpleFields().keySet().contains(CLUSTER_TENANT_ISOLATION_ENABLED_KEY)) { return znRecord.getBooleanField(CLUSTER_TENANT_ISOLATION_ENABLED_KEY, true); } else { return true; } } else { return true; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.codeStyle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.codeStyle.CustomCodeStyleSettings; import com.intellij.ui.ClickListener; import com.intellij.ui.SpeedSearchComparator; import com.intellij.ui.TreeSpeedSearch; import com.intellij.ui.components.JBScrollPane; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.containers.MultiMap; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import gnu.trove.THashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.*; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.lang.reflect.Field; import java.util.*; import java.util.List; /** * @author max */ public abstract class OptionTreeWithPreviewPanel extends CustomizableLanguageCodeStylePanel { private static final Logger LOG = Logger.getInstance("#com.intellij.application.options.CodeStyleSpacesPanel"); protected JTree myOptionsTree; protected final ArrayList<BooleanOptionKey> myKeys = new ArrayList<>(); protected final JPanel myPanel = new JPanel(new GridBagLayout()); private boolean myShowAllStandardOptions = false; private final Set<String> myAllowedOptions = new HashSet<>(); protected MultiMap<String, CustomBooleanOptionInfo> myCustomOptions = new MultiMap<>(); protected boolean isFirstUpdate = true; private final Map<String, String> myRenamedFields = new THashMap<>(); private final Map<String, String> myRemappedGroups = new THashMap<>(); public OptionTreeWithPreviewPanel(CodeStyleSettings settings) { super(settings); } @Override protected void init() { super.init(); initTables(); myOptionsTree = createOptionsTree(); myOptionsTree.setCellRenderer(new MyTreeCellRenderer()); myOptionsTree.setBackground(UIUtil.getPanelBackground()); myOptionsTree.setBorder(JBUI.Borders.emptyRight(10)); JScrollPane scrollPane = new JBScrollPane(myOptionsTree) { @Override public Dimension getMinimumSize() { return super.getPreferredSize(); } }; myPanel.add(scrollPane, new GridBagConstraints(0, 0, 1, 1, 0, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.emptyInsets(), 0, 0)); JPanel previewPanel = createPreviewPanel(); myPanel.add(previewPanel, new GridBagConstraints(1, 0, 1, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.emptyInsets(), 0, 0)); installPreviewPanel(previewPanel); addPanelToWatch(myPanel); isFirstUpdate = false; } @Override public void showAllStandardOptions() { myShowAllStandardOptions = true; updateOptions(true); } @Override public void showStandardOptions(String... optionNames) { if (isFirstUpdate) { Collections.addAll(myAllowedOptions, optionNames); } updateOptions(false, optionNames); } @Override public void showCustomOption(Class<? extends CustomCodeStyleSettings> settingsClass, String fieldName, String title, String groupName, Object... options) { showCustomOption(settingsClass, fieldName, title, groupName, null, null, options); } @Override public void showCustomOption(Class<? extends CustomCodeStyleSettings> settingsClass, String fieldName, String title, @Nullable String groupName, @Nullable OptionAnchor anchor, @Nullable String anchorFieldName, Object... options) { if (isFirstUpdate) { myCustomOptions.putValue(groupName, new CustomBooleanOptionInfo(settingsClass, fieldName, title, groupName, anchor, anchorFieldName)); } enableOption(fieldName); } @Override public void renameStandardOption(String fieldName, String newTitle) { if (isFirstUpdate) { myRenamedFields.put(fieldName, newTitle); } } protected void updateOptions(boolean showAllStandardOptions, String... allowedOptions) { for (BooleanOptionKey key : myKeys) { String fieldName = key.field.getName(); if (key instanceof CustomBooleanOptionKey) { key.setEnabled(false); } else if (showAllStandardOptions) { key.setEnabled(true); } else { key.setEnabled(false); for (String optionName : allowedOptions) { if (fieldName.equals(optionName)) { key.setEnabled(true); break; } } } } } protected void enableOption(String optionName) { for (BooleanOptionKey key : myKeys) { if (key.field.getName().equals(optionName)) { key.setEnabled(true); } } } protected JTree createOptionsTree() { DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode(); String groupName = ""; DefaultMutableTreeNode groupNode = null; List<BooleanOptionKey> result = sortOptions(orderByGroup(myKeys)); for (BooleanOptionKey key : result) { String newGroupName = key.groupName; if (!newGroupName.equals(groupName) || groupNode == null) { groupName = newGroupName; groupNode = new DefaultMutableTreeNode(newGroupName); rootNode.add(groupNode); } if (isOptionVisible(key)) { groupNode.add(new MyToggleTreeNode(key, key.title)); } } DefaultTreeModel model = new DefaultTreeModel(rootNode); final Tree optionsTree = new Tree(model); new TreeSpeedSearch(optionsTree).setComparator(new SpeedSearchComparator(false)); TreeUtil.installActions(optionsTree); optionsTree.setRootVisible(false); UIUtil.setLineStyleAngled(optionsTree); optionsTree.setShowsRootHandles(true); optionsTree.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (!optionsTree.isEnabled()) return; if (e.getKeyCode() == KeyEvent.VK_SPACE) { TreePath treePath = optionsTree.getLeadSelectionPath(); selectCheckbox(treePath); e.consume(); } } }); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (!optionsTree.isEnabled()) return false; TreePath treePath = optionsTree.getPathForLocation(e.getX(), e.getY()); selectCheckbox(treePath); return true; } }.installOn(optionsTree); int row = 0; while (row < optionsTree.getRowCount()) { optionsTree.expandRow(row); row++; } return optionsTree; } private List<BooleanOptionKey> orderByGroup(final List<BooleanOptionKey> options) { final List<String> groupOrder = getGroupOrder(options); List<BooleanOptionKey> result = new ArrayList<>(options.size()); result.addAll(options); Collections.sort(result, (key1, key2) -> { String group1 = key1.groupName; String group2 = key2.groupName; if (group1 == null) { return group2 == null ? 0 : 1; } if (group2 == null) { return -1; } Integer index1 = groupOrder.indexOf(group1); Integer index2 = groupOrder.indexOf(group2); if (index1 == -1 || index2 == -1) return group1.compareToIgnoreCase(group2); return index1.compareTo(index2); }); return result; } protected List<String> getGroupOrder(List<BooleanOptionKey> options) { List<String> groupOrder = new ArrayList<>(); for (BooleanOptionKey each : options) { if (each.groupName != null && !groupOrder.contains(each.groupName)) { groupOrder.add(each.groupName); } } return groupOrder; } private void selectCheckbox(TreePath treePath) { if (treePath == null) { return; } Object o = treePath.getLastPathComponent(); if (o instanceof MyToggleTreeNode) { MyToggleTreeNode node = (MyToggleTreeNode)o; if (!node.isEnabled()) return; node.setSelected(!node.isSelected()); int row = myOptionsTree.getRowForPath(treePath); myOptionsTree.repaint(myOptionsTree.getRowBounds(row)); //updatePreview(); somethingChanged(); } } protected abstract void initTables(); @Override protected void resetImpl(final CodeStyleSettings settings) { TreeModel treeModel = myOptionsTree.getModel(); TreeNode root = (TreeNode)treeModel.getRoot(); resetNode(root, settings); ((DefaultTreeModel)treeModel).nodeChanged(root); } private void resetNode(TreeNode node, final CodeStyleSettings settings) { if (node instanceof MyToggleTreeNode) { resetMyTreeNode((MyToggleTreeNode)node, settings); return; } for (int j = 0; j < node.getChildCount(); j++) { TreeNode child = node.getChildAt(j); resetNode(child, settings); } } private void resetMyTreeNode(MyToggleTreeNode childNode, final CodeStyleSettings settings) { try { BooleanOptionKey key = (BooleanOptionKey)childNode.getKey(); childNode.setSelected(key.getValue(settings)); childNode.setEnabled(key.isEnabled()); } catch (IllegalArgumentException | IllegalAccessException e) { LOG.error(e); } } @Override public void apply(CodeStyleSettings settings) { TreeModel treeModel = myOptionsTree.getModel(); TreeNode root = (TreeNode)treeModel.getRoot(); applyNode(root, settings); } private static void applyNode(TreeNode node, final CodeStyleSettings settings) { if (node instanceof MyToggleTreeNode) { applyToggleNode((MyToggleTreeNode)node, settings); return; } for (int j = 0; j < node.getChildCount(); j++) { TreeNode child = node.getChildAt(j); applyNode(child, settings); } } private static void applyToggleNode(MyToggleTreeNode childNode, final CodeStyleSettings settings) { BooleanOptionKey key = (BooleanOptionKey)childNode.getKey(); key.setValue(settings, childNode.isSelected()); } @Override public boolean isModified(CodeStyleSettings settings) { TreeModel treeModel = myOptionsTree.getModel(); TreeNode root = (TreeNode)treeModel.getRoot(); if (isModified(root, settings)) { return true; } return false; } private static boolean isModified(TreeNode node, final CodeStyleSettings settings) { if (node instanceof MyToggleTreeNode) { if (isToggleNodeModified((MyToggleTreeNode)node, settings)) { return true; } } for (int j = 0; j < node.getChildCount(); j++) { TreeNode child = node.getChildAt(j); if (isModified(child, settings)) { return true; } } return false; } private static boolean isToggleNodeModified(MyToggleTreeNode childNode, final CodeStyleSettings settings) { try { BooleanOptionKey key = (BooleanOptionKey)childNode.getKey(); return childNode.isSelected() != key.getValue(settings); } catch (IllegalArgumentException | IllegalAccessException e) { LOG.error(e); } return false; } protected void initBooleanField(@NonNls String fieldName, String title, String groupName) { if (myShowAllStandardOptions || myAllowedOptions.contains(fieldName)) { doInitBooleanField(fieldName, title, groupName); } } private void doInitBooleanField(@NonNls String fieldName, String title, String groupName) { try { Class styleSettingsClass = CommonCodeStyleSettings.class; Field field = styleSettingsClass.getField(fieldName); String actualGroupName = getRemappedGroup(fieldName, groupName); BooleanOptionKey key = new BooleanOptionKey(fieldName, getRenamedTitle(actualGroupName, actualGroupName), getRenamedTitle(fieldName, title), field); myKeys.add(key); } catch (NoSuchFieldException | SecurityException e) { LOG.error(e); } } protected void initCustomOptions(String groupName) { for (CustomBooleanOptionInfo option : myCustomOptions.get(groupName)) { try { Field field = option.settingClass.getField(option.fieldName); myKeys.add(new CustomBooleanOptionKey(option.fieldName, getRenamedTitle(groupName, groupName), getRenamedTitle(option.fieldName, option.title), option.anchor, option.anchorFieldName, option.settingClass, field)); } catch (NoSuchFieldException | SecurityException e) { LOG.error(e); } } } private String getRenamedTitle(String fieldName, String defaultTitle) { String renamed = myRenamedFields.get(fieldName); return renamed == null ? defaultTitle : renamed; } protected static class MyTreeCellRenderer implements TreeCellRenderer { private final JLabel myLabel; private final JCheckBox myCheckBox; public MyTreeCellRenderer() { myLabel = new JLabel(); myCheckBox = new JCheckBox(); myCheckBox.setMargin(JBUI.emptyInsets()); } @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean isSelected, boolean expanded, boolean leaf, int row, boolean hasFocus) { if (value instanceof MyToggleTreeNode) { MyToggleTreeNode treeNode = (MyToggleTreeNode)value; JToggleButton button = myCheckBox; button.setText(treeNode.getText()); button.setSelected(treeNode.isSelected); if (isSelected) { button.setForeground(UIUtil.getTreeSelectionForeground()); button.setBackground(UIUtil.getTreeSelectionBackground()); } else { button.setForeground(UIUtil.getTreeTextForeground()); button.setBackground(tree.getBackground()); } button.setEnabled(tree.isEnabled() && treeNode.isEnabled()); return button; } else { myLabel.setText(value.toString()); myLabel.setFont(myLabel.getFont().deriveFont(Font.BOLD)); myLabel.setOpaque(true); if (isSelected) { myLabel.setForeground(UIUtil.getTreeSelectionForeground()); myLabel.setBackground(UIUtil.getTreeSelectionBackground()); } else { myLabel.setForeground(UIUtil.getTreeTextForeground()); myLabel.setBackground(tree.getBackground()); } myLabel.setEnabled(tree.isEnabled()); return myLabel; } } } private class BooleanOptionKey extends OrderedOption { final String groupName; String title; final Field field; private boolean enabled = true; public BooleanOptionKey(String fieldName, String groupName, String title, Field field) { this(fieldName, groupName, title, null, null, field); } public BooleanOptionKey(String fieldName, String groupName, String title, @Nullable OptionAnchor anchor, @Nullable String anchorFiledName, Field field) { super(fieldName, anchor, anchorFiledName); this.groupName = groupName; this.title = title; this.field = field; } public void setValue(CodeStyleSettings settings, Boolean aBoolean) { try { CommonCodeStyleSettings commonSettings = settings.getCommonSettings(getDefaultLanguage()); field.set(commonSettings, aBoolean); } catch (IllegalAccessException e) { LOG.error(e); } } public boolean getValue(CodeStyleSettings settings) throws IllegalAccessException { CommonCodeStyleSettings commonSettings = settings.getCommonSettings(getDefaultLanguage()); return field.getBoolean(commonSettings); } public void setEnabled(boolean enabled) { this.enabled = enabled; } public boolean isEnabled() { return this.enabled; } } private static class CustomBooleanOptionInfo { @NotNull final Class<? extends CustomCodeStyleSettings> settingClass; @NotNull final String fieldName; @NotNull final String title; @Nullable final String groupName; @Nullable final OptionAnchor anchor; @Nullable final String anchorFieldName; private CustomBooleanOptionInfo(@NotNull Class<? extends CustomCodeStyleSettings> settingClass, @NotNull String fieldName, @NotNull String title, @Nullable String groupName, @Nullable OptionAnchor anchor, @Nullable String anchorFieldName) { this.settingClass = settingClass; this.fieldName = fieldName; this.title = title; this.groupName = groupName; this.anchor = anchor; this.anchorFieldName = anchorFieldName; } } private class CustomBooleanOptionKey<T extends CustomCodeStyleSettings> extends BooleanOptionKey { private final Class<T> mySettingsClass; public CustomBooleanOptionKey(String fieldName, String groupName, String title, OptionAnchor anchor, String anchorFieldName, Class<T> settingsClass, Field field) { super(fieldName, groupName, title, anchor, anchorFieldName, field); mySettingsClass = settingsClass; } @Override public void setValue(CodeStyleSettings settings, Boolean aBoolean) { final CustomCodeStyleSettings customSettings = settings.getCustomSettings(mySettingsClass); try { field.set(customSettings, aBoolean); } catch (IllegalAccessException e) { LOG.error(e); } } @Override public boolean getValue(CodeStyleSettings settings) throws IllegalAccessException { final CustomCodeStyleSettings customSettings = settings.getCustomSettings(mySettingsClass); return field.getBoolean(customSettings); } } private static class MyToggleTreeNode extends DefaultMutableTreeNode { private final Object myKey; private final String myText; private boolean isSelected; private boolean isEnabled = true; public MyToggleTreeNode(Object key, String text) { myKey = key; myText = text; } public Object getKey() { return myKey; } public String getText() { return myText; } public void setSelected(boolean val) { isSelected = val; } public boolean isSelected() { return isSelected; } public void setEnabled(boolean val) { isEnabled = val; } public boolean isEnabled() { return isEnabled; } } @Override public JComponent getPanel() { return myPanel; } @Override public Set<String> processListOptions() { Set<String> result = new HashSet<>(); for (BooleanOptionKey key : myKeys) { result.add(key.title); if (key.groupName != null) { result.add(key.groupName); } } result.addAll(myRenamedFields.values()); for (String groupName : myCustomOptions.keySet()) { result.add(groupName); for (CustomBooleanOptionInfo trinity : myCustomOptions.get(groupName)) { result.add(trinity.title); } } return result; } protected boolean shouldHideOptions() { return false; } private boolean isOptionVisible(BooleanOptionKey key) { if (!shouldHideOptions()) return true; if (myShowAllStandardOptions || myAllowedOptions.contains(key.getOptionName())) return true; for (CustomBooleanOptionInfo customOption : myCustomOptions.get(key.groupName)) { if (customOption.fieldName.equals(key.getOptionName())) return true; } return false; } @Override public void moveStandardOption(String fieldName, String newGroup) { myRemappedGroups.put(fieldName, newGroup); } private String getRemappedGroup(String fieldName, String defaultName) { return myRemappedGroups.containsKey(fieldName) ? myRemappedGroups.get(fieldName) : defaultName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.examples.upload; import java.io.File; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.wicket.Application; import org.apache.wicket.PageParameters; import org.apache.wicket.examples.WicketExamplePage; import org.apache.wicket.extensions.ajax.markup.html.form.upload.UploadProgressBar; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.upload.FileUpload; import org.apache.wicket.markup.html.form.upload.FileUploadField; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.FeedbackPanel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.util.file.Files; import org.apache.wicket.util.file.Folder; import org.apache.wicket.util.lang.Bytes; /** * Upload example. * * @author Eelco Hillenius */ public class UploadPage extends WicketExamplePage { /** * List view for files in upload folder. */ private class FileListView extends ListView<File> { /** * Construct. * * @param name * Component name * @param files * The file list model */ public FileListView(String name, final IModel<List<File>> files) { super(name, files); } /** * @see ListView#populateItem(ListItem) */ @Override protected void populateItem(ListItem<File> listItem) { final File file = listItem.getModelObject(); listItem.add(new Label("file", file.getName())); listItem.add(new Link("delete") { @Override public void onClick() { Files.remove(file); info("Deleted " + file); } }); } } /** * Form for uploads. */ private class FileUploadForm extends Form<Void> { private FileUploadField fileUploadField; /** * Construct. * * @param name * Component name */ public FileUploadForm(String name) { super(name); // set this form to multipart mode (allways needed for uploads!) setMultiPart(true); // Add one file input field add(fileUploadField = new FileUploadField("fileInput")); // Set maximum size to 100K for demo purposes setMaxSize(Bytes.kilobytes(100)); } /** * @see org.apache.wicket.markup.html.form.Form#onSubmit() */ @Override protected void onSubmit() { final FileUpload upload = fileUploadField.getFileUpload(); if (upload != null) { // Create a new file File newFile = new File(getUploadFolder(), upload.getClientFileName()); // Check new file, delete if it allready existed checkFileExists(newFile); try { // Save to new file newFile.createNewFile(); upload.writeTo(newFile); UploadPage.this.info("saved file: " + upload.getClientFileName()); } catch (Exception e) { throw new IllegalStateException("Unable to write file"); } } } } /** Log. */ private static final Log log = LogFactory.getLog(UploadPage.class); /** Reference to listview for easy access. */ private final FileListView fileListView; /** * Constructor. * * @param parameters * Page parameters */ public UploadPage(final PageParameters parameters) { Folder uploadFolder = getUploadFolder(); // Create feedback panels final FeedbackPanel uploadFeedback = new FeedbackPanel("uploadFeedback"); // Add uploadFeedback to the page itself add(uploadFeedback); // Add simple upload form, which is hooked up to its feedback panel by // virtue of that panel being nested in the form. final FileUploadForm simpleUploadForm = new FileUploadForm("simpleUpload"); add(simpleUploadForm); // Add folder view add(new Label("dir", uploadFolder.getAbsolutePath())); fileListView = new FileListView("fileList", new LoadableDetachableModel<List<File>>() { @Override protected List<File> load() { return Arrays.asList(getUploadFolder().listFiles()); } }); add(fileListView); // Add upload form with ajax progress bar final FileUploadForm ajaxSimpleUploadForm = new FileUploadForm("ajax-simpleUpload"); ajaxSimpleUploadForm.add(new UploadProgressBar("progress", ajaxSimpleUploadForm)); add(ajaxSimpleUploadForm); } /** * Check whether the file allready exists, and if so, try to delete it. * * @param newFile * the file to check */ private void checkFileExists(File newFile) { if (newFile.exists()) { // Try to delete the file if (!Files.remove(newFile)) { throw new IllegalStateException("Unable to overwrite " + newFile.getAbsolutePath()); } } } private Folder getUploadFolder() { return ((UploadApplication)Application.get()).getUploadFolder(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.discovery.tcp.ipfinder.s3; import com.amazonaws.AmazonClientException; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.S3ObjectSummary; import java.io.ByteArrayInputStream; import java.net.InetSocketAddress; import java.util.Collection; import java.util.LinkedList; import java.util.StringTokenizer; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.resources.LoggerResource; import org.apache.ignite.spi.IgniteSpiConfiguration; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter; /** * AWS S3-based IP finder. * <p> * For information about Amazon S3 visit <a href="http://aws.amazon.com">aws.amazon.com</a>. * <h1 class="header">Configuration</h1> * <h2 class="header">Mandatory</h2> * <ul> * <li>AWS credentials (see {@link #setAwsCredentials(AWSCredentials)})</li> * <li>Bucket name (see {@link #setBucketName(String)})</li> * </ul> * <h2 class="header">Optional</h2> * <ul> * <li>Client configuration (see {@link #setClientConfiguration(ClientConfiguration)})</li> * <li>Shared flag (see {@link #setShared(boolean)})</li> * </ul> * <p> * The finder will create S3 bucket with configured name. The bucket will contain entries named * like the following: {@code 192.168.1.136#1001}. * <p> * Note that storing data in AWS S3 service will result in charges to your AWS account. * Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local * or home network tests. * <p> * Note that this finder is shared by default (see {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder#isShared()}. */ public class TcpDiscoveryS3IpFinder extends TcpDiscoveryIpFinderAdapter { /** Delimiter to use in S3 entries name. */ public static final String DELIM = "#"; /** Entry content. */ private static final byte[] ENTRY_CONTENT = new byte[] {1}; /** Entry metadata with content length set. */ private static final ObjectMetadata ENTRY_METADATA; static { ENTRY_METADATA = new ObjectMetadata(); ENTRY_METADATA.setContentLength(ENTRY_CONTENT.length); } /** Grid logger. */ @LoggerResource private IgniteLogger log; /** Client to interact with S3 storage. */ @GridToStringExclude private AmazonS3 s3; /** Bucket name. */ private String bucketName; /** Init guard. */ @GridToStringExclude private final AtomicBoolean initGuard = new AtomicBoolean(); /** Init latch. */ @GridToStringExclude private final CountDownLatch initLatch = new CountDownLatch(1); /** Amazon client configuration. */ private ClientConfiguration cfg; /** AWS Credentials. */ @GridToStringExclude private AWSCredentials cred; /** * Constructor. */ public TcpDiscoveryS3IpFinder() { setShared(true); } /** {@inheritDoc} */ @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException { initClient(); Collection<InetSocketAddress> addrs = new LinkedList<>(); try { ObjectListing list = s3.listObjects(bucketName); while (true) { for (S3ObjectSummary sum : list.getObjectSummaries()) { String key = sum.getKey(); StringTokenizer st = new StringTokenizer(key, DELIM); if (st.countTokens() != 2) U.error(log, "Failed to parse S3 entry due to invalid format: " + key); else { String addrStr = st.nextToken(); String portStr = st.nextToken(); int port = -1; try { port = Integer.parseInt(portStr); } catch (NumberFormatException e) { U.error(log, "Failed to parse port for S3 entry: " + key, e); } if (port != -1) try { addrs.add(new InetSocketAddress(addrStr, port)); } catch (IllegalArgumentException e) { U.error(log, "Failed to parse port for S3 entry: " + key, e); } } } if (list.isTruncated()) list = s3.listNextBatchOfObjects(list); else break; } } catch (AmazonClientException e) { throw new IgniteSpiException("Failed to list objects in the bucket: " + bucketName, e); } return addrs; } /** {@inheritDoc} */ @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException { assert !F.isEmpty(addrs); initClient(); for (InetSocketAddress addr : addrs) { String key = key(addr); try { s3.putObject(bucketName, key, new ByteArrayInputStream(ENTRY_CONTENT), ENTRY_METADATA); } catch (AmazonClientException e) { throw new IgniteSpiException("Failed to put entry [bucketName=" + bucketName + ", entry=" + key + ']', e); } } } /** {@inheritDoc} */ @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException { assert !F.isEmpty(addrs); initClient(); for (InetSocketAddress addr : addrs) { String key = key(addr); try { s3.deleteObject(bucketName, key); } catch (AmazonClientException e) { throw new IgniteSpiException("Failed to delete entry [bucketName=" + bucketName + ", entry=" + key + ']', e); } } } /** * Gets S3 key for provided address. * * @param addr Node address. * @return Key. */ private String key(InetSocketAddress addr) { assert addr != null; SB sb = new SB(); sb.a(addr.getAddress().getHostAddress()) .a(DELIM) .a(addr.getPort()); return sb.toString(); } /** * Amazon s3 client initialization. * * @throws org.apache.ignite.spi.IgniteSpiException In case of error. */ @SuppressWarnings({"BusyWait"}) private void initClient() throws IgniteSpiException { if (initGuard.compareAndSet(false, true)) try { if (cred == null) throw new IgniteSpiException("AWS credentials are not set."); if (cfg == null) U.warn(log, "Amazon client configuration is not set (will use default)."); if (F.isEmpty(bucketName)) throw new IgniteSpiException("Bucket name is null or empty (provide bucket name and restart)."); s3 = cfg != null ? new AmazonS3Client(cred, cfg) : new AmazonS3Client(cred); if (!s3.doesBucketExist(bucketName)) { try { s3.createBucket(bucketName); if (log.isDebugEnabled()) log.debug("Created S3 bucket: " + bucketName); while (!s3.doesBucketExist(bucketName)) try { U.sleep(200); } catch (IgniteInterruptedCheckedException e) { throw new IgniteSpiException("Thread has been interrupted.", e); } } catch (AmazonClientException e) { if (!s3.doesBucketExist(bucketName)) { s3 = null; throw new IgniteSpiException("Failed to create bucket: " + bucketName, e); } } } } finally { initLatch.countDown(); } else { try { U.await(initLatch); } catch (IgniteInterruptedCheckedException e) { throw new IgniteSpiException("Thread has been interrupted.", e); } if (s3 == null) throw new IgniteSpiException("Ip finder has not been initialized properly."); } } /** * Sets bucket name for IP finder. * * @param bucketName Bucket name. */ @IgniteSpiConfiguration(optional = false) public void setBucketName(String bucketName) { this.bucketName = bucketName; } /** * Sets Amazon client configuration. * <p> * For details refer to Amazon S3 API reference. * * @param cfg Amazon client configuration. */ @IgniteSpiConfiguration(optional = true) public void setClientConfiguration(ClientConfiguration cfg) { this.cfg = cfg; } /** * Sets AWS credentials. * <p> * For details refer to Amazon S3 API reference. * * @param cred AWS credentials. */ @IgniteSpiConfiguration(optional = false) public void setAwsCredentials(AWSCredentials cred) { this.cred = cred; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(TcpDiscoveryS3IpFinder.class, this, "super", super.toString()); } }
/** * Copyright (c) 2011, University of Konstanz, Distributed Systems Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of Konstanz nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.treetank.axis; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.treetank.api.INodeReadTrx; import org.treetank.data.AtomicValue; import org.treetank.data.interfaces.ITreeData; import org.treetank.exception.TTException; import org.treetank.exception.TTIOException; /** * <h1>AbstractAxis</h1> * * <p> * Provide standard Java iterator capability compatible with the new enhanced for loop available since Java 5. * </p> * * <p> * All implementations must make sure to call super.hasNext() as the first thing in hasNext(). * </p> * * <p> * All users must make sure to call next() after hasNext() evaluated to true. * </p> */ public abstract class AbsAxis implements Iterator<Long>, Iterable<Long> { /** Iterate over transaction exclusive to this step. */ private final INodeReadTrx mRTX; /** Key of last found node. */ private long mKey; /** Make sure next() can only be called after hasNext(). */ private boolean mNext; /** Key of node where axis started. */ private long mStartKey; /** Include self? */ private final boolean mIncludeSelf; /** Map with ItemList to each transaction. */ private final static Map<INodeReadTrx, ItemList> atomics = new ConcurrentHashMap<INodeReadTrx, ItemList>(); /** Map with ItemList to each transaction. */ private final static Map<INodeReadTrx, Long> lastPointer = new ConcurrentHashMap<INodeReadTrx, Long>(); /** * Bind axis step to transaction. * * @param paramRtx * transaction to operate with */ public AbsAxis(final INodeReadTrx paramRtx) { this(paramRtx, false); } /** * Bind axis step to transaction. * * @param paramRtx * transaction to operate with * @param paramIncludeSelf * determines if self is included */ public AbsAxis(final INodeReadTrx paramRtx, final boolean paramIncludeSelf) { checkNotNull(paramRtx); mRTX = paramRtx; mIncludeSelf = paramIncludeSelf; reset(paramRtx.getNode().getDataKey()); } /** * {@inheritDoc} */ @Override public final Iterator<Long> iterator() { return this; } /** * {@inheritDoc} */ @Override public final Long next() { checkState(mNext, "IAxis.next() must be called exactely once after hasNext()" + " evaluated to true."); mKey = mRTX.getNode().getDataKey(); mNext = false; return mKey; } /** * {@inheritDoc} */ @Override public final void remove() { throw new UnsupportedOperationException(); } /** * Resetting the nodekey of this axis to a given nodekey. * * @param paramNodeKey * the nodekey where the reset should occur to. */ public void reset(final long paramNodeKey) { mStartKey = paramNodeKey; mKey = paramNodeKey; mNext = false; lastPointer.remove(mRTX); } /** * Move cursor to a node by its node key. * * @param pKey * Key of node to select. * @return True if the node with the given node key is selected. */ public boolean moveTo(final long pKey) { try { if (pKey < 0 || mRTX.moveTo(pKey)) { lastPointer.put(mRTX, pKey); return true; } else { return false; } } catch (TTIOException exc) { throw new RuntimeException(exc); } } /** * Closing the Transaction * * @throws TTException */ public void close() throws TTException { atomics.remove(mRTX); lastPointer.remove(mRTX); mRTX.close(); } /** * Getting the current node (including items from the ItemList) * * @return Getting the node. */ public ITreeData getNode() { if (lastPointer.get(mRTX) != null && lastPointer.get(mRTX) < 0) { return atomics.get(mRTX).getItem(lastPointer.get(mRTX)); } else { return mRTX.getNode(); } } /** * Make sure the transaction points to the node it started with. This must * be called just before hasNext() == false. * * @return Key of node where transaction was before the first call of * hasNext(). */ protected final long resetToStartKey() { // No check because of IAxis Convention 4. moveTo(mStartKey); mNext = false; return mStartKey; } /** * Make sure the transaction points to the node after the last hasNext(). * This must be called first in hasNext(). * * @return Key of node where transaction was after the last call of * hasNext(). */ protected final long resetToLastKey() { // No check because of IAxis Convention 4. moveTo(mKey); mNext = true; return mKey; } /** * Get start key. * * @return Start key. */ protected final long getStartKey() { return mStartKey; } /** * Is self included? * * @return True if self is included. False else. */ protected final boolean isSelfIncluded() { return mIncludeSelf; } /** * {@inheritDoc} */ @Override public abstract boolean hasNext(); /** * Getting the ItemList. * * @return the Itemlist */ public ItemList getItemList() { if (!atomics.containsKey(mRTX)) { atomics.put(mRTX, new ItemList()); } return atomics.get(mRTX); } /** * Adding any AtomicVal to any ItemList staticly. * * @param pRtx * as key * @param pVal * to be added * @return the index in the ItemList */ public static int addAtomicToItemList(final INodeReadTrx pRtx, final AtomicValue pVal) { if (!atomics.containsKey(pRtx)) { atomics.put(pRtx, new ItemList()); } return atomics.get(pRtx).addItem(pVal); } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uribeacon.scan.compat; import android.app.AlarmManager; import android.app.PendingIntent; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothManager; import android.content.Context; import android.content.Intent; import android.os.Build; import org.uribeacon.scan.util.Clock; import org.uribeacon.scan.util.Logger; import org.uribeacon.scan.util.SystemClock; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import java.util.Map; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; /** * Implements Bluetooth LE scan related API on top of * {@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2} and later. * <p> * This class delivers a callback on found, updated, and lost for devices matching a * {@link ScanFilter} filter during scan cycles. * <p> * A scan cycle comprises a period when the Bluetooth Adapter is active and a period when the * Bluetooth adapter is idle. Having an idle period is energy efficient for long lived scans. * <p> * This class can be accessed on multiple threads: * <ul> * <li> main thread (user) can call any of the BluetoothLeScanner APIs * <li> IntentService worker thread can call {@link #blockingScanCycle} * <li> AIDL binder thread can call {@link #leScanCallback.onLeScan} * </ul> * * @see <a href="http://go/ble-glossary">BLE Glossary</a> */ class JbBluetoothLeScannerCompat extends BluetoothLeScannerCompat { // Number of cycles before a sighted device is considered lost. /* @VisibleForTesting */ static final int SCAN_LOST_CYCLES = 4; // Constants for Scan Cycle // Low Power: 2.5 minute period with 1.5 seconds active (1% duty cycle) /* @VisibleForTesting */ static final int LOW_POWER_IDLE_MILLIS = 148500; /* @VisibleForTesting */ static final int LOW_POWER_ACTIVE_MILLIS = 1500; // Balanced: 15 second period with 1.5 second active (10% duty cycle) /* @VisibleForTesting */ static final int BALANCED_IDLE_MILLIS = 13500; /* @VisibleForTesting */ static final int BALANCED_ACTIVE_MILLIS = 1500; // Low Latency: 1.67 second period with 1.5 seconds active (90% duty cycle) /* @VisibleForTesting */ static final int LOW_LATENCY_IDLE_MILLIS = 167; /* @VisibleForTesting */ static final int LOW_LATENCY_ACTIVE_MILLIS = 1500; /** * Wraps user requests and stores the list of filters and callbacks. Also saves a set of * addresses for which any of the filters have matched in order to do lost processing. */ private static class ScanClient { final List<ScanFilter> filtersList; final Set<String> addressesSeen; final ScanCallback callback; final ScanSettings settings; ScanClient(ScanSettings settings, List<ScanFilter> filters, ScanCallback callback) { this.settings = settings; this.filtersList = filters; this.addressesSeen = new HashSet<String>(); this.callback = callback; } } // Alarm Scan variables private final Clock clock; private final AlarmManager alarmManager; private final PendingIntent alarmIntent; private long alarmIntervalMillis; // Variable to hold a scheduled task. Only used in Android 5.1. ScheduledFuture scheduledTask; // Map of BD_ADDR->ScanResult for replay to new registrations. // Entries are evicted after SCAN_LOST_CYCLES cycles. /* @VisibleForTesting */ final HashMap<String, ScanResult> recentScanResults; // Default Scan Constants = Balanced private int scanIdleMillis = BALANCED_IDLE_MILLIS; private int scanActiveMillis = BALANCED_ACTIVE_MILLIS; // Override values for scan window private int overrideScanActiveMillis = -1; private int overrideScanIdleMillis; // Milliseconds to wait before considering a device lost. If set to a negative number // SCAN_LOST_CYCLES is used to determine when to inform clients about lost events. private long scanLostOverrideMillis = -1; private final BluetoothAdapter bluetoothAdapter; /* @VisibleForTesting */ final HashMap<ScanCallback, ScanClient> serialClients; /** * The Bluetooth LE callback which will be registered with the OS, * to be fired on device discovery. */ private final BluetoothAdapter.LeScanCallback leScanCallback = new BluetoothAdapter.LeScanCallback() { /** * Callback method called from the OS on each BLE device sighting. * This method is invoked on the AIDL handler thread, so all methods * called here must be synchronized. * * @param device The device discovered * @param rssi The signal strength in dBm it was received at * @param scanRecordBytes The raw byte payload buffer */ @Override public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecordBytes) { long currentTimeInNanos = TimeUnit.MILLISECONDS.toNanos(clock.currentTimeMillis()); ScanResult result = new ScanResult(device, ScanRecord.parseFromBytes(scanRecordBytes), rssi, currentTimeInNanos); onScanResult(device.getAddress(), result); } }; /** * Package constructor, called from {@link BluetoothLeScannerCompatProvider}. */ JbBluetoothLeScannerCompat( Context context, BluetoothManager manager, AlarmManager alarmManager) { this(manager, alarmManager, new SystemClock(), PendingIntent.getBroadcast(context, 0 /* requestCode */, new Intent(context, ScanWakefulBroadcastReceiver.class), 0 /* flags */)); } /** * Testing constructor for the scanner. * * @VisibleForTesting */ JbBluetoothLeScannerCompat(BluetoothManager manager, AlarmManager alarmManager, Clock clock, PendingIntent alarmIntent) { this.bluetoothAdapter = manager.getAdapter(); this.serialClients = new HashMap<ScanCallback, ScanClient>(); this.recentScanResults = new HashMap<String, ScanResult>(); this.alarmManager = alarmManager; this.alarmIntent = alarmIntent; this.clock = clock; } /** * The entry point blockingScanCycle executes a BLE Scan cycle and is called from the * ScanWakefulService. When this method ends, the service will signal the ScanWakefulBroadcast * receiver to release its wakelock and the phone will enter a sleep phase for the remainder of * the BLE scan cycle. * <p> * This is called on the IntentService handler thread and hence is synchronized. * <p> * Suppresses the experimental 'wait not in loop' warning because we don't mind exiting early. * Suppresses deprecation because this is the compatibility support. */ @SuppressWarnings({"WaitNotInLoop", "deprecation"}) synchronized void blockingScanCycle() { Logger.logDebug("Starting BLE Active Scan Cycle."); int activeMillis = getScanActiveMillis(); if (activeMillis > 0) { bluetoothAdapter.startLeScan(leScanCallback); // Sleep for the duration of the scan. No wakeups are expected, but catch is required. try { wait(activeMillis); } catch (InterruptedException e) { Logger.logError("Exception in ScanCycle Sleep", e); } finally { try { bluetoothAdapter.stopLeScan(leScanCallback); } catch (NullPointerException e) { // An NPE is thrown if Bluetooth has been reset since this blocking scan began. Logger.logDebug("NPE thrown in BlockingScanCycle"); } // Active BLE scan ends // Execute cycle complete to 1) detect lost devices onScanCycleComplete(); } } Logger.logDebug("Stopping BLE Active Scan Cycle."); } private void callbackLostLeScanClients(String address, ScanResult result) { for (ScanClient client : serialClients.values()) { int wantAny = client.settings.getCallbackType() & ScanSettings.CALLBACK_TYPE_ALL_MATCHES; int wantLost = client.settings.getCallbackType() & ScanSettings.CALLBACK_TYPE_MATCH_LOST; if (client.addressesSeen.remove(address) && (wantAny | wantLost) != 0) { // Catch any exceptions and log them but continue processing other scan results. try { client.callback.onScanResult(ScanSettings.CALLBACK_TYPE_MATCH_LOST, result); } catch (Exception e) { Logger.logError("Failure while sending 'lost' scan result to listener", e); } } } } /** * Process a single scan result, sending it directly * to any active listeners who want to know. * * @VisibleForTesting */ void onScanResult(String address, ScanResult result) { callbackLeScanClients(address, result); } /** * Distribute each scan record to registered clients. When a "found" event occurs record the * address in the client filter so we can later send the "lost" event to that same client. * <P> * This method will be called by the AIDL handler thread from onLeScan. */ private synchronized void callbackLeScanClients(String address, ScanResult result) { for (ScanClient client : serialClients.values()) { if (matchesAnyFilter(client.filtersList, result)) { boolean seenItBefore = client.addressesSeen.contains(address); int clientFlags = client.settings.getCallbackType(); int firstMatchBit = clientFlags & ScanSettings.CALLBACK_TYPE_FIRST_MATCH; int allMatchesBit = clientFlags & ScanSettings.CALLBACK_TYPE_ALL_MATCHES; // Catch any exceptions and log them but continue processing other listeners. if ((firstMatchBit | allMatchesBit) != 0) { try { if (!seenItBefore) { client.callback.onScanResult(ScanSettings.CALLBACK_TYPE_FIRST_MATCH, result); } else if (allMatchesBit != 0) { client.callback.onScanResult(ScanSettings.CALLBACK_TYPE_ALL_MATCHES, result); } } catch (Exception e) { Logger.logError("Failure while handling scan result", e); } } if (!seenItBefore) { client.addressesSeen.add(address); } } } recentScanResults.put(address, result); } @Override public synchronized boolean startScan(List<ScanFilter> filterList, ScanSettings settings, ScanCallback callback) { return startSerialScan(settings, filterList, callback); } private boolean startSerialScan(ScanSettings settings, List<ScanFilter> filterList, ScanCallback callback) { ScanClient client = new ScanClient(settings, filterList, callback); serialClients.put(callback, client); int clientFlags = client.settings.getCallbackType(); int firstMatchBit = clientFlags & ScanSettings.CALLBACK_TYPE_FIRST_MATCH; int allMatchesBit = clientFlags & ScanSettings.CALLBACK_TYPE_ALL_MATCHES; // Process new registrations by immediately invoking the "found" callback // with all previously sighted devices. if ((firstMatchBit | allMatchesBit) != 0) { for (Entry<String, ScanResult> entry : recentScanResults.entrySet()) { String address = entry.getKey(); ScanResult savedResult = entry.getValue(); if (matchesAnyFilter(filterList, savedResult)) { // Catch any exceptions and log them but continue processing other scan results. try { client.callback.onScanResult(ScanSettings.CALLBACK_TYPE_FIRST_MATCH, savedResult); } catch (Exception e) { Logger.logError("Failure while handling scan result for new listener", e); } client.addressesSeen.add(address); } } } updateRepeatingAlarm(); return true; } /** * Global override for scan window. This separately supersedes settings from all scan clients. * * @param scanMillis -1 to remove override, 0 to pause scan, or a positive number * @param idleMillis a positive number * @param serialScanDurationMillis not used in this scanner */ @Override public synchronized void setCustomScanTiming( int scanMillis, int idleMillis, long serialScanDurationMillis) { overrideScanActiveMillis = scanMillis; overrideScanIdleMillis = idleMillis; // reset scanner so it picks up new scan window values updateRepeatingAlarm(); } /** * Sets the time after which a sighted device will be marked as lost. */ @Override public synchronized void setScanLostOverride(long scanLostOverrideMillis) { this.scanLostOverrideMillis = scanLostOverrideMillis; } /** * Stop scanning. * * @see JbBluetoothLeScannerCompat#startScan */ @Override public synchronized void stopScan(ScanCallback callback) { serialClients.remove(callback); updateRepeatingAlarm(); } /** * Test for lost tags by periodically checking the found devices * for any that haven't been seen recently. * * @VisibleForTesting */ void onScanCycleComplete() { Iterator<Map.Entry<String, ScanResult>> iter = recentScanResults.entrySet().iterator(); long lostTimestampMillis = getLostTimestampMillis(); // Clear out any expired notifications from the "old sightings" record. while (iter.hasNext()) { Map.Entry<String, ScanResult> entry = iter.next(); String address = entry.getKey(); ScanResult savedResult = entry.getValue(); if (TimeUnit.NANOSECONDS.toMillis(savedResult.getTimestampNanos()) < lostTimestampMillis) { callbackLostLeScanClients(address, savedResult); iter.remove(); } } } /** * Sets parameters for the various scan modes * * @param scanMode the ScanMode in BluetoothLeScanner Settings */ private void setScanMode(int scanMode) { switch (scanMode) { case ScanSettings.SCAN_MODE_LOW_LATENCY: scanIdleMillis = LOW_LATENCY_IDLE_MILLIS; scanActiveMillis = LOW_LATENCY_ACTIVE_MILLIS; break; case ScanSettings.SCAN_MODE_LOW_POWER: scanIdleMillis = LOW_POWER_IDLE_MILLIS; scanActiveMillis = LOW_POWER_ACTIVE_MILLIS; break; // Fall through and be balanced when there's nothing saying not to. default: case ScanSettings.SCAN_MODE_BALANCED: scanIdleMillis = BALANCED_IDLE_MILLIS; scanActiveMillis = BALANCED_ACTIVE_MILLIS; break; } } private int getScanModePriority(int mode) { switch (mode) { case ScanSettings.SCAN_MODE_LOW_LATENCY: return 2; case ScanSettings.SCAN_MODE_BALANCED: return 1; case ScanSettings.SCAN_MODE_LOW_POWER: return 0; default: Logger.logError("Unknown scan mode " + mode); return 0; } } private int getMaxPriorityScanMode() { int maxPriority = -1; for (ScanClient scanClient : serialClients.values()) { ScanSettings settings = scanClient.settings; if (maxPriority == -1 || getScanModePriority(settings.getScanMode()) > getScanModePriority(maxPriority)) { maxPriority = settings.getScanMode(); } } return maxPriority; } /** * Update the repeating alarm wake-up based on the period defined for the scanner If there are * no clients, or a batch scan running, it will cancel the alarm. */ private void updateRepeatingAlarm() { Logger.logDebug("updateRepeatingAlarm, getMaxPriorityScanMode = " + getMaxPriorityScanMode()); // Apply Scan Mode (Cycle Parameters) setScanMode(getMaxPriorityScanMode()); if (serialClients.isEmpty()) { // No listeners. Remove the repeating alarm, if there is one. alarmManager.cancel(alarmIntent); alarmIntervalMillis = 0; Logger.logInfo("Scan : No clients left, canceling alarm."); } else { int idleMillis = getScanIdleMillis(); int scanPeriod = idleMillis + getScanActiveMillis(); if ((idleMillis != 0) && (alarmIntervalMillis != scanPeriod)) { alarmIntervalMillis = scanPeriod; Logger.logDebug("Setting repeating alarm with interval: " + alarmIntervalMillis); // In Android 5.1 the shortest interval for repeating alarm is 60 seconds: // http://code.google.com/p/android/issues/detail?id=161244 if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP && alarmIntervalMillis < 60000) { Logger.logDebug("Using LOLLIPOP_MR1 workaround."); alarmManager.cancel(alarmIntent); cancelScheduledTask(); scheduledTask = Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(new Runnable() { @Override public void run() { blockingScanCycle(); } }, 0, // initial delay alarmIntervalMillis, TimeUnit.MILLISECONDS); } else { cancelScheduledTask(); // Specifies a repeating alarm at the scanPeriod, starting immediately. alarmManager.setRepeating(AlarmManager.RTC_WAKEUP, 0, alarmIntervalMillis, alarmIntent); Logger.logInfo("Scan alarm setup complete @ " + System.currentTimeMillis()); } } } } private void cancelScheduledTask() { if (scheduledTask != null) { scheduledTask.cancel(false); } } private static boolean matchesAnyFilter(List<ScanFilter> filters, ScanResult result) { if (filters == null || filters.isEmpty()) { return true; } for (ScanFilter filter : filters) { if (filter.matches(result)) { return true; } } return false; } /** * Calculates the number of milliseconds since this device was booted up. * (Not a value that can be used as a real timestamp, but suitable for comparisons.) */ private long millisecondsSinceBoot() { return TimeUnit.NANOSECONDS.toMillis(clock.elapsedRealtimeNanos()); } /** * Compute the timestamp in the past which is the earliest that a sighting can have been * seen; sightings last seen before this timestamp will be deemed to be too old. * Then the Sandmen come. * * @VisibleForTesting */ long getLostTimestampMillis() { if (scanLostOverrideMillis >= 0) { return clock.currentTimeMillis() - scanLostOverrideMillis; } return clock.currentTimeMillis() - (SCAN_LOST_CYCLES * getScanCycleMillis()); } /** * Returns the length of a single scan cycle, comprising both active and idle time. * * @VisibleForTesting */ long getScanCycleMillis() { return getScanActiveMillis() + getScanIdleMillis(); } /** * Get the current active ble scan time that has been set * * @VisibleForTesting */ int getScanActiveMillis() { return (overrideScanActiveMillis != -1) ? overrideScanActiveMillis : scanActiveMillis; } /** * Get the current idle ble scan time that has been set * * @VisibleForTesting */ int getScanIdleMillis() { return (overrideScanActiveMillis != -1) ? overrideScanIdleMillis : scanIdleMillis; } }
/* * Copyright (C) 2014, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. * All rights reserved. * * The Java Pathfinder core (jpf-core) platform is licensed under the * Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nasa.jpf.util; import java.io.File; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * utility to perform pathname expansion * the following patterns are supported so far: * * (1) brace expansion ala bash: foo{Boo,Shoo} => fooBoo, fooShoo * (this doesn't check for existence, its simply lexical) * * (2) '*' wildcard pathname expansion ala bash: "*.java" | "*\Main*.java" * (supports wildcards in mutiple path elements and within file/dir name) * * (3) recursive dir expansion ala Ant: "**\*.jar" * */ public class PathnameExpander { public String[] expandPath (String s) { if (s == null || s.length() == 0) { return null; } boolean hasWildcards = (s.indexOf('*') >= 0); int i = s.indexOf('{'); if (i >= 0){ ArrayList<String> list = new ArrayList<String>(); int j=0, jLast = s.length(); for (; (i = s.indexOf('{', j)) >= 0;) { if ((j = s.indexOf('}', i)) > 0) { String[] choices = s.substring(i + 1, j).split(","); if (list.isEmpty()) { String prefix = s.substring(0, i); for (String c : choices) { list.add(prefix + c); } } else { String prefix = s.substring(jLast, i); ArrayList<String> newList = new ArrayList<String>(); for (String e : list) { for (String c : choices) { newList.add(e + prefix + c); } } list = newList; } jLast = j+1; } else { throw new IllegalArgumentException("illegal path spec (missing '}'): " + s); } } if (jLast < s.length()) { String postfix = s.substring(jLast); ArrayList<String> newList = new ArrayList<String>(); for (String e : list) { newList.add(e + postfix); } list = newList; } if (hasWildcards){ ArrayList<String> newList = new ArrayList<String>(); for (String p : list) { for (String c : expandWildcards(p)) { newList.add(c); } } list = newList; } return list.toArray(new String[list.size()]); } else { // no bracket expansion required if (hasWildcards){ return expandWildcards(s); } else { // nothing to expand at all return (new String[] {s}); } } } protected String[] expandWildcards (String s){ int i = s.indexOf('*'); if (i >= 0){ // Ok, we have at least one wildcard String[] a = s.split("\\/"); ArrayList<File> list = new ArrayList<File>(); int j= initializeMatchList(list, a[0]); for (; j<a.length; j++){ ArrayList<File> newList = new ArrayList<File>(); String e = a[j]; if (e.indexOf('*') >= 0){ if (e.equals("**")){ // matches all subdirs recursively collectDirs(list, newList); } else { // file/dir name match collectMatchingNames(list, newList, getPattern(e)); } } else { // no wildcard collectExistingFile(list, newList, e); } if (newList.isEmpty()){ // shortcut, nothing more to match return new String[0]; } list = newList; } return getPaths(list); } else { // no wildcards, nothing to expand return new String[] {s}; } } private int initializeMatchList (ArrayList<File> list, String path){ if (path.isEmpty()){ // absolute pathname (ignoring drive letters for now) list.add(new File(File.separator)); return 1; } else if (path.equals("..") || path.equals(".")){ list.add(new File(path)); return 1; } else { list.add(new File(".")); return 0; } } private void collectMatchingNames(ArrayList<File> list, ArrayList<File> newList, Pattern pattern){ for (File dir : list) { if (dir.isDirectory()){ for (String c : dir.list()){ Matcher m = pattern.matcher(c); if (m.matches()){ newList.add(new File(dir,c)); } } } } } private void collectExistingFile(ArrayList<File> list, ArrayList<File> newList, String fname) { for (File dir : list) { if (dir.isDirectory()){ File nf = new File(dir, fname); if (nf.exists()) { newList.add(nf); } } } } private void collectDirs(ArrayList<File> list, ArrayList<File> newList){ for (File dir : list) { if (dir.isDirectory()){ newList.add(dir); // this includes the dir itself! collectSubdirs(newList,dir); } } } private void collectSubdirs(ArrayList<File> newList, File dir) { for (File f : dir.listFiles()){ if (f.isDirectory()){ newList.add(f); collectSubdirs(newList, f); } } } protected String[] getPaths(ArrayList<File> list) { String[] result = new String[list.size()]; int k=0; for (File f : list){ String p = f.getPath(); if ((p.length() > 1) && (p.charAt(0) == '.')){ // remove leading "./" char c = p.charAt(1); if (c == '\\' || c == '/'){ p = p.substring(2); } } result[k++] = p; } return result; } protected Pattern getPattern(String s){ Pattern p; StringBuilder sb = new StringBuilder(); int len = s.length(); for (int j=0; j<len; j++){ char c = s.charAt(j); switch (c){ case '.' : sb.append("\\."); break; case '$' : sb.append("\\$"); break; case '[' : sb.append("\\["); break; case ']' : sb.append("\\]"); break; case '*' : sb.append(".*"); break; // <2do> and probably more.. default: sb.append(c); } } p = Pattern.compile(sb.toString()); return p; } }
package com.sequenceiq.cloudbreak.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.time.Duration; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.junit.Before; import org.junit.Test; import org.junit.jupiter.api.Assertions; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.base.InstanceStatus; import com.sequenceiq.cloudbreak.cloud.model.CloudCredential; import com.sequenceiq.cloudbreak.cloud.model.VolumeSetAttributes; import com.sequenceiq.cloudbreak.cluster.util.ResourceAttributeUtil; import com.sequenceiq.cloudbreak.common.json.Json; import com.sequenceiq.cloudbreak.common.orchestration.Node; import com.sequenceiq.cloudbreak.converter.spi.CredentialToCloudCredentialConverter; import com.sequenceiq.cloudbreak.domain.Resource; import com.sequenceiq.cloudbreak.domain.Template; import com.sequenceiq.cloudbreak.domain.stack.Stack; import com.sequenceiq.cloudbreak.domain.stack.cluster.Cluster; import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceGroup; import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceMetaData; import com.sequenceiq.cloudbreak.dto.credential.Credential; import com.sequenceiq.cloudbreak.orchestrator.host.HostOrchestrator; import com.sequenceiq.cloudbreak.orchestrator.model.NodeReachabilityResult; import com.sequenceiq.cloudbreak.service.GatewayConfigService; import com.sequenceiq.cloudbreak.service.environment.credential.CredentialClientService; import com.sequenceiq.common.api.type.ResourceType; public class StackUtilTest { @Mock private CredentialToCloudCredentialConverter credentialToCloudCredentialConverter; @Mock private CredentialClientService credentialClientService; @Mock private ResourceAttributeUtil resourceAttributeUtil; @Mock private HostOrchestrator hostOrchestrator; @Mock private GatewayConfigService gatewayConfigService; @Captor private ArgumentCaptor<Set<Node>> nodesCaptor; @InjectMocks private final StackUtil stackUtil = new StackUtil(); @Before public void setUp() { MockitoAnnotations.initMocks(this); } @Test public void testGetUptimeForClusterZero() { Cluster cluster = new Cluster(); long uptime = stackUtil.getUptimeForCluster(cluster, true); assertEquals(0L, uptime); } @Test public void testGetUptimeForClusterNoGetUpSince() { Cluster cluster = new Cluster(); int minutes = 10; cluster.setUptime(Duration.ofMinutes(minutes).toString()); long uptime = stackUtil.getUptimeForCluster(cluster, false); assertEquals(Duration.ofMinutes(minutes).toMillis(), uptime); } @Test public void testGetUptimeForCluster() { Cluster cluster = new Cluster(); int minutes = 10; cluster.setUptime(Duration.ofMinutes(minutes).toString()); cluster.setUpSince(new Date().getTime()); long uptime = stackUtil.getUptimeForCluster(cluster, true); assertTrue(uptime >= Duration.ofMinutes(minutes).toMillis()); } @Test public void testGetCloudCredential() { Stack stack = new Stack(); stack.setEnvironmentCrn("envCrn"); CloudCredential cloudCredential = new CloudCredential("123", "CloudCred", "account"); when(credentialClientService.getByEnvironmentCrn(anyString())).thenReturn(Credential.builder().build()); when(credentialToCloudCredentialConverter.convert(any(Credential.class))).thenReturn(cloudCredential); CloudCredential result = stackUtil.getCloudCredential(stack); assertEquals(result.getId(), cloudCredential.getId()); assertEquals(result.getName(), cloudCredential.getName()); } @Test public void testCreateInstanceToVolumeInfoMapWhenEveryVolumeSetAreAttachedToInstance() { List<Resource> volumeSets = new ArrayList<>(); volumeSets.add(getVolumeSetResource("anInstanceId")); volumeSets.add(getVolumeSetResource("secInstanceId")); volumeSets.add(getVolumeSetResource("thirdInstanceId")); Map<String, Map<String, Object>> actual = stackUtil.createInstanceToVolumeInfoMap(volumeSets); assertEquals(volumeSets.size(), actual.size()); } @Test public void testCreateInstanceToVolumeInfoMapWhenNotEveryVolumeSetAreAttachedToInstance() { List<Resource> volumeSets = new ArrayList<>(); volumeSets.add(getVolumeSetResource("anInstanceId")); volumeSets.add(getVolumeSetResource("secInstanceId")); volumeSets.add(getVolumeSetResource("thirdInstanceId")); volumeSets.add(getVolumeSetResource(null)); volumeSets.add(getVolumeSetResource(null)); Map<String, Map<String, Object>> actual = stackUtil.createInstanceToVolumeInfoMap(volumeSets); int numberOfVolumeSetsWithoutInstanceReference = 2; assertEquals(volumeSets.size() - numberOfVolumeSetsWithoutInstanceReference, actual.size()); } @Test public void collectAndCheckReachableNodes() throws NodesUnreachableException { Stack stack = new Stack(); Set<InstanceGroup> instanceGroupSet = new HashSet<>(); InstanceGroup instanceGroup = new InstanceGroup(); Set<InstanceMetaData> instanceMetaDataSet = new HashSet<>(); InstanceMetaData instanceMetaData1 = new InstanceMetaData(); instanceMetaData1.setInstanceGroup(instanceGroup); instanceMetaData1.setDiscoveryFQDN("node1.example.com"); InstanceMetaData instanceMetaData2 = new InstanceMetaData(); instanceMetaData2.setInstanceStatus(InstanceStatus.TERMINATED); instanceMetaData2.setInstanceGroup(instanceGroup); instanceMetaData2.setDiscoveryFQDN("node2.example.com"); InstanceMetaData instanceMetaData3 = new InstanceMetaData(); instanceMetaData3.setInstanceGroup(instanceGroup); instanceMetaData3.setDiscoveryFQDN("node3.example.com"); instanceMetaDataSet.add(instanceMetaData1); instanceMetaDataSet.add(instanceMetaData2); instanceMetaDataSet.add(instanceMetaData3); instanceGroup.setInstanceMetaData(instanceMetaDataSet); Template template = new Template(); template.setInstanceType("m5.xlarge"); instanceGroup.setTemplate(template); instanceGroupSet.add(instanceGroup); stack.setInstanceGroups(instanceGroupSet); ArrayList<String> necessaryNodes = new ArrayList<>(); necessaryNodes.add("node1.example.com"); necessaryNodes.add("node3.example.com"); Set<Node> nodes = new HashSet<>(); nodes.add(new Node("1.1.1.1", "1.1.1.1", "1", "m5.xlarge", "node1.example.com", "worker")); nodes.add(new Node("1.1.1.3", "1.1.1.3", "3", "m5.xlarge", "node3.example.com", "worker")); when(hostOrchestrator.getResponsiveNodes(nodesCaptor.capture(), any())).thenReturn(new NodeReachabilityResult(nodes, Set.of())); stackUtil.collectAndCheckReachableNodes(stack, necessaryNodes); verify(hostOrchestrator).getResponsiveNodes(nodesCaptor.capture(), any()); List<String> fqdns = nodesCaptor.getValue().stream().map(Node::getHostname).collect(Collectors.toList()); assertTrue(fqdns.contains("node1.example.com")); assertFalse("Terminated node should be filtered out", fqdns.contains("node2.example.com")); assertTrue(fqdns.contains("node3.example.com")); } @Test public void collectAndCheckReachableNodesButSomeNodeMissing() { Stack stack = new Stack(); Set<InstanceGroup> instanceGroupSet = new HashSet<>(); InstanceGroup instanceGroup = new InstanceGroup(); Set<InstanceMetaData> instanceMetaDataSet = new HashSet<>(); InstanceMetaData instanceMetaData1 = new InstanceMetaData(); instanceMetaData1.setInstanceGroup(instanceGroup); instanceMetaData1.setDiscoveryFQDN("node1.example.com"); InstanceMetaData instanceMetaData2 = new InstanceMetaData(); instanceMetaData2.setInstanceStatus(InstanceStatus.TERMINATED); instanceMetaData2.setInstanceGroup(instanceGroup); instanceMetaData2.setDiscoveryFQDN("node2.example.com"); InstanceMetaData instanceMetaData3 = new InstanceMetaData(); instanceMetaData3.setInstanceGroup(instanceGroup); instanceMetaData3.setDiscoveryFQDN("node3.example.com"); instanceMetaDataSet.add(instanceMetaData1); instanceMetaDataSet.add(instanceMetaData2); instanceMetaDataSet.add(instanceMetaData3); instanceGroup.setInstanceMetaData(instanceMetaDataSet); Template template = new Template(); template.setInstanceType("m5.xlarge"); instanceGroup.setTemplate(template); instanceGroupSet.add(instanceGroup); stack.setInstanceGroups(instanceGroupSet); ArrayList<String> necessaryNodes = new ArrayList<>(); necessaryNodes.add("node1.example.com"); necessaryNodes.add("node3.example.com"); Set<Node> nodes = new HashSet<>(); nodes.add(new Node("1.1.1.1", "1.1.1.1", "1", "m5.xlarge", "node1.example.com", "worker")); when(hostOrchestrator.getResponsiveNodes(nodesCaptor.capture(), any())).thenReturn(new NodeReachabilityResult(nodes, Set.of())); NodesUnreachableException nodesUnreachableException = Assertions.assertThrows(NodesUnreachableException.class, () -> stackUtil.collectAndCheckReachableNodes(stack, necessaryNodes)); assertEquals(1, nodesUnreachableException.getUnreachableNodes().size()); assertEquals("node3.example.com", nodesUnreachableException.getUnreachableNodes().iterator().next()); } @Test public void collectReachableNodesTest() { Stack stack = new Stack(); Set<InstanceGroup> instanceGroupSet = new HashSet<>(); InstanceGroup instanceGroup = new InstanceGroup(); Set<InstanceMetaData> instanceMetaDataSet = new HashSet<>(); InstanceMetaData instanceMetaData1 = new InstanceMetaData(); instanceMetaData1.setInstanceGroup(instanceGroup); instanceMetaData1.setDiscoveryFQDN("node1.example.com"); InstanceMetaData instanceMetaData2 = new InstanceMetaData(); instanceMetaData2.setInstanceStatus(InstanceStatus.TERMINATED); instanceMetaData2.setInstanceGroup(instanceGroup); instanceMetaData2.setDiscoveryFQDN("node2.example.com"); InstanceMetaData instanceMetaData3 = new InstanceMetaData(); instanceMetaData3.setInstanceGroup(instanceGroup); instanceMetaData3.setDiscoveryFQDN("node3.example.com"); instanceMetaDataSet.add(instanceMetaData1); instanceMetaDataSet.add(instanceMetaData2); instanceMetaDataSet.add(instanceMetaData3); instanceGroup.setInstanceMetaData(instanceMetaDataSet); Template template = new Template(); template.setInstanceType("m5.xlarge"); instanceGroup.setTemplate(template); instanceGroupSet.add(instanceGroup); stack.setInstanceGroups(instanceGroupSet); Set<Node> nodes = new HashSet<>(); nodes.add(new Node("1.1.1.1", "1.1.1.1", "1", "m5.xlarge", "node1.example.com", "worker")); when(hostOrchestrator.getResponsiveNodes(nodesCaptor.capture(), any())).thenReturn(new NodeReachabilityResult(nodes, Set.of())); stackUtil.collectReachableNodes(stack); verify(hostOrchestrator).getResponsiveNodes(nodesCaptor.capture(), any()); List<String> fqdns = nodesCaptor.getValue().stream().map(Node::getHostname).collect(Collectors.toList()); assertTrue(fqdns.contains("node1.example.com")); assertFalse("Terminated node should be filtered out", fqdns.contains("node2.example.com")); assertTrue(fqdns.contains("node3.example.com")); } private Resource getVolumeSetResource(String instanceID) { Resource resource = new Resource(); resource.setResourceType(ResourceType.AZURE_VOLUMESET); resource.setInstanceId(instanceID); VolumeSetAttributes volumeSetAttributes = new VolumeSetAttributes.Builder() .build(); resource.setAttributes(new Json(volumeSetAttributes)); return resource; } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import hudson.ExtensionList; import jenkins.model.Jenkins; import jenkins.model.item_category.ItemCategory; import org.acegisecurity.AccessDeniedException; import org.apache.commons.jelly.Script; import org.apache.commons.jelly.XMLOutput; import org.apache.commons.lang.StringUtils; import org.jenkins.ui.icon.Icon; import org.jenkins.ui.icon.IconSet; import org.jenkins.ui.icon.IconSpec; import org.kohsuke.stapler.MetaClass; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.WebApp; import org.kohsuke.stapler.jelly.DefaultScriptInvoker; import org.kohsuke.stapler.jelly.JellyClassTearOff; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import java.io.StringWriter; import java.util.logging.Level; import java.util.logging.Logger; /** * {@link Descriptor} for {@link TopLevelItem}s. * * @author Kohsuke Kawaguchi */ public abstract class TopLevelItemDescriptor extends Descriptor<TopLevelItem> implements IconSpec { private static final Logger LOGGER = Logger.getLogger(TopLevelItemDescriptor.class.getName()); protected TopLevelItemDescriptor(Class<? extends TopLevelItem> clazz) { super(clazz); } /** * Infers the type of the corresponding {@link TopLevelItem} from the outer class. * This version works when you follow the common convention, where a descriptor * is written as the static nested class of the describable class. * * @since 1.278 */ protected TopLevelItemDescriptor() { } /** * {@link TopLevelItemDescriptor}s often uses other descriptors to decorate itself. * This method allows the subtype of {@link TopLevelItemDescriptor}s to filter them out. * * <p> * This is useful for a workflow/company specific item type that wants to eliminate * options that the user would see. * * @since 1.294 */ public boolean isApplicable(Descriptor descriptor) { return true; } /** * {@link TopLevelItemDescriptor}s often may want to limit the scope within which they can be created. * This method allows the subtype of {@link TopLevelItemDescriptor}s to filter them out. * * @since 1.607 */ public boolean isApplicableIn(ItemGroup parent) { return true; } /** * Checks if this top level item is applicable within the specified item group. * <p> * This is just a convenience function. * @since 1.607 */ public final void checkApplicableIn(ItemGroup parent) { if (!isApplicableIn(parent)) { throw new AccessDeniedException( Messages.TopLevelItemDescriptor_NotApplicableIn(getDisplayName(), parent.getFullDisplayName())); } } /** * Tests if the given instance belongs to this descriptor, in the sense * that this descriptor can produce items like the given one. * * <p> * {@link TopLevelItemDescriptor}s that act like a wizard and produces different * object types than {@link #clazz} can override this method to augment * instance-descriptor relationship. * @since 1.410 */ public boolean testInstance(TopLevelItem i) { return clazz.isInstance(i); } /** * {@inheritDoc} * * <p> * Used as the caption when the user chooses what item type to create. * The descriptor implementation also needs to have {@code newInstanceDetail.jelly} * script, which will be used to render the text below the caption * that explains the item type. */ @Override public String getDisplayName() { return super.getDisplayName(); } /** * A description of this kind of item type. This description can contain HTML code but it is recommended that * you use plain text in order to be consistent with the rest of Jenkins. * * This method should be called from a thread where Stapler is handling an HTTP request, otherwise it will * return an empty string. * * @return A string, by default the value from newInstanceDetail view is taken. * * @since 2.0 */ @Nonnull public String getDescription() { Stapler stapler = Stapler.getCurrent(); if (stapler != null) { try { WebApp webapp = WebApp.getCurrent(); MetaClass meta = webapp.getMetaClass(this); Script s = meta.loadTearOff(JellyClassTearOff.class).findScript("newInstanceDetail"); if (s == null) { return ""; } DefaultScriptInvoker dsi = new DefaultScriptInvoker(); StringWriter sw = new StringWriter(); XMLOutput xml = dsi.createXMLOutput(sw, true); dsi.invokeScript(Stapler.getCurrentRequest(), Stapler.getCurrentResponse(), s, this, xml); return sw.toString(); } catch (Exception e) { LOGGER.log(Level.WARNING, null, e); return ""; } } else { return ""; } } /** * Used to categorize this kind of item type. @see {@link ItemCategory} * * @return A string with the category identifier, {@link ItemCategory.UncategorizedCategory#getId()} by default. * * @since 2.0 */ @Nonnull public String getCategoryId() { return ItemCategory.UncategorizedCategory.ID; } /** * Represents a file path pattern to get the Item icon in different sizes. * * For example: plugin/plugin-shortname/images/:size/item.png, where {@code :size} represents the different * icon sizes used commonly in Jenkins project: 16x16, 24x24, 32x32 or 48x48 * * @see FreeStyleProject.DescriptorImpl#getIconFilePathPattern() * * @return A string or null if it is not defined. * * @since 2.0 * @deprecated prefer {@link #getIconClassName()} */ @CheckForNull @Deprecated public String getIconFilePathPattern() { return null; } /** * An icon file path associated to a specific size. * * @param size A string with values that represent the common sizes: 16x16, 24x24, 32x32 or 48x48 * * @return A string or null if it is not defined. * * @since 2.0 * @deprecated prefer {@link #getIconClassName()} */ @CheckForNull @Deprecated public String getIconFilePath(String size) { if (!StringUtils.isBlank(getIconFilePathPattern())) { return getIconFilePathPattern().replace(":size", size); } return null; } /** * Get the Item's Icon class specification e.g. 'icon-notepad'. * <p> * Note: do <strong>NOT</strong> include icon size specifications (such as 'icon-sm'). * * @return The Icon class specification e.g. 'icon-notepad'. */ @Override public String getIconClassName() { // Oh the fun of somebody adding a legacy way of referencing images into 2.0 code String pattern = getIconFilePathPattern(); if (pattern != null) { // here we go with the dance of the IconSet's String path = pattern.replace(":size", "24x24"); // we'll strip the icon-md to get the class name if (path.indexOf('/') == -1) { // this one is easy... too easy... also will never happen return IconSet.toNormalizedIconNameClass(path); } if (Jenkins.RESOURCE_PATH.length() > 0 && path.startsWith(Jenkins.RESOURCE_PATH)) { // will to live falling path = path.substring(Jenkins.RESOURCE_PATH.length()); } Icon icon = IconSet.icons.getIconByUrl(path); if (icon != null) { return icon.getClassSpec().replaceAll("\\s*icon-md\\s*", " ").replaceAll("\\s+", " "); } } return null; } /** * @deprecated since 2007-01-19. * This is not a valid operation for {@link Item}s. */ @Deprecated public TopLevelItem newInstance(StaplerRequest req) throws FormException { throw new UnsupportedOperationException(); } /** * Creates a new {@link TopLevelItem}. * * @deprecated as of 1.390 * Use {@link #newInstance(ItemGroup, String)} */ @Deprecated public TopLevelItem newInstance(String name) { return newInstance(Jenkins.get(), name); } /** * Creates a new {@link TopLevelItem} for the specified parent. * * @since 1.390 */ public abstract TopLevelItem newInstance(ItemGroup parent, String name); /** * Returns all the registered {@link TopLevelItem} descriptors. */ public static ExtensionList<TopLevelItemDescriptor> all() { return Items.all(); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.service; import javax.persistence.Column; import javax.persistence.DiscriminatorValue; import javax.persistence.Entity; import javax.persistence.PrimaryKeyJoinColumn; import javax.persistence.Table; import javax.persistence.Transient; import com.cloud.offering.ServiceOffering; import com.cloud.storage.DiskOfferingVO; import com.cloud.vm.VirtualMachine; @Entity @Table(name="service_offering") @DiscriminatorValue(value="Service") @PrimaryKeyJoinColumn(name="id") public class ServiceOfferingVO extends DiskOfferingVO implements ServiceOffering { @Column(name="cpu") private int cpu; @Column(name="speed") private int speed; @Column(name="ram_size") private int ramSize; @Column(name="nw_rate") private Integer rateMbps; @Column(name="mc_rate") private Integer multicastRateMbps; @Column(name="ha_enabled") private boolean offerHA; @Column(name="limit_cpu_use") private boolean limitCpuUse; @Column(name="host_tag") private String hostTag; @Column(name="default_use") private boolean default_use; @Column(name="vm_type") private String vm_type; @Column(name="sort_key") int sortKey; protected ServiceOfferingVO() { super(); } public ServiceOfferingVO(String name, int cpu, int ramSize, int speed, Integer rateMbps, Integer multicastRateMbps, boolean offerHA, String displayText, boolean useLocalStorage, boolean recreatable, String tags, boolean systemUse, VirtualMachine.Type vm_type, boolean defaultUse) { super(name, displayText, false, tags, recreatable, useLocalStorage, systemUse, true); this.cpu = cpu; this.ramSize = ramSize; this.speed = speed; this.rateMbps = rateMbps; this.multicastRateMbps = multicastRateMbps; this.offerHA = offerHA; this.limitCpuUse = false; this.default_use = defaultUse; this.vm_type = vm_type == null ? null : vm_type.toString().toLowerCase(); } public ServiceOfferingVO(String name, int cpu, int ramSize, int speed, Integer rateMbps, Integer multicastRateMbps, boolean offerHA, boolean limitCpuUse, String displayText, boolean useLocalStorage, boolean recreatable, String tags, boolean systemUse, VirtualMachine.Type vm_type, Long domainId) { super(name, displayText, false, tags, recreatable, useLocalStorage, systemUse, true, domainId); this.cpu = cpu; this.ramSize = ramSize; this.speed = speed; this.rateMbps = rateMbps; this.multicastRateMbps = multicastRateMbps; this.offerHA = offerHA; this.limitCpuUse = limitCpuUse; this.vm_type = vm_type == null ? null : vm_type.toString().toLowerCase(); } public ServiceOfferingVO(String name, int cpu, int ramSize, int speed, Integer rateMbps, Integer multicastRateMbps, boolean offerHA, boolean limitResourceUse, String displayText, boolean useLocalStorage, boolean recreatable, String tags, boolean systemUse, VirtualMachine.Type vm_type, Long domainId, String hostTag) { this(name, cpu, ramSize, speed, rateMbps, multicastRateMbps, offerHA, limitResourceUse, displayText, useLocalStorage, recreatable, tags, systemUse, vm_type, domainId); this.hostTag = hostTag; } @Override public boolean getOfferHA() { return offerHA; } public void setOfferHA(boolean offerHA) { this.offerHA = offerHA; } @Override public boolean getLimitCpuUse() { return limitCpuUse; } public void setLimitResourceUse(boolean limitCpuUse) { this.limitCpuUse = limitCpuUse; } @Override public boolean getDefaultUse() { return default_use; } @Override @Transient public String[] getTagsArray() { String tags = getTags(); if (tags == null || tags.length() == 0) { return new String[0]; } return tags.split(","); } @Override public int getCpu() { return cpu; } public void setCpu(int cpu) { this.cpu = cpu; } public void setSpeed(int speed) { this.speed = speed; } public void setRamSize(int ramSize) { this.ramSize = ramSize; } @Override public int getSpeed() { return speed; } @Override public int getRamSize() { return ramSize; } public void setRateMbps(Integer rateMbps) { this.rateMbps = rateMbps; } @Override public Integer getRateMbps() { return rateMbps; } public void setMulticastRateMbps(Integer multicastRateMbps) { this.multicastRateMbps = multicastRateMbps; } @Override public Integer getMulticastRateMbps() { return multicastRateMbps; } public void setHostTag(String hostTag) { this.hostTag = hostTag; } public String getHostTag() { return hostTag; } public String getSystemVmType(){ return vm_type; } public void setSortKey(int key) { sortKey = key; } public int getSortKey() { return sortKey; } }
/* Copyright 2013 Rustici Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.rusticisoftware.tincan.v095; import com.rusticisoftware.tincan.internal.DateTime; import com.rusticisoftware.tincan.Agent; import com.rusticisoftware.tincan.QueryableStatementTarget; import com.rusticisoftware.tincan.StatementsQueryInterface; import com.rusticisoftware.tincan.TCAPIVersion; import com.rusticisoftware.tincan.Verb; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.UUID; /** * Query model class used for building query parameters passed to get statements from LRS */ public class StatementsQuery implements StatementsQueryInterface { private TCAPIVersion version = TCAPIVersion.V095; private URI verbID; private QueryableStatementTarget object; private UUID registration; private Boolean context; private Agent actor; private DateTime since; private DateTime until; private Integer limit; private Boolean authoritative; private Boolean sparse; private Agent instructor; private Boolean ascending; public StatementsQuery() { } public void setVerbID(String verbID) throws URISyntaxException { this.setVerbID(new URI(verbID)); } public void setVerbID(Verb verb) throws URISyntaxException { this.setVerbID(verb.getId().toString()); } public HashMap<String,String> toParameterMap() throws IOException { HashMap<String,String> params = new HashMap<String,String>(); if (this.getVerbID() != null) { params.put("verb", this.getVerbID().toString()); } if (this.getObject() != null) { params.put("object", this.getObject().toJSON(getVersion())); } if (this.getRegistration() != null) { params.put("registration", this.getRegistration().toString()); } if (this.getContext() != null) { params.put("context", this.getContext().toString()); } if (this.getActor() != null) { params.put("actor", this.getActor().toJSON(getVersion())); } if (this.getSince() != null) { params.put("since", this.getSince().toString()); } if (this.getUntil() != null) { params.put("until", this.getUntil().toString()); } if (this.getLimit() != null) { params.put("limit", this.getLimit().toString()); } if (this.getAuthoritative() != null) { params.put("authoritative", this.getAuthoritative().toString()); } if (this.getSparse() != null) { params.put("sparse", this.getSparse().toString()); } if (this.getInstructor() != null) { params.put("instructor", this.getInstructor().toJSON(getVersion())); } if (this.getAscending() != null) { params.put("ascending", this.getAscending().toString()); } return params; } public TCAPIVersion getVersion() { return version; } public void setVersion(TCAPIVersion version) { this.version = version; } public URI getVerbID() { return verbID; } public void setVerbID(URI verbID) { this.verbID = verbID; } public QueryableStatementTarget getObject() { return object; } public void setObject(QueryableStatementTarget object) { this.object = object; } public UUID getRegistration() { return registration; } public void setRegistration(UUID registration) { this.registration = registration; } public Boolean getContext() { return context; } public void setContext(Boolean context) { this.context = context; } public Agent getActor() { return actor; } public void setActor(Agent actor) { this.actor = actor; } public DateTime getSince() { return since; } public void setSince(DateTime since) { this.since = since; } public DateTime getUntil() { return until; } public void setUntil(DateTime until) { this.until = until; } public Integer getLimit() { return limit; } public void setLimit(Integer limit) { this.limit = limit; } public Boolean getAuthoritative() { return authoritative; } public void setAuthoritative(Boolean authoritative) { this.authoritative = authoritative; } public Boolean getSparse() { return sparse; } public void setSparse(Boolean sparse) { this.sparse = sparse; } public Agent getInstructor() { return instructor; } public void setInstructor(Agent instructor) { this.instructor = instructor; } public Boolean getAscending() { return ascending; } public void setAscending(Boolean ascending) { this.ascending = ascending; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.preloader; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.cluster.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.timeout.*; import org.apache.ignite.internal.util.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.internal.util.worker.*; import org.apache.ignite.lang.*; import org.apache.ignite.thread.*; import org.jetbrains.annotations.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.concurrent.locks.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.GridTopic.*; import static org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionState.*; import static org.apache.ignite.internal.processors.dr.GridDrType.*; /** * Thread pool for requesting partitions from other nodes * and populating local cache. */ @SuppressWarnings("NonConstantFieldWithUpperCaseName") public class GridDhtPartitionDemandPool { /** Dummy message to wake up a blocking queue if a node leaves. */ private final SupplyMessage DUMMY_TOP = new SupplyMessage(); /** */ private final GridCacheContext<?, ?> cctx; /** */ private final IgniteLogger log; /** */ private final ReadWriteLock busyLock; /** */ @GridToStringInclude private final Collection<DemandWorker> dmdWorkers; /** Preload predicate. */ private IgnitePredicate<GridCacheEntryInfo> preloadPred; /** Future for preload mode {@link CacheRebalanceMode#SYNC}. */ @GridToStringInclude private SyncFuture syncFut; /** Preload timeout. */ private final AtomicLong timeout; /** Allows demand threads to synchronize their step. */ private CyclicBarrier barrier; /** Demand lock. */ private final ReadWriteLock demandLock = new ReentrantReadWriteLock(); /** */ private int poolSize; /** Last timeout object. */ private AtomicReference<GridTimeoutObject> lastTimeoutObj = new AtomicReference<>(); /** Last exchange future. */ private volatile GridDhtPartitionsExchangeFuture lastExchangeFut; /** * @param cctx Cache context. * @param busyLock Shutdown lock. */ public GridDhtPartitionDemandPool(GridCacheContext<?, ?> cctx, ReadWriteLock busyLock) { assert cctx != null; assert busyLock != null; this.cctx = cctx; this.busyLock = busyLock; log = cctx.logger(getClass()); boolean enabled = cctx.rebalanceEnabled() && !cctx.kernalContext().clientNode(); poolSize = enabled ? cctx.config().getRebalanceThreadPoolSize() : 0; if (enabled) { barrier = new CyclicBarrier(poolSize); dmdWorkers = new ArrayList<>(poolSize); for (int i = 0; i < poolSize; i++) dmdWorkers.add(new DemandWorker(i)); syncFut = new SyncFuture(dmdWorkers); } else { dmdWorkers = Collections.emptyList(); syncFut = new SyncFuture(dmdWorkers); // Calling onDone() immediately since preloading is disabled. syncFut.onDone(); } timeout = new AtomicLong(cctx.config().getRebalanceTimeout()); } /** * */ void start() { if (poolSize > 0) { for (DemandWorker w : dmdWorkers) new IgniteThread(cctx.gridName(), "preloader-demand-worker", w).start(); } } /** * */ void stop() { U.cancel(dmdWorkers); if (log.isDebugEnabled()) log.debug("Before joining on demand workers: " + dmdWorkers); U.join(dmdWorkers, log); if (log.isDebugEnabled()) log.debug("After joining on demand workers: " + dmdWorkers); lastExchangeFut = null; lastTimeoutObj.set(null); } /** * @return Future for {@link CacheRebalanceMode#SYNC} mode. */ IgniteInternalFuture<?> syncFuture() { return syncFut; } /** * Sets preload predicate for demand pool. * * @param preloadPred Preload predicate. */ void preloadPredicate(IgnitePredicate<GridCacheEntryInfo> preloadPred) { this.preloadPred = preloadPred; } /** * @return Size of this thread pool. */ int poolSize() { return poolSize; } /** * Wakes up demand workers when new exchange future was added. */ void onExchangeFutureAdded() { synchronized (dmdWorkers) { for (DemandWorker w : dmdWorkers) w.addMessage(DUMMY_TOP); } } /** * Force preload. */ void forcePreload() { GridTimeoutObject obj = lastTimeoutObj.getAndSet(null); if (obj != null) cctx.time().removeTimeoutObject(obj); final GridDhtPartitionsExchangeFuture exchFut = lastExchangeFut; if (exchFut != null) { if (log.isDebugEnabled()) log.debug("Forcing rebalance event for future: " + exchFut); exchFut.listen(new CI1<IgniteInternalFuture<AffinityTopologyVersion>>() { @Override public void apply(IgniteInternalFuture<AffinityTopologyVersion> t) { cctx.shared().exchange().forcePreloadExchange(exchFut); } }); } else if (log.isDebugEnabled()) log.debug("Ignoring force rebalance request (no topology event happened yet)."); } /** * @return {@code true} if entered to busy state. */ private boolean enterBusy() { if (busyLock.readLock().tryLock()) return true; if (log.isDebugEnabled()) log.debug("Failed to enter to busy state (demander is stopping): " + cctx.nodeId()); return false; } /** * */ private void leaveBusy() { busyLock.readLock().unlock(); } /** * @param type Type. * @param discoEvt Discovery event. */ private void preloadEvent(int type, DiscoveryEvent discoEvt) { preloadEvent(-1, type, discoEvt); } /** * @param part Partition. * @param type Type. * @param discoEvt Discovery event. */ private void preloadEvent(int part, int type, DiscoveryEvent discoEvt) { assert discoEvt != null; cctx.events().addPreloadEvent(part, type, discoEvt.eventNode(), discoEvt.type(), discoEvt.timestamp()); } /** * @param msg Message to check. * @return {@code True} if dummy message. */ private boolean dummyTopology(SupplyMessage msg) { return msg == DUMMY_TOP; } /** * @param deque Deque to poll from. * @param time Time to wait. * @param w Worker. * @return Polled item. * @throws InterruptedException If interrupted. */ @Nullable private <T> T poll(BlockingQueue<T> deque, long time, GridWorker w) throws InterruptedException { assert w != null; // There is currently a case where {@code interrupted} // flag on a thread gets flipped during stop which causes the pool to hang. This check // will always make sure that interrupted flag gets reset before going into wait conditions. // The true fix should actually make sure that interrupted flag does not get reset or that // interrupted exception gets propagated. Until we find a real fix, this method should // always work to make sure that there is no hanging during stop. if (w.isCancelled()) Thread.currentThread().interrupt(); return deque.poll(time, MILLISECONDS); } /** * @param p Partition. * @param topVer Topology version. * @return Picked owners. */ private Collection<ClusterNode> pickedOwners(int p, AffinityTopologyVersion topVer) { Collection<ClusterNode> affNodes = cctx.affinity().nodes(p, topVer); int affCnt = affNodes.size(); Collection<ClusterNode> rmts = remoteOwners(p, topVer); int rmtCnt = rmts.size(); if (rmtCnt <= affCnt) return rmts; List<ClusterNode> sorted = new ArrayList<>(rmts); // Sort in descending order, so nodes with higher order will be first. Collections.sort(sorted, CU.nodeComparator(false)); // Pick newest nodes. return sorted.subList(0, affCnt); } /** * @param p Partition. * @param topVer Topology version. * @return Nodes owning this partition. */ private Collection<ClusterNode> remoteOwners(int p, AffinityTopologyVersion topVer) { return F.view(cctx.dht().topology().owners(p, topVer), F.remoteNodes(cctx.nodeId())); } /** * @param assigns Assignments. * @param force {@code True} if dummy reassign. */ void addAssignments(final GridDhtPreloaderAssignments assigns, boolean force) { if (log.isDebugEnabled()) log.debug("Adding partition assignments: " + assigns); long delay = cctx.config().getRebalanceDelay(); if (delay == 0 || force) { assert assigns != null; synchronized (dmdWorkers) { for (DemandWorker w : dmdWorkers) { w.addAssignments(assigns); w.addMessage(DUMMY_TOP); } } } else if (delay > 0) { assert !force; GridTimeoutObject obj = lastTimeoutObj.get(); if (obj != null) cctx.time().removeTimeoutObject(obj); final GridDhtPartitionsExchangeFuture exchFut = lastExchangeFut; assert exchFut != null : "Delaying rebalance process without topology event."; obj = new GridTimeoutObjectAdapter(delay) { @Override public void onTimeout() { exchFut.listen(new CI1<IgniteInternalFuture<AffinityTopologyVersion>>() { @Override public void apply(IgniteInternalFuture<AffinityTopologyVersion> f) { cctx.shared().exchange().forcePreloadExchange(exchFut); } }); } }; lastTimeoutObj.set(obj); cctx.time().addTimeoutObject(obj); } } /** * */ void unwindUndeploys() { demandLock.writeLock().lock(); try { cctx.deploy().unwind(cctx); } finally { demandLock.writeLock().unlock(); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtPartitionDemandPool.class, this); } /** * */ private class DemandWorker extends GridWorker { /** Worker ID. */ private int id; /** Partition-to-node assignments. */ private final LinkedBlockingDeque<GridDhtPreloaderAssignments> assignQ = new LinkedBlockingDeque<>(); /** Message queue. */ private final LinkedBlockingDeque<SupplyMessage> msgQ = new LinkedBlockingDeque<>(); /** Counter. */ private long cntr; /** Hide worker logger and use cache logger instead. */ private IgniteLogger log = GridDhtPartitionDemandPool.this.log; /** * @param id Worker ID. */ private DemandWorker(int id) { super(cctx.gridName(), "preloader-demand-worker", GridDhtPartitionDemandPool.this.log); assert id >= 0; this.id = id; } /** * @param assigns Assignments. */ void addAssignments(GridDhtPreloaderAssignments assigns) { assert assigns != null; assignQ.offer(assigns); if (log.isDebugEnabled()) log.debug("Added assignments to worker: " + this); } /** * @return {@code True} if topology changed. */ private boolean topologyChanged() { return !assignQ.isEmpty() || cctx.shared().exchange().topologyChanged(); } /** * @param msg Message. */ private void addMessage(SupplyMessage msg) { if (!enterBusy()) return; try { assert dummyTopology(msg) || msg.supply().workerId() == id; msgQ.offer(msg); } finally { leaveBusy(); } } /** * @param timeout Timed out value. */ private void growTimeout(long timeout) { long newTimeout = (long)(timeout * 1.5D); // Account for overflow. if (newTimeout < 0) newTimeout = Long.MAX_VALUE; // Grow by 50% only if another thread didn't do it already. if (GridDhtPartitionDemandPool.this.timeout.compareAndSet(timeout, newTimeout)) U.warn(log, "Increased rebalancing message timeout from " + timeout + "ms to " + newTimeout + "ms."); } /** * @param pick Node picked for preloading. * @param p Partition. * @param entry Preloaded entry. * @param topVer Topology version. * @return {@code False} if partition has become invalid during preloading. * @throws IgniteInterruptedCheckedException If interrupted. */ private boolean preloadEntry( ClusterNode pick, int p, GridCacheEntryInfo entry, AffinityTopologyVersion topVer ) throws IgniteCheckedException { try { GridCacheEntryEx cached = null; try { cached = cctx.dht().entryEx(entry.key()); if (log.isDebugEnabled()) log.debug("Rebalancing key [key=" + entry.key() + ", part=" + p + ", node=" + pick.id() + ']'); if (cctx.dht().isIgfsDataCache() && cctx.dht().igfsDataSpaceUsed() > cctx.dht().igfsDataSpaceMax()) { LT.error(log, null, "Failed to rebalance IGFS data cache (IGFS space size exceeded maximum " + "value, will ignore rebalance entries): " + name()); if (cached.markObsoleteIfEmpty(null)) cached.context().cache().removeIfObsolete(cached.key()); return true; } if (preloadPred == null || preloadPred.apply(entry)) { if (cached.initialValue( entry.value(), entry.version(), entry.ttl(), entry.expireTime(), true, topVer, cctx.isDrEnabled() ? DR_PRELOAD : DR_NONE )) { cctx.evicts().touch(cached, topVer); // Start tracking. if (cctx.events().isRecordable(EVT_CACHE_REBALANCE_OBJECT_LOADED) && !cached.isInternal()) cctx.events().addEvent(cached.partition(), cached.key(), cctx.localNodeId(), (IgniteUuid)null, null, EVT_CACHE_REBALANCE_OBJECT_LOADED, entry.value(), true, null, false, null, null, null); } else if (log.isDebugEnabled()) log.debug("Rebalancing entry is already in cache (will ignore) [key=" + cached.key() + ", part=" + p + ']'); } else if (log.isDebugEnabled()) log.debug("Rebalance predicate evaluated to false for entry (will ignore): " + entry); } catch (GridCacheEntryRemovedException ignored) { if (log.isDebugEnabled()) log.debug("Entry has been concurrently removed while rebalancing (will ignore) [key=" + cached.key() + ", part=" + p + ']'); } catch (GridDhtInvalidPartitionException ignored) { if (log.isDebugEnabled()) log.debug("Partition became invalid during rebalancing (will ignore): " + p); return false; } } catch (IgniteInterruptedCheckedException e) { throw e; } catch (IgniteCheckedException e) { throw new IgniteCheckedException("Failed to cache rebalanced entry (will stop rebalancing) [local=" + cctx.nodeId() + ", node=" + pick.id() + ", key=" + entry.key() + ", part=" + p + ']', e); } return true; } /** * @param idx Unique index for this topic. * @return Topic for partition. */ public Object topic(long idx) { return TOPIC_CACHE.topic(cctx.namexx(), cctx.nodeId(), id, idx); } /** * @param node Node to demand from. * @param topVer Topology version. * @param d Demand message. * @param exchFut Exchange future. * @return Missed partitions. * @throws InterruptedException If interrupted. * @throws ClusterTopologyCheckedException If node left. * @throws IgniteCheckedException If failed to send message. */ private Set<Integer> demandFromNode( ClusterNode node, final AffinityTopologyVersion topVer, GridDhtPartitionDemandMessage d, GridDhtPartitionsExchangeFuture exchFut ) throws InterruptedException, IgniteCheckedException { GridDhtPartitionTopology top = cctx.dht().topology(); cntr++; d.topic(topic(cntr)); d.workerId(id); Set<Integer> missed = new HashSet<>(); // Get the same collection that will be sent in the message. Collection<Integer> remaining = d.partitions(); // Drain queue before processing a new node. drainQueue(); if (isCancelled() || topologyChanged()) return missed; cctx.io().addOrderedHandler(d.topic(), new CI2<UUID, GridDhtPartitionSupplyMessage>() { @Override public void apply(UUID nodeId, GridDhtPartitionSupplyMessage msg) { addMessage(new SupplyMessage(nodeId, msg)); } }); try { boolean retry; // DoWhile. // ======= do { retry = false; // Create copy. d = new GridDhtPartitionDemandMessage(d, remaining); long timeout = GridDhtPartitionDemandPool.this.timeout.get(); d.timeout(timeout); if (log.isDebugEnabled()) log.debug("Sending demand message [node=" + node.id() + ", demand=" + d + ']'); // Send demand message. cctx.io().send(node, d, cctx.ioPolicy()); // While. // ===== while (!isCancelled() && !topologyChanged()) { SupplyMessage s = poll(msgQ, timeout, this); // If timed out. if (s == null) { if (msgQ.isEmpty()) { // Safety check. U.warn(log, "Timed out waiting for partitions to load, will retry in " + timeout + " ms (you may need to increase 'networkTimeout' or 'rebalanceBatchSize'" + " configuration properties)."); growTimeout(timeout); // Ordered listener was removed if timeout expired. cctx.io().removeOrderedHandler(d.topic()); // Must create copy to be able to work with IO manager thread local caches. d = new GridDhtPartitionDemandMessage(d, remaining); // Create new topic. d.topic(topic(++cntr)); // Create new ordered listener. cctx.io().addOrderedHandler(d.topic(), new CI2<UUID, GridDhtPartitionSupplyMessage>() { @Override public void apply(UUID nodeId, GridDhtPartitionSupplyMessage msg) { addMessage(new SupplyMessage(nodeId, msg)); } }); // Resend message with larger timeout. retry = true; break; // While. } else continue; // While. } // If topology changed. if (dummyTopology(s)) { if (topologyChanged()) break; // While. else continue; // While. } // Check that message was received from expected node. if (!s.senderId().equals(node.id())) { U.warn(log, "Received supply message from unexpected node [expectedId=" + node.id() + ", rcvdId=" + s.senderId() + ", msg=" + s + ']'); continue; // While. } if (log.isDebugEnabled()) log.debug("Received supply message: " + s); GridDhtPartitionSupplyMessage supply = s.supply(); // Check whether there were class loading errors on unmarshal if (supply.classError() != null) { if (log.isDebugEnabled()) log.debug("Class got undeployed during preloading: " + supply.classError()); retry = true; // Quit preloading. break; } // Preload. for (Map.Entry<Integer, CacheEntryInfoCollection> e : supply.infos().entrySet()) { int p = e.getKey(); if (cctx.affinity().localNode(p, topVer)) { GridDhtLocalPartition part = top.localPartition(p, topVer, true); assert part != null; if (part.state() == MOVING) { boolean reserved = part.reserve(); assert reserved : "Failed to reserve partition [gridName=" + cctx.gridName() + ", cacheName=" + cctx.namex() + ", part=" + part + ']'; part.lock(); try { Collection<Integer> invalidParts = new GridLeanSet<>(); // Loop through all received entries and try to preload them. for (GridCacheEntryInfo entry : e.getValue().infos()) { if (!invalidParts.contains(p)) { if (!part.preloadingPermitted(entry.key(), entry.version())) { if (log.isDebugEnabled()) log.debug("Preloading is not permitted for entry due to " + "evictions [key=" + entry.key() + ", ver=" + entry.version() + ']'); continue; } if (!preloadEntry(node, p, entry, topVer)) { invalidParts.add(p); if (log.isDebugEnabled()) log.debug("Got entries for invalid partition during " + "preloading (will skip) [p=" + p + ", entry=" + entry + ']'); } } } boolean last = supply.last().contains(p); // If message was last for this partition, // then we take ownership. if (last) { remaining.remove(p); top.own(part); if (log.isDebugEnabled()) log.debug("Finished rebalancing partition: " + part); if (cctx.events().isRecordable(EVT_CACHE_REBALANCE_PART_LOADED)) preloadEvent(p, EVT_CACHE_REBALANCE_PART_LOADED, exchFut.discoveryEvent()); } } finally { part.unlock(); part.release(); } } else { remaining.remove(p); if (log.isDebugEnabled()) log.debug("Skipping rebalancing partition (state is not MOVING): " + part); } } else { remaining.remove(p); if (log.isDebugEnabled()) log.debug("Skipping rebalancing partition (it does not belong on current node): " + p); } } remaining.removeAll(s.supply().missed()); // Only request partitions based on latest topology version. for (Integer miss : s.supply().missed()) if (cctx.affinity().localNode(miss, topVer)) missed.add(miss); if (remaining.isEmpty()) break; // While. if (s.supply().ack()) { retry = true; break; } } } while (retry && !isCancelled() && !topologyChanged()); return missed; } finally { cctx.io().removeOrderedHandler(d.topic()); } } /** * @throws InterruptedException If interrupted. */ private void drainQueue() throws InterruptedException { while (!msgQ.isEmpty()) { SupplyMessage msg = msgQ.take(); if (log.isDebugEnabled()) log.debug("Drained supply message: " + msg); } } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { try { int rebalanceOrder = cctx.config().getRebalanceOrder(); if (!CU.isMarshallerCache(cctx.name())) { if (log.isDebugEnabled()) log.debug("Waiting for marshaller cache preload [cacheName=" + cctx.name() + ']'); try { cctx.kernalContext().cache().marshallerCache().preloader().syncFuture().get(); } catch (IgniteInterruptedCheckedException ignored) { if (log.isDebugEnabled()) log.debug("Failed to wait for marshaller cache preload future (grid is stopping): " + "[cacheName=" + cctx.name() + ']'); return; } catch (IgniteCheckedException e) { throw new Error("Ordered preload future should never fail: " + e.getMessage(), e); } } if (rebalanceOrder > 0) { IgniteInternalFuture<?> fut = cctx.kernalContext().cache().orderedPreloadFuture(rebalanceOrder); try { if (fut != null) { if (log.isDebugEnabled()) log.debug("Waiting for dependant caches rebalance [cacheName=" + cctx.name() + ", rebalanceOrder=" + rebalanceOrder + ']'); fut.get(); } } catch (IgniteInterruptedCheckedException ignored) { if (log.isDebugEnabled()) log.debug("Failed to wait for ordered rebalance future (grid is stopping): " + "[cacheName=" + cctx.name() + ", rebalanceOrder=" + rebalanceOrder + ']'); return; } catch (IgniteCheckedException e) { throw new Error("Ordered rebalance future should never fail: " + e.getMessage(), e); } } GridDhtPartitionsExchangeFuture exchFut = null; boolean stopEvtFired = false; while (!isCancelled()) { try { barrier.await(); if (id == 0 && exchFut != null && !exchFut.dummy() && cctx.events().isRecordable(EVT_CACHE_REBALANCE_STOPPED)) { if (!cctx.isReplicated() || !stopEvtFired) { preloadEvent(EVT_CACHE_REBALANCE_STOPPED, exchFut.discoveryEvent()); stopEvtFired = true; } } } catch (BrokenBarrierException ignore) { throw new InterruptedException("Demand worker stopped."); } // Sync up all demand threads at this step. GridDhtPreloaderAssignments assigns = null; while (assigns == null) assigns = poll(assignQ, cctx.gridConfig().getNetworkTimeout(), this); demandLock.readLock().lock(); try { exchFut = assigns.exchangeFuture(); // Assignments are empty if preloading is disabled. if (assigns.isEmpty()) continue; boolean resync = false; // While. // ===== while (!isCancelled() && !topologyChanged() && !resync) { Collection<Integer> missed = new HashSet<>(); // For. // === for (ClusterNode node : assigns.keySet()) { if (topologyChanged() || isCancelled()) break; // For. GridDhtPartitionDemandMessage d = assigns.remove(node); // If another thread is already processing this message, // move to the next node. if (d == null) continue; // For. try { Set<Integer> set = demandFromNode(node, assigns.topologyVersion(), d, exchFut); if (!set.isEmpty()) { if (log.isDebugEnabled()) log.debug("Missed partitions from node [nodeId=" + node.id() + ", missed=" + set + ']'); missed.addAll(set); } } catch (IgniteInterruptedCheckedException e) { throw e; } catch (ClusterTopologyCheckedException e) { if (log.isDebugEnabled()) log.debug("Node left during rebalancing (will retry) [node=" + node.id() + ", msg=" + e.getMessage() + ']'); resync = true; break; // For. } catch (IgniteCheckedException e) { U.error(log, "Failed to receive partitions from node (rebalancing will not " + "fully finish) [node=" + node.id() + ", msg=" + d + ']', e); } } // Processed missed entries. if (!missed.isEmpty()) { if (log.isDebugEnabled()) log.debug("Reassigning partitions that were missed: " + missed); assert exchFut.exchangeId() != null; cctx.shared().exchange().forceDummyExchange(true, exchFut); } else break; // While. } } finally { demandLock.readLock().unlock(); syncFut.onWorkerDone(this); } cctx.shared().exchange().scheduleResendPartitions(); } } finally { // Safety. syncFut.onWorkerDone(this); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DemandWorker.class, this, "assignQ", assignQ, "msgQ", msgQ, "super", super.toString()); } } /** * Sets last exchange future. * * @param lastFut Last future to set. */ void updateLastExchangeFuture(GridDhtPartitionsExchangeFuture lastFut) { lastExchangeFut = lastFut; } /** * @param exchFut Exchange future. * @return Assignments of partitions to nodes. */ GridDhtPreloaderAssignments assign(GridDhtPartitionsExchangeFuture exchFut) { // No assignments for disabled preloader. GridDhtPartitionTopology top = cctx.dht().topology(); if (!cctx.rebalanceEnabled()) return new GridDhtPreloaderAssignments(exchFut, top.topologyVersion()); int partCnt = cctx.affinity().partitions(); assert exchFut.forcePreload() || exchFut.dummyReassign() || exchFut.exchangeId().topologyVersion().equals(top.topologyVersion()) : "Topology version mismatch [exchId=" + exchFut.exchangeId() + ", topVer=" + top.topologyVersion() + ']'; GridDhtPreloaderAssignments assigns = new GridDhtPreloaderAssignments(exchFut, top.topologyVersion()); AffinityTopologyVersion topVer = assigns.topologyVersion(); for (int p = 0; p < partCnt; p++) { if (cctx.shared().exchange().hasPendingExchange()) { if (log.isDebugEnabled()) log.debug("Skipping assignments creation, exchange worker has pending assignments: " + exchFut.exchangeId()); break; } // If partition belongs to local node. if (cctx.affinity().localNode(p, topVer)) { GridDhtLocalPartition part = top.localPartition(p, topVer, true); assert part != null; assert part.id() == p; if (part.state() != MOVING) { if (log.isDebugEnabled()) log.debug("Skipping partition assignment (state is not MOVING): " + part); continue; // For. } Collection<ClusterNode> picked = pickedOwners(p, topVer); if (picked.isEmpty()) { top.own(part); if (cctx.events().isRecordable(EVT_CACHE_REBALANCE_PART_DATA_LOST)) { DiscoveryEvent discoEvt = exchFut.discoveryEvent(); cctx.events().addPreloadEvent(p, EVT_CACHE_REBALANCE_PART_DATA_LOST, discoEvt.eventNode(), discoEvt.type(), discoEvt.timestamp()); } if (log.isDebugEnabled()) log.debug("Owning partition as there are no other owners: " + part); } else { ClusterNode n = F.first(picked); GridDhtPartitionDemandMessage msg = assigns.get(n); if (msg == null) { assigns.put(n, msg = new GridDhtPartitionDemandMessage( top.updateSequence(), exchFut.exchangeId().topologyVersion(), cctx.cacheId())); } msg.addPartition(p); } } } return assigns; } /** * */ private class SyncFuture extends GridFutureAdapter<Object> { /** */ private static final long serialVersionUID = 0L; /** Remaining workers. */ private Collection<DemandWorker> remaining; /** * @param workers List of workers. */ private SyncFuture(Collection<DemandWorker> workers) { assert workers.size() == poolSize(); remaining = Collections.synchronizedList(new LinkedList<>(workers)); } /** * @param w Worker who iterated through all partitions. */ void onWorkerDone(DemandWorker w) { if (isDone()) return; if (remaining.remove(w)) if (log.isDebugEnabled()) log.debug("Completed full partition iteration for worker [worker=" + w + ']'); if (remaining.isEmpty()) { if (log.isDebugEnabled()) log.debug("Completed sync future."); onDone(); } } } /** * Supply message wrapper. */ private static class SupplyMessage { /** Sender ID. */ private UUID sndId; /** Supply message. */ private GridDhtPartitionSupplyMessage supply; /** * Dummy constructor. */ private SupplyMessage() { // No-op. } /** * @param sndId Sender ID. * @param supply Supply message. */ SupplyMessage(UUID sndId, GridDhtPartitionSupplyMessage supply) { this.sndId = sndId; this.supply = supply; } /** * @return Sender ID. */ UUID senderId() { return sndId; } /** * @return Message. */ GridDhtPartitionSupplyMessage supply() { return supply; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SupplyMessage.class, this); } } }
/***************************** * DeviceListActivity * ------------------ * * @author Marco Rinalducci * @version 1.0.0 * * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *****************************/ package ch.rinalducci.DroneRemote; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import java.util.Set; @SuppressWarnings("ConstantConditions") public class DeviceListActivity extends Activity { // Debugging private static final String TAG = "DeviceListActivity"; private static final boolean D = false; // Return Intent extra public static String EXTRA_DEVICE_ADDRESS = "device_address"; // Member fields private BluetoothAdapter mBtAdapter; private ArrayAdapter<String> mNewDevicesArrayAdapter; /********************************************************************************* * * @category Smartphone override method * *********************************************************************************/ /** * Called when the activity is first created */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Setup the window //requestWindowFeature(Window.FEATURE_LEFT_ICON); setContentView(R.layout.device_list); //getWindow().setFeatureDrawableResource(Window.FEATURE_LEFT_ICON, android.R.drawable.ic_menu_search); // Set result CANCELED incase the user backs out setResult(Activity.RESULT_CANCELED); // Initialize the button to perform device discovery Button scanButton = (Button) findViewById(R.id.button_scan); scanButton.setOnClickListener(new OnClickListener() { public void onClick(View v) { doDiscovery(); v.setVisibility(View.GONE); } }); // Initialize array adapters. One for already paired devices and // one for newly discovered devices ArrayAdapter<String> mPairedDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); mNewDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); // Find and set up the ListView for paired devices ListView pairedListView = (ListView) findViewById(R.id.paired_devices); pairedListView.setAdapter(mPairedDevicesArrayAdapter); pairedListView.setOnItemClickListener(mDeviceClickListener); // Find and set up the ListView for newly discovered devices ListView newDevicesListView = (ListView) findViewById(R.id.new_devices); newDevicesListView.setAdapter(mNewDevicesArrayAdapter); newDevicesListView.setOnItemClickListener(mDeviceClickListener); // Register for broadcasts when a device is discovered IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND); this.registerReceiver(mReceiver, filter); // Register for broadcasts when discovery has finished filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); this.registerReceiver(mReceiver, filter); // Get the local Bluetooth adapter mBtAdapter = BluetoothAdapter.getDefaultAdapter(); // Get a set of currently paired devices Set<BluetoothDevice> pairedDevices = mBtAdapter.getBondedDevices(); // If there are paired devices, add each one to the ArrayAdapter if (pairedDevices != null) { if (pairedDevices.size() > 0) { findViewById(R.id.title_paired_devices).setVisibility(View.VISIBLE); for (BluetoothDevice device : pairedDevices) { mPairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } } else { String noDevices = getResources().getText(R.string.none_paired).toString(); mPairedDevicesArrayAdapter.add(noDevices); } } } /** * Called when the activity will be destroyed */ @Override protected void onDestroy() { super.onDestroy(); // Make sure we're not doing discovery anymore if (mBtAdapter != null) { mBtAdapter.cancelDiscovery(); } // Unregister broadcast listeners this.unregisterReceiver(mReceiver); } /********************************************************************************* * * @category Bluetooth * *********************************************************************************/ /** * Start device discover with the BluetoothAdapter */ private void doDiscovery() { if (D) Log.d(TAG, "doDiscovery()"); // Indicate scanning in the title findViewById(R.id.progressBar).setVisibility(View.VISIBLE); setTitle(R.string.scanning); // Turn on sub-title for new devices findViewById(R.id.title_new_devices).setVisibility(View.VISIBLE); // If we're already discovering, stop it if (mBtAdapter.isDiscovering()) { mBtAdapter.cancelDiscovery(); } // Request discover from BluetoothAdapter mBtAdapter.startDiscovery(); } /** * The on-click listener for all devices in the ListViews */ private OnItemClickListener mDeviceClickListener = new OnItemClickListener() { public void onItemClick(AdapterView<?> av, View v, int arg2, long arg3) { // Cancel discovery because it's costly and we're about to connect mBtAdapter.cancelDiscovery(); // Get the device MAC address, which is the last 17 chars in the View String info = ((TextView) v).getText().toString(); if (!info.equals(getResources().getText(R.string.none_found).toString())) { String address = info.substring(info.length() - 17); // Create the result Intent and include the MAC address Intent intent = new Intent(); intent.putExtra(EXTRA_DEVICE_ADDRESS, address); // Set result and finish this Activity setResult(Activity.RESULT_OK, intent); finish(); } } }; /** * The BroadcastReceiver that listens for discovered devices and * changes the title when discovery is finished */ private final BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); // When discovery finds a device if (BluetoothDevice.ACTION_FOUND.equals(action)) { // Get the BluetoothDevice object from the Intent BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); // If it's already paired, skip it, because it's been listed already if (device.getBondState() != BluetoothDevice.BOND_BONDED) { mNewDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } // When discovery is finished, change the Activity title } else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) { findViewById(R.id.progressBar).setVisibility(View.GONE); setTitle(R.string.select_device); if (mNewDevicesArrayAdapter.getCount() == 0) { String noDevices = getResources().getText(R.string.none_found).toString(); mNewDevicesArrayAdapter.add(noDevices); } //findViewById(R.id.button_scan).setVisibility(View.VISIBLE); } } }; }
/** * The MIT License (MIT) * <p> * Copyright (c) 2017 LiangMaYong ( ibeam@qq.com ) * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/ or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. **/ package com.liangmayong.apkbox.core.context; import android.app.Application; import android.content.ComponentName; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.content.res.AssetManager; import android.content.res.Resources; import android.os.Bundle; import android.view.LayoutInflater; import com.liangmayong.apkbox.core.ApkLoaded; import com.liangmayong.apkbox.core.constant.ApkConstant; import com.liangmayong.apkbox.core.resources.ApkLayoutInflater; import com.liangmayong.apkbox.hook.activity.HookActivity_Component; import com.liangmayong.apkbox.hook.service.HookService_Component; import com.liangmayong.apkbox.reflect.ApkMethod; /** * Created by liangmayong on 2016/9/18. */ public final class ApkContext extends Application { /** * get * * @param baseContext baseContext * @param loaded loaded * @return context */ public static Context get(Context baseContext, ApkLoaded loaded) { ApkContext context = new ApkContext(baseContext); context.loaded = loaded; ApkContextModifier.setOuterContext(baseContext, context); return context; } /** * get * * @param baseContext baseContext * @param apkPath apkPath * @return context */ public static Context get(Context baseContext, String apkPath) { ApkContext context = new ApkContext(baseContext); context.loaded = ApkLoaded.get(baseContext, apkPath); ApkContextModifier.setOuterContext(baseContext, context); return context; } private ApkLoaded loaded = null; private Resources.Theme mTheme = null; private ContentResolver contentResolver = null; private ApkContext(Context base) { try { ApkMethod method = new ApkMethod(getClass(), this, "attach", Context.class); method.invoke(base); } catch (Exception e) { } } @Override public Context getApplicationContext() { if (isApkLoaded()) { return this; } return super.getApplicationContext(); } @Override public ClassLoader getClassLoader() { if (isApkLoaded()) { return loaded.getClassLoader(); } return super.getClassLoader(); } @Override public AssetManager getAssets() { if (isApkLoaded()) { return loaded.getAssets(getBaseContext()); } return super.getAssets(); } @Override public Resources getResources() { if (isApkLoaded()) { return loaded.getResources(getBaseContext()); } return super.getResources(); } @Override public String getPackageName() { return getBaseContext().getPackageName(); } @Override public Resources.Theme getTheme() { if (isApkLoaded()) { if (mTheme == null) { mTheme = getResources().newTheme(); mTheme.setTo(getBaseContext().getTheme()); } return mTheme; } return super.getTheme(); } public boolean isApkLoaded() { if (loaded != null) { return true; } return false; } @Override public void startActivity(Intent intent) { if (isApkLoaded()) { intent.putExtra(ApkConstant.EXTRA_APK_PATH, loaded.getApkPath()); intent = HookActivity_Component.modify(intent); } super.startActivity(intent); } @Override public void startActivity(Intent intent, Bundle options) { if (isApkLoaded()) { intent.putExtra(ApkConstant.EXTRA_APK_PATH, loaded.getApkPath()); intent = HookActivity_Component.modify(intent); } super.startActivity(intent, options); } @Override public ComponentName startService(Intent service) { if (isApkLoaded()) { service.putExtra(ApkConstant.EXTRA_APK_PATH, loaded.getApkPath()); service = HookService_Component.modify(service); } return super.startService(service); } @Override public boolean stopService(Intent name) { if (isApkLoaded()) { name.putExtra(ApkConstant.EXTRA_APK_PATH, loaded.getApkPath()); name = HookService_Component.modify(name); } return super.stopService(name); } @Override public boolean bindService(Intent service, ServiceConnection conn, int flags) { if (isApkLoaded()) { service.putExtra(ApkConstant.EXTRA_APK_PATH, loaded.getApkPath()); service = HookService_Component.modify(service); } return super.bindService(service, conn, flags); } @Override public void sendBroadcast(Intent intent, String receiverPermission) { if (isApkLoaded()) { receiverPermission = getPackageName() + ".permission.APK_RECEIVE"; intent.setAction(intent.getAction().replaceAll(loaded.getApkInfo().packageName, getPackageName())); } super.sendBroadcast(intent, receiverPermission); } @Override public void sendBroadcast(Intent intent) { if (isApkLoaded()) { sendBroadcast(intent, ""); } else { super.sendBroadcast(intent); } } @Override public Object getSystemService(String name) { if (isApkLoaded()) { if (Context.LAYOUT_INFLATER_SERVICE.equals(name)) { return new ApkLayoutInflater((LayoutInflater) super.getSystemService(name)); } else if (ApkConstant.SERVICE_APK_BOX_MANAGER.equals(name)) { return null; } } return super.getSystemService(name); } }
/* * Swift Parallel Scripting Language (http://swift-lang.org) * Code from Java CoG Kit Project (see notice below) with modifications. * * Copyright 2005-2014 University of Chicago * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // ---------------------------------------------------------------------- // This code is developed as part of the Java CoG Kit project // The terms of the license can be found at http://www.cogkit.org/license // This message may not be removed or altered. // ---------------------------------------------------------------------- /** * Copyright (c) 2003, National Research Council of Canada * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice(s) and this licence appear in all copies of the Software or * substantial portions of the Software, and that both the above copyright notice(s) and this * license appear in supporting documentation. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE * FOR ANY CLAIM, OR ANY DIRECT, INDIRECT, SPECIAL OR CONSEQUENTIAL * DAMAGES, OR ANY DAMAGES WHATSOEVER (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWSOEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OF THE SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * Except as contained in this notice, the name of a copyright holder shall NOT be used in * advertising or otherwise to promote the sale, use or other dealings in this Software * without specific prior written authorization. Title to copyright in this software and any * associated documentation will at all times remain with copyright holders. */ package org.globus.cog.security.cert.management; import java.awt.Button; import java.awt.FlowLayout; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import java.awt.Label; import java.awt.Panel; import java.awt.TextField; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.StringTokenizer; import org.globus.cog.security.cert.request.GridCertRequest; /** * @author Jean-Claude Cote */ public class CertReqApplet extends GIPApplet implements ActionListener { private static final String PROPERTY_FILE = "CertRequestApplet"; // Values configured by property file. private String emailSubject = null; private String genAction = null; private String mailRequest = null; private String countryNameLabel = "Country Name"; private String organizationLabel = "Organization"; private String organizationalUnitLabel = "Organizational Unit"; private String nameLabel = "Name"; private String passPhraseLabel = "PassPhrase"; private String confirmPassPhraseLabel = "Confirm PassPhrase"; private String yourEmailAddressLabel = "Your e-mail address"; private String serviceLabel = "Service"; private String hostLabel = "Host"; // UI elements. private Button mailButton = null; private Button genButton = null; private TextField passwordField = new TextField(); private TextField passwordConfField = new TextField(); private TextField countryField = new TextField("CA"); private TextField organizationField = new TextField("Grid"); private TextField organizationUnitField = new TextField(); private TextField nameField = new TextField(); private TextField hostField = new TextField(); private TextField serviceField = new TextField(); private String certReqFileContent = ""; private TextField fromField = new TextField(); String country = ""; String organization = ""; String organizationUnit = ""; String name = ""; String host = ""; String service = "host"; // ldap private boolean bHostCertReq = false; public void init() { super.init(); // Get param values. // Set certtype String type = getParameter("certificateRequestType"); if (type != null && type.length() > 0) { if (type.equalsIgnoreCase("host")){ bHostCertReq = true; } } // Try to find the FQDN. InetAddress inetAdd = null; try { inetAdd = InetAddress.getLocalHost(); } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } String hostName = inetAdd.getCanonicalHostName(); System.out.println("GetCanonicalHostName returned: " + hostName); if (hostName != null && hostName.length() > 0){ StringTokenizer tokens = new StringTokenizer(hostName, "."); if(tokens.countTokens() > 3){ if (bHostCertReq){ host = hostName; } else{ String hostDomain = hostName.substring(hostName.indexOf(".") + 1,hostName.length()); organizationUnit = hostDomain; } } } genAction = getLocString("GenerateRequestAction"); mailRequest = getLocString("MailRequestAction"); // Setup UI. mailButton = new Button(mailRequest); genButton = new Button(genAction); Panel titlePanel = null; if (appletTitle.length() > 0) { titlePanel = new Panel(); titlePanel.add(new Label(appletTitle)); titlePanel.setFont(new Font("Arial", Font.BOLD, 24)); titlePanel.setBackground(bgColor); } Panel inputPanel = new Panel(); inputPanel.add(new Label(countryNameLabel)); inputPanel.add(countryField); inputPanel.add(new Label(organizationLabel)); inputPanel.add(organizationField); if (bHostCertReq){ inputPanel.add(new Label(hostLabel)); hostField.setText(host); inputPanel.add(hostField); inputPanel.add(new Label(serviceLabel)); serviceField.setText(service); inputPanel.add(serviceField); } else{ inputPanel.add(new Label(organizationalUnitLabel)); organizationUnitField.setText(organizationUnit); inputPanel.add(organizationUnitField); inputPanel.add(new Label(nameLabel)); nameField.setText(name); inputPanel.add(nameField); inputPanel.add(new Label(passPhraseLabel)); passwordField.setEchoChar('*'); inputPanel.add(passwordField); inputPanel.add(new Label(confirmPassPhraseLabel)); inputPanel.add(passwordConfField); passwordConfField.setEchoChar('*'); } inputPanel.add(new Label(yourEmailAddressLabel)); inputPanel.add(fromField); inputPanel.setLayout(new GridLayout(0, 2)); inputPanel.setBackground(bgColor); Panel buttonPanel = new Panel(); genButton.addActionListener(this); buttonPanel.add(genButton); mailButton.addActionListener(this); buttonPanel.add(mailButton); buttonPanel.setLayout(new FlowLayout()); Panel statusPanel = new Panel(); Font font = new Font("Courier", Font.PLAIN, 12); status.setFont(font); statusPanel.add(status); Panel mainPanel = new Panel(); GridBagLayout gridbag = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); if (titlePanel != null) { c.weightx = 1.0; c.gridwidth = GridBagConstraints.REMAINDER; //end row gridbag.setConstraints(titlePanel, c); mainPanel.add(titlePanel); } c.weightx = 1.0; c.gridwidth = GridBagConstraints.REMAINDER; //end row gridbag.setConstraints(inputPanel, c); mainPanel.add(inputPanel); c.weightx = 1.0; c.gridwidth = GridBagConstraints.REMAINDER; //end row gridbag.setConstraints(buttonPanel, c); mainPanel.add(buttonPanel); c.weightx = 1.0; c.gridwidth = GridBagConstraints.REMAINDER; //end row gridbag.setConstraints(statusPanel, c); mainPanel.add(statusPanel); mainPanel.setLayout(gridbag); this.add(mainPanel); this.setBackground(bgColor); } public void actionPerformed(ActionEvent e) { boolean bOk = true; if (debug){ this.doDebugTests(); } country = countryField.getText(); organization = organizationField.getText(); organizationUnit = organizationUnitField.getText(); name = nameField.getText(); host = hostField.getText(); service = serviceField.getText(); String from = fromField.getText(); try { if (bOk) { if (bHostCertReq){ // Reset the cert file loc to user loc // need to do this since we may have changed the loc base on the type of service. resetCertFileLoc(); if(service.length() == 0){ service = "host"; } name = service + "/" + host; organizationUnit = name.substring(name.indexOf(".") + 1,name.length()); int i = userCertFile.lastIndexOf("user"); userCertFile = userCertFile.substring(0, i) + service + userCertFile.substring(i + 4, userCertFile.length()); userKeyFile = userKeyFile.substring(0, i) + service + userKeyFile.substring(i + 4, userKeyFile.length()); userCertReqFile = userCertReqFile.substring(0, i) + service + userCertReqFile.substring(i + 4, userCertReqFile.length()); } } if (e.getActionCommand() == genAction) { bOk = checkCertDir(); // Check not to overwrite any of these files. if (bOk) { bOk = checkCertsDoNotExists(); } String password = ""; if (bOk) { if (!bHostCertReq) { password = passwordField.getText(); String password2 = passwordConfField.getText(); bOk = verifyPassword(password, password2); } } // Generate cert request. if (bOk) { String cn = "C=" + country + ",O=" + organization + ",OU=" + organizationUnit + ",CN=" + name; appendToStatus("Generating cert request for: " + cn); appendToStatus("Writing new private key to " + userKeyFile); GridCertRequest.genCertificateRequest( cn, emailAddressOfCA, password, userKeyFile, userCertFile, userCertReqFile); certReqFileContent = readCertReqFile(userCertReqFile); appendToStatus(certReqFileContent); } } else if (e.getActionCommand() == mailRequest) { // Get recipient's email address. if (bOk) { if (from.length() == 0) { appendToStatus("Please specify your e-mail address."); bOk = false; } } // Get request from file if we generated it at an earlier date. if (bOk && certReqFileContent.length() == 0) { certReqFileContent = readCertReqFile(userCertReqFile); } // Send the request to the CA. if (bOk && certReqFileContent.length() != 0) { if (sendMail(from, certReqFileContent)) { appendToStatus("Your request has been mailed to " + emailAddressOfCA ); } } } else { appendToStatus("Error: Unknown action " + e.getActionCommand() ); } } catch (Exception ex) { // Write exection to Java console. ex.printStackTrace(); // Write exception to status area. String message = ex.getMessage() + "\n"; StackTraceElement[] stackElements = ex.getStackTrace(); for (int i = 0; i < stackElements.length; i++) { message += stackElements[i].toString() + "\n"; } appendToStatus(message); } } private boolean checkCertsDoNotExists() { boolean bFileExists = false; File f = new File(userKeyFile); if (f.exists()) { appendToStatus(userKeyFile + " exists"); bFileExists = true; } f = new File(userCertFile); if (f.exists()) { appendToStatus(userCertFile + " exists"); bFileExists = true; } f = new File(userCertReqFile); if (f.exists()) { appendToStatus(userCertReqFile + " exists"); bFileExists = true; } if (bFileExists) { appendToStatus("Looks like you already have credential."); appendToStatus("If you wish to create new ones you will need to move them to another location."); } return !bFileExists; } private String readCertReqFile( String userCertReqFile) throws FileNotFoundException, IOException { File fUserCertReqFile = new File(userCertReqFile); if (!fUserCertReqFile.exists() || !fUserCertReqFile.canRead()) { appendToStatus( "Can't read certificate request file: " + userCertReqFile); return ""; } String certReqData = ""; BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream(userCertReqFile))); String sLine = in.readLine(); while (sLine != null) { certReqData += sLine + "\n"; sLine = in.readLine(); } in.close(); return certReqData; } /* (non-Javadoc) * @see ca.gc.nrc.gip.applets.GIPApplet#getPropertyFileLoc() */ protected String getPropertyFileName() { // TODO Auto-generated method stub return PROPERTY_FILE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.db.compaction; import java.io.*; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import org.junit.Test; import org.junit.runner.RunWith; import org.apache.cassandra.OrderedJUnit4ClassRunner; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.Util; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.*; import org.apache.cassandra.db.columniterator.IdentityQueryFilter; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.dht.*; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.io.sstable.metadata.MetadataCollector; import org.apache.cassandra.io.sstable.metadata.StatsMetadata; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Pair; import static org.junit.Assert.*; @RunWith(OrderedJUnit4ClassRunner.class) public class CompactionsTest extends SchemaLoader { private static final String STANDARD1 = "Standard1"; public static final String KEYSPACE1 = "Keyspace1"; public ColumnFamilyStore testSingleSSTableCompaction(String strategyClassName) throws Exception { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore store = keyspace.getColumnFamilyStore(STANDARD1); store.clearUnsafe(); store.metadata.gcGraceSeconds(1); store.setCompactionStrategyClass(strategyClassName); // disable compaction while flushing store.disableAutoCompaction(); long timestamp = populate(KEYSPACE1, STANDARD1, 0, 9, 3); //ttl=3s store.forceBlockingFlush(); assertEquals(1, store.getSSTables().size()); long originalSize = store.getSSTables().iterator().next().uncompressedLength(); // wait enough to force single compaction TimeUnit.SECONDS.sleep(5); // enable compaction, submit background and wait for it to complete store.enableAutoCompaction(); FBUtilities.waitOnFutures(CompactionManager.instance.submitBackground(store)); while (CompactionManager.instance.getPendingTasks() > 0 || CompactionManager.instance.getActiveCompactions() > 0) TimeUnit.SECONDS.sleep(1); // and sstable with ttl should be compacted assertEquals(1, store.getSSTables().size()); long size = store.getSSTables().iterator().next().uncompressedLength(); assertTrue("should be less than " + originalSize + ", but was " + size, size < originalSize); // make sure max timestamp of compacted sstables is recorded properly after compaction. assertMaxTimestamp(store, timestamp); return store; } private long populate(String ks, String cf, int startRowKey, int endRowKey, int ttl) { long timestamp = System.currentTimeMillis(); for (int i = startRowKey; i <= endRowKey; i++) { DecoratedKey key = Util.dk(Integer.toString(i)); Mutation rm = new Mutation(ks, key.getKey()); for (int j = 0; j < 10; j++) rm.add(cf, Util.cellname(Integer.toString(j)), ByteBufferUtil.EMPTY_BYTE_BUFFER, timestamp, j > 0 ? ttl : 0); // let first column never expire, since deleting all columns does not produce sstable rm.apply(); } return timestamp; } /** * Test to see if sstable has enough expired columns, it is compacted itself. */ @Test public void testSingleSSTableCompactionWithSizeTieredCompaction() throws Exception { testSingleSSTableCompaction(SizeTieredCompactionStrategy.class.getCanonicalName()); } @Test public void testSingleSSTableCompactionWithLeveledCompaction() throws Exception { ColumnFamilyStore store = testSingleSSTableCompaction(LeveledCompactionStrategy.class.getCanonicalName()); WrappingCompactionStrategy strategy = (WrappingCompactionStrategy) store.getCompactionStrategy(); // tombstone removal compaction should not promote level assert strategy.getSSTableCountPerLevel()[0] == 1; } @Test public void testSuperColumnTombstones() throws ExecutionException, InterruptedException { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("Super1"); cfs.disableAutoCompaction(); DecoratedKey key = Util.dk("tskey"); ByteBuffer scName = ByteBufferUtil.bytes("TestSuperColumn"); // a subcolumn Mutation rm = new Mutation(KEYSPACE1, key.getKey()); rm.add("Super1", Util.cellname(scName, ByteBufferUtil.bytes(0)), ByteBufferUtil.EMPTY_BYTE_BUFFER, FBUtilities.timestampMicros()); rm.apply(); cfs.forceBlockingFlush(); // shadow the subcolumn with a supercolumn tombstone rm = new Mutation(KEYSPACE1, key.getKey()); rm.deleteRange("Super1", SuperColumns.startOf(scName), SuperColumns.endOf(scName), FBUtilities.timestampMicros()); rm.apply(); cfs.forceBlockingFlush(); CompactionManager.instance.performMaximal(cfs); assertEquals(1, cfs.getSSTables().size()); // check that the shadowed column is gone SSTableReader sstable = cfs.getSSTables().iterator().next(); AbstractBounds<RowPosition> bounds = new Bounds<RowPosition>(key, sstable.partitioner.getMinimumToken().maxKeyBound()); ISSTableScanner scanner = sstable.getScanner(new DataRange(bounds, new IdentityQueryFilter())); OnDiskAtomIterator iter = scanner.next(); assertEquals(key, iter.getKey()); assert iter.next() instanceof RangeTombstone; assert !iter.hasNext(); } @Test public void testUncheckedTombstoneSizeTieredCompaction() throws Exception { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore store = keyspace.getColumnFamilyStore(STANDARD1); store.clearUnsafe(); store.metadata.gcGraceSeconds(1); store.metadata.compactionStrategyOptions.put("tombstone_compaction_interval", "1"); store.metadata.compactionStrategyOptions.put("unchecked_tombstone_compaction", "false"); store.reload(); store.setCompactionStrategyClass(SizeTieredCompactionStrategy.class.getName()); // disable compaction while flushing store.disableAutoCompaction(); //Populate sstable1 with with keys [0..9] populate(KEYSPACE1, STANDARD1, 0, 9, 3); //ttl=3s store.forceBlockingFlush(); //Populate sstable2 with with keys [10..19] (keys do not overlap with SSTable1) long timestamp2 = populate(KEYSPACE1, STANDARD1, 10, 19, 3); //ttl=3s store.forceBlockingFlush(); assertEquals(2, store.getSSTables().size()); Iterator<SSTableReader> it = store.getSSTables().iterator(); long originalSize1 = it.next().uncompressedLength(); long originalSize2 = it.next().uncompressedLength(); // wait enough to force single compaction TimeUnit.SECONDS.sleep(5); // enable compaction, submit background and wait for it to complete store.enableAutoCompaction(); FBUtilities.waitOnFutures(CompactionManager.instance.submitBackground(store)); while (CompactionManager.instance.getPendingTasks() > 0 || CompactionManager.instance.getActiveCompactions() > 0) TimeUnit.SECONDS.sleep(1); // even though both sstables were candidate for tombstone compaction // it was not executed because they have an overlapping token range assertEquals(2, store.getSSTables().size()); it = store.getSSTables().iterator(); long newSize1 = it.next().uncompressedLength(); long newSize2 = it.next().uncompressedLength(); assertEquals("candidate sstable should not be tombstone-compacted because its key range overlap with other sstable", originalSize1, newSize1); assertEquals("candidate sstable should not be tombstone-compacted because its key range overlap with other sstable", originalSize2, newSize2); // now let's enable the magic property store.metadata.compactionStrategyOptions.put("unchecked_tombstone_compaction", "true"); store.reload(); //submit background task again and wait for it to complete FBUtilities.waitOnFutures(CompactionManager.instance.submitBackground(store)); while (CompactionManager.instance.getPendingTasks() > 0 || CompactionManager.instance.getActiveCompactions() > 0) TimeUnit.SECONDS.sleep(1); //we still have 2 sstables, since they were not compacted against each other assertEquals(2, store.getSSTables().size()); it = store.getSSTables().iterator(); newSize1 = it.next().uncompressedLength(); newSize2 = it.next().uncompressedLength(); assertTrue("should be less than " + originalSize1 + ", but was " + newSize1, newSize1 < originalSize1); assertTrue("should be less than " + originalSize2 + ", but was " + newSize2, newSize2 < originalSize2); // make sure max timestamp of compacted sstables is recorded properly after compaction. assertMaxTimestamp(store, timestamp2); } public static void assertMaxTimestamp(ColumnFamilyStore cfs, long maxTimestampExpected) { long maxTimestampObserved = Long.MIN_VALUE; for (SSTableReader sstable : cfs.getSSTables()) maxTimestampObserved = Math.max(sstable.getMaxTimestamp(), maxTimestampObserved); assertEquals(maxTimestampExpected, maxTimestampObserved); } @Test public void testEchoedRow() { // This test check that EchoedRow doesn't skipp rows: see CASSANDRA-2653 Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("Standard2"); // disable compaction while flushing cfs.disableAutoCompaction(); // Insert 4 keys in two sstables. We need the sstables to have 2 rows // at least to trigger what was causing CASSANDRA-2653 for (int i=1; i < 5; i++) { DecoratedKey key = Util.dk(String.valueOf(i)); Mutation rm = new Mutation(KEYSPACE1, key.getKey()); rm.add("Standard2", Util.cellname(String.valueOf(i)), ByteBufferUtil.EMPTY_BYTE_BUFFER, i); rm.apply(); if (i % 2 == 0) cfs.forceBlockingFlush(); } Collection<SSTableReader> toCompact = cfs.getSSTables(); assert toCompact.size() == 2; // Reinserting the same keys. We will compact only the previous sstable, but we need those new ones // to make sure we use EchoedRow, otherwise it won't be used because purge can be done. for (int i=1; i < 5; i++) { DecoratedKey key = Util.dk(String.valueOf(i)); Mutation rm = new Mutation(KEYSPACE1, key.getKey()); rm.add("Standard2", Util.cellname(String.valueOf(i)), ByteBufferUtil.EMPTY_BYTE_BUFFER, i); rm.apply(); } cfs.forceBlockingFlush(); SSTableReader tmpSSTable = null; for (SSTableReader sstable : cfs.getSSTables()) if (!toCompact.contains(sstable)) tmpSSTable = sstable; assert tmpSSTable != null; // Force compaction on first sstables. Since each row is in only one sstable, we will be using EchoedRow. Util.compact(cfs, toCompact); assertEquals(2, cfs.getSSTables().size()); // Now, we remove the sstable that was just created to force the use of EchoedRow (so that it doesn't hide the problem) cfs.markObsolete(Collections.singleton(tmpSSTable), OperationType.UNKNOWN); assertEquals(1, cfs.getSSTables().size()); // Now assert we do have the 4 keys assertEquals(4, Util.getRangeSlice(cfs).size()); } @Test public void testDontPurgeAccidentaly() throws InterruptedException { testDontPurgeAccidentaly("test1", "Super5"); // Use CF with gc_grace=0, see last bug of CASSANDRA-2786 testDontPurgeAccidentaly("test1", "SuperDirectGC"); } @Test public void testUserDefinedCompaction() throws Exception { Keyspace keyspace = Keyspace.open(KEYSPACE1); final String cfname = "Standard3"; // use clean(no sstable) CF ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(cfname); // disable compaction while flushing cfs.disableAutoCompaction(); final int ROWS_PER_SSTABLE = 10; for (int i = 0; i < ROWS_PER_SSTABLE; i++) { DecoratedKey key = Util.dk(String.valueOf(i)); Mutation rm = new Mutation(KEYSPACE1, key.getKey()); rm.add(cfname, Util.cellname("col"), ByteBufferUtil.EMPTY_BYTE_BUFFER, System.currentTimeMillis()); rm.apply(); } cfs.forceBlockingFlush(); Collection<SSTableReader> sstables = cfs.getSSTables(); assert sstables.size() == 1; SSTableReader sstable = sstables.iterator().next(); int prevGeneration = sstable.descriptor.generation; String file = new File(sstable.descriptor.filenameFor(Component.DATA)).getName(); // submit user defined compaction on flushed sstable CompactionManager.instance.forceUserDefinedCompaction(file); // wait until user defined compaction finishes do { Thread.sleep(100); } while (CompactionManager.instance.getPendingTasks() > 0 || CompactionManager.instance.getActiveCompactions() > 0); // CF should have only one sstable with generation number advanced sstables = cfs.getSSTables(); assert sstables.size() == 1; assert sstables.iterator().next().descriptor.generation == prevGeneration + 1; } @Test public void testRangeTombstones() throws IOException, ExecutionException, InterruptedException { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("Standard2"); cfs.clearUnsafe(); // disable compaction while flushing cfs.disableAutoCompaction(); final CFMetaData cfmeta = cfs.metadata; Directories dir = cfs.directories; ArrayList<DecoratedKey> keys = new ArrayList<DecoratedKey>(); for (int i=0; i < 4; i++) { keys.add(Util.dk(""+i)); } ArrayBackedSortedColumns cf = ArrayBackedSortedColumns.factory.create(cfmeta); cf.addColumn(Util.column("01", "a", 1)); // this must not resurrect cf.addColumn(Util.column("a", "a", 3)); cf.deletionInfo().add(new RangeTombstone(Util.cellname("0"), Util.cellname("b"), 2, (int) (System.currentTimeMillis()/1000)),cfmeta.comparator); SSTableWriter writer = new SSTableWriter(cfs.getTempSSTablePath(dir.getDirectoryForNewSSTables()), 0, 0, cfs.metadata, StorageService.getPartitioner(), new MetadataCollector(cfs.metadata.comparator)); writer.append(Util.dk("0"), cf); writer.append(Util.dk("1"), cf); writer.append(Util.dk("3"), cf); cfs.addSSTable(writer.closeAndOpenReader()); writer = new SSTableWriter(cfs.getTempSSTablePath(dir.getDirectoryForNewSSTables()), 0, 0, cfs.metadata, StorageService.getPartitioner(), new MetadataCollector(cfs.metadata.comparator)); writer.append(Util.dk("0"), cf); writer.append(Util.dk("1"), cf); writer.append(Util.dk("2"), cf); writer.append(Util.dk("3"), cf); cfs.addSSTable(writer.closeAndOpenReader()); Collection<SSTableReader> toCompact = cfs.getSSTables(); assert toCompact.size() == 2; // Force compaction on first sstables. Since each row is in only one sstable, we will be using EchoedRow. Util.compact(cfs, toCompact); assertEquals(1, cfs.getSSTables().size()); // Now assert we do have the 4 keys assertEquals(4, Util.getRangeSlice(cfs).size()); ArrayList<DecoratedKey> k = new ArrayList<DecoratedKey>(); for (Row r : Util.getRangeSlice(cfs)) { k.add(r.key); assertEquals(ByteBufferUtil.bytes("a"),r.cf.getColumn(Util.cellname("a")).value()); assertNull(r.cf.getColumn(Util.cellname("01"))); assertEquals(3,r.cf.getColumn(Util.cellname("a")).timestamp()); } for (SSTableReader sstable : cfs.getSSTables()) { StatsMetadata stats = sstable.getSSTableMetadata(); assertEquals(ByteBufferUtil.bytes("0"), stats.minColumnNames.get(0)); assertEquals(ByteBufferUtil.bytes("b"), stats.maxColumnNames.get(0)); } assertEquals(keys, k); } @Test public void testCompactionLog() throws Exception { SystemKeyspace.discardCompactionsInProgress(); String cf = "Standard4"; ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(cf); insertData(KEYSPACE1, cf, 0, 1); cfs.forceBlockingFlush(); Collection<SSTableReader> sstables = cfs.getSSTables(); assert !sstables.isEmpty(); Set<Integer> generations = Sets.newHashSet(Iterables.transform(sstables, new Function<SSTableReader, Integer>() { public Integer apply(SSTableReader sstable) { return sstable.descriptor.generation; } })); UUID taskId = SystemKeyspace.startCompaction(cfs, sstables); Map<Pair<String, String>, Map<Integer, UUID>> compactionLogs = SystemKeyspace.getUnfinishedCompactions(); Set<Integer> unfinishedCompactions = compactionLogs.get(Pair.create(KEYSPACE1, cf)).keySet(); assert unfinishedCompactions.containsAll(generations); SystemKeyspace.finishCompaction(taskId); compactionLogs = SystemKeyspace.getUnfinishedCompactions(); assert !compactionLogs.containsKey(Pair.create(KEYSPACE1, cf)); } private void testDontPurgeAccidentaly(String k, String cfname) throws InterruptedException { // This test catches the regression of CASSANDRA-2786 Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(cfname); // disable compaction while flushing cfs.clearUnsafe(); cfs.disableAutoCompaction(); // Add test row DecoratedKey key = Util.dk(k); Mutation rm = new Mutation(KEYSPACE1, key.getKey()); rm.add(cfname, Util.cellname(ByteBufferUtil.bytes("sc"), ByteBufferUtil.bytes("c")), ByteBufferUtil.EMPTY_BYTE_BUFFER, 0); rm.apply(); cfs.forceBlockingFlush(); Collection<SSTableReader> sstablesBefore = cfs.getSSTables(); QueryFilter filter = QueryFilter.getIdentityFilter(key, cfname, System.currentTimeMillis()); assertTrue(cfs.getColumnFamily(filter).hasColumns()); // Remove key rm = new Mutation(KEYSPACE1, key.getKey()); rm.delete(cfname, 2); rm.apply(); ColumnFamily cf = cfs.getColumnFamily(filter); assertTrue("should be empty: " + cf, cf == null || !cf.hasColumns()); // Sleep one second so that the removal is indeed purgeable even with gcgrace == 0 Thread.sleep(1000); cfs.forceBlockingFlush(); Collection<SSTableReader> sstablesAfter = cfs.getSSTables(); Collection<SSTableReader> toCompact = new ArrayList<SSTableReader>(); for (SSTableReader sstable : sstablesAfter) if (!sstablesBefore.contains(sstable)) toCompact.add(sstable); Util.compact(cfs, toCompact); cf = cfs.getColumnFamily(filter); assertTrue("should be empty: " + cf, cf == null || !cf.hasColumns()); } private static Range<Token> rangeFor(int start, int end) { return new Range<Token>(new BytesToken(String.format("%03d", start).getBytes()), new BytesToken(String.format("%03d", end).getBytes())); } private static Collection<Range<Token>> makeRanges(int ... keys) { Collection<Range<Token>> ranges = new ArrayList<Range<Token>>(keys.length / 2); for (int i = 0; i < keys.length; i += 2) ranges.add(rangeFor(keys[i], keys[i + 1])); return ranges; } private static void insertRowWithKey(int key) { long timestamp = System.currentTimeMillis(); DecoratedKey decoratedKey = Util.dk(String.format("%03d", key)); Mutation rm = new Mutation(KEYSPACE1, decoratedKey.getKey()); rm.add("Standard1", Util.cellname("col"), ByteBufferUtil.EMPTY_BYTE_BUFFER, timestamp, 1000); rm.apply(); } @Test public void testNeedsCleanup() { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore store = keyspace.getColumnFamilyStore("Standard1"); store.clearUnsafe(); // disable compaction while flushing store.disableAutoCompaction(); // write three groups of 9 keys: 001, 002, ... 008, 009 // 101, 102, ... 108, 109 // 201, 202, ... 208, 209 for (int i = 1; i < 10; i++) { insertRowWithKey(i); insertRowWithKey(i + 100); insertRowWithKey(i + 200); } store.forceBlockingFlush(); assertEquals(1, store.getSSTables().size()); SSTableReader sstable = store.getSSTables().iterator().next(); // contiguous range spans all data assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 209))); assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 210))); // separate ranges span all data assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 109, 200, 209))); assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 109, 200, 210))); assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 210))); // one range is missing completely assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(100, 109, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 109))); // the beginning of one range is missing assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(1, 9, 100, 109, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 101, 109, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 109, 201, 209))); // the end of one range is missing assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 8, 100, 109, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 108, 200, 209))); assertTrue(CompactionManager.needsCleanup(sstable, makeRanges(0, 9, 100, 109, 200, 208))); // some ranges don't contain any data assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 0, 0, 9, 50, 51, 100, 109, 150, 199, 200, 209, 300, 301))); // same case, but with a middle range not covering some of the existing data assertFalse(CompactionManager.needsCleanup(sstable, makeRanges(0, 0, 0, 9, 50, 51, 100, 103, 150, 199, 200, 209, 300, 301))); } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <hr> * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * This file has been modified by the OpenOLAT community. Changes are licensed * under the Apache 2.0 license as the original file. */ package org.olat.group.ui.portlet; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.olat.NewControllerFactory; import org.olat.core.CoreSpringFactory; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.Component; import org.olat.core.gui.components.link.Link; import org.olat.core.gui.components.link.LinkFactory; import org.olat.core.gui.components.table.DefaultColumnDescriptor; import org.olat.core.gui.components.table.Table; import org.olat.core.gui.components.table.TableController; import org.olat.core.gui.components.table.TableEvent; import org.olat.core.gui.components.table.TableGuiConfiguration; import org.olat.core.gui.components.velocity.VelocityContainer; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.gui.control.generic.portal.AbstractPortletRunController; import org.olat.core.gui.control.generic.portal.PortletDefaultTableDataModel; import org.olat.core.gui.control.generic.portal.PortletEntry; import org.olat.core.gui.control.generic.portal.PortletToolSortingControllerImpl; import org.olat.core.gui.control.generic.portal.SortingCriteria; import org.olat.core.gui.translator.Translator; import org.olat.core.util.coordinate.CoordinatorManager; import org.olat.core.util.event.GenericEventListener; import org.olat.core.util.filter.FilterFactory; import org.olat.core.util.resource.OresHelper; import org.olat.group.BusinessGroup; import org.olat.group.BusinessGroupLazy; import org.olat.group.BusinessGroupOrder; import org.olat.group.BusinessGroupService; import org.olat.group.model.SearchBusinessGroupParams; import org.olat.group.ui.edit.BusinessGroupModifiedEvent; /** * Description:<br> * Run view controller for the groups list portlet * <P> * Initial Date: 11.07.2005 <br> * * @author gnaegi */ public class GroupsPortletRunController extends AbstractPortletRunController<BusinessGroupEntry> implements GenericEventListener { private static final String CMD_LAUNCH = "cmd.launch"; private final TableController tableCtr; private final GroupTableDataModel groupListModel; private VelocityContainer groupsVC; private Link showAllLink; private final BusinessGroupService businessGroupService; /** * Constructor * * @param ureq * @param component */ public GroupsPortletRunController(WindowControl wControl, UserRequest ureq, Translator trans, String portletName, int defaultMaxEntries) { super(wControl, ureq, trans, portletName, defaultMaxEntries); businessGroupService = CoreSpringFactory.getImpl(BusinessGroupService.class); sortingTermsList.add(SortingCriteria.ALPHABETICAL_SORTING); sortingTermsList.add(SortingCriteria.DATE_SORTING); groupsVC = createVelocityContainer("groupsPortlet"); showAllLink = LinkFactory.createLink("groupsPortlet.showAll", groupsVC, this); TableGuiConfiguration tableConfig = new TableGuiConfiguration(); tableConfig.setTableEmptyMessage(trans.translate("groupsPortlet.nogroups")); tableConfig.setDisplayTableHeader(false); tableConfig.setCustomCssClass("b_portlet_table"); tableConfig.setDisplayRowCount(false); tableConfig.setPageingEnabled(false); tableConfig.setDownloadOffered(false); //disable the default sorting for this table tableConfig.setSortingEnabled(false); tableCtr = new TableController(tableConfig, ureq, getWindowControl(), trans); listenTo(tableCtr); // dummy header key, won't be used since setDisplayTableHeader is set to // false tableCtr.addColumnDescriptor(new DefaultColumnDescriptor("groupsPortlet.bgname", 0, CMD_LAUNCH, trans.getLocale())); sortingCriteria = getPersistentSortingConfiguration(ureq); groupListModel = new GroupTableDataModel(Collections.<PortletEntry<BusinessGroupEntry>>emptyList()); tableCtr.setTableDataModel(groupListModel); reloadModel(sortingCriteria); groupsVC.put("table", tableCtr.getInitialComponent()); putInitialPanel(groupsVC); // register for businessgroup type events CoordinatorManager.getInstance().getCoordinator().getEventBus().registerFor(this, ureq.getIdentity(), OresHelper.lookupType(BusinessGroup.class)); } private List<PortletEntry<BusinessGroupEntry>> convertBusinessGroupToPortletEntryList(List<BusinessGroup> groups, boolean withDescription) { List<PortletEntry<BusinessGroupEntry>> convertedList = new ArrayList<PortletEntry<BusinessGroupEntry>>(); for(BusinessGroup group:groups) { GroupPortletEntry entry = new GroupPortletEntry(group); if(withDescription) { entry.getValue().setDescription(group.getDescription()); } convertedList.add(entry); } return convertedList; } private List<PortletEntry<BusinessGroupEntry>> convertShortBusinessGroupToPortletEntryList(List<BusinessGroupLazy> groups, boolean withDescription) { List<PortletEntry<BusinessGroupEntry>> convertedList = new ArrayList<PortletEntry<BusinessGroupEntry>>(); for(BusinessGroupLazy group:groups) { GroupPortletEntry entry = new GroupPortletEntry(group); if(withDescription) { entry.getValue().setDescription(group.getDescription()); } convertedList.add(entry); } return convertedList; } protected void reloadModel(SortingCriteria sortingCriteria) { if (sortingCriteria.getSortingType() == SortingCriteria.AUTO_SORTING) { BusinessGroupOrder order = null; if(sortingCriteria.getSortingTerm()==SortingCriteria.ALPHABETICAL_SORTING) { order = sortingCriteria.isAscending() ? BusinessGroupOrder.nameAsc : BusinessGroupOrder.nameDesc; } else if(sortingCriteria.getSortingTerm()==SortingCriteria.DATE_SORTING) { order = sortingCriteria.isAscending() ? BusinessGroupOrder.creationDateAsc : BusinessGroupOrder.creationDateDesc; } int maxEntries = sortingCriteria.getMaxEntries(); List<BusinessGroupLazy> groupList = businessGroupService.findBusinessGroups(getIdentity(), maxEntries * 2, order); Set<BusinessGroupLazy> removeDuplicates = new HashSet<BusinessGroupLazy>(maxEntries); for(Iterator<BusinessGroupLazy> it=groupList.iterator(); it.hasNext(); ) { BusinessGroupLazy group = it.next(); if(removeDuplicates.contains(group)) { it.remove(); } else { removeDuplicates.add(group); } } List<BusinessGroupLazy> uniqueList = groupList.subList(0, Math.min(maxEntries, groupList.size())); List<PortletEntry<BusinessGroupEntry>> entries = convertShortBusinessGroupToPortletEntryList(uniqueList, false); groupListModel.setObjects(entries); tableCtr.modelChanged(); } else { reloadModel(getPersistentManuallySortedItems()); } } protected void reloadModel(List<PortletEntry<BusinessGroupEntry>> sortedItems) { groupListModel.setObjects(sortedItems); tableCtr.modelChanged(); } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, * org.olat.core.gui.components.Component, org.olat.core.gui.control.Event) */ public void event(UserRequest ureq, Component source, Event event) { if (source == showAllLink) { String businessPath = "[GroupsSite:0][MyGroups:0][AllGroups:0]"; NewControllerFactory.getInstance().launch(businessPath, ureq, getWindowControl()); } } /** * @see org.olat.core.gui.control.ControllerEventListener#dispatchEvent(org.olat.core.gui.UserRequest, * org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event) */ public void event(UserRequest ureq, Controller source, Event event) { super.event(ureq, source, event); if (source == tableCtr) { if (event.getCommand().equals(Table.COMMANDLINK_ROWACTION_CLICKED)) { TableEvent te = (TableEvent) event; String actionid = te.getActionId(); if (actionid.equals(CMD_LAUNCH)) { PortletEntry<BusinessGroupEntry> entry = groupListModel.getObject(te.getRowId()); String businessPath = "[BusinessGroup:" + entry.getKey() + "]"; NewControllerFactory.getInstance().launch(businessPath, ureq, getWindowControl()); } } } } /** * @see org.olat.core.gui.control.DefaultController#doDispose(boolean) */ protected void doDispose() { super.doDispose(); // de-register for businessgroup type events CoordinatorManager.getInstance().getCoordinator().getEventBus().deregisterFor(this, OresHelper.lookupType(BusinessGroup.class)); // POST: all firing event for the source just deregistered are finished // (listeners lock in EventAgency) } public void event(Event event) { if (event instanceof BusinessGroupModifiedEvent) { BusinessGroupModifiedEvent mev = (BusinessGroupModifiedEvent) event; if(BusinessGroupModifiedEvent.IDENTITY_REMOVED_EVENT.equals(event.getCommand()) && getIdentity().getKey().equals(mev.getAffectedIdentityKey())) { Long modifiedKey = mev.getModifiedGroupKey(); for(PortletEntry<BusinessGroupEntry> portlet:groupListModel.getObjects()) { if(modifiedKey.equals(portlet.getKey())) {; groupListModel.getObjects().remove(portlet); tableCtr.modelChanged(); break; } } } } } /** * Retrieves the persistent sortingCriteria and the persistent manually sorted, if any, * creates the table model for the manual sorting, and instantiates the PortletToolSortingControllerImpl. * @param ureq * @param wControl * @return a PortletToolSortingControllerImpl instance. */ protected PortletToolSortingControllerImpl<BusinessGroupEntry> createSortingTool(UserRequest ureq, WindowControl wControl) { if(portletToolsController==null) { List<BusinessGroupLazy> groupList = businessGroupService.findBusinessGroups(getIdentity(), -1); List<PortletEntry<BusinessGroupEntry>> portletEntryList = convertShortBusinessGroupToPortletEntryList(groupList, true); GroupsManualSortingTableDataModel tableDataModel = new GroupsManualSortingTableDataModel(portletEntryList); List<PortletEntry<BusinessGroupEntry>> sortedItems = getPersistentManuallySortedItems(); portletToolsController = new PortletToolSortingControllerImpl<BusinessGroupEntry>(ureq, wControl, getTranslator(), sortingCriteria, tableDataModel, sortedItems); portletToolsController.setConfigManualSorting(true); portletToolsController.setConfigAutoSorting(true); portletToolsController.addControllerListener(this); } return portletToolsController; } /** * Retrieves the persistent manually sorted items for the current portlet. * @param ureq * @return */ private List<PortletEntry<BusinessGroupEntry>> getPersistentManuallySortedItems() { @SuppressWarnings("unchecked") Map<Long, Integer> storedPrefs = (Map<Long, Integer>)guiPreferences.get(Map.class, getPreferenceKey(SORTED_ITEMS_PREF)); List<PortletEntry<BusinessGroupEntry>> portletEntryList; if(storedPrefs != null) { SearchBusinessGroupParams params = new SearchBusinessGroupParams(getIdentity(), true, true); params.setGroupKeys(storedPrefs.keySet()); List<BusinessGroup> groups = businessGroupService.findBusinessGroups(params, null, 0, -1); portletEntryList = convertBusinessGroupToPortletEntryList(groups, false); } else { List<BusinessGroupLazy> groups = new ArrayList<BusinessGroupLazy>(); portletEntryList = convertShortBusinessGroupToPortletEntryList(groups, false); } return getPersistentManuallySortedItems(portletEntryList); } /** * Comparator implementation used for sorting BusinessGroup entries according with the * input sortingCriteria. * <p> * @param sortingCriteria * @return a Comparator for the input sortingCriteria */ protected Comparator<BusinessGroupEntry> getComparator(final SortingCriteria sortingCriteria) { return new Comparator<BusinessGroupEntry>(){ public int compare(final BusinessGroupEntry group1, final BusinessGroupEntry group2) { int comparisonResult = 0; if(sortingCriteria.getSortingTerm()==SortingCriteria.ALPHABETICAL_SORTING) { comparisonResult = collator.compare(group1.getName(), group2.getName()); } else if(sortingCriteria.getSortingTerm()==SortingCriteria.DATE_SORTING) { comparisonResult = group1.getCreationDate().compareTo(group2.getCreationDate()); } if(!sortingCriteria.isAscending()) { //if not isAscending return (-comparisonResult) return -comparisonResult; } return comparisonResult; } }; } /** * * PortletDefaultTableDataModel implementation for the current portlet. * * <P> * Initial Date: 10.12.2007 <br> * @author Lavinia Dumitrescu */ private static class GroupTableDataModel extends PortletDefaultTableDataModel<BusinessGroupEntry> { public GroupTableDataModel(List<PortletEntry<BusinessGroupEntry>> objects) { super(objects, 1); } public Object getValueAt(int row, int col) { PortletEntry<BusinessGroupEntry> entry = getObject(row); BusinessGroupEntry businessGroup = entry.getValue(); switch (col) { case 0: return businessGroup.getName(); default: return "ERROR"; } } @Override public Object createCopyWithEmptyList() { return new GroupTableDataModel(new ArrayList<PortletEntry<BusinessGroupEntry>>()); } } /** * * PortletDefaultTableDataModel implementation for the manual sorting component. * * <P> * Initial Date: 10.12.2007 <br> * @author Lavinia Dumitrescu */ private static class GroupsManualSortingTableDataModel extends PortletDefaultTableDataModel<BusinessGroupEntry> { /** * @param objects * @param locale */ public GroupsManualSortingTableDataModel(List<PortletEntry<BusinessGroupEntry>> objects) { super(objects, 3); } /** * @see org.olat.core.gui.components.table.TableDataModel#getValueAt(int, int) */ public final Object getValueAt(int row, int col) { PortletEntry<BusinessGroupEntry> portletEntry = getObject(row); BusinessGroupEntry group = portletEntry.getValue(); switch (col) { case 0: return group.getName(); case 1: String description = group.getDescription(); description = FilterFactory.getHtmlTagsFilter().filter(description); return (description == null ? "n/a" : description); case 2: Date date = group.getCreationDate(); return date; default: return "error"; } } @Override public GroupsManualSortingTableDataModel createCopyWithEmptyList() { return new GroupsManualSortingTableDataModel(new ArrayList<PortletEntry<BusinessGroupEntry>>()); } } private static class GroupPortletEntry implements PortletEntry<BusinessGroupEntry> { private BusinessGroupEntry value; private Long key; public GroupPortletEntry(BusinessGroup group) { value = new BusinessGroupEntry(group); key = group.getKey(); } public GroupPortletEntry(BusinessGroupLazy group) { value = new BusinessGroupEntry(group); key = group.getKey(); } public Long getKey() { return key; } public BusinessGroupEntry getValue() { return value; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.aggregation; import com.facebook.presto.bytecode.DynamicClassLoader; import com.facebook.presto.metadata.BoundVariables; import com.facebook.presto.metadata.FunctionRegistry; import com.facebook.presto.metadata.SqlAggregationFunction; import com.facebook.presto.operator.aggregation.state.BlockState; import com.facebook.presto.operator.aggregation.state.BlockStateSerializer; import com.facebook.presto.operator.aggregation.state.NullableBooleanState; import com.facebook.presto.operator.aggregation.state.NullableDoubleState; import com.facebook.presto.operator.aggregation.state.NullableLongState; import com.facebook.presto.operator.aggregation.state.SliceState; import com.facebook.presto.operator.aggregation.state.StateCompiler; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.function.AccumulatorState; import com.facebook.presto.spi.function.AccumulatorStateSerializer; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import java.lang.invoke.MethodHandle; import java.util.List; import static com.facebook.presto.metadata.Signature.typeVariable; import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata; import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INDEX; import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.BLOCK_INPUT_CHANNEL; import static com.facebook.presto.operator.aggregation.AggregationMetadata.ParameterMetadata.ParameterType.STATE; import static com.facebook.presto.operator.aggregation.AggregationUtils.generateAggregationName; import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.util.Reflection.methodHandle; import static com.google.common.collect.ImmutableList.toImmutableList; public class ArbitraryAggregationFunction extends SqlAggregationFunction { public static final ArbitraryAggregationFunction ARBITRARY_AGGREGATION = new ArbitraryAggregationFunction(); private static final String NAME = "arbitrary"; private static final MethodHandle LONG_INPUT_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "input", Type.class, NullableLongState.class, Block.class, int.class); private static final MethodHandle DOUBLE_INPUT_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "input", Type.class, NullableDoubleState.class, Block.class, int.class); private static final MethodHandle SLICE_INPUT_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "input", Type.class, SliceState.class, Block.class, int.class); private static final MethodHandle BOOLEAN_INPUT_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "input", Type.class, NullableBooleanState.class, Block.class, int.class); private static final MethodHandle BLOCK_INPUT_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "input", Type.class, BlockState.class, Block.class, int.class); private static final MethodHandle LONG_OUTPUT_FUNCTION = methodHandle(NullableLongState.class, "write", Type.class, NullableLongState.class, BlockBuilder.class); private static final MethodHandle DOUBLE_OUTPUT_FUNCTION = methodHandle(NullableDoubleState.class, "write", Type.class, NullableDoubleState.class, BlockBuilder.class); private static final MethodHandle SLICE_OUTPUT_FUNCTION = methodHandle(SliceState.class, "write", Type.class, SliceState.class, BlockBuilder.class); private static final MethodHandle BOOLEAN_OUTPUT_FUNCTION = methodHandle(NullableBooleanState.class, "write", Type.class, NullableBooleanState.class, BlockBuilder.class); private static final MethodHandle BLOCK_OUTPUT_FUNCTION = methodHandle(BlockState.class, "write", Type.class, BlockState.class, BlockBuilder.class); private static final MethodHandle LONG_COMBINE_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "combine", NullableLongState.class, NullableLongState.class); private static final MethodHandle DOUBLE_COMBINE_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "combine", NullableDoubleState.class, NullableDoubleState.class); private static final MethodHandle SLICE_COMBINE_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "combine", SliceState.class, SliceState.class); private static final MethodHandle BOOLEAN_COMBINE_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "combine", NullableBooleanState.class, NullableBooleanState.class); private static final MethodHandle BLOCK_COMBINE_FUNCTION = methodHandle(ArbitraryAggregationFunction.class, "combine", BlockState.class, BlockState.class); protected ArbitraryAggregationFunction() { super(NAME, ImmutableList.of(typeVariable("T")), ImmutableList.of(), parseTypeSignature("T"), ImmutableList.of(parseTypeSignature("T"))); } @Override public String getDescription() { return "return an arbitrary non-null input value"; } @Override public InternalAggregationFunction specialize(BoundVariables boundVariables, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type valueType = boundVariables.getTypeVariable("T"); return generateAggregation(valueType); } private static InternalAggregationFunction generateAggregation(Type type) { DynamicClassLoader classLoader = new DynamicClassLoader(ArbitraryAggregationFunction.class.getClassLoader()); List<Type> inputTypes = ImmutableList.of(type); MethodHandle inputFunction; MethodHandle combineFunction; MethodHandle outputFunction; Class<? extends AccumulatorState> stateInterface; AccumulatorStateSerializer<?> stateSerializer; if (type.getJavaType() == long.class) { stateInterface = NullableLongState.class; stateSerializer = StateCompiler.generateStateSerializer(stateInterface, classLoader); inputFunction = LONG_INPUT_FUNCTION; combineFunction = LONG_COMBINE_FUNCTION; outputFunction = LONG_OUTPUT_FUNCTION; } else if (type.getJavaType() == double.class) { stateInterface = NullableDoubleState.class; stateSerializer = StateCompiler.generateStateSerializer(stateInterface, classLoader); inputFunction = DOUBLE_INPUT_FUNCTION; combineFunction = DOUBLE_COMBINE_FUNCTION; outputFunction = DOUBLE_OUTPUT_FUNCTION; } else if (type.getJavaType() == Slice.class) { stateInterface = SliceState.class; stateSerializer = StateCompiler.generateStateSerializer(stateInterface, classLoader); inputFunction = SLICE_INPUT_FUNCTION; combineFunction = SLICE_COMBINE_FUNCTION; outputFunction = SLICE_OUTPUT_FUNCTION; } else if (type.getJavaType() == boolean.class) { stateInterface = NullableBooleanState.class; stateSerializer = StateCompiler.generateStateSerializer(stateInterface, classLoader); inputFunction = BOOLEAN_INPUT_FUNCTION; combineFunction = BOOLEAN_COMBINE_FUNCTION; outputFunction = BOOLEAN_OUTPUT_FUNCTION; } else { stateInterface = BlockState.class; stateSerializer = new BlockStateSerializer(type); inputFunction = BLOCK_INPUT_FUNCTION; combineFunction = BLOCK_COMBINE_FUNCTION; outputFunction = BLOCK_OUTPUT_FUNCTION; } inputFunction = inputFunction.bindTo(type); Type intermediateType = stateSerializer.getSerializedType(); List<ParameterMetadata> inputParameterMetadata = createInputParameterMetadata(type); AggregationMetadata metadata = new AggregationMetadata( generateAggregationName(NAME, type.getTypeSignature(), inputTypes.stream().map(Type::getTypeSignature).collect(toImmutableList())), inputParameterMetadata, inputFunction, combineFunction, outputFunction.bindTo(type), stateInterface, stateSerializer, StateCompiler.generateStateFactory(stateInterface, classLoader), type); GenericAccumulatorFactoryBinder factory = AccumulatorCompiler.generateAccumulatorFactoryBinder(metadata, classLoader); return new InternalAggregationFunction(NAME, inputTypes, intermediateType, type, true, factory); } private static List<ParameterMetadata> createInputParameterMetadata(Type value) { return ImmutableList.of(new ParameterMetadata(STATE), new ParameterMetadata(BLOCK_INPUT_CHANNEL, value), new ParameterMetadata(BLOCK_INDEX)); } public static void input(Type type, NullableDoubleState state, Block block, int position) { if (!state.isNull()) { return; } state.setNull(false); state.setDouble(type.getDouble(block, position)); } public static void input(Type type, NullableLongState state, Block block, int position) { if (!state.isNull()) { return; } state.setNull(false); state.setLong(type.getLong(block, position)); } public static void input(Type type, SliceState state, Block block, int position) { if (state.getSlice() != null) { return; } state.setSlice(type.getSlice(block, position)); } public static void input(Type type, NullableBooleanState state, Block block, int position) { if (!state.isNull()) { return; } state.setNull(false); state.setBoolean(type.getBoolean(block, position)); } public static void input(Type type, BlockState state, Block block, int position) { if (state.getBlock() != null) { return; } state.setBlock((Block) type.getObject(block, position)); } public static void combine(NullableLongState state, NullableLongState otherState) { if (!state.isNull()) { return; } state.setNull(false); state.setLong(otherState.getLong()); } public static void combine(NullableDoubleState state, NullableDoubleState otherState) { if (!state.isNull()) { return; } state.setNull(false); state.setDouble(otherState.getDouble()); } public static void combine(NullableBooleanState state, NullableBooleanState otherState) { if (!state.isNull()) { return; } state.setNull(false); state.setBoolean(otherState.getBoolean()); } public static void combine(SliceState state, SliceState otherState) { if (state.getSlice() != null) { return; } state.setSlice(otherState.getSlice()); } public static void combine(BlockState state, BlockState otherState) { if (state.getBlock() != null) { return; } state.setBlock(otherState.getBlock()); } }
package proj.me.bitframe.shading_two; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Color; import androidx.palette.graphics.Palette; import android.view.LayoutInflater; import android.view.View; import android.widget.ImageView; import com.squareup.picasso.Callback; import com.squareup.picasso.MemoryPolicy; import com.squareup.picasso.NetworkPolicy; import com.squareup.picasso.Picasso; import java.util.List; import proj.me.bitframe.BeanBitFrame; import proj.me.bitframe.BeanImage; import proj.me.bitframe.FrameModel; import proj.me.bitframe.ImageShades; import proj.me.bitframe.ImageType; import proj.me.bitframe.R; import proj.me.bitframe.dimentions.BeanShade2; import proj.me.bitframe.dimentions.ImageOrder; import proj.me.bitframe.dimentions.LayoutType; import proj.me.bitframe.exceptions.FrameException; import proj.me.bitframe.helper.Utils; /** * Created by Deepak.Tiwari on 29-09-2015. */ public final class ImageShadingTwo extends ImageShades { LayoutInflater inflater; Context context; int totalImages; BindingShadeTwo bindingShadeTwo; String imageLink1, imageLink2; BeanBitFrame beanBitFrame1, beanBitFrame2; FrameModel frameModel; int resultColor; public ImageShadingTwo(Context context, int totalImages, FrameModel frameModel) { this.context = context; inflater = LayoutInflater.from(context); this.totalImages = totalImages; bindingShadeTwo = new BindingShadeTwo(); beanBitFrame1 = new BeanBitFrame(); beanBitFrame2 = new BeanBitFrame(); beanBitFrame1.setLoaded(true); beanBitFrame2.setLoaded(true); this.frameModel = frameModel; } @Override protected void updateFrameUi(List<Bitmap> images, List<BeanImage> beanImages, boolean hasImageProperties) throws FrameException{ BeanBitFrame beanBitFrameFirst = null, beanBitFrameSecond = null; if (hasImageProperties) { beanBitFrameFirst = (BeanBitFrame) beanImages.get(0); beanBitFrameSecond = (BeanBitFrame) beanImages.get(1); } int width1 = hasImageProperties ? (int) beanBitFrameFirst.getWidth() : images.get(0).getWidth(); int height1 = hasImageProperties ? (int) beanBitFrameFirst.getHeight() : images.get(0).getHeight(); int width2 = hasImageProperties ? (int) beanBitFrameSecond.getWidth() : images.get(1).getWidth(); int height2 = hasImageProperties ? (int) beanBitFrameSecond.getHeight() : images.get(1).getHeight(); BeanShade2 beanShade2 = ShadeTwo.calculateDimentions(frameModel, width1, height1, width2, height2); boolean isFirstImageLeftOrTop = beanShade2.getImageOrderList().get(0) == ImageOrder.FIRST; boolean isVertLayout = beanShade2.getLayoutType() == LayoutType.VERT; Utils.logMessage("getWidth1 : " + width1); Utils.logMessage("getHeight1 : " + height1); Utils.logMessage("getWidth2 : " + width2); Utils.logMessage("getHeight2 : " + height2); Utils.logMessage("Start++++++++++++++++++++++++++++++++++++Start"); Utils.logMessage("getWidth1 : " + beanShade2.getWidth1()); Utils.logMessage("getHeight1 : " + beanShade2.getHeight1()); Utils.logMessage("getWidth2 : " + beanShade2.getWidth2()); Utils.logMessage("getHeight2 : " + beanShade2.getHeight2()); Utils.logMessage("isFirstImageLeftOrTop : " + isFirstImageLeftOrTop); Utils.logMessage("isVertLayout : " + isVertLayout); Utils.logMessage("End++++++++++++++++++++++++++++++++++++End"); View root = null; if (isVertLayout) { root = bindingShadeTwo.bind(inflater.inflate(R.layout.view_double_vert, null), this); } else { root = bindingShadeTwo.bind(inflater.inflate(R.layout.view_double_horz, null), this); } Bitmap bitmap1 = hasImageProperties ? null : images.get(0); Bitmap bitmap2 = hasImageProperties ? null : images.get(1); boolean isAddViewVisible = false; final ImageView imageView1 = (ImageView) root.findViewById(R.id.view_double_image1); final ImageView imageView2 = (ImageView) root.findViewById(R.id.view_double_image2); BindingShadeTwo.setLayoutHeight(imageView1, beanShade2.getHeight1()); BindingShadeTwo.setLayoutWidth(imageView1, beanShade2.getWidth1()); if (!hasImageProperties) { Palette.from(isFirstImageLeftOrTop ? bitmap1 : bitmap2).generate(new PaletteListener(1, this)); BindingShadeTwo.setBitmap(imageView1, isFirstImageLeftOrTop ? bitmap1 : bitmap2); } else { //set bean properties int resultColor = 0; beanBitFrame1.setHasGreaterVibrantPopulation(isFirstImageLeftOrTop ? beanBitFrameFirst.isHasGreaterVibrantPopulation() : beanBitFrameSecond.isHasGreaterVibrantPopulation()); beanBitFrame1.setMutedColor(isFirstImageLeftOrTop ? beanBitFrameFirst.getMutedColor() : beanBitFrameSecond.getMutedColor()); beanBitFrame1.setVibrantColor(isFirstImageLeftOrTop ? beanBitFrameFirst.getVibrantColor() : beanBitFrameSecond.getVibrantColor()); switch (frameModel.getColorCombination()) { case VIBRANT_TO_MUTED: if (beanBitFrame1.isHasGreaterVibrantPopulation()) resultColor = beanBitFrame1.getVibrantColor(); else resultColor = beanBitFrame1.getMutedColor(); break; case MUTED_TO_VIBRANT: if (beanBitFrame1.isHasGreaterVibrantPopulation()) resultColor = beanBitFrame1.getMutedColor(); else resultColor = beanBitFrame1.getVibrantColor(); break; default: throw new FrameException("could not found color combination"); } bindingShadeTwo.setFirstImageBgColor(resultColor); bindingShadeTwo.setFirstCommentBgColor(Utils.getColorWithTransparency(resultColor, frameModel.getCommentTransparencyPercent())); beanBitFrame1.setPrimaryCount(isFirstImageLeftOrTop ? beanBitFrameFirst.getPrimaryCount() : beanBitFrameSecond.getPrimaryCount()); beanBitFrame1.setSecondaryCount(isFirstImageLeftOrTop ? beanBitFrameFirst.getSecondaryCount() : beanBitFrameSecond.getSecondaryCount()); } BindingShadeTwo.setLayoutWidth(imageView2, beanShade2.getWidth2()); BindingShadeTwo.setLayoutHeight(imageView2, beanShade2.getHeight2()); if (!hasImageProperties) { Palette.from(isFirstImageLeftOrTop ? bitmap2 : bitmap1).generate(new PaletteListener(2, this)); BindingShadeTwo.setBitmap(imageView2, isFirstImageLeftOrTop ? bitmap2 : bitmap1); } else { //set bean properties int resultColor = 0; beanBitFrame2.setHasGreaterVibrantPopulation(isFirstImageLeftOrTop ? beanBitFrameSecond.isHasGreaterVibrantPopulation() : beanBitFrameFirst.isHasGreaterVibrantPopulation()); beanBitFrame2.setMutedColor(isFirstImageLeftOrTop ? beanBitFrameSecond.getMutedColor() : beanBitFrameFirst.getMutedColor()); beanBitFrame2.setVibrantColor(isFirstImageLeftOrTop ? beanBitFrameSecond.getVibrantColor() : beanBitFrameFirst.getVibrantColor()); switch (frameModel.getColorCombination()) { case VIBRANT_TO_MUTED: if (beanBitFrame2.isHasGreaterVibrantPopulation()) resultColor = beanBitFrame2.getVibrantColor(); else resultColor = beanBitFrame2.getMutedColor(); break; case MUTED_TO_VIBRANT: if (beanBitFrame2.isHasGreaterVibrantPopulation()) resultColor = beanBitFrame2.getMutedColor(); else resultColor = beanBitFrame2.getVibrantColor(); break; default: throw new FrameException("could not found color combination"); } bindingShadeTwo.setSecondImageBgColor(resultColor); bindingShadeTwo.setSecondCommentBgColor(Utils.getColorWithTransparency(resultColor, frameModel.getCommentTransparencyPercent())); beanBitFrame2.setPrimaryCount(isFirstImageLeftOrTop ? beanBitFrameSecond.getPrimaryCount() : beanBitFrameFirst.getPrimaryCount()); beanBitFrame2.setSecondaryCount(isFirstImageLeftOrTop ? beanBitFrameSecond.getSecondaryCount() : beanBitFrameFirst.getSecondaryCount()); } bindingShadeTwo.setAddVisibility(true); if (beanShade2.isAddInLayout()) { if (totalImages > 2) { bindingShadeTwo.setAddAsCounter(true); bindingShadeTwo.setAddText("+" + (totalImages - 2)); //text color //bg } else { bindingShadeTwo.setAddAsCounter(false); bindingShadeTwo.setAddText("+"); //text color //bg isAddViewVisible = true; } } else { if (totalImages > 2) { bindingShadeTwo.setCounterVisibility(true); bindingShadeTwo.setCounterText((totalImages - 2 > frameModel.getMaxExtraCount() ? (frameModel.getMaxExtraCount() + "+") : ("+" + (totalImages - 2)))); } } bindingShadeTwo.setFirstImageScaleType(frameModel.getScaleType()); //bg bindingShadeTwo.setSecondImageScaleType(frameModel.getScaleType()); //bg bindingShadeTwo.setFirstComment(isFirstImageLeftOrTop ? beanImages.get(0).getImageComment() : beanImages.get(1).getImageComment()); bindingShadeTwo.setFirstCommentVisibility(frameModel.isShouldShowComment()); //bg bindingShadeTwo.setSecondComment(isFirstImageLeftOrTop ? beanImages.get(1).getImageComment() : beanImages.get(0).getImageComment()); bindingShadeTwo.setSecondCommentVisibility(frameModel.isShouldShowComment()); //bg //no need to add or remove from list /*imageLink1 = isFirstImageLeftOrTop ? beanImages.get(0).getImageLink() : beanImages.get(1).getImageLink(); imageLink2 = isFirstImageLeftOrTop ? beanImages.get(1).getImageLink() : beanImages.get(0).getImageLink();*/ BeanImage beanImage1 = null; BeanImage beanImage2 = null; if (isFirstImageLeftOrTop) { beanImage1 = beanImages.get(0); beanImage2 = beanImages.get(1); } else { beanImage1 = beanImages.get(1); beanImage2 = beanImages.get(0); } imageLink1 = beanImage1.getImageLink(); int firstPrimaryCount = beanImage1.getPrimaryCount(); int firstSecondaryCount = beanImage1.getSecondaryCount(); imageLink2 = beanImage2.getImageLink(); int secondPrimaryCount = beanImage2.getPrimaryCount(); int secondSecondaryCount = beanImage2.getSecondaryCount(); if (isVertLayout) addImageView(root, isFirstImageLeftOrTop ? beanShade2.getWidth1() : beanShade2.getWidth2(), beanShade2.getHeight1() + beanShade2.getHeight2(), isAddViewVisible); else addImageView(root, isFirstImageLeftOrTop ? beanShade2.getHeight1() : beanShade2.getHeight2(), beanShade2.getWidth1() + beanShade2.getWidth2(), isAddViewVisible); beanBitFrame1.setWidth(/*beanShade2.getWidth1()*/hasImageProperties ? (isFirstImageLeftOrTop ? beanBitFrameFirst.getWidth() : beanBitFrameSecond.getWidth()) : isFirstImageLeftOrTop ? bitmap1.getWidth() : bitmap2.getWidth()); beanBitFrame1.setHeight(/*beanShade2.getHeight1()*/hasImageProperties ? (isFirstImageLeftOrTop ? beanBitFrameFirst.getHeight() : beanBitFrameSecond.getHeight()) : isFirstImageLeftOrTop ? bitmap1.getHeight() : bitmap2.getHeight()); beanBitFrame1.setImageLink(imageLink1); beanBitFrame1.setImageComment(bindingShadeTwo.getFirstComment()); beanBitFrame1.setPrimaryCount(firstPrimaryCount); beanBitFrame1.setSecondaryCount(firstSecondaryCount); beanBitFrame2.setWidth(/*beanShade2.getWidth2()*/hasImageProperties ? (isFirstImageLeftOrTop ? beanBitFrameSecond.getWidth() : beanBitFrameFirst.getWidth()) : isFirstImageLeftOrTop ? bitmap2.getWidth() : bitmap1.getWidth()); beanBitFrame2.setHeight(/*beanShade2.getHeight2()*/hasImageProperties ? (isFirstImageLeftOrTop ? beanBitFrameSecond.getHeight() : beanBitFrameFirst.getHeight()) : isFirstImageLeftOrTop ? bitmap2.getHeight() : bitmap1.getHeight()); beanBitFrame2.setImageLink(imageLink2); beanBitFrame2.setImageComment(bindingShadeTwo.getSecondComment()); beanBitFrame2.setPrimaryCount(secondPrimaryCount); beanBitFrame2.setSecondaryCount(secondSecondaryCount); if (hasImageProperties) { //extra properties int defaultColor = Color.parseColor("#ffffffff"); int commentColor = Color.parseColor("#33000000"); int mixedColor = Utils.getMixedArgbColor(bindingShadeTwo.getFirstImageBgColor(), bindingShadeTwo.getSecondImageBgColor()); int inverseColor = Utils.getInverseColor(mixedColor); setColorsToAddMoreView(bindingShadeTwo.getSecondImageBgColor(), mixedColor, inverseColor); frameResult(beanBitFrame1, beanBitFrame2); //bindingShadeTwo.setDividerVisible(Utils.showShowDivider()); bindingShadeTwo.setDividerColor(inverseColor); if (bindingShadeTwo.isAddVisibility()) { if (bindingShadeTwo.isAddAsCounter()) { bindingShadeTwo.setAddTextColor(defaultColor); bindingShadeTwo.setAddBgColor(commentColor); } else { bindingShadeTwo.setAddBgColor(mixedColor); bindingShadeTwo.setAddTextColor(inverseColor); } } final Picasso picasso = getCurrentFramePicasso(); //need to notify ImageShading too, to load image via picasso Utils.logVerbose("IMAGE_LOADING : " + " going to load two image"); if (frameModel.isShouldStoreImages()) { Utils.getPicassoRequestCreator(picasso, imageLink1).fit().centerInside().noPlaceholder().into(imageView1, new Callback() { @Override public void onSuccess() { //do nothing Utils.logVerbose("IMAGE_LOADING success"); } @Override public void onError() { Utils.logVerbose("IMAGE_LOADING error"); Utils.getPicassoRequestCreator(picasso, imageLink1 + "?" + System.currentTimeMillis()).fit().centerInside().noPlaceholder().into(imageView1); } }); Utils.getPicassoRequestCreator(picasso, imageLink2).fit().centerInside().noPlaceholder().into(imageView2, new Callback() { @Override public void onSuccess() { //do nothing Utils.logVerbose("IMAGE_LOADING success"); } @Override public void onError() { Utils.logVerbose("IMAGE_LOADING error"); Utils.getPicassoRequestCreator(picasso, imageLink2 + "?" + System.currentTimeMillis()).fit().centerInside().noPlaceholder().into(imageView2); } }); } else { Utils.getPicassoRequestCreator(picasso, imageLink1).memoryPolicy(MemoryPolicy.NO_STORE) .networkPolicy(NetworkPolicy.NO_STORE).fit().centerInside().noPlaceholder().into(imageView1, new Callback() { @Override public void onSuccess() { //do nothing Utils.logVerbose("IMAGE_LOADING success"); } @Override public void onError() { Utils.logVerbose("IMAGE_LOADING error"); Utils.getPicassoRequestCreator(picasso, imageLink1 + "?" + System.currentTimeMillis()).memoryPolicy(MemoryPolicy.NO_STORE) .networkPolicy(NetworkPolicy.NO_STORE).fit().centerInside().noPlaceholder().into(imageView1); } }); Utils.getPicassoRequestCreator(picasso, imageLink2).memoryPolicy(MemoryPolicy.NO_STORE) .networkPolicy(NetworkPolicy.NO_STORE).fit().centerInside().noPlaceholder().into(imageView2, new Callback() { @Override public void onSuccess() { //do nothing Utils.logVerbose("IMAGE_LOADING success"); } @Override public void onError() { Utils.logVerbose("IMAGE_LOADING error"); Utils.getPicassoRequestCreator(picasso, imageLink2 + "?" + System.currentTimeMillis()).memoryPolicy(MemoryPolicy.NO_STORE) .networkPolicy(NetworkPolicy.NO_STORE).fit().centerInside().noPlaceholder().into(imageView2); } }); } } } @Override public void onImageShadeClick(View view) { switch ((String) view.getTag()) { case "img1": imageClicked(ImageType.VIEW_DOUBLE, 1, imageLink1); break; case "img2": imageClicked(ImageType.VIEW_DOUBLE, 2, imageLink2); break; case "add": addMore(); break; } } @Override protected void onPaletteGenerated(Palette palette, int viewId) throws FrameException { int defaultColor = Color.parseColor("#ffffffff"); int commentColor = Color.parseColor("#33000000"); int resultColor = 0; Palette.Swatch vibrantSwatch = palette.getVibrantSwatch(); Palette.Swatch mutedSwatch = palette.getMutedSwatch(); int vibrantPopulation = vibrantSwatch == null ? 0 : vibrantSwatch.getPopulation(); int mutedPopulation = mutedSwatch == null ? 0 : mutedSwatch.getPopulation(); int vibrantColor = palette.getVibrantColor(defaultColor); int mutedColor = palette.getMutedColor(defaultColor); boolean hasGreaterVibrantPopulation = vibrantPopulation > mutedPopulation; switch (frameModel.getColorCombination()) { case VIBRANT_TO_MUTED: if (hasGreaterVibrantPopulation && vibrantColor > 0) resultColor = vibrantColor; else resultColor = mutedColor; break; case MUTED_TO_VIBRANT: if (hasGreaterVibrantPopulation && mutedColor > 0) resultColor = mutedColor; else resultColor = vibrantColor; break; default: throw new FrameException("could not found color combination"); } Utils.logMessage("vibrant pop = " + vibrantPopulation + " muted pop" + mutedPopulation); switch (viewId) { case 1: bindingShadeTwo.setFirstImageBgColor(resultColor); bindingShadeTwo.setFirstCommentBgColor(Utils.getColorWithTransparency(resultColor, frameModel.getCommentTransparencyPercent())); beanBitFrame1.setMutedColor(mutedColor); beanBitFrame1.setVibrantColor(vibrantColor); beanBitFrame1.setHasGreaterVibrantPopulation(hasGreaterVibrantPopulation); break; case 2: bindingShadeTwo.setSecondImageBgColor(resultColor); bindingShadeTwo.setSecondCommentBgColor(Utils.getColorWithTransparency(resultColor, frameModel.getCommentTransparencyPercent())); beanBitFrame2.setMutedColor(mutedColor); beanBitFrame2.setVibrantColor(vibrantColor); beanBitFrame2.setHasGreaterVibrantPopulation(hasGreaterVibrantPopulation); break; default: throw new FrameException("invalid view counter"); } if (ImageShadingTwo.this.resultColor == 0) ImageShadingTwo.this.resultColor = resultColor; else { int mixedColor = Utils.getMixedArgbColor(ImageShadingTwo.this.resultColor, resultColor); beanBitFrame1.setMutedColor(beanBitFrame1.getMutedColor() <= 0 ? mixedColor : beanBitFrame1.getMutedColor()); beanBitFrame1.setVibrantColor(beanBitFrame1.getVibrantColor() <= 0 ? mixedColor : beanBitFrame1.getVibrantColor()); beanBitFrame2.setMutedColor(beanBitFrame2.getMutedColor() <= 0 ? mixedColor : beanBitFrame2.getMutedColor()); beanBitFrame2.setVibrantColor(beanBitFrame2.getVibrantColor() <= 0 ? mixedColor : beanBitFrame2.getVibrantColor()); int inverseColor = Utils.getInverseColor(mixedColor); setColorsToAddMoreView(resultColor, mixedColor, inverseColor); frameResult(beanBitFrame1, beanBitFrame2); //bindingShadeTwo.setDividerVisible(Utils.showShowDivider()); bindingShadeTwo.setDividerColor(inverseColor); if (bindingShadeTwo.isAddVisibility()) { if (bindingShadeTwo.isAddAsCounter()) { bindingShadeTwo.setAddTextColor(defaultColor); bindingShadeTwo.setAddBgColor(commentColor); } else { bindingShadeTwo.setAddBgColor(mixedColor); bindingShadeTwo.setAddTextColor(inverseColor); } } } } }
package org.get.oxicam.clinicalguide.ui; import java.io.File; import org.get.oxicam.clinicalguide.ClinicalGuideActivity; import org.get.oxicam.clinicalguide.FileUtils; import org.get.oxicam.clinicalguide.R; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Point; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.support.v4.app.Fragment; import android.util.Log; import android.util.TypedValue; import android.view.Display; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; public class SummaryScreenFragment extends Fragment { private ClinicalGuideActivity mActivity; // private SimpleAdapter mAdapter; public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mActivity = (ClinicalGuideActivity) getActivity(); mActivity.setTitle("Summary"); View v = inflater.inflate(R.layout.summaryscreen_list_item, container, false); RelativeLayout rl = (RelativeLayout) v.findViewById(R.id.layout); rl.setPadding(10, 10, 10, 10); // setPadding for entire screen Object arg = getArguments().get("summary"); // get the contents Display display = mActivity.getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); // text view TextView summaryTv = new TextView(mActivity); summaryTv.setId(1); summaryTv.setHint("This is Summary of a form"); summaryTv.setText((String) arg); // get the summary data to the summary // textview area summaryTv.setTextSize(TypedValue.COMPLEX_UNIT_SP, 18); // setTextSize rl.addView(summaryTv); RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) summaryTv .getLayoutParams(); // Comment - EditText EditText commentET = new EditText(mActivity); commentET.setId(4); commentET.setHint("Comments"); commentET.setMaxHeight(size.y / 2 - 100); // !!!!!!!!!!!!!! // commentET.setLayoutParams(new ViewGroup.LayoutParams(size.x, // size.y/2)); rl.addView(commentET); params = (RelativeLayout.LayoutParams) commentET.getLayoutParams(); params.addRule(RelativeLayout.BELOW, summaryTv.getId()); params.topMargin = 10; params.width = RelativeLayout.LayoutParams.MATCH_PARENT; // params.height = RelativeLayout.LayoutParams.MATCH_PARENT; // Button = "View Detail" Button viewDetailBtn = new Button(mActivity); viewDetailBtn.setId(3); viewDetailBtn.setText("View Detail"); viewDetailBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub Bundle b = new Bundle(); String details = getArguments().getString("details"); details += "\n\nComments"; details += "\n============"; EditText comment = (EditText) mActivity.findViewById(4); details += "\n" + comment.getText().toString(); b.putString("detail", details); Fragment frag = Fragment.instantiate(mActivity, ViewDetailScreenFragment.class.getName()); frag.setArguments(b); mActivity.setContent(frag); } }); rl.addView(viewDetailBtn); params = (RelativeLayout.LayoutParams) viewDetailBtn.getLayoutParams(); params.addRule(RelativeLayout.BELOW, commentET.getId()); params.topMargin = 10; params.bottomMargin = 10; params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); // Cancel Button Button cancelBtn = new Button(mActivity); cancelBtn.setId(5); cancelBtn.setText("Cancel"); cancelBtn.setCompoundDrawablesWithIntrinsicBounds( R.drawable.navigation_cancel, 0, 0, 0); cancelBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub AlertDialog.Builder alert = new AlertDialog.Builder(mActivity); alert.setTitle("Confirmation"); alert.setMessage("Do you really want to cancel?"); alert.setPositiveButton("Ok", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub mActivity.onBackPressed(); } }); alert.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub dialog.cancel(); } }); AlertDialog alertDialog = alert.create(); alertDialog.show(); } }); rl.addView(cancelBtn); params = (RelativeLayout.LayoutParams) cancelBtn.getLayoutParams(); params.addRule(RelativeLayout.BELOW, viewDetailBtn.getId()); params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); // OK Button Button okBtn = new Button(mActivity); okBtn.setId(6); okBtn.setText("Save"); okBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub Bundle b = getArguments(); String data = b.getString("details"); String html = b.getString("html"); data += "\n\nComments"; html += "<div title=\"comments\">\n"; data += "\n============"; EditText comment = (EditText) mActivity.findViewById(4); data += "\n" + comment.getText().toString(); String reportName = b.getString("filename"); html += comment.getText().toString(); html += "\n</div>\n"; Log.w("hi", Environment.getExternalStorageDirectory() + "/oxicam/" + reportName); FileUtils.writeFile(html, reportName); Toast.makeText(mActivity, "Success saving files", Toast.LENGTH_SHORT).show(); } }); okBtn.setCompoundDrawablesWithIntrinsicBounds(R.drawable.content_save, 0, 0, 0); rl.addView(okBtn); params = (RelativeLayout.LayoutParams) okBtn.getLayoutParams(); params.addRule(RelativeLayout.BELOW, viewDetailBtn.getId()); params.addRule(RelativeLayout.LEFT_OF, cancelBtn.getId()); // email button Button emailButton = new Button(mActivity); emailButton.setId(7); emailButton.setText("Save and Email"); emailButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub Intent emailIntent = new Intent(Intent.ACTION_SEND); emailIntent.setType("application/octet-stream"); Bundle b = getArguments(); String data = b.getString("details"); String html = b.getString("html"); data += "\n\nComments"; html += "<div title=\"comments\">\n"; data += "\n============"; EditText comment = (EditText) mActivity.findViewById(4); data += "\n" + comment.getText().toString(); String reportName = b.getString("filename"); html += comment.getText().toString(); html += "\n</div>\n"; Log.w("hi", Environment.getExternalStorageDirectory() + "/oxicam/" + reportName); FileUtils.writeFile(html, reportName); Toast.makeText(mActivity, "Success saving files", Toast.LENGTH_SHORT).show(); File file = new File(Environment.getExternalStorageDirectory() + "/oxicam/" + reportName); Log.w("hi", file.exists() + ""); emailIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(file)); emailIntent.putExtra(Intent.EXTRA_SUBJECT, reportName); mActivity.startActivity(Intent.createChooser(emailIntent, "Send With...")); } }); emailButton.setCompoundDrawablesWithIntrinsicBounds( R.drawable.content_new_attachment, 0, 0, 0); rl.addView(emailButton); params = (RelativeLayout.LayoutParams) emailButton.getLayoutParams(); params.addRule(RelativeLayout.BELOW, viewDetailBtn.getId()); params.addRule(RelativeLayout.LEFT_OF, okBtn.getId()); return v; } }
/* * Copyright 2001-2005 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time.field; import java.math.RoundingMode; import junit.framework.TestCase; import junit.framework.TestSuite; /** * * * @author Brian S O'Neill */ public class TestFieldUtils extends TestCase { public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } public static TestSuite suite() { return new TestSuite(TestFieldUtils.class); } public TestFieldUtils(String name) { super(name); } public void testSafeAddInt() { assertEquals(0, FieldUtils.safeAdd(0, 0)); assertEquals(5, FieldUtils.safeAdd(2, 3)); assertEquals(-1, FieldUtils.safeAdd(2, -3)); assertEquals(1, FieldUtils.safeAdd(-2, 3)); assertEquals(-5, FieldUtils.safeAdd(-2, -3)); assertEquals(Integer.MAX_VALUE - 1, FieldUtils.safeAdd(Integer.MAX_VALUE, -1)); assertEquals(Integer.MIN_VALUE + 1, FieldUtils.safeAdd(Integer.MIN_VALUE, 1)); assertEquals(-1, FieldUtils.safeAdd(Integer.MIN_VALUE, Integer.MAX_VALUE)); assertEquals(-1, FieldUtils.safeAdd(Integer.MAX_VALUE, Integer.MIN_VALUE)); try { FieldUtils.safeAdd(Integer.MAX_VALUE, 1); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Integer.MAX_VALUE, 100); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Integer.MAX_VALUE, Integer.MAX_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Integer.MIN_VALUE, -1); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Integer.MIN_VALUE, -100); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Integer.MIN_VALUE, Integer.MIN_VALUE); fail(); } catch (ArithmeticException e) { } } public void testSafeAddLong() { assertEquals(0L, FieldUtils.safeAdd(0L, 0L)); assertEquals(5L, FieldUtils.safeAdd(2L, 3L)); assertEquals(-1L, FieldUtils.safeAdd(2L, -3L)); assertEquals(1L, FieldUtils.safeAdd(-2L, 3L)); assertEquals(-5L, FieldUtils.safeAdd(-2L, -3L)); assertEquals(Long.MAX_VALUE - 1, FieldUtils.safeAdd(Long.MAX_VALUE, -1L)); assertEquals(Long.MIN_VALUE + 1, FieldUtils.safeAdd(Long.MIN_VALUE, 1L)); assertEquals(-1, FieldUtils.safeAdd(Long.MIN_VALUE, Long.MAX_VALUE)); assertEquals(-1, FieldUtils.safeAdd(Long.MAX_VALUE, Long.MIN_VALUE)); try { FieldUtils.safeAdd(Long.MAX_VALUE, 1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Long.MAX_VALUE, 100L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Long.MAX_VALUE, Long.MAX_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Long.MIN_VALUE, -1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Long.MIN_VALUE, -100L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeAdd(Long.MIN_VALUE, Long.MIN_VALUE); fail(); } catch (ArithmeticException e) { } } public void testSafeSubtractLong() { assertEquals(0L, FieldUtils.safeSubtract(0L, 0L)); assertEquals(-1L, FieldUtils.safeSubtract(2L, 3L)); assertEquals(5L, FieldUtils.safeSubtract(2L, -3L)); assertEquals(-5L, FieldUtils.safeSubtract(-2L, 3L)); assertEquals(1L, FieldUtils.safeSubtract(-2L, -3L)); assertEquals(Long.MAX_VALUE - 1, FieldUtils.safeSubtract(Long.MAX_VALUE, 1L)); assertEquals(Long.MIN_VALUE + 1, FieldUtils.safeSubtract(Long.MIN_VALUE, -1L)); assertEquals(0, FieldUtils.safeSubtract(Long.MIN_VALUE, Long.MIN_VALUE)); assertEquals(0, FieldUtils.safeSubtract(Long.MAX_VALUE, Long.MAX_VALUE)); try { FieldUtils.safeSubtract(Long.MIN_VALUE, 1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeSubtract(Long.MIN_VALUE, 100L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeSubtract(Long.MIN_VALUE, Long.MAX_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeSubtract(Long.MAX_VALUE, -1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeSubtract(Long.MAX_VALUE, -100L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeSubtract(Long.MAX_VALUE, Long.MIN_VALUE); fail(); } catch (ArithmeticException e) { } } //----------------------------------------------------------------------- public void testSafeMultiplyLongLong() { assertEquals(0L, FieldUtils.safeMultiply(0L, 0L)); assertEquals(1L, FieldUtils.safeMultiply(1L, 1L)); assertEquals(3L, FieldUtils.safeMultiply(1L, 3L)); assertEquals(3L, FieldUtils.safeMultiply(3L, 1L)); assertEquals(6L, FieldUtils.safeMultiply(2L, 3L)); assertEquals(-6L, FieldUtils.safeMultiply(2L, -3L)); assertEquals(-6L, FieldUtils.safeMultiply(-2L, 3L)); assertEquals(6L, FieldUtils.safeMultiply(-2L, -3L)); assertEquals(Long.MAX_VALUE, FieldUtils.safeMultiply(Long.MAX_VALUE, 1L)); assertEquals(Long.MIN_VALUE, FieldUtils.safeMultiply(Long.MIN_VALUE, 1L)); assertEquals(-Long.MAX_VALUE, FieldUtils.safeMultiply(Long.MAX_VALUE, -1L)); try { FieldUtils.safeMultiply(Long.MIN_VALUE, -1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(-1L, Long.MIN_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MIN_VALUE, 100L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MIN_VALUE, Long.MAX_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MAX_VALUE, Long.MIN_VALUE); fail(); } catch (ArithmeticException e) { } } //----------------------------------------------------------------------- public void testSafeMultiplyLongInt() { assertEquals(0L, FieldUtils.safeMultiply(0L, 0)); assertEquals(1L, FieldUtils.safeMultiply(1L, 1)); assertEquals(3L, FieldUtils.safeMultiply(1L, 3)); assertEquals(3L, FieldUtils.safeMultiply(3L, 1)); assertEquals(6L, FieldUtils.safeMultiply(2L, 3)); assertEquals(-6L, FieldUtils.safeMultiply(2L, -3)); assertEquals(-6L, FieldUtils.safeMultiply(-2L, 3)); assertEquals(6L, FieldUtils.safeMultiply(-2L, -3)); assertEquals(-1L * Integer.MIN_VALUE, FieldUtils.safeMultiply(-1L, Integer.MIN_VALUE)); assertEquals(Long.MAX_VALUE, FieldUtils.safeMultiply(Long.MAX_VALUE, 1)); assertEquals(Long.MIN_VALUE, FieldUtils.safeMultiply(Long.MIN_VALUE, 1)); assertEquals(-Long.MAX_VALUE, FieldUtils.safeMultiply(Long.MAX_VALUE, -1)); try { FieldUtils.safeMultiply(Long.MIN_VALUE, -1); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MIN_VALUE, 100); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MIN_VALUE, Integer.MAX_VALUE); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeMultiply(Long.MAX_VALUE, Integer.MIN_VALUE); fail(); } catch (ArithmeticException e) { } } //----------------------------------------------------------------------- public void testSafeDivideLongLong() { assertEquals(1L, FieldUtils.safeDivide(1L, 1L)); assertEquals(1L, FieldUtils.safeDivide(3L, 3L)); assertEquals(0L, FieldUtils.safeDivide(1L, 3L)); assertEquals(3L, FieldUtils.safeDivide(3L, 1L)); assertEquals(1L, FieldUtils.safeDivide(5L, 3L)); assertEquals(-1L, FieldUtils.safeDivide(5L, -3L)); assertEquals(-1L, FieldUtils.safeDivide(-5L, 3L)); assertEquals(1L, FieldUtils.safeDivide(-5L, -3L)); assertEquals(2L, FieldUtils.safeDivide(6L, 3L)); assertEquals(-2L, FieldUtils.safeDivide(6L, -3L)); assertEquals(-2L, FieldUtils.safeDivide(-6L, 3L)); assertEquals(2L, FieldUtils.safeDivide(-6L, -3L)); assertEquals(2L, FieldUtils.safeDivide(7L, 3L)); assertEquals(-2L, FieldUtils.safeDivide(7L, -3L)); assertEquals(-2L, FieldUtils.safeDivide(-7L, 3L)); assertEquals(2L, FieldUtils.safeDivide(-7L, -3L)); assertEquals(Long.MAX_VALUE, FieldUtils.safeDivide(Long.MAX_VALUE, 1L)); assertEquals(Long.MIN_VALUE, FieldUtils.safeDivide(Long.MIN_VALUE, 1L)); assertEquals(-Long.MAX_VALUE, FieldUtils.safeDivide(Long.MAX_VALUE, -1L)); try { FieldUtils.safeDivide(Long.MIN_VALUE, -1L); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeDivide(1L, 0L); fail(); } catch (ArithmeticException e) { } } //----------------------------------------------------------------------- public void testSafeDivideRoundingModeLong() { assertEquals(3L, FieldUtils.safeDivide(15L, 5L, RoundingMode.UNNECESSARY)); assertEquals(59L, FieldUtils.safeDivide(179L, 3L, RoundingMode.FLOOR)); assertEquals(60L, FieldUtils.safeDivide(179L, 3L, RoundingMode.CEILING)); assertEquals(60L, FieldUtils.safeDivide(179L, 3L, RoundingMode.HALF_UP)); assertEquals(-60L, FieldUtils.safeDivide(-179L, 3L, RoundingMode.HALF_UP)); assertEquals(60L, FieldUtils.safeDivide(179L, 3L, RoundingMode.HALF_DOWN)); assertEquals(-60L, FieldUtils.safeDivide(-179L, 3L, RoundingMode.HALF_DOWN)); assertEquals(Long.MAX_VALUE, FieldUtils.safeDivide(Long.MAX_VALUE, 1L, RoundingMode.UNNECESSARY)); assertEquals(Long.MIN_VALUE, FieldUtils.safeDivide(Long.MIN_VALUE, 1L, RoundingMode.UNNECESSARY)); assertEquals(-Long.MAX_VALUE, FieldUtils.safeDivide(Long.MAX_VALUE, -1L, RoundingMode.UNNECESSARY)); try { FieldUtils.safeDivide(Long.MIN_VALUE, -1L, RoundingMode.UNNECESSARY); fail(); } catch (ArithmeticException e) { } try { FieldUtils.safeDivide(1L, 0L, RoundingMode.UNNECESSARY); fail(); } catch (ArithmeticException e) { } } }
/* * Copyright (C) 2017 RTAndroid Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rtandroid.benchmark.ui.dialogs; import android.app.Activity; import android.app.Dialog; import android.content.DialogInterface; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.view.LayoutInflater; import android.view.View; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.SpinnerAdapter; import android.widget.TextView; import com.google.gson.Gson; import rtandroid.benchmark.utils.RealTimeUtils; import rtandroid.benchmark.R; import rtandroid.benchmark.data.TestCase; /** * Dialog which allows creation and editing of test cases */ public class TestCaseDialog extends DialogFragment implements SeekBar.OnSeekBarChangeListener { private static final String ARG_CASE = "name"; private OnTestCaseUpdateListener mListener; private EditText mName; private SeekBar mPriority; private TextView mPriorityText; private SeekBar mPowerLevel; private TextView mPowerLevelText; private Spinner mCpuLock; private TestCase mOldTestCase; /** * @return New instance of fragment TestCaseDialog to create a test case. */ public static TestCaseDialog newInstance() { return new TestCaseDialog(); } /** * @return New instance of fragment TestCaseDialog filled with given test case. */ public static TestCaseDialog newInstance(TestCase testCase) { // Create argument bundle Gson gson = new Gson(); Bundle args = new Bundle(); args.putString(ARG_CASE, gson.toJson(testCase)); TestCaseDialog fragment = new TestCaseDialog(); fragment.setArguments(args); return fragment; } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { LayoutInflater inflater = getActivity().getLayoutInflater(); View v = inflater.inflate(R.layout.dialog_test_case, null); mName = (EditText) v.findViewById(R.id.input_name); mPriority = (SeekBar) v.findViewById(R.id.input_priority); mPriority.setMax(TestCase.PRIORITY_MAX); mPriority.setOnSeekBarChangeListener(this); mPriorityText = (TextView) v.findViewById(R.id.txt_priority); mPowerLevel = (SeekBar) v.findViewById(R.id.input_power_level); mPowerLevel.setMax(TestCase.POWER_LEVEL_MAX); mPowerLevel.setOnSeekBarChangeListener(this); mPowerLevelText = (TextView) v.findViewById(R.id.txt_power_level); Integer[] isolatedCpuIDs = RealTimeUtils.getIsolatedCpus(); String[] values = new String[1 + isolatedCpuIDs.length]; values[0] = "Disabled"; for (int i = 0; i < isolatedCpuIDs.length; i++) { values[i+1] = "Core " + isolatedCpuIDs[i]; } SpinnerAdapter adapter = new ArrayAdapter<String>(getActivity(), android.R.layout.simple_spinner_dropdown_item, values); mCpuLock = (Spinner) v.findViewById(R.id.input_cpu_core); mCpuLock.setAdapter(adapter); // fill with values Bundle args = getArguments(); if (args != null) { Gson gson = new Gson(); String jsonTestCase = args.getString(ARG_CASE); mOldTestCase = gson.fromJson(jsonTestCase, TestCase.class); mName.setText(mOldTestCase.getName()); mPriority.setProgress(mOldTestCase.getRealtimePriority()); mPowerLevel.setProgress(mOldTestCase.getPowerLevel()); int core = mOldTestCase.getCpuCore(); for (int i = 0; i < isolatedCpuIDs.length; i++) { if (isolatedCpuIDs[i] == core) { mCpuLock.setSelection(i+1); // core[0] is disabled break; } } } onProgressChanged(mPriority, mPriority.getProgress(), false); onProgressChanged(mPowerLevel, mPowerLevel.getProgress(), false); // Build dialog final AlertDialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(R.string.dialog_test_case_title) .setView(v) .setPositiveButton(android.R.string.ok, null) .setNegativeButton(android.R.string.cancel, null) .create(); dialog.setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialogInterface) { dialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { onSubmit(); } }); } }); return dialog; } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnTestCaseUpdateListener) getTargetFragment(); } catch (ClassCastException e) { throw new ClassCastException(getTargetFragment().toString() + " must implement onTestCaseSaveListener"); } } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (seekBar == mPriority) { String text = (progress == 0) ? "Disabled" : Integer.toString(progress); mPriorityText.setText(text); } else if (seekBar == mPowerLevel) { String text = (progress == 0) ? "Disabled" : Integer.toString(progress) + "%"; mPowerLevelText.setText(text); } else { throw new RuntimeException("Unknown seek bar event received"); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { // Nothing to do } @Override public void onStopTrackingTouch(SeekBar seekBar) { // Nothing to do } private void onSubmit() { if (mListener != null) { if (mName.getText().toString().isEmpty()) { mName.setError("Please enter a name"); return; } // Pass a value to listener TestCase newTestCase = new TestCase(mName.getText().toString(), TestCase.NO_PRIORITY, TestCase.NO_POWER_LEVEL, TestCase.NO_CORE_LOCK); newTestCase.setCpuCore((int) mCpuLock.getSelectedItemId()); if (mPriority.getProgress() != 0) { newTestCase.setPriority(mPriority.getProgress()); } if (mPowerLevel.getProgress() != 0) { newTestCase.setPowerLevel(mPowerLevel.getProgress()); } mListener.onTestCaseUpdated(mOldTestCase, newTestCase); } dismiss(); } /** * This interface must be implemented by target fragments that show this dialog to allow an passing of chosen value. */ public interface OnTestCaseUpdateListener { void onTestCaseUpdated(TestCase oldTestCase, TestCase newTestCase); } }
package denominator.route53; import com.squareup.okhttp.mockwebserver.MockResponse; import org.junit.Rule; import org.junit.Test; import java.util.Iterator; import denominator.model.ResourceRecordSet; import denominator.model.profile.Weighted; import denominator.model.rdata.CNAMEData; import denominator.profile.WeightedResourceRecordSetApi; import static denominator.assertj.ModelAssertions.assertThat; public class Route53WeightedResourceRecordSetApiMockTest { @Rule public MockRoute53Server server = new MockRoute53Server(); @Test public void iterateByNameWhenPresent() throws Exception { server.enqueue(new MockResponse().setBody(twoRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); Iterator<ResourceRecordSet<?>> iterator = api.iterateByName("www.denominator.io."); assertThat(iterator.next()).isEqualTo(rrset1); assertThat(iterator.next()) .hasName("www.denominator.io.") .hasType("CNAME") .hasQualifier("MyService-West") .hasTtl(0) .hasWeight(5) .containsExactlyRecords(CNAMEData.create("www2.denominator.io.")); server.assertRequest() .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io."); } @Test public void iterateByNameWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody(noRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); assertThat(api.iterateByName("www.denominator.io.")).isEmpty(); server.assertRequest() .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io."); } @Test public void iterateByNameAndTypeWhenPresent() throws Exception { server.enqueue(new MockResponse().setBody(twoRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); assertThat(api.iterateByNameAndType("www.denominator.io.", "CNAME")).contains(rrset1); server.assertRequest() .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME"); } @Test public void iterateByNameAndTypeWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody(noRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); assertThat(api.iterateByNameAndType("www.denominator.io.", "CNAME")).isEmpty(); server.assertRequest() .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME"); } @Test public void getByNameTypeAndQualifierWhenPresent() throws Exception { server.enqueue(new MockResponse().setBody(twoRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); assertThat(api.getByNameTypeAndQualifier("www.denominator.io.", "CNAME", "MyService-East")) .isEqualTo(rrset1); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-East"); } @Test public void getByNameTypeAndQualifierWhenAbsent() throws Exception { server.enqueue(new MockResponse().setBody(noRecords)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); assertThat(api.getByNameTypeAndQualifier("www.denominator.io.", "CNAME", "MyService-East")) .isNull(); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-East"); } @Test public void putRecordSet() throws Exception { server.enqueue(new MockResponse().setBody(noRecords)); server.enqueue(new MockResponse().setBody(changeSynced)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); api.put(rrset1); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-East"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset") .hasXMLBody( "<ChangeResourceRecordSetsRequest xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ChangeBatch>\n" + " <Changes>\n" + " <Change>\n" + " <Action>CREATE</Action>\n" + " <ResourceRecordSet>\n" + " <Name>www.denominator.io.</Name>\n" + " <Type>CNAME</Type>\n" + " <SetIdentifier>MyService-East</SetIdentifier>\n" + " <Weight>1</Weight>\n" + " <TTL>0</TTL>\n" + " <ResourceRecords>\n" + " <ResourceRecord>\n" + " <Value>www1.denominator.io.</Value>\n" + " </ResourceRecord>\n" + " </ResourceRecords>\n" + " </ResourceRecordSet>\n" + " </Change>\n" + " </Changes>\n" + " </ChangeBatch>\n" + "</ChangeResourceRecordSetsRequest>"); } @Test public void putRecordSetSkipsWhenEqual() throws Exception { server.enqueue(new MockResponse().setBody(oneRecord)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); api.put(rrset1); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-East"); } @Test public void deleteDoesntAffectOtherQualifiers() throws Exception { server.enqueue(new MockResponse().setBody(twoRecords)); server.enqueue(new MockResponse().setBody(changeSynced)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); api.deleteByNameTypeAndQualifier("www.denominator.io.", "CNAME", "MyService-East"); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-East"); server.assertRequest() .hasMethod("POST") .hasPath("/2012-12-12/hostedzone/Z1PA6795/rrset") .hasXMLBody( "<ChangeResourceRecordSetsRequest xmlns=\"https://route53.amazonaws.com/doc/2012-12-12/\">\n" + " <ChangeBatch>\n" + " <Changes>\n" + " <Change>\n" + " <Action>DELETE</Action>\n" + " <ResourceRecordSet>\n" + " <Name>www.denominator.io.</Name>\n" + " <Type>CNAME</Type>\n" + " <SetIdentifier>MyService-East</SetIdentifier>\n" + " <Weight>1</Weight>\n" + " <TTL>0</TTL>\n" + " <ResourceRecords>\n" + " <ResourceRecord>\n" + " <Value>www1.denominator.io.</Value>\n" + " </ResourceRecord>\n" + " </ResourceRecords>\n" + " </ResourceRecordSet>\n" + " </Change>\n" + " </Changes>\n" + " </ChangeBatch>\n" + "</ChangeResourceRecordSetsRequest>"); } @Test public void deleteAbsentRRSDoesNothing() throws Exception { server.enqueue(new MockResponse().setBody(oneRecord)); WeightedResourceRecordSetApi api = server.connect().api().weightedRecordSetsInZone("Z1PA6795"); api.deleteByNameTypeAndQualifier("www.denominator.io.", "CNAME", "MyService-West"); server.assertRequest() .hasPath( "/2012-12-12/hostedzone/Z1PA6795/rrset?name=www.denominator.io.&type=CNAME&identifier=MyService-West"); } private String noRecords = "<ListResourceRecordSetsResponse><ResourceRecordSets></ResourceRecordSets></ListResourceRecordSetsResponse>"; private String oneRecord = "<ListResourceRecordSetsResponse><ResourceRecordSets><ResourceRecordSet><Name>www.denominator.io.</Name><Type>CNAME</Type><SetIdentifier>MyService-East</SetIdentifier><Weight>1</Weight><TTL>0</TTL><ResourceRecords><ResourceRecord><Value>www1.denominator.io.</Value></ResourceRecord></ResourceRecords></ResourceRecordSet></ResourceRecordSets></ListResourceRecordSetsResponse>"; private String twoRecords = "<ListResourceRecordSetsResponse><ResourceRecordSets><ResourceRecordSet><Name>www.denominator.io.</Name><Type>CNAME</Type><SetIdentifier>MyService-East</SetIdentifier><Weight>1</Weight><TTL>0</TTL><ResourceRecords><ResourceRecord><Value>www1.denominator.io.</Value></ResourceRecord></ResourceRecords></ResourceRecordSet><ResourceRecordSet><Name>www.denominator.io.</Name><Type>CNAME</Type><SetIdentifier>MyService-West</SetIdentifier><Weight>5</Weight><TTL>0</TTL><ResourceRecords><ResourceRecord><Value>www2.denominator.io.</Value></ResourceRecord></ResourceRecords></ResourceRecordSet></ResourceRecordSets></ListResourceRecordSetsResponse>"; private String changeSynced = "<GetChangeResponse><ChangeInfo><Id>/change/C2682N5HXP0BZ4</Id><Status>INSYNC</Status><SubmittedAt>2011-09-10T01:36:41.958Z</SubmittedAt></ChangeInfo></GetChangeResponse>"; private ResourceRecordSet<CNAMEData> rrset1 = ResourceRecordSet.<CNAMEData>builder() .name("www.denominator.io.") .type("CNAME") .qualifier("MyService-East") .ttl(0) .weighted(Weighted.create(1)) .add(CNAMEData.create("www1.denominator.io.")).build(); }
package com.psddev.dari.db; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.UUID; import com.psddev.dari.util.ObjectMap; import com.psddev.dari.util.ObjectUtils; import com.psddev.dari.util.StorageItem; /** State value utility methods. */ abstract class StateValueUtils { /** Key for the embedded object's unique ID. */ public static final String ID_KEY = "_id"; /** Key for the embedded object's type. */ public static final String TYPE_KEY = "_type"; /** * Key for the reference to the object that should replace the * embedded object map. */ public static final String REFERENCE_KEY = "_ref"; /** * Thread local map used for detecting circular references in * {@link #resolveReferences}. */ private static final ThreadLocal<Map<UUID, Object>> CIRCULAR_REFERENCES = new ThreadLocal<Map<UUID, Object>>(); /** Converts the given {@code object} into an ID if it's a reference. */ public static UUID toIdIfReference(Object object) { return object instanceof Map ? ObjectUtils.to(UUID.class, ((Map<?, ?>) object).get(REFERENCE_KEY)) : null; } public static Object toObjectIfReference(Database database, Object object) { if (object instanceof Map) { Map<?, ?> objectMap = (Map<?, ?>) object; UUID id = ObjectUtils.to(UUID.class, objectMap.get(REFERENCE_KEY)); if (id != null) { UUID typeId = ObjectUtils.to(UUID.class, objectMap.get(TYPE_KEY)); ObjectType type; if (typeId != null) { type = database.getEnvironment().getTypeById(typeId); } else { type = database.getEnvironment().getTypeByName(ObjectUtils.to(String.class, objectMap.get(TYPE_KEY))); } if (type == null || type.isAbstract()) { return database.readFirst(Query.from(Object.class).where("_id = ?", id)); } Object reference = type.createObject(id); State referenceState = State.getInstance(reference); referenceState.setStatus(StateStatus.REFERENCE_ONLY); referenceState.setResolveToReferenceOnly(true); return reference; } } return null; } /** Resolves all object references within the given {@code items}. */ public static Map<UUID, Object> resolveReferences(Database database, Object parent, Iterable<?> items, String field) { State parentState = State.getInstance(parent); if (parentState != null && parentState.isResolveToReferenceOnly()) { Map<UUID, Object> references = new HashMap<UUID, Object>(); for (Object item : items) { Object itemReference = toObjectIfReference(database, item); if (itemReference != null) { references.put(State.getInstance(itemReference).getId(), itemReference); } } return references; } if (parent instanceof Modification) { for (Object item : parentState.getObjects()) { if (!(item instanceof Modification)) { parent = item; break; } } if (parent instanceof Modification) { parent = null; } } boolean isFirst = false; try { Map<UUID, Object> circularReferences = CIRCULAR_REFERENCES.get(); if (circularReferences == null) { isFirst = true; circularReferences = new HashMap<UUID, Object>(); CIRCULAR_REFERENCES.set(circularReferences); } if (parentState != null) { circularReferences.put(parentState.getId(), parent); } // Find IDs that have not been resolved yet. Map<UUID, Object> references = new HashMap<UUID, Object>(); Set<UUID> unresolvedIds = new HashSet<UUID>(); Set<UUID> unresolvedTypeIds = new HashSet<UUID>(); for (Object item : items) { UUID id = toIdIfReference(item); if (id != null) { if (circularReferences.containsKey(id)) { references.put(id, circularReferences.get(id)); } else if (parentState != null && parentState.getExtras().containsKey(State.SUB_DATA_STATE_EXTRA_PREFIX + id)) { references.put(id, parentState.getExtras().get(State.SUB_DATA_STATE_EXTRA_PREFIX + id)); } else { unresolvedIds.add(id); unresolvedTypeIds.add(ObjectUtils.to(UUID.class, ((Map<?, ?>) item).get(TYPE_KEY))); } } } // Fetch unresolved objects and cache them. if (!unresolvedIds.isEmpty()) { Query<?> query = Query .from(Object.class) .where("_id = ?", unresolvedIds) .using(database) .option(State.REFERENCE_RESOLVING_QUERY_OPTION, parent) .option(State.REFERENCE_FIELD_QUERY_OPTION, field) .option(State.UNRESOLVED_TYPE_IDS_QUERY_OPTION, unresolvedTypeIds); if (parentState != null) { if (!parentState.isResolveUsingCache()) { query.setCache(false); } if (parentState.isResolveUsingMaster()) { query.setMaster(true); } } for (Object object : query.selectAll()) { UUID id = State.getInstance(object).getId(); unresolvedIds.remove(id); circularReferences.put(id, object); references.put(id, object); } for (UUID id : unresolvedIds) { circularReferences.put(id, null); } } for (Iterator<Map.Entry<UUID, Object>> i = references.entrySet().iterator(); i.hasNext();) { Map.Entry<UUID, Object> entry = i.next(); Object object = entry.getValue(); if ((parentState == null || !parentState.isResolveInvisible()) && object != null && !ObjectUtils.isBlank(State.getInstance(object).getRawValue("dari.visibilities"))) { entry.setValue(null); } } return references; } finally { if (isFirst) { CIRCULAR_REFERENCES.remove(); } } } public static Map<UUID, Object> resolveReferences(Database database, Object parent, Iterable<?> items) { return resolveReferences(database, parent, items, null); } /** * Converts the given {@code value} to an instance of the type that * matches the given {@code field} and {@code type} and is most * commonly used in Java. */ public static Object toJavaValue( Database database, Object object, ObjectField field, String type, Object value) { if (value == null && (field == null || !field.isMetric())) { return null; } UUID valueId = toIdIfReference(value); if (valueId != null) { Map<UUID, Object> references = resolveReferences(database, object, Collections.singleton(value)); value = references.get(valueId); if (value == null) { return null; } } if (field == null || type == null) { return value; } int slashAt = type.indexOf('/'); String firstType; String subType; if (slashAt > -1) { firstType = type.substring(0, slashAt); subType = type.substring(slashAt + 1); } else { firstType = type; subType = null; } Converter converter = CONVERTERS.get(firstType); if (converter == null) { return value; } try { return converter.toJavaValue(database, object, field, subType, value); } catch (Exception error) { if (object != null) { State state = State.getInstance(object); String name = field.getInternalName(); state.put("dari.trash." + name, value); state.put("dari.trashError." + name, error.getClass().getName()); state.put("dari.trashErrorMessage." + name, error.getMessage()); } return null; } } /** * Interface that defines how to convert between various * representations of a state value. */ private interface Converter { Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) throws Exception; } private static final Map<String, Converter> CONVERTERS; static { Map<String, Converter> m = new HashMap<String, Converter>(); m.put(ObjectField.DATE_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Date) { return value; } else if (value instanceof Number) { return new Date(((Number) value).longValue()); } else { return ObjectUtils.to(Date.class, value); } } }); m.put(ObjectField.FILE_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof StorageItem) { return value; } else if (value instanceof String) { return StorageItem.Static.createUrl((String) value); } else if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) value; StorageItem item = StorageItem.Static.createIn(ObjectUtils.to(String.class, map.get("storage"))); new ObjectMap(item).putAll(map); return item; } else { throw new IllegalArgumentException(); } } }); m.put(ObjectField.LIST_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof StateValueList) { return value; } else { Iterable<?> iterable = ObjectUtils.to(Iterable.class, value); return new StateValueList(database, object, field, subType, iterable); } } }); m.put(ObjectField.LOCATION_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Location) { return value; } else if (value instanceof Map) { Map<?, ?> map = (Map<?, ?>) value; Double x = ObjectUtils.to(Double.class, map.get("x")); Double y = ObjectUtils.to(Double.class, map.get("y")); if (x != null && y != null) { return new Location(x, y); } } throw new IllegalArgumentException(); } }); m.put(ObjectField.REGION_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Region) { return value; } else if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) value; Region region = Region.parseGeoJson(map); Region.parseCircles(region, (List<List<Double>>) map.get("circles")); return region; } throw new IllegalArgumentException(); } }); m.put(ObjectField.MAP_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof StateValueMap) { return value; } else if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) value; return new StateValueMap(database, object, field, subType, map); } else { throw new IllegalArgumentException(); } } }); m.put(ObjectField.METRIC_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Metric) { return value; } else { Metric metric = new Metric(State.getInstance(object), field); return metric; } } }); m.put(ObjectField.RECORD_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Recordable) { return value; } else if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> valueMap = (Map<String, Object>) value; Object typeId = valueMap.get(TYPE_KEY); if (typeId != null) { State objectState = State.getInstance(object); DatabaseEnvironment environment = objectState.getDatabase().getEnvironment(); ObjectType valueType = environment.getTypeById(ObjectUtils.to(UUID.class, typeId)); if (valueType == null) { valueType = environment.getTypeByName(ObjectUtils.to(String.class, typeId)); } if (valueType != null) { value = valueType.createObject(ObjectUtils.to(UUID.class, valueMap.get(ID_KEY))); State valueState = State.getInstance(value); valueState.setDatabase(database); valueState.setResolveToReferenceOnly(objectState.isResolveToReferenceOnly()); valueState.setResolveInvisible(objectState.isResolveInvisible()); valueState.putAll(valueMap); return value; } } } else { UUID id = ObjectUtils.to(UUID.class, value); if (id != null) { return Query.findById(Object.class, id); } } throw new IllegalArgumentException(); } }); m.put(ObjectField.REFERENTIAL_TEXT_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof ReferentialText) { return value; } else { ReferentialText text = new ReferentialText(); text.setResolveInvisible(State.getInstance(object).isResolveInvisible()); if (value instanceof Iterable) { boolean isFirst = false; try { Map<UUID, Object> circularReferences = StateValueUtils.CIRCULAR_REFERENCES.get(); if (circularReferences == null) { isFirst = true; circularReferences = new HashMap<UUID, Object>(); StateValueUtils.CIRCULAR_REFERENCES.set(circularReferences); } if (object != null) { State objectState = State.getInstance(object); circularReferences.put(objectState.getId(), object); } for (Object item : (Iterable<?>) value) { text.add(item); } } finally { if (isFirst) { StateValueUtils.CIRCULAR_REFERENCES.remove(); } } } else { text.add(value.toString()); } return text; } } }); m.put(ObjectField.SET_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof StateValueSet) { return value; } else { Iterable<?> iterable = ObjectUtils.to(Iterable.class, value); return new StateValueSet(database, object, field, subType, iterable); } } }); m.put(ObjectField.TEXT_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof byte[]) { value = new String((byte[]) value, StandardCharsets.UTF_8); } String enumClassName = field.getJavaEnumClassName(); Class<?> enumClass = ObjectUtils.getClassByName(enumClassName); if (enumClass != null && Enum.class.isAssignableFrom(enumClass)) { return ObjectUtils.to(enumClass, value); } return value.toString(); } }); m.put(ObjectField.URI_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) throws URISyntaxException { if (value instanceof URI) { return value; } else { return new URI(value.toString()); } } }); m.put(ObjectField.URL_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) throws MalformedURLException { if (value instanceof URL) { return value; } else { return new URL(value.toString()); } } }); m.put(ObjectField.UUID_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof UUID) { return value; } else { UUID uuid = ObjectUtils.to(UUID.class, value); if (uuid != null) { return uuid; } } throw new IllegalArgumentException(); } }); m.put(ObjectField.LOCALE_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (value instanceof Locale) { return value; } else { Locale locale = ObjectUtils.to(Locale.class, value); if (locale != null) { return locale; } } throw new IllegalArgumentException(); } }); m.put(ObjectField.ANY_TYPE, new Converter() { @Override public Object toJavaValue( Database database, Object object, ObjectField field, String subType, Object value) { if (Query.SERIALIZED_MISSING_VALUE.equals(value)) { return Query.MISSING_VALUE; } return value; } }); CONVERTERS = m; } }
/** * Copyright (c) 2011 Yahoo! Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package com.yahoo.omid.tso; import java.io.IOException; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import com.yahoo.omid.client.SyncAbortCompleteCallback; import com.yahoo.omid.client.SyncCommitCallback; import com.yahoo.omid.client.SyncCommitQueryCallback; import com.yahoo.omid.client.SyncCreateCallback; import com.yahoo.omid.client.TSOClient; import com.yahoo.omid.tso.messages.CommitResponse; import com.yahoo.omid.tso.messages.TimestampResponse; /** * Example of ChannelHandler for the Transaction Client * * @author maysam * */ public class ClientHandler extends TSOClient { private static final Log LOG = LogFactory.getLog(ClientHandler.class); /** * Maximum number of modified rows in each transaction */ static final int MAX_ROW = 20; /** * The number of rows in database */ static final int DB_SIZE = 20000000; private static final long PAUSE_LENGTH = 50; // in ms /** * Maximum number if outstanding message */ private final int MAX_IN_FLIGHT; /** * Number of message to do */ private final int nbMessage; /** * Current rank (decreasing, 0 is the end of the game) */ private int curMessage; /** * number of outstanding commit requests */ private int outstandingTransactions = 0; /** * Start date */ private Date startDate = null; /** * Stop date */ private Date stopDate = null; /** * Return value for the caller */ final BlockingQueue<Boolean> answer = new LinkedBlockingQueue<Boolean>(); private Committed committed = new Committed(); private Set<Long> aborted = Collections.synchronizedSet(new HashSet<Long>(100000)); /* * For statistial purposes */ public ConcurrentHashMap<Long, Long> wallClockTime = new ConcurrentHashMap<Long, Long>(); public long totalNanoTime = 0; public long totalTx = 0; private Channel channel; private float percentReads; /** * Method to wait for the final response * * @return success or not */ public boolean waitForAll() { for (;;) { try { return answer.take(); } catch (InterruptedException e) { // Ignore. } } } /** * Constructor * * @param nbMessage * @param inflight * @throws IOException */ public ClientHandler(Configuration conf, int nbMessage, int inflight, boolean pauseClient, float percentReads) throws IOException { super(conf); if (nbMessage < 0) { throw new IllegalArgumentException("nbMessage: " + nbMessage); } this.MAX_IN_FLIGHT = inflight; this.nbMessage = nbMessage; this.curMessage = nbMessage; this.pauseClient = pauseClient; this.percentReads = percentReads; } /** * Starts the traffic */ @Override public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) { super.channelConnected(ctx, e); startDate = new Date(); channel = e.getChannel(); outstandingTransactions = 0; startTransaction(); } /** * If write of Commit Request was not possible before, just do it now */ @Override public void channelInterestChanged(ChannelHandlerContext ctx, ChannelStateEvent e) { startTransaction(); } /** * When the channel is closed, print result */ @Override public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception { super.channelClosed(ctx, e); stopDate = new Date(); String MB = String.format("Memory Used: %8.3f MB", (Runtime.getRuntime().totalMemory() - Runtime.getRuntime() .freeMemory()) / 1048576.0); String Mbs = String.format("%9.3f TPS", ((nbMessage - curMessage) * 1000 / (float) (stopDate.getTime() - (startDate != null ? startDate.getTime() : 0)))); System.out.println(MB + " " + Mbs); } /** * When a message is received, handle it based on its type * @throws IOException */ @Override protected void processMessage(TSOMessage msg) { if (msg instanceof CommitResponse) { handle((CommitResponse) msg); } else if (msg instanceof TimestampResponse) { handle((TimestampResponse) msg); } } /** * Handle the TimestampResponse message */ public void handle(TimestampResponse timestampResponse) { sendCommitRequest(timestampResponse.timestamp); } /** * Handle the CommitRequest message */ private long lasttotalTx = 0; private long lasttotalNanoTime = 0; private long lastTimeout = System.currentTimeMillis(); public void handle(CommitResponse msg) { // outstandingTransactions.decrementAndGet(); outstandingTransactions--; long finishNanoTime = System.nanoTime(); long startNanoTime = wallClockTime.remove(msg.startTimestamp); if (msg.committed) { totalNanoTime += (finishNanoTime - startNanoTime); totalTx++; long timeout = System.currentTimeMillis(); // if (totalTx % 10000 == 0) {//print out if (timeout - lastTimeout > 60 * 1000) { // print out long difftx = totalTx - lasttotalTx; long difftime = totalNanoTime - lasttotalNanoTime; System.out.format( " CLIENT: totalTx: %d totalNanoTime: %d microtime/tx: %4.3f tx/s %4.3f " + "Size Com: %d Size Aborted: %d Memory Used: %8.3f KB TPS: %9.3f \n", difftx, difftime, (difftime / (double) difftx / 1000), 1000 * difftx / ((double) (timeout - lastTimeout)), getSizeCom(), getSizeAborted(), // largestDeletedTimestamp - _decoder.lastStartTimestamp, (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024.0, ((nbMessage - curMessage) * 1000 / (float) (new Date().getTime() - (startDate != null ? startDate .getTime() : 0)))); lasttotalTx = totalTx; lasttotalNanoTime = totalNanoTime; lastTimeout = timeout; } } else {// aborted try { super.completeAbort(msg.startTimestamp, new SyncAbortCompleteCallback()); } catch (IOException e) { LOG.error("Couldn't send abort", e); } } startTransaction(); } private long getSizeCom() { return committed.getSize(); } private long getSizeAborted() { return aborted.size() * 8 * 8; } private java.util.Random rnd; private boolean pauseClient; /** * Sends the CommitRequest message to the channel * * @param timestamp * @param channel */ private void sendCommitRequest(final long timestamp) { if (!((channel.getInterestOps() & Channel.OP_WRITE) == 0)) return; // initialize rnd if it is not yet if (rnd == null) { long seed = System.currentTimeMillis(); seed *= channel.getId();// to make it channel dependent rnd = new java.util.Random(seed); } boolean readOnly = (rnd.nextFloat() * 100) < percentReads; int size = readOnly ? 0 : rnd.nextInt(MAX_ROW); final RowKey [] rows = new RowKey[size]; for (byte i = 0; i < rows.length; i++) { long l = rnd.nextInt(DB_SIZE); byte[] b = new byte[8]; for (int iii = 0; iii < 8; iii++) { b[7 - iii] = (byte) (l >>> (iii * 8)); } byte[] tableId = new byte[8]; rows[i] = new RowKey(b, tableId); } // send a query once in a while totalCommitRequestSent++; if (totalCommitRequestSent % QUERY_RATE == 0 && rows.length > 0) { long queryTimeStamp = rnd.nextInt(Math.abs((int) timestamp)); try { isCommitted(timestamp, queryTimeStamp, new SyncCommitQueryCallback()); } catch (IOException e) { LOG.error("Couldn't send commit query", e); } } executor.schedule(new Runnable() { @Override public void run() { // keep statistics wallClockTime.put(timestamp, System.nanoTime()); try { commit(timestamp, rows, new SyncCommitCallback()); } catch (IOException e) { LOG.error("Couldn't send commit", e); e.printStackTrace(); } } }, pauseClient ? PAUSE_LENGTH : 0, TimeUnit.MILLISECONDS); } private static ScheduledExecutorService executor = Executors.newScheduledThreadPool(20); private long totalCommitRequestSent;// just to keep the total number of // commitreqeusts sent private int QUERY_RATE = 100;// send a query after this number of commit // requests /** * Start a new transaction * * @param channel * @throws IOException */ private void startTransaction() { while (true) {// fill the pipe with as much as request you can if (!((channel.getInterestOps() & Channel.OP_WRITE) == 0)) return; // if (outstandingTransactions.intValue() >= MAX_IN_FLIGHT) if (outstandingTransactions >= MAX_IN_FLIGHT) return; if (curMessage == 0) { LOG.warn("No more message"); // wait for all outstanding msgs and then close the channel // if (outstandingTransactions.intValue() == 0) { if (outstandingTransactions == 0) { LOG.warn("Close channel"); channel.close().addListener(new ChannelFutureListener() { public void operationComplete(ChannelFuture future) { answer.offer(true); } }); } return; } curMessage--; // TimestampRequest tr = new TimestampRequest(); outstandingTransactions++; // outstandingTransactions.incrementAndGet(); // Channels.write(channel, tr); try { super.getNewTimestamp(new SyncCreateCallback()); } catch (IOException e) { LOG.error("Couldn't start transaction", e); } Thread.yield(); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.packaging; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.intellij.execution.ExecutionException; import com.intellij.execution.RunCanceledByUserException; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessOutput; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.jetbrains.python.sdk.PythonSdkType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; public class PyCondaPackageManagerImpl extends PyPackageManagerImpl { @Nullable private volatile List<PyPackage> mySideCache = null; public static final String PYTHON = "python"; public boolean useConda = true; public boolean useConda() { return useConda; } public void useConda(boolean conda) { useConda = conda; } PyCondaPackageManagerImpl(@NotNull final Sdk sdk) { super(sdk); } @Override public void install(@NotNull List<PyRequirement> requirements, @NotNull List<String> extraArgs) throws ExecutionException { if (useConda) { final ArrayList<String> arguments = new ArrayList<>(); for (PyRequirement requirement : requirements) { arguments.add(requirement.toString()); } arguments.add("-y"); if (extraArgs.contains("-U")) { getCondaOutput("update", arguments); } else { arguments.addAll(extraArgs); getCondaOutput("install", arguments); } } else { super.install(requirements, extraArgs); } } private ProcessOutput getCondaOutput(@NotNull final String command, List<String> arguments) throws ExecutionException { final Sdk sdk = getSdk(); final String condaExecutable = PyCondaPackageService.getCondaExecutable(sdk.getHomeDirectory()); if (condaExecutable == null) throw new PyExecutionException("Cannot find conda", "Conda", Collections.emptyList(), new ProcessOutput()); final String path = getCondaDirectory(); if (path == null) throw new PyExecutionException("Empty conda name for " + sdk.getHomePath(), command, arguments); final ArrayList<String> parameters = Lists.newArrayList(condaExecutable, command, "-p", path); parameters.addAll(arguments); final GeneralCommandLine commandLine = new GeneralCommandLine(parameters); final CapturingProcessHandler handler = new CapturingProcessHandler(commandLine); final ProcessOutput result = handler.runProcess(); final int exitCode = result.getExitCode(); if (exitCode != 0) { final String message = StringUtil.isEmptyOrSpaces(result.getStdout()) && StringUtil.isEmptyOrSpaces(result.getStderr()) ? "Permission denied" : "Non-zero exit code"; throw new PyExecutionException(message, "Conda", parameters, result); } return result; } @Nullable private String getCondaDirectory() { final VirtualFile homeDirectory = getSdk().getHomeDirectory(); if (homeDirectory == null) return null; if (SystemInfo.isWindows) return homeDirectory.getParent().getPath(); return homeDirectory.getParent().getParent().getPath(); } @Override public void install(@NotNull String requirementString) throws ExecutionException { if (useConda) { super.install(requirementString); } else { getCondaOutput("install", Lists.newArrayList(requirementString, "-y")); } } @Override public void uninstall(@NotNull List<PyPackage> packages) throws ExecutionException { if (useConda) { final ArrayList<String> arguments = new ArrayList<>(); for (PyPackage aPackage : packages) { arguments.add(aPackage.getName()); } arguments.add("-y"); getCondaOutput("remove", arguments); } else { super.uninstall(packages); } } /** * @return packages installed using 'conda' manager only. * Use 'useConda' flag to retrieve 'pip' packages */ @NotNull @Override protected List<PyPackage> collectPackages() throws ExecutionException { final List<PyPackage> pipPackages = super.collectPackages(); final ProcessOutput output = getCondaOutput("list", Lists.newArrayList("-e")); final Set<PyPackage> condaPackages = Sets.newConcurrentHashSet(parseCondaToolOutput(output.getStdout())); if (useConda) { mySideCache = pipPackages; return Lists.newArrayList(condaPackages); } else { mySideCache = Lists.newArrayList(condaPackages); return super.collectPackages(); } } @NotNull protected static List<PyPackage> parseCondaToolOutput(@NotNull String s) throws ExecutionException { final String[] lines = StringUtil.splitByLines(s); final List<PyPackage> packages = new ArrayList<>(); for (String line : lines) { if (line.startsWith("#")) continue; final List<String> fields = StringUtil.split(line, "="); if (fields.size() < 3) { throw new PyExecutionException("Invalid conda output format", "conda", Collections.emptyList()); } final String name = fields.get(0); final String version = fields.get(1); final List<PyRequirement> requirements = new ArrayList<>(); if (fields.size() >= 4) { final String requiresLine = fields.get(3); final String requiresSpec = StringUtil.join(StringUtil.split(requiresLine, ":"), "\n"); requirements.addAll(PyPackageUtil.fix(PyRequirement.fromText(requiresSpec))); } if (!"Python".equals(name)) { packages.add(new PyPackage(name, version, "", requirements)); } } return packages; } public static boolean isCondaVEnv(@NotNull final Sdk sdk) { final String condaName = "conda-meta"; final VirtualFile homeDirectory = sdk.getHomeDirectory(); if (homeDirectory == null) return false; final VirtualFile condaParent = SystemInfo.isWindows ? homeDirectory.getParent() : homeDirectory.getParent().getParent(); final VirtualFile condaMeta = condaParent.findChild(condaName); final VirtualFile envs = condaParent.findChild("envs"); return condaMeta != null && envs == null; } // Conda virtual environment and system conda public static boolean isConda(@NotNull final Sdk sdk) { final String condaName = "conda-meta"; final VirtualFile homeDirectory = sdk.getHomeDirectory(); if (homeDirectory == null) return false; final VirtualFile condaParent = SystemInfo.isWindows ? homeDirectory.getParent() : homeDirectory.getParent().getParent(); final VirtualFile condaMeta = condaParent.findChild(condaName); return condaMeta != null; } @NotNull public static String createVirtualEnv(@Nullable String condaExecutable, @NotNull String destinationDir, @NotNull String version) throws ExecutionException { if (condaExecutable == null) throw new PyExecutionException("Cannot find conda", "Conda", Collections.emptyList(), new ProcessOutput()); final ArrayList<String> parameters = Lists.newArrayList(condaExecutable, "create", "-p", destinationDir, "-y", "python=" + version); final GeneralCommandLine commandLine = new GeneralCommandLine(parameters); final CapturingProcessHandler handler = new CapturingProcessHandler(commandLine); final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); final ProcessOutput result = handler.runProcessWithProgressIndicator(indicator); if (result.isCancelled()) { throw new RunCanceledByUserException(); } final int exitCode = result.getExitCode(); if (exitCode != 0) { final String message = StringUtil.isEmptyOrSpaces(result.getStdout()) && StringUtil.isEmptyOrSpaces(result.getStderr()) ? "Permission denied" : "Non-zero exit code"; throw new PyExecutionException(message, "Conda", parameters, result); } final String binary = PythonSdkType.getPythonExecutable(destinationDir); final String binaryFallback = destinationDir + File.separator + "bin" + File.separator + "python"; return (binary != null) ? binary : binaryFallback; } @Nullable @Override public List<PyPackage> getPackages() { final List<PyPackage> packagesCache = mySideCache; if (packagesCache == null) return null; final List<PyPackage> packages = Lists.newArrayList(packagesCache); final List<PyPackage> condaPackages = super.getPackages(); if (condaPackages == null) return null; packages.addAll(condaPackages); return Collections.unmodifiableList(packages); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.deadCode; import com.intellij.codeInspection.HTMLJavaHTMLComposer; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.codeInspection.ex.DescriptorComposer; import com.intellij.codeInspection.ex.HTMLComposerImpl; import com.intellij.codeInspection.reference.*; import com.intellij.codeInspection.ui.InspectionToolPresentation; import com.intellij.codeInspection.ui.InspectionTreeNode; import com.intellij.codeInspection.ui.RefElementNode; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.tree.TreeNode; import java.util.HashSet; import java.util.Iterator; import java.util.Set; public class DeadHTMLComposer extends HTMLComposerImpl { private final InspectionToolPresentation myToolPresentation; private final HTMLJavaHTMLComposer myComposer; public DeadHTMLComposer(@NotNull InspectionToolPresentation presentation) { myToolPresentation = presentation; myComposer = getExtension(HTMLJavaHTMLComposer.COMPOSER); } @Override public void compose(final StringBuffer buf, RefEntity refEntity) { compose(buf, refEntity, true); } public void compose(final StringBuffer buf, RefEntity refEntity, boolean toExternalHtml) { if (toExternalHtml) { genPageHeader(buf, refEntity); } if (refEntity instanceof RefElement) { RefElementImpl refElement = (RefElementImpl)refEntity; if (refElement.isSuspicious() && !refElement.isEntry()) { appendHeading(buf, InspectionsBundle.message("inspection.problem.synopsis")); //noinspection HardCodedStringLiteral buf.append("<br>"); buf.append("<div class=\"problem-description\">"); appendProblemSynopsis(refElement, buf); buf.append("</div>"); if (toExternalHtml) { buf.append("<br><br>"); appendResolution(buf, refElement, DescriptorComposer.quickFixTexts(refElement, myToolPresentation)); } refElement.accept(new RefJavaVisitor() { @Override public void visitClass(@NotNull RefClass aClass) { appendClassInstantiations(buf, aClass); myComposer.appendDerivedClasses(buf, aClass); myComposer.appendClassExtendsImplements(buf, aClass); myComposer.appendLibraryMethods(buf, aClass); myComposer.appendTypeReferences(buf, aClass); } @Override public void visitMethod(@NotNull RefMethod method) { appendElementInReferences(buf, method); appendElementOutReferences(buf, method); myComposer.appendDerivedMethods(buf, method); myComposer.appendSuperMethods(buf, method); } @Override public void visitField(@NotNull RefField field) { appendElementInReferences(buf, field); appendElementOutReferences(buf, field); } }); } else { appendNoProblems(buf); } appendCallesList(refElement, buf, new HashSet<>(), true); } } public static void appendProblemSynopsis(final RefElement refElement, final StringBuffer buf) { refElement.accept(new RefJavaVisitor() { @Override public void visitField(@NotNull RefField field) { if (field.isUsedForReading() && !field.isUsedForWriting()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis")); return; } if (!field.isUsedForReading() && field.isUsedForWriting()) { if (field.isOnlyAssignedInInitializer()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis1")); return; } buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis2")); return; } int nUsages = field.getInReferences().size(); if (nUsages == 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis1")); } else if (nUsages == 1) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis3")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis4", nUsages)); } } @Override public void visitClass(@NotNull RefClass refClass) { if (refClass.isAnonymous()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis10")); } else if (refClass.isInterface() || refClass.isAbstract()) { String classOrInterface = HTMLJavaHTMLComposer.getClassOrInterface(refClass, true); //noinspection HardCodedStringLiteral buf.append("&nbsp;"); int nDerived = getImplementationsCount(refClass); if (nDerived == 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis23", classOrInterface)); } else if (nDerived == 1) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis24", classOrInterface)); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis25", classOrInterface, nDerived)); } } else if (refClass.isUtilityClass()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis11")); } else { int nInstantiationsCount = getInstantiationsCount(refClass); if (nInstantiationsCount == 0) { int nImplementations = getImplementationsCount(refClass); if (nImplementations != 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis19", nImplementations)); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis13")); } } else if (nInstantiationsCount == 1) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis12")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis20", nInstantiationsCount)); } } } @Override public void visitMethod(@NotNull RefMethod method) { RefClass refClass = method.getOwnerClass(); if (method.isExternalOverride()) { String classOrInterface = HTMLJavaHTMLComposer.getClassOrInterface(refClass, false); buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis22", classOrInterface)); } else if (method.isStatic() || method.isConstructor()) { int nRefs = method.getInReferences().size(); if (method.isConstructor()) { if (nRefs == 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis26.constructor")); } else if (method.isConstructor() && ((RefMethodImpl)method).isSuspiciousRecursive()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis27.constructor")); } else if (nRefs == 1) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis28.constructor")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis29.constructor", nRefs) ); } } else { if (nRefs == 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis26.method")); } else if (method.isConstructor() && ((RefMethodImpl)method).isSuspiciousRecursive()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis27.method")); } else if (nRefs == 1) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis28.method")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis29.method", nRefs) ); } } } else if (((RefClassImpl)refClass).isSuspicious()) { if (method.isAbstract()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis14")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis15")); } } else { int nOwnRefs = method.getInReferences().size(); int nSuperRefs = getSuperRefsCount(method); int nDerivedRefs = getDerivedRefsCount(method); if (nOwnRefs == 0 && nSuperRefs == 0 && nDerivedRefs == 0) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis16")); } else if (nDerivedRefs > 0 && nSuperRefs == 0 && nOwnRefs == 0) { String classOrInterface = HTMLJavaHTMLComposer.getClassOrInterface(refClass, false); buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis21", classOrInterface)); } else if (((RefMethodImpl)method).isSuspiciousRecursive()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis17")); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis18")); } } } }); } @Override protected void appendAdditionalListItemInfo(StringBuffer buf, RefElement refElement) { if (refElement instanceof RefImplicitConstructor) { refElement = ((RefImplicitConstructor)refElement).getOwnerClass(); } //noinspection HardCodedStringLiteral buf.append("<br>"); if (refElement instanceof RefClass) { RefClassImpl refClass = (RefClassImpl)refElement; if (refClass.isSuspicious()) { if (refClass.isUtilityClass()) { // Append nothing. } else if (refClass.isAnonymous()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis9.suspicious", getInstantiationsCount(refClass))); } else if (refClass.isInterface() || refClass.isAbstract()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis8.suspicious", getInstantiationsCount(refClass))); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis7.suspicious", getInstantiationsCount(refClass))); } } else { if (refClass.isUtilityClass()) { // Append nothing. } else if (refClass.isAnonymous()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis9", getInstantiationsCount(refClass))); } else if (refClass.isInterface() || refClass.isAbstract()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis8", getInstantiationsCount(refClass))); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis7", getInstantiationsCount(refClass))); } } } else { int nUsageCount = refElement.getInReferences().size(); if (refElement instanceof RefMethod) { nUsageCount += getDerivedRefsCount((RefMethod) refElement); } if (((RefElementImpl)refElement).isSuspicious()) { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis6.suspicious", nUsageCount)); } else { buf.append(InspectionsBundle.message("inspection.dead.code.problem.synopsis6", nUsageCount)); } } } private static int getDerivedRefsCount(RefMethod refMethod) { int count = 0; for (RefMethod refDerived : refMethod.getDerivedMethods()) { count += refDerived.getInReferences().size() + getDerivedRefsCount(refDerived); } return count; } private static int getSuperRefsCount(RefMethod refMethod) { int count = 0; for (RefMethod refSuper : refMethod.getSuperMethods()) { count += refSuper.getInReferences().size() + getSuperRefsCount(refSuper); } return count; } private static int getInstantiationsCount(RefClass aClass) { if (!aClass.isAnonymous()) { int count = 0; for (RefMethod refConstructor : aClass.getConstructors()) { count += refConstructor.getInReferences().size(); } for (RefClass subClass : aClass.getSubClasses()) { count += getInstantiationsCount(subClass); count -= subClass.getConstructors().size(); } return count; } return 1; } private static int getImplementationsCount(RefClass refClass) { int count = 0; for (RefClass subClass : refClass.getSubClasses()) { if (!subClass.isInterface() && !subClass.isAbstract()) { count++; } count += getImplementationsCount(subClass); } return count; } private void appendClassInstantiations(StringBuffer buf, RefClass refClass) { if (!refClass.isInterface() && !refClass.isAbstract() && !refClass.isUtilityClass()) { boolean found = false; appendHeading(buf, InspectionsBundle.message("inspection.dead.code.export.results.instantiated.from.heading")); startList(buf); for (RefMethod refMethod : refClass.getConstructors()) { for (RefElement refCaller : refMethod.getInReferences()) { appendListItem(buf, refCaller); found = true; } } if (!found) { startListItem(buf); buf.append(InspectionsBundle.message("inspection.dead.code.export.results.no.instantiations.found")); doneListItem(buf); } doneList(buf); } } private void appendCallesList(RefElement element, StringBuffer buf, Set<? super RefElement> mentionedElements, boolean appendCallees){ final Set<RefElement> possibleChildren = getPossibleChildren(new RefElementNode(element, myToolPresentation), element); if (!possibleChildren.isEmpty()) { if (appendCallees){ appendHeading(buf, InspectionsBundle.message("inspection.export.results.callees")); buf.append("<div class=\"problem-description\">"); } @NonNls final String ul = "<ul>"; buf.append(ul); for (RefElement refElement : possibleChildren) { if (!mentionedElements.contains(refElement)) { mentionedElements.add(refElement); @NonNls final String li = "<li>"; buf.append(li); appendElementReference(buf, refElement, true); @NonNls final String closeLi = "</li>"; buf.append(closeLi); appendCallesList(refElement, buf, mentionedElements, false); } } @NonNls final String closeUl = "</ul>"; buf.append(closeUl); if (appendCallees) { buf.append("</div>"); } } } public static Set<RefElement> getPossibleChildren(final RefElementNode refElementNode, RefElement refElement) { final TreeNode[] pathToRoot = refElementNode.getPath(); final HashSet<RefElement> newChildren = new HashSet<>(); if (!refElement.isValid()) return newChildren; for (RefElement refCallee : refElement.getOutReferences()) { if (((RefElementImpl)refCallee).isSuspicious()) { if (notInPath(pathToRoot, refCallee)) newChildren.add(refCallee); } } if (refElement instanceof RefMethod) { RefMethod refMethod = (RefMethod) refElement; if (!refMethod.isStatic() && !refMethod.isConstructor() && !refMethod.getOwnerClass().isAnonymous()) { for (RefMethod refDerived : refMethod.getDerivedMethods()) { if (((RefMethodImpl)refDerived).isSuspicious()) { if (notInPath(pathToRoot, refDerived)) newChildren.add(refDerived); } } } } else if (refElement instanceof RefClass) { RefClass refClass = (RefClass) refElement; for (RefClass subClass : refClass.getSubClasses()) { if ((subClass.isInterface() || subClass.isAbstract()) && ((RefClassImpl)subClass).isSuspicious()) { if (notInPath(pathToRoot, subClass)) newChildren.add(subClass); } } if (refClass.getDefaultConstructor() instanceof RefImplicitConstructor) { Set<RefElement> fromConstructor = getPossibleChildren(refElementNode, refClass.getDefaultConstructor()); newChildren.addAll(fromConstructor); } } return newChildren; } private static boolean notInPath(TreeNode[] pathToRoot, RefElement refChild) { for (TreeNode aPathToRoot : pathToRoot) { InspectionTreeNode node = (InspectionTreeNode)aPathToRoot; if (node instanceof RefElementNode && ((RefElementNode)node).getElement() == refChild) return false; } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.rdftriplestore.inference; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.rya.accumulo.AccumuloRdfConfiguration; import org.apache.rya.accumulo.AccumuloRyaDAO; import org.apache.rya.rdftriplestore.RdfCloudTripleStore; import org.apache.tinkerpop.gremlin.structure.Graph; import org.junit.After; import org.junit.Assert; import org.junit.Test; import org.openrdf.model.Resource; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.ValueFactory; import org.openrdf.model.impl.ValueFactoryImpl; import org.openrdf.query.QueryLanguage; import org.openrdf.repository.sail.SailRepository; import org.openrdf.repository.sail.SailRepositoryConnection; import com.google.common.collect.Sets; import junit.framework.TestCase; public class InferenceEngineTest extends TestCase { private Connector connector; private AccumuloRyaDAO dao; private final ValueFactory vf = new ValueFactoryImpl(); private AccumuloRdfConfiguration conf; private RdfCloudTripleStore store; private InferenceEngine inferenceEngine; private SailRepository repository; private SailRepositoryConnection conn; @Override public void setUp() throws Exception { super.setUp(); dao = new AccumuloRyaDAO(); connector = new MockInstance().getConnector("", new PasswordToken("")); dao.setConnector(connector); conf = new AccumuloRdfConfiguration(); dao.setConf(conf); dao.init(); store = new RdfCloudTripleStore(); store.setConf(conf); store.setRyaDAO(dao); inferenceEngine = new InferenceEngine(); inferenceEngine.setRyaDAO(dao); store.setInferenceEngine(inferenceEngine); inferenceEngine.refreshGraph(); store.initialize(); repository = new SailRepository(store); conn = repository.getConnection(); } @Override @After public void tearDown() throws Exception { conn.close(); repository.shutDown(); store.shutDown(); dao.purge(conf); dao.destroy(); } @Test public void testSubClassGraph() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:A> rdfs:subClassOf <urn:C> . \n" + " <urn:B> rdfs:subClassOf <urn:C> . \n" + " <urn:C> rdfs:subClassOf <urn:D> . \n" + " <urn:E> owl:equivalentClass <urn:D> . \n" + " <urn:E> rdfs:subClassOf <urn:G> . \n" + " <urn:Z> a owl:Class . \n" + " <urn:F> owl:equivalentClass <urn:G> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final URI a = vf.createURI("urn:A"); final URI b = vf.createURI("urn:B"); final URI c = vf.createURI("urn:C"); final URI d = vf.createURI("urn:D"); final URI e = vf.createURI("urn:E"); final URI f = vf.createURI("urn:F"); final URI g = vf.createURI("urn:G"); final URI z = vf.createURI("urn:Z"); final URI missing = vf.createURI("urn:Missing"); final Set<URI> empty = new HashSet<>(); final Set<URI> belowLevel2 = new HashSet<>(Arrays.asList(new URI[] { a, b })); final Set<URI> belowLevel3 = new HashSet<>(Arrays.asList(new URI[] { a, b, c, d, e })); final Set<URI> belowLevel4 = new HashSet<>(Arrays.asList(new URI[] { a, b, c, d, e, f, g })); Assert.assertEquals(empty, inferenceEngine.getSubClasses(a)); Assert.assertEquals(empty, inferenceEngine.getSubClasses(b)); Assert.assertEquals(empty, inferenceEngine.getSubClasses(z)); Assert.assertEquals(empty, inferenceEngine.getSubClasses(missing)); Assert.assertEquals(belowLevel2, inferenceEngine.getSubClasses(c)); Assert.assertEquals(belowLevel3, inferenceEngine.getSubClasses(d)); Assert.assertEquals(belowLevel3, inferenceEngine.getSubClasses(e)); Assert.assertEquals(belowLevel4, inferenceEngine.getSubClasses(f)); Assert.assertEquals(belowLevel4, inferenceEngine.getSubClasses(g)); } @Test public void testSubPropertyGraph() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:p> rdfs:subPropertyOf <urn:q> . \n" + " <urn:p> rdfs:subPropertyOf <urn:r> . \n" + " <urn:r> owl:equivalentProperty <urn:s> . \n" + " <urn:q> rdfs:subPropertyOf <urn:t> . \n" + " <urn:t> rdfs:subPropertyOf <urn:u> . \n" + " <urn:s> rdfs:subPropertyOf <urn:u> . \n" + " <urn:v> owl:equivalentProperty <urn:u> . \n" + " <urn:w> a owl:FunctionalProperty . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final Graph graph = inferenceEngine.getSubPropertyOfGraph(); final URI p = vf.createURI("urn:p"); final URI q = vf.createURI("urn:q"); final URI r = vf.createURI("urn:r"); final URI s = vf.createURI("urn:s"); final URI t = vf.createURI("urn:t"); final URI u = vf.createURI("urn:u"); final URI v = vf.createURI("urn:v"); final URI w = vf.createURI("urn:w"); final URI missing = vf.createURI("urn:Missing"); final Set<URI> empty = new HashSet<>(); final Set<URI> belowQ = new HashSet<>(Arrays.asList(new URI[] { p })); final Set<URI> belowR = new HashSet<>(Arrays.asList(new URI[] { p, r, s })); final Set<URI> belowT = new HashSet<>(Arrays.asList(new URI[] { p, q })); final Set<URI> belowU = new HashSet<>(Arrays.asList(new URI[] { p, q, r, s, t, u, v })); Assert.assertEquals(empty, InferenceEngine.findParents(graph, p)); Assert.assertEquals(empty, InferenceEngine.findParents(graph, w)); Assert.assertEquals(empty, InferenceEngine.findParents(graph, missing)); Assert.assertEquals(belowQ, InferenceEngine.findParents(graph, q)); Assert.assertEquals(belowR, InferenceEngine.findParents(graph, r)); Assert.assertEquals(belowR, InferenceEngine.findParents(graph, s)); Assert.assertEquals(belowT, InferenceEngine.findParents(graph, t)); Assert.assertEquals(belowU, InferenceEngine.findParents(graph, u)); Assert.assertEquals(belowU, InferenceEngine.findParents(graph, v)); } @Test public void testDomainRange() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:p1> rdfs:subPropertyOf <urn:p2> . \n" + " <urn:p2> rdfs:subPropertyOf <urn:p3> . \n" + " <urn:q1> rdfs:subPropertyOf <urn:q2> . \n" + " <urn:q2> rdfs:subPropertyOf <urn:q3> . \n" + " <urn:i1> rdfs:subPropertyOf <urn:i2> . \n" + " <urn:i2> rdfs:subPropertyOf <urn:i3> . \n" + " <urn:j1> rdfs:subPropertyOf <urn:j2> . \n" + " <urn:j2> rdfs:subPropertyOf <urn:j3> . \n" + " <urn:p2> owl:inverseOf <urn:i2> . \n" + " <urn:i1> owl:inverseOf <urn:q2> . \n" + " <urn:q1> owl:inverseOf <urn:j2> . \n" + " <urn:D1> rdfs:subClassOf <urn:D2> . \n" + " <urn:D2> rdfs:subClassOf <urn:D3> . \n" + " <urn:R1> rdfs:subClassOf <urn:R2> . \n" + " <urn:R2> rdfs:subClassOf <urn:R3> . \n" + " <urn:p2> rdfs:domain <urn:D2> . \n" + " <urn:p2> rdfs:range <urn:R2> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final Set<URI> hasDomainD1 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:D1")); final Set<URI> hasDomainD2 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:D2")); final Set<URI> hasDomainD3 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:D3")); final Set<URI> hasRangeD1 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:D1")); final Set<URI> hasRangeD2 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:D2")); final Set<URI> hasRangeD3 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:D3")); final Set<URI> hasDomainR1 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:R1")); final Set<URI> hasDomainR2 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:R2")); final Set<URI> hasDomainR3 = inferenceEngine.getPropertiesWithDomain(vf.createURI("urn:R3")); final Set<URI> hasRangeR1 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:R1")); final Set<URI> hasRangeR2 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:R2")); final Set<URI> hasRangeR3 = inferenceEngine.getPropertiesWithRange(vf.createURI("urn:R3")); final Set<URI> empty = new HashSet<>(); final Set<URI> expectedForward = new HashSet<>(); expectedForward.add(vf.createURI("urn:p2")); expectedForward.add(vf.createURI("urn:p1")); expectedForward.add(vf.createURI("urn:q2")); expectedForward.add(vf.createURI("urn:q1")); final Set<URI> expectedInverse = new HashSet<>(); expectedInverse.add(vf.createURI("urn:i1")); expectedInverse.add(vf.createURI("urn:i2")); expectedInverse.add(vf.createURI("urn:j1")); expectedInverse.add(vf.createURI("urn:j2")); Assert.assertEquals(empty, hasDomainD1); Assert.assertEquals(empty, hasRangeD1); Assert.assertEquals(empty, hasDomainR1); Assert.assertEquals(empty, hasRangeR1); Assert.assertEquals(expectedForward, hasDomainD2); Assert.assertEquals(expectedInverse, hasRangeD2); Assert.assertEquals(expectedInverse, hasDomainR2); Assert.assertEquals(expectedForward, hasRangeR2); Assert.assertEquals(expectedForward, hasDomainD3); Assert.assertEquals(expectedInverse, hasRangeD3); Assert.assertEquals(expectedInverse, hasDomainR3); Assert.assertEquals(expectedForward, hasRangeR3); } @Test public void testSomeValuesFrom() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" // base restrictions + " <urn:Chair> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:Department> .\n" + " <urn:Dean> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:College> .\n" // classes related to the restriction type + " <urn:ScienceDepartmentChair> rdfs:subClassOf <urn:Chair> .\n" + " <urn:Chair> rdfs:subClassOf <urn:Person> .\n" + " <urn:Dean> rdfs:subClassOf <urn:Person> .\n" + " <urn:Student> rdfs:subClassOf <urn:Person> .\n" // classes related to the value type + " <urn:ScienceDepartment> rdfs:subClassOf <urn:Department> .\n" + " <urn:HumanitiesDepartment> rdfs:subClassOf <urn:Department> .\n" + " <urn:Department> rdfs:subClassOf <urn:Organization> .\n" + " <urn:College> rdfs:subClassOf <urn:Organization> .\n" // properties related to the restriction property + " <urn:temporaryHeadOf> rdfs:subPropertyOf <urn:headOf> .\n" + " <urn:headOf> rdfs:subPropertyOf <urn:worksFor> .\n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final Set<URI> properties = new HashSet<>(); properties.add(vf.createURI("urn:headOf")); properties.add(vf.createURI("urn:temporaryHeadOf")); final Map<Resource, Set<URI>> chairDerivations = new HashMap<>(); chairDerivations.put(vf.createURI("urn:Department"), properties); chairDerivations.put(vf.createURI("urn:ScienceDepartment"), properties); chairDerivations.put(vf.createURI("urn:HumanitiesDepartment"), properties); final Map<Resource, Set<URI>> deanDerivations = new HashMap<>(); deanDerivations.put(vf.createURI("urn:College"), properties); final Map<Resource, Set<URI>> combinedDerivations = new HashMap<>(chairDerivations); combinedDerivations.put(vf.createURI("urn:College"), properties); // Get someValuesFrom restrictions given the direct types Assert.assertEquals(deanDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Dean"))); Assert.assertEquals(chairDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Chair"))); // Finds the subtype's restrictions given the supertype Assert.assertEquals(combinedDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:Person"))); // Finds nothing if given a subtype which is not a restriction Assert.assertEquals(new HashMap<>(), inferenceEngine.getSomeValuesFromByRestrictionType(vf.createURI("urn:ScienceDepartmentChair"))); } @Test public void testAllValuesFrom() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Dog> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Dog> .\n" + " <urn:Retriever> rdfs:subClassOf <urn:Dog> .\n" + " <urn:Terrier> rdfs:subClassOf <urn:Dog> .\n" + " <urn:Terrier> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Terrier> .\n" + " <urn:Cairn_Terrier> rdfs:subClassOf <urn:Terrier> .\n" + " <urn:parent> rdfs:subPropertyOf <urn:relative> .\n" + " <urn:Dog> rdfs:subClassOf <urn:Mammal> .\n" + " <urn:Person> rdfs:subClassOf <urn:Mammal> .\n" + " <urn:Person> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Person> .\n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final Map<Resource, Set<URI>> restrictionsImplyingTerrier = new HashMap<>(); final Set<URI> properties = new HashSet<>(); properties.add(vf.createURI("urn:parent")); properties.add(vf.createURI("urn:relative")); restrictionsImplyingTerrier.put(vf.createURI("urn:Terrier"), properties); restrictionsImplyingTerrier.put(vf.createURI("urn:Cairn_Terrier"), properties); Assert.assertEquals(restrictionsImplyingTerrier, inferenceEngine.getAllValuesFromByValueType(vf.createURI("urn:Terrier"))); final Map<Resource, Set<URI>> restrictionsImplyingDog = new HashMap<>(restrictionsImplyingTerrier); restrictionsImplyingDog.put(vf.createURI("urn:Dog"), properties); restrictionsImplyingDog.put(vf.createURI("urn:Retriever"), properties); Assert.assertEquals(restrictionsImplyingDog, inferenceEngine.getAllValuesFromByValueType(vf.createURI("urn:Dog"))); final Map<Resource, Set<URI>> restrictionsImplyingMammal = new HashMap<>(restrictionsImplyingDog); restrictionsImplyingMammal.put(vf.createURI("urn:Person"), properties); Assert.assertEquals(restrictionsImplyingMammal, inferenceEngine.getAllValuesFromByValueType(vf.createURI("urn:Mammal"))); } @Test public void testHasValueGivenProperty() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Biped> owl:onProperty <urn:walksUsingLegs> . \n" + " <urn:Biped> owl:hasValue \"2\" . \n" + " <urn:Mammal> owl:onProperty <urn:taxon> . \n" + " <urn:Mammal> owl:hasValue <urn:Mammalia> . \n" + " <urn:Vertebrate> owl:onProperty <urn:taxon> . \n" + " <urn:Vertebrate> owl:hasValue <urn:Vertebrata> . \n" + " <urn:Tunicate> owl:onProperty <urn:taxon> . \n" + " <urn:Tunicate> owl:hasValue <urn:Tunicata> . \n" + " <urn:Mammal> rdfs:subClassOf <urn:Vertebrate> . \n" + " <urn:Vertebrate> rdfs:subClassOf <urn:Animal> . \n" + " <urn:Tunicate> rdfs:subClassOf <urn:Animal> . \n" + " <urn:Biped> rdfs:subClassOf <urn:Animal> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final Map<Resource, Set<Value>> typeToValueImplications = new HashMap<>(); final Set<Value> vertebrateTaxa = new HashSet<>(); final Set<Value> tunicateTaxa = new HashSet<>(); vertebrateTaxa.add(vf.createURI("urn:Vertebrata")); tunicateTaxa.add(vf.createURI("urn:Tunicata")); final Set<Value> mammalTaxa = new HashSet<>(vertebrateTaxa); mammalTaxa.add(vf.createURI("urn:Mammalia")); typeToValueImplications.put(vf.createURI("urn:Vertebrate"), vertebrateTaxa); typeToValueImplications.put(vf.createURI("urn:Tunicate"), tunicateTaxa); typeToValueImplications.put(vf.createURI("urn:Mammal"), mammalTaxa); Assert.assertEquals(typeToValueImplications, inferenceEngine.getHasValueByProperty(vf.createURI("urn:taxon"))); } @Test public void testHasValueGivenType() throws Exception { final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Biped> owl:onProperty <urn:walksUsingLegs> . \n" + " <urn:Biped> owl:hasValue \"2\" . \n" + " <urn:Mammal> owl:onProperty <urn:taxon> . \n" + " <urn:Mammal> owl:hasValue <urn:Mammalia> . \n" + " <urn:Vertebrate> owl:onProperty <urn:taxon> . \n" + " <urn:Vertebrate> owl:hasValue <urn:Vertebrata> . \n" + " <urn:Tunicate> owl:onProperty <urn:taxon> . \n" + " <urn:Tunicate> owl:hasValue <urn:Tunicata> . \n" + " <urn:Plant> owl:onProperty <urn:taxon> . \n" + " <urn:Plant> owl:hasValue <urn:Plantae> . \n" + " <urn:Mammal> rdfs:subClassOf <urn:Vertebrate> . \n" + " <urn:Vertebrate> rdfs:subClassOf <urn:Animal> . \n" + " <urn:Tunicate> rdfs:subClassOf <urn:Animal> . \n" + " <urn:Biped> rdfs:subClassOf <urn:Animal> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute(); inferenceEngine.refreshGraph(); final URI legs = vf.createURI("urn:walksUsingLegs"); final URI taxon = vf.createURI("urn:taxon"); // Verify direct restrictions: final Map<URI, Set<Value>> valuesImplyingBiped = new HashMap<>(); valuesImplyingBiped.put(legs, new HashSet<>()); valuesImplyingBiped.get(legs).add(vf.createLiteral("2")); Assert.assertEquals(valuesImplyingBiped, inferenceEngine.getHasValueByType(vf.createURI("urn:Biped"))); final Map<URI, Set<Value>> valuesImplyingMammal = new HashMap<>(); valuesImplyingMammal.put(taxon, new HashSet<>()); valuesImplyingMammal.get(taxon).add(vf.createURI("urn:Mammalia")); Assert.assertEquals(valuesImplyingMammal, inferenceEngine.getHasValueByType(vf.createURI("urn:Mammal"))); final Map<URI, Set<Value>> valuesImplyingTunicate = new HashMap<>(); valuesImplyingTunicate.put(taxon, new HashSet<>()); valuesImplyingTunicate.get(taxon).add(vf.createURI("urn:Tunicata")); Assert.assertEquals(valuesImplyingTunicate, inferenceEngine.getHasValueByType(vf.createURI("urn:Tunicate"))); final Map<URI, Set<Value>> valuesImplyingPlant = new HashMap<>(); valuesImplyingPlant.put(taxon, new HashSet<>()); valuesImplyingPlant.get(taxon).add(vf.createURI("urn:Plantae")); Assert.assertEquals(valuesImplyingPlant, inferenceEngine.getHasValueByType(vf.createURI("urn:Plant"))); // Verify indirect restrictions given a supertype, including multiple properties where relevant: final Map<URI, Set<Value>> valuesImplyingVertebrate = new HashMap<>(); valuesImplyingVertebrate.put(taxon, new HashSet<>(valuesImplyingMammal.get(taxon))); valuesImplyingVertebrate.get(taxon).add(vf.createURI("urn:Vertebrata")); Assert.assertEquals(valuesImplyingVertebrate, inferenceEngine.getHasValueByType(vf.createURI("urn:Vertebrate"))); final Map<URI, Set<Value>> valuesImplyingAnimal = new HashMap<>(); valuesImplyingAnimal.put(legs, valuesImplyingBiped.get(legs)); valuesImplyingAnimal.put(taxon, new HashSet<>(valuesImplyingVertebrate.get(taxon))); valuesImplyingAnimal.get(taxon).addAll(valuesImplyingTunicate.get(taxon)); Assert.assertEquals(valuesImplyingAnimal, inferenceEngine.getHasValueByType(vf.createURI("urn:Animal"))); } @Test public void testUnionOf() throws Exception { final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:A> owl:unionOf <urn:list1> . \n" + " <urn:B> owl:unionOf <urn:list2> . \n" + " <urn:list1> rdf:first <urn:X> . \n" + " <urn:list1> rdf:rest <urn:list2> . \n" + " <urn:list2> rdf:first <urn:Y> . \n" + " <urn:list2> rdf:rest <urn:list3> . \n" + " <urn:list3> rdf:first <urn:Z> . \n" + " <urn:Y> rdfs:subClassOf <urn:SuperY> . \n" + " <urn:SubY> rdfs:subClassOf <urn:Y> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute(); inferenceEngine.refreshGraph(); final Set<URI> subClassesA = inferenceEngine.getSubClasses(vf.createURI("urn:A")); final Set<URI> subClassesB = inferenceEngine.getSubClasses(vf.createURI("urn:B")); final Set<URI> expectedA = new HashSet<>(); final Set<URI> expectedB = new HashSet<>(); expectedB.add(vf.createURI("urn:Y")); expectedB.add(vf.createURI("urn:SubY")); expectedB.add(vf.createURI("urn:Z")); expectedA.addAll(expectedB); expectedA.add(vf.createURI("urn:X")); Assert.assertEquals(expectedA, subClassesA); Assert.assertEquals(expectedB, subClassesB); } public void testIntersectionOf() throws Exception { final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Mother> owl:intersectionOf _:bnode1 . \n" + " _:bnode1 rdf:first <urn:Woman> . \n" + " _:bnode1 rdf:rest _:bnode2 . \n" + " _:bnode2 rdf:first <urn:Parent> . \n" + " _:bnode2 rdf:rest rdf:nil . \n" + " <urn:Father> owl:intersectionOf _:bnode3 . \n" + " _:bnode3 rdf:first <urn:Man> . \n" + " _:bnode3 rdf:rest _:bnode4 . \n" + " _:bnode4 rdf:first <urn:Parent> . \n" + " _:bnode4 rdf:rest rdf:nil . \n" + " <urn:Mom> owl:intersectionOf _:bnode5 . \n" + " _:bnode5 rdf:first <urn:Woman> . \n" + " _:bnode5 rdf:rest _:bnode6 . \n" + " _:bnode6 rdf:first <urn:Parent> . \n" + " _:bnode6 rdf:rest rdf:nil . \n" + " <urn:Mother> rdfs:subClassOf <urn:ImmediateFamilyMember> . \n" + " <urn:ImmediateFamilyMember> rdfs:subClassOf <urn:Relative> . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute(); inferenceEngine.refreshGraph(); final URI mother = vf.createURI("urn:Mother"); final URI father = vf.createURI("urn:Father"); final URI woman = vf.createURI("urn:Woman"); final URI parent = vf.createURI("urn:Parent"); final URI man = vf.createURI("urn:Man"); final URI mom = vf.createURI("urn:Mom"); final URI immediateFamilyMember = vf.createURI("urn:ImmediateFamilyMember"); final URI relative = vf.createURI("urn:Relative"); final List<Set<Resource>> intersectionsImplyingMother = Arrays.asList(Sets.newHashSet(woman, parent)); Assert.assertEquals(intersectionsImplyingMother, inferenceEngine.getIntersectionsImplying(mother)); final List<Set<Resource>> intersectionsImplyingFather = Arrays.asList(Sets.newHashSet(man, parent)); Assert.assertEquals(intersectionsImplyingFather, inferenceEngine.getIntersectionsImplying(father)); // Check that Mother is a subclassOf Parent and Woman and // ImmediateFamilyMember and Relative. Also, Mother is a subclassOf // Mother and Mom through inferring equivalentClass. final Set<URI> motherSuperClassUris = inferenceEngine.getSuperClasses(mother); Assert.assertNotNull(motherSuperClassUris); Assert.assertEquals(6, motherSuperClassUris.size()); Assert.assertTrue(motherSuperClassUris.contains(parent)); Assert.assertTrue(motherSuperClassUris.contains(woman)); Assert.assertTrue(motherSuperClassUris.contains(immediateFamilyMember)); Assert.assertTrue(motherSuperClassUris.contains(relative)); Assert.assertTrue(motherSuperClassUris.contains(mother)); Assert.assertTrue(motherSuperClassUris.contains(mom)); // Check that Father is a subclassOf Parent and Man final Set<URI> fatherSuperClassUris = inferenceEngine.getSuperClasses(father); Assert.assertNotNull(fatherSuperClassUris); Assert.assertEquals(2, fatherSuperClassUris.size()); Assert.assertTrue(fatherSuperClassUris.contains(parent)); Assert.assertTrue(fatherSuperClassUris.contains(man)); // Check that Mom is a subclassOf Parent and Woman and // ImmediateFamilyMember and Relative. The last 2 should be inferred // from having the same intersection as Mother. Also, Mom is a // subclassOf Mother and Mom through inferring equivalentClass. final Set<URI> momSuperClassUris = inferenceEngine.getSuperClasses(mom); Assert.assertNotNull(momSuperClassUris); Assert.assertEquals(6, momSuperClassUris.size()); Assert.assertTrue(momSuperClassUris.contains(parent)); Assert.assertTrue(momSuperClassUris.contains(woman)); Assert.assertTrue(momSuperClassUris.contains(immediateFamilyMember)); Assert.assertTrue(momSuperClassUris.contains(relative)); Assert.assertTrue(momSuperClassUris.contains(mother)); Assert.assertTrue(momSuperClassUris.contains(mom)); } @Test public void testOneOf() throws Exception { final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Suits> owl:oneOf _:bnodeS1 . \n" + " _:bnodeS1 rdf:first <urn:Clubs> . \n" + " _:bnodeS1 rdf:rest _:bnodeS2 . \n" + " _:bnodeS2 rdf:first <urn:Diamonds> . \n" + " _:bnodeS2 rdf:rest _:bnodeS3 . \n" + " _:bnodeS3 rdf:first <urn:Hearts> . \n" + " _:bnodeS3 rdf:rest _:bnodeS4 . \n" + " _:bnodeS4 rdf:first <urn:Spades> . \n" + " _:bnodeS4 rdf:rest rdf:nil . \n" + " <urn:Ranks> owl:oneOf _:bnodeR1 . \n" + " _:bnodeR1 rdf:first <urn:Ace> . \n" + " _:bnodeR1 rdf:rest _:bnodeR2 . \n" + " _:bnodeR2 rdf:first <urn:2> . \n" + " _:bnodeR2 rdf:rest _:bnodeR3 . \n" + " _:bnodeR3 rdf:first <urn:3> . \n" + " _:bnodeR3 rdf:rest _:bnodeR4 . \n" + " _:bnodeR4 rdf:first <urn:4> . \n" + " _:bnodeR4 rdf:rest _:bnodeR5 . \n" + " _:bnodeR5 rdf:first <urn:5> . \n" + " _:bnodeR5 rdf:rest _:bnodeR6 . \n" + " _:bnodeR6 rdf:first <urn:6> . \n" + " _:bnodeR6 rdf:rest _:bnodeR7 . \n" + " _:bnodeR7 rdf:first <urn:7> . \n" + " _:bnodeR7 rdf:rest _:bnodeR8 . \n" + " _:bnodeR8 rdf:first <urn:8> . \n" + " _:bnodeR8 rdf:rest _:bnodeR9 . \n" + " _:bnodeR9 rdf:first <urn:9> . \n" + " _:bnodeR9 rdf:rest _:bnodeR10 . \n" + " _:bnodeR10 rdf:first <urn:10> . \n" + " _:bnodeR10 rdf:rest _:bnodeR11 . \n" + " _:bnodeR11 rdf:first <urn:Jack> . \n" + " _:bnodeR11 rdf:rest _:bnodeR12 . \n" + " _:bnodeR12 rdf:first <urn:Queen> . \n" + " _:bnodeR12 rdf:rest _:bnodeR13 . \n" + " _:bnodeR13 rdf:first <urn:King> . \n" + " _:bnodeR13 rdf:rest rdf:nil . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute(); inferenceEngine.refreshGraph(); final URI suits = vf.createURI("urn:Suits"); final URI ranks = vf.createURI("urn:Ranks"); final URI clubs = vf.createURI("urn:Clubs"); final URI diamonds = vf.createURI("urn:Diamonds"); final URI hearts = vf.createURI("urn:Hearts"); final URI spades = vf.createURI("urn:Spades"); final URI ace = vf.createURI("urn:Ace"); final URI two = vf.createURI("urn:2"); final URI three = vf.createURI("urn:3"); final URI four = vf.createURI("urn:4"); final URI five = vf.createURI("urn:5"); final URI six = vf.createURI("urn:6"); final URI seven = vf.createURI("urn:7"); final URI eight = vf.createURI("urn:8"); final URI nine = vf.createURI("urn:9"); final URI ten = vf.createURI("urn:10"); final URI jack = vf.createURI("urn:Jack"); final URI queen = vf.createURI("urn:Queen"); final URI king = vf.createURI("urn:King"); final URI joker = vf.createURI("urn:Joker"); final boolean isJokerEnumeratedType = inferenceEngine.isEnumeratedType(joker); Assert.assertFalse(isJokerEnumeratedType); final boolean isSuitsEnumeratedType = inferenceEngine.isEnumeratedType(suits); Assert.assertTrue(isSuitsEnumeratedType); final Set<Resource> enumerationImplyingSuits = Sets.newHashSet(clubs, diamonds, hearts, spades); final Set<Resource> actualCardSuits = inferenceEngine.getEnumeration(suits); Assert.assertEquals(enumerationImplyingSuits, actualCardSuits); final boolean isRanksEnumeratedType = inferenceEngine.isEnumeratedType(ranks); Assert.assertTrue(isRanksEnumeratedType); final Set<Resource> enumerationImplyingRanks = Sets.newHashSet(ace, two, three, four, five, six, seven, eight, nine, ten, jack, queen, king); final Set<Resource> actualCardRanks = inferenceEngine.getEnumeration(ranks); Assert.assertEquals(enumerationImplyingRanks, actualCardRanks); } @Test public void hasSelfTest() throws Exception { final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Narcissist> owl:onProperty <urn:love> ; owl:hasSelf \"true\" . \n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute(); inferenceEngine.refreshGraph(); final Set<Resource> expectedTypes = new HashSet<>(); expectedTypes.add(vf.createURI("urn:Narcissist")); Assert.assertEquals(expectedTypes, inferenceEngine.getHasSelfImplyingProperty(vf.createURI("urn:love"))); final Set<URI> expectedProperties = new HashSet<>(); expectedProperties.add(vf.createURI("urn:love")); Assert.assertEquals(expectedProperties, inferenceEngine.getHasSelfImplyingType(vf.createURI("urn:Narcissist"))); } @Test public void testPropertyTypes() throws Exception { final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:comment> a owl:AnnotationProperty .\n" + " <urn:olderThan> a owl:TransitiveProperty, owl:IrreflexiveProperty, owl:AsymmetricProperty .\n" + " <urn:notYoungerThan> a owl:TransitiveProperty, owl:ReflexiveProperty .\n" + " <urn:related> a owl:Property, owl:SymmetricProperty, owl:TransitiveProperty .\n" + " <urn:knows> a owl:SymmetricProperty, owl:ObjectProperty, owl:ReflexiveProperty .\n" + " <urn:sameAgeAs> a owl:SymmetricProperty, owl:ReflexiveProperty, owl:TransitiveProperty .\n" + "}}"; conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute(); inferenceEngine.refreshGraph(); final URI comment = vf.createURI("urn:comment"); // none of the three supported types final URI older = vf.createURI("urn:olderThan"); // transitive only final URI notYounger = vf.createURI("urn:notYoungerThan"); // transitive and reflexive final URI related = vf.createURI("urn:related"); // transitive and symmetric final URI knows = vf.createURI("urn:knows"); // reflexive and symmetric final URI sameAge = vf.createURI("urn:sameAgeAs"); // all three // symmetry Assert.assertFalse(inferenceEngine.isSymmetricProperty(comment)); Assert.assertFalse(inferenceEngine.isSymmetricProperty(older)); Assert.assertFalse(inferenceEngine.isSymmetricProperty(notYounger)); Assert.assertTrue(inferenceEngine.isSymmetricProperty(related)); Assert.assertTrue(inferenceEngine.isSymmetricProperty(knows)); Assert.assertTrue(inferenceEngine.isSymmetricProperty(sameAge)); // transitivity Assert.assertFalse(inferenceEngine.isTransitiveProperty(comment)); Assert.assertTrue(inferenceEngine.isTransitiveProperty(older)); Assert.assertTrue(inferenceEngine.isTransitiveProperty(notYounger)); Assert.assertTrue(inferenceEngine.isTransitiveProperty(related)); Assert.assertFalse(inferenceEngine.isTransitiveProperty(knows)); Assert.assertTrue(inferenceEngine.isTransitiveProperty(sameAge)); // reflexivity Assert.assertFalse(inferenceEngine.isReflexiveProperty(comment)); Assert.assertFalse(inferenceEngine.isReflexiveProperty(older)); Assert.assertTrue(inferenceEngine.isReflexiveProperty(notYounger)); Assert.assertFalse(inferenceEngine.isReflexiveProperty(related)); Assert.assertTrue(inferenceEngine.isReflexiveProperty(knows)); Assert.assertTrue(inferenceEngine.isReflexiveProperty(sameAge)); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package management.util; import hydra.Log; import hydra.Prms; import hydra.TestConfig; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeType; import javax.management.openmbean.TabularDataSupport; import management.jmx.JMXPrms; import util.TestException; import util.TestHelper; import com.gemstone.gemfire.LogWriter; import com.gemstone.gemfire.cache.query.CqEvent; import com.gemstone.gemfire.management.internal.ManagementConstants; import com.gemstone.gemfire.management.internal.cli.json.GfJsonException; import com.gemstone.gemfire.management.internal.cli.result.CommandResult; @SuppressWarnings({"unchecked", "rawtypes"}) public class HydraUtil { public static final String NEW_LINE = System.getProperty("line.separator"); public static final String TAB = "\t"; public static void createLogWriter(){ Log.createLogWriter("hydrautil", "fine"); } public static void logInfo(String message) { LogWriter logger = Log.getLogWriter(); logger.info(message); } public static void logFine(String message) { LogWriter logger = Log.getLogWriter(); logger.fine(message); } public static void logFinest(String message) { LogWriter logger = Log.getLogWriter(); logger.finest(message); } public static void logError(String message) { LogWriter logger = Log.getLogWriter(); logger.error(message); } public static void logError(String message, Throwable e) { LogWriter logger = Log.getLogWriter(); logger.error(message, e); } public static void logErrorAndRaiseException(String message, Throwable e) throws TestException { LogWriter logger = Log.getLogWriter(); logger.error(message, e); throw new TestException(message, e); } public static void logErrorAndRaiseException(String string) { logError(string); throw new TestException(string); } public static void logInfo(String message, Throwable e) { LogWriter logger = Log.getLogWriter(); logger.info(message, e); } public static String ObjectToString(Object object) { if (object == null) return "<<NULL>>"; if (object.getClass().isArray()) { String type = object.getClass().getComponentType().toString(); if(!isPrimitive(type)){ StringBuilder sb = new StringBuilder(); sb.append("Array ["); Object a[] = (Object[]) object; for (Object aa : a) sb.append(ObjectToString(aa)).append(", "); sb.append("]"); return sb.toString(); }else{ //primitive return handlePrimitiveArray(object,type); } }else if (object instanceof CommandResult){ return handleCommandResult((CommandResult)object); } else if (object instanceof AttributeList){ AttributeList list = (AttributeList) object; StringBuilder sb = new StringBuilder(); sb.append(" JMXAttributeList [").append(NEW_LINE); for(Object a : list){ Attribute attr = (Attribute)a; Object key = attr.getName(); Object value = attr.getValue(); sb.append(TAB).append("[").append("Attribute:").append(ObjectToString(key)) .append(" Value:").append(ObjectToString(value)).append("]").append(NEW_LINE); } sb.append("]"); return sb.toString(); } else if (object instanceof Map){ Map map = (Map) object; StringBuilder sb = new StringBuilder(); sb.append(" Map [").append(NEW_LINE); Set<Entry> set = map.entrySet(); for(Map.Entry e : set){ Object key = e.getKey(); Object value = e.getValue(); sb.append(TAB).append("[").append("K:").append(ObjectToString(key)) .append(" V:").append(ObjectToString(value)).append("]").append(NEW_LINE); } sb.append("]"); return sb.toString(); } else if (object instanceof CqEvent) { CqEvent event = (CqEvent) object; StringBuilder sb = new StringBuilder(); sb.append("CqEvent ["); sb.append(" QueryName=<").append(event.getCq().getName()); sb.append("> BaseOperation=").append(event.getBaseOperation()); sb.append(" QueryOperation=").append(event.getQueryOperation()); sb.append(" Key=").append(event.getKey()); sb.append(" NewValue=").append(event.getNewValue()); sb.append(" ]"); return sb.toString(); } else if (object instanceof TabularDataSupport){ Map map = (Map) object; return ObjectToString(map); } else if(object instanceof CompositeData){ StringBuilder sb = new StringBuilder(); CompositeData data = (CompositeData) object; CompositeType type = data.getCompositeType(); Set<String> keys = type.keySet(); sb.append("CompositeData type - " + type.getTypeName()).append(NEW_LINE) .append(" description " + type.getDescription()).append(NEW_LINE) .append(" data ").append(NEW_LINE); for(String key : keys){ if(data.containsKey(key)){ sb.append(TAB).append("[ K: " + key).append(" V :" + ObjectToString(data.get(key))).append("]").append(NEW_LINE); } } sb.append(NEW_LINE); return sb.toString(); }if (object instanceof List) { List list = (List)object; StringBuilder sb = new StringBuilder(); sb.append(" List [").append(NEW_LINE); for(Object a : list){ sb.append(TAB).append(ObjectToString(a)).append(NEW_LINE); } sb.append("]"); return sb.toString(); } else return object.toString(); } private static boolean isPrimitive(String type) { String primitiveTypes[] = { "int", "double", "byte", "boolean", "char", "float" }; for(String s : primitiveTypes) if(s.equals(type)) return true; return false; } private static String handleCommandResult(CommandResult object) { StringBuilder sb = new StringBuilder(); sb.append(" Status : " + object.getStatus()).append(NEW_LINE); try { sb.append(" Raw JSON : " + object.getContent().toIndentedString(5)); sb.append(NEW_LINE); } catch (GfJsonException e) { sb.append(" Error getting raw JSON : " + e.getMessage()); logError(" Error getting raw JSON : ", e); } return sb.toString(); } private static String handlePrimitiveArray(Object object, String type) { StringBuilder sb = new StringBuilder(); if("int".equals(type)){ int a[] = (int[])object; sb.append(" intArray["); for(int i : a){ sb.append(i).append(","); } sb.append("]"); }else if("double".equals(type)){ double a[] = (double[])object; sb.append(" doubleArray["); for(double i : a){ sb.append(i).append(","); } sb.append("]"); }else if("byte".equals(type)){ byte a[] = (byte[])object; sb.append(" byteArray["); for(byte i : a){ sb.append(i).append(","); } sb.append("]"); }else if("boolean".equals(type)){ boolean a[] = (boolean[])object; sb.append(" booleanArray["); for(boolean i : a){ sb.append(i).append(","); } sb.append("]"); }else if("char".equals(type)){ char a[] = (char[])object; sb.append(" charArray["); for(char i : a){ sb.append(i).append(","); } sb.append("]"); } return sb.toString(); } public static boolean isConcurrentTest() { if(runninghydra()){ return !TestConfig.tab().booleanAt(Prms.serialExecution); }else return false; } public static boolean isSerialTest() { if(runninghydra()){ return TestConfig.tab().booleanAt(Prms.serialExecution); }else return true; } public static void main(String[] args) { Object a[] = { "sgjdkfg", 1232, 1234.5454, true }; System.out.println(ObjectToString(a)); } public static Object getInstanceOfClass(String klass) { try { Class cklass = Class.forName(klass); Object object; object = cklass.newInstance(); return object; } catch (InstantiationException e) { throw new TestException(TestHelper.getStackTrace(e)); } catch (IllegalAccessException e) { throw new TestException(TestHelper.getStackTrace(e)); } catch (ClassNotFoundException e) { throw new TestException(TestHelper.getStackTrace(e)); } } public static String getStackTraceAsString(Throwable throwable) { final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); if (throwable != null) throwable.printStackTrace(printWriter); else { try { result.append("<No StackTrace>"); } catch (IOException e) { // ignore its just in-memory writer } } return result.toString(); } private static Random localRandom = new Random(); private static Random getRandomGen() { if(runninghydra()) return TestConfig.tab().getRandGen(); else { return localRandom; } } public static boolean runninghydra() { try{ hydra.TestConfig.tab(); return true; }catch(Exception e){ return false; } } public static <V> V getRandomElement(List<V> coll){ int size = coll.size(); if(runninghydra()){ int randomElement=0; if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; return coll.get(randomElement); }else{ int randomElement=0; if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; return coll.get(randomElement); } } public static <V> V getRandomElement(V[] coll){ int size = coll.length; if(runninghydra()){ int randomElement=0; if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; return coll[randomElement]; }else{ int randomElement=0; if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; return coll[randomElement]; } } public static <V> V getRandomElement(Set<V> coll){ int size = coll.size(); int randomElement=0; if(runninghydra()){ if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; }else{ if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; } int i=0; for(V v : coll){ if(i==randomElement) return v; else i++; } return null; } public static <V> V getRandomElement(Collection<V> coll){ int size = coll.size(); int randomElement=0; if(runninghydra()){ if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; }else{ if(size>1){ size = size-1; randomElement = getRandomGen().nextInt(size); } else randomElement = 0; } int i=0; for(V v : coll){ if(i==randomElement) return v; else i++; } return null; } public static int getnextRandomInt(int maxExclusive){ return TestConfig.tab().getRandGen().nextInt(maxExclusive-1); } public static int getnextNonZeroRandomInt(int maxExclusive){ int randomInt = 0; while(randomInt==0) randomInt = getnextRandomInt(maxExclusive); return randomInt; } public static boolean getRandomBoolean(){ return TestConfig.tab().getRandGen().nextBoolean(); } public static void sleepForReplicationJMX() { try { long waitTime = ManagementConstants.REFRESH_TIME*TestConfig.tab().longAt(JMXPrms.sleepTimeFactor); Log.getLogWriter().info("Sleeping for " + waitTime + " ms for JMX Replication "); Thread.sleep(waitTime); } catch (InterruptedException e) { } } public static void sleepForDataUpdaterJMX(long time) { try { long waitTime = time * 1000 * TestConfig.tab().longAt(JMXPrms.sleepTimeFactor); Log.getLogWriter().info("Sleeping for " + waitTime + " ms for mbean to update "); Thread.sleep(waitTime); } catch (InterruptedException e) { } } public static void threadDump() { java.lang.management.ThreadMXBean mxBean = ManagementFactory.getThreadMXBean(); ThreadInfo infos[] = mxBean.dumpAllThreads(true, true); StringBuilder sb = new StringBuilder(); for(ThreadInfo info : infos){ sb.append(info); } logInfo(sb.toString()); } public static String generateNamedDoubleSuffixedNames(String prefix, int n, int m, String s[], boolean varyFirst, boolean useComma) { String v = ""; if (varyFirst) { for (int j = 1; j <= m; j++) { for (int i = 1; i <= n; i++) { //v += prefix + "_" + s[i] + i + "_" + j; v += prefix + "_" + s[i-1] + "_" + j; if (i*j < m*n) { if (useComma) v += ","; v += " "; } } } } else { for (int i = 1; i <= n; i++) { for (int j = 1; j <= m; j++) { v += prefix + "_" + i + "_" + s[j-1];// + j; if (i*j < m*n) { if (useComma) v += ","; v += " "; } } } } return v; } public static String generateNamedDoubleSuffixedNames(String prefix, int n, int m, String s, boolean varyFirst, boolean useComma) { String array[] = s.split("\\|"); return generateNamedDoubleSuffixedNames(prefix, n, m, array, varyFirst, useComma); } @SuppressWarnings({ "rawtypes", "unchecked" }) public static Object copyMap(Map map) { Map map2 = new HashMap(); Set<Map.Entry> set = map.entrySet(); for(Map.Entry e : set){ map2.put(e.getKey(), e.getValue()); } return map2; } }
package net.craftstars.general.util; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import com.ensifera.animosity.craftirc.CommandEndPoint; import com.ensifera.animosity.craftirc.CraftIRC; import org.bukkit.Bukkit; import org.bukkit.command.Command; import org.bukkit.command.CommandMap; import org.bukkit.command.CommandSender; import org.bukkit.command.PluginCommand; import org.bukkit.command.SimpleCommandMap; import org.bukkit.configuration.Configuration; import org.bukkit.craftbukkit.CraftServer; import org.bukkit.plugin.Plugin; import org.bukkit.plugin.PluginDescriptionFile; import net.craftstars.general.General; import net.craftstars.general.command.CommandBase; import net.craftstars.general.text.LanguageText; public final class CommandManager { public static boolean setAliases = false; private static SimpleCommandMap commandMap = null; private static Map<String,Command> knownCommands = null; private static Method register = null; public static String[] compassAliases; public static String[] posAliases; public static HashMap<CommandEndPoint, String> cmdTags = null; private CommandManager() {} public static void setup(Configuration config) { if(setAliases) return; getCommandMap(); if(!config.getKeys(false).contains("aliases")) General.logger.warn(LanguageText.LOG_COMMAND_NO_ALIASES.value()); Plugin chat = Bukkit.getPluginManager().getPlugin("CraftIRC"); boolean foundIRC = isCraftIRC3(chat); PluginDescriptionFile plug = General.plugin.getDescription(); try { Map<String,Map<String,Object>> commands = plug.getCommands(); for(String key : commands.keySet()) { PluginCommand generalCommand = General.plugin.getCommand(key); //General.logger.debug("Registering aliases for command: " + key); try { Class<? extends CommandBase> clazz = General.class.getClassLoader() .loadClass("net.craftstars.general.command." + generalCommand.getName() + "Command") .asSubclass(CommandBase.class); CommandBase commandInstance = clazz.getConstructor(General.class, Command.class) .newInstance(General.plugin, generalCommand); generalCommand.setExecutor(commandInstance); if(foundIRC) { if(cmdTags == null) cmdTags = new HashMap<CommandEndPoint, String>(); CraftIRC irc = (CraftIRC) chat; String tag = generalCommand.getLabel(); try { CommandEndPoint ep = commandInstance.new CraftIRCForwarder(irc, tag); cmdTags.put(ep, tag); } catch(Exception e) { General.logger.warn(LanguageText.LOG_COMMAND_IRC_REG_ERROR.value("command", generalCommand.getName())); } } } catch(ClassNotFoundException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(IllegalArgumentException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(SecurityException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(InstantiationException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(IllegalAccessException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(InvocationTargetException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } catch(NoSuchMethodException e) { General.logger.error(LanguageText.LOG_COMMAND_REG_ERROR.value("command", generalCommand.getName()),e); } if(register != null && key.contains(".")) register(key.split("\\.")[1], generalCommand); if(knownCommands != null) { Iterator<Entry<String,Command>> iter = knownCommands.entrySet().iterator(); while(iter.hasNext()) { Entry<String,Command> cmd = iter.next(); if(cmd.getValue() != generalCommand) continue; cmd.setValue(new GeneralCommand(generalCommand)); } } List<String> aliases = config.getStringList("aliases." + key); if(aliases == null) { //General.logger.warn("No aliases defined for " + key + " command; skipping."); continue; } for(String alias : aliases) register(alias, generalCommand); } } catch(NullPointerException e) { e.printStackTrace(); return; } catch(ClassCastException e) { General.logger.error("Commands are of wrong type!",e); } } private static boolean isCraftIRC3(Plugin irc) { if(irc != null && irc instanceof CraftIRC && irc.getDescription().getVersion().startsWith("3")) return true; return false; } public static boolean register(String label, Command command) { try { boolean success = (Boolean) register.invoke(commandMap, label, "General.dynalias", command, true); if(!success) { Command cmd = Bukkit.getPluginCommand(label); String claimant; if(cmd instanceof PluginCommand) claimant = ((PluginCommand) cmd).getPlugin().getDescription().getName(); else claimant = Bukkit.getName(); General.logger.info(LanguageText.LOG_COMMAND_TAKEN.value("alias", label, "plugin", claimant)); } return success; } catch(IllegalArgumentException e) { General.logger.warn(e.getMessage()); } catch(IllegalAccessException e) { General.logger.warn(e.getMessage()); } catch(InvocationTargetException e) { General.logger.warn(e.getMessage()); } return false; } @SuppressWarnings("unchecked") private static boolean getCommandMap() { CraftServer cs = (CraftServer) Bukkit.getServer(); Field cm; try { cm = CraftServer.class.getDeclaredField("commandMap"); } catch(SecurityException e) { General.logger.warn(e.getMessage()); return false; } catch(NoSuchFieldException e) { General.logger.warn(e.getMessage()); return false; } cm.setAccessible(true); try { commandMap = (SimpleCommandMap) cm.get(cs); } catch(IllegalArgumentException e) { General.logger.warn(e.getMessage()); return false; } catch(IllegalAccessException e) { General.logger.warn(e.getMessage()); return false; } if(commandMap == null) return false; try { register = SimpleCommandMap.class.getDeclaredMethod("register", String.class, String.class, Command.class, boolean.class); } catch(SecurityException e) { General.logger.warn(e.getMessage()); return false; } catch(NoSuchMethodException e) { General.logger.warn(e.getMessage()); return false; } register.setAccessible(true); try { Field commands = SimpleCommandMap.class.getDeclaredField("knownCommands"); commands.setAccessible(true); knownCommands = (Map<String,Command>)commands.get(commandMap); } catch(SecurityException e) { return false; } catch(NoSuchFieldException e) { General.logger.warn(e.getMessage()); return false; } catch(IllegalArgumentException e) { General.logger.warn(e.getMessage()); return false; } catch(IllegalAccessException e) { General.logger.warn(e.getMessage()); return false; } return true; } public static class GeneralCommand extends Command { private PluginCommand command; public GeneralCommand(PluginCommand cmd) { super(cmd.getName(), cmd.getDescription(), cmd.getUsage(), cmd.getAliases()); command = cmd; } @Override public boolean execute(CommandSender sender, String commandLabel, String[] args) { return command.execute(sender, commandLabel, args); } @Override public String getName() { return command.getName(); } @Override public String getPermission() { return command.getPermission(); } @Override public void setPermission(String permission) { command.setPermission(permission); } @Override public boolean testPermission(CommandSender target) { return command.testPermission(target); } @Override public boolean testPermissionSilent(CommandSender target) { return command.testPermissionSilent(target); } @Override public String getLabel() { return command.getLabel(); } @Override public boolean setLabel(String name) { return command.setLabel(name); } @Override public boolean register(CommandMap map) { return command.register(map); } @Override public boolean unregister(CommandMap map) { return command.unregister(map); } @Override public boolean isRegistered() { return command.isRegistered(); } @Override public List<String> getAliases() { return command.getAliases(); } @Override public String getPermissionMessage() { return command.getPermissionMessage(); } @Override public String getDescription() { return command.getDescription(); } @Override public String getUsage() { return command.getUsage(); } @Override public GeneralCommand setAliases(List<String> aliases) { command.setAliases(aliases); return this; } @Override public GeneralCommand setDescription(String descr) { command.setDescription(descr); return this; } @Override public GeneralCommand setPermissionMessage(String permissionMessage) { command.setPermissionMessage(permissionMessage); return this; } @Override public GeneralCommand setUsage(String usage) { command.setUsage(usage); return this; } public CommandBase getExecutor() { return (CommandBase)command.getExecutor(); } } }
package io.swagger.client.api; import com.sun.jersey.api.client.GenericType; import io.swagger.client.ApiException; import io.swagger.client.ApiClient; import io.swagger.client.Configuration; import io.swagger.client.Pair; import io.swagger.client.model.CallControlUser; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2016-04-22T07:24:15.167Z") public class EnterpriseApiApi { private ApiClient apiClient; public EnterpriseApiApi() { this(Configuration.getDefaultApiClient()); } public EnterpriseApiApi(ApiClient apiClient) { this.apiClient = apiClient; } public ApiClient getApiClient() { return apiClient; } public void setApiClient(ApiClient apiClient) { this.apiClient = apiClient; } /** * Enterprise GET: GetBlockList\r\n Simple Enteprise which returns the current and complete list of numbers that the network is blocking * All \r\n Try with api_key &#39;demo&#39; for the demo block list (which will block 18008472911, 13157244022, 17275567300, 18008276655) but not 12061231234 * @param cached (optional) * @return List<String> * @throws ApiException if fails to make API call */ public List<String> enterpriseApiGetBlockList(Boolean cached) throws ApiException { Object localVarPostBody = null; // create path and map variables String localVarPath = "/api/2015-11-01/Enterprise/GetBlockList".replaceAll("\\{format\\}","json"); // query params List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); localVarQueryParams.addAll(apiClient.parameterToPairs("", "cached", cached)); final String[] localVarAccepts = { "application/json", "text/json", "application/xml", "text/xml" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); final String[] localVarContentTypes = { }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); String[] localVarAuthNames = new String[] { }; GenericType<List<String>> localVarReturnType = new GenericType<List<String>>() {}; return apiClient.invokeAPI(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType); } /** * Enterprise GET: GetUser\r\n Returns the current information from the user * * @param phoneNumber (required) * @return CallControlUser * @throws ApiException if fails to make API call */ public CallControlUser enterpriseApiGetUser(String phoneNumber) throws ApiException { Object localVarPostBody = null; // verify the required parameter 'phoneNumber' is set if (phoneNumber == null) { throw new ApiException(400, "Missing the required parameter 'phoneNumber' when calling enterpriseApiGetUser"); } // create path and map variables String localVarPath = "/api/2015-11-01/Enterprise/GetUser/{phoneNumber}".replaceAll("\\{format\\}","json") .replaceAll("\\{" + "phoneNumber" + "\\}", apiClient.escapeString(phoneNumber.toString())); // query params List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json", "text/json", "application/xml", "text/xml" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); final String[] localVarContentTypes = { }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); String[] localVarAuthNames = new String[] { }; GenericType<CallControlUser> localVarReturnType = new GenericType<CallControlUser>() {}; return apiClient.invokeAPI(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType); } /** * Enterprise GET: ShouldBlock\r\n Simple Enteprise which returns a call block proceed decision. * This returns information required to perform basic call blocking behaviors\r\n Try with api_key &#39;demo&#39; and phone numbers 18008472911, 13157244022, 17275567300, 18008276655, and 12061231234 (last one not spam) * @param phoneNumber phone number to search (required) * @param userPhoneNumber (OPTOPNAL) phone number of user to look up block rules (required) * @return String * @throws ApiException if fails to make API call */ public String enterpriseApiShouldBlock(String phoneNumber, String userPhoneNumber) throws ApiException { Object localVarPostBody = null; // verify the required parameter 'phoneNumber' is set if (phoneNumber == null) { throw new ApiException(400, "Missing the required parameter 'phoneNumber' when calling enterpriseApiShouldBlock"); } // verify the required parameter 'userPhoneNumber' is set if (userPhoneNumber == null) { throw new ApiException(400, "Missing the required parameter 'userPhoneNumber' when calling enterpriseApiShouldBlock"); } // create path and map variables String localVarPath = "/api/2015-11-01/Enterprise/ShouldBlock/{phoneNumber}/{userPhoneNumber}".replaceAll("\\{format\\}","json") .replaceAll("\\{" + "phoneNumber" + "\\}", apiClient.escapeString(phoneNumber.toString())) .replaceAll("\\{" + "userPhoneNumber" + "\\}", apiClient.escapeString(userPhoneNumber.toString())); // query params List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json", "text/json", "application/xml", "text/xml" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); final String[] localVarContentTypes = { }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); String[] localVarAuthNames = new String[] { }; GenericType<String> localVarReturnType = new GenericType<String>() {}; return apiClient.invokeAPI(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType); } /** * * * @param user (required) * @return Object * @throws ApiException if fails to make API call */ public Object enterpriseApiUpsertUser(CallControlUser user) throws ApiException { Object localVarPostBody = user; // verify the required parameter 'user' is set if (user == null) { throw new ApiException(400, "Missing the required parameter 'user' when calling enterpriseApiUpsertUser"); } // create path and map variables String localVarPath = "/api/2015-11-01/Enterprise/UpsertUser".replaceAll("\\{format\\}","json"); // query params List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json", "text/json", "application/xml", "text/xml" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); final String[] localVarContentTypes = { "application/json", "text/json", "application/xml", "text/xml", "application/x-www-form-urlencoded" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); String[] localVarAuthNames = new String[] { }; GenericType<Object> localVarReturnType = new GenericType<Object>() {}; return apiClient.invokeAPI(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType); } }
package org.jetbrains.plugins.groovy.extensions; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrMethodCall; import org.jetbrains.plugins.groovy.lang.psi.impl.GroovyNamesUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.GrLightMethodBuilder; import org.jetbrains.plugins.groovy.util.ClassInstanceCache; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiParameter; import com.intellij.psi.PsiParameterList; import com.intellij.psi.PsiType; import com.intellij.util.PairFunction; /** * @author Sergey Evdokimov */ public class GroovyMethodInfo { private static volatile Map<String, Map<String, List<GroovyMethodInfo>>> METHOD_INFOS; private static Map<String, Map<String, List<GroovyMethodInfo>>> LIGHT_METHOD_INFOS; private final List<String> myParams; private final ClassLoader myClassLoader; private final String myReturnType; private final String myReturnTypeCalculatorClassName; private PairFunction<GrMethodCall, PsiMethod, PsiType> myReturnTypeCalculatorInstance; private final Map<String, NamedArgumentDescriptor> myNamedArguments; private final String myNamedArgProviderClassName; private GroovyNamedArgumentProvider myNamedArgProviderInstance; private static void ensureInit() { if (METHOD_INFOS != null) return; Map<String, Map<String, List<GroovyMethodInfo>>> methodInfos = new HashMap<String, Map<String, List<GroovyMethodInfo>>>(); Map<String, Map<String, List<GroovyMethodInfo>>> lightMethodInfos = new HashMap<String, Map<String, List<GroovyMethodInfo>>>(); for (GroovyClassDescriptor classDescriptor : GroovyClassDescriptor.EP_NAME.getExtensions()) { ClassLoader classLoader = classDescriptor.getLoaderForClass(); for (GroovyMethodDescriptor method : classDescriptor.methods) { addMethodDescriptor(methodInfos, method, classLoader, classDescriptor.className); } } for (GroovyMethodDescriptorExtension methodDescriptor : GroovyMethodDescriptorExtension.EP_NAME.getExtensions()) { if (methodDescriptor.className != null) { assert methodDescriptor.lightMethodKey == null; addMethodDescriptor(methodInfos, methodDescriptor, methodDescriptor.getLoaderForClass(), methodDescriptor.className); } else { assert methodDescriptor.className == null; addMethodDescriptor(lightMethodInfos, methodDescriptor, methodDescriptor.getLoaderForClass(), methodDescriptor.lightMethodKey); } } processUnnamedDescriptors(lightMethodInfos); processUnnamedDescriptors(methodInfos); LIGHT_METHOD_INFOS = lightMethodInfos; METHOD_INFOS = methodInfos; } private static void processUnnamedDescriptors(Map<String, Map<String, List<GroovyMethodInfo>>> map) { for (Map<String, List<GroovyMethodInfo>> methodMap : map.values()) { List<GroovyMethodInfo> unnamedMethodDescriptors = methodMap.get(null); if (unnamedMethodDescriptors != null) { for (Map.Entry<String, List<GroovyMethodInfo>> entry : methodMap.entrySet()) { if (entry.getKey() != null) { entry.getValue().addAll(unnamedMethodDescriptors); } } } } } @javax.annotation.Nullable private static List<GroovyMethodInfo> getInfos(Map<String, Map<String, List<GroovyMethodInfo>>> map, String key, PsiMethod method) { Map<String, List<GroovyMethodInfo>> methodMap = map.get(key); if (methodMap == null) return null; List<GroovyMethodInfo> res = methodMap.get(method.getName()); if (res == null) { res = methodMap.get(null); } return res; } public static List<GroovyMethodInfo> getInfos(PsiMethod method) { ensureInit(); List<GroovyMethodInfo> lightMethodInfos = null; if (method instanceof GrLightMethodBuilder) { Object methodKind = ((GrLightMethodBuilder)method).getMethodKind(); if (methodKind instanceof String) { lightMethodInfos = getInfos(LIGHT_METHOD_INFOS, (String)methodKind, method); } } List<GroovyMethodInfo> methodInfos = null; PsiClass containingClass = method.getContainingClass(); if (containingClass != null) { methodInfos = getInfos(METHOD_INFOS, containingClass.getQualifiedName(), method); } if (methodInfos == null) { return lightMethodInfos == null ? Collections.<GroovyMethodInfo>emptyList() : lightMethodInfos; } else { if (lightMethodInfos == null) { return methodInfos; } else { List<GroovyMethodInfo> res = new ArrayList<GroovyMethodInfo>(lightMethodInfos.size() + methodInfos.size()); res.addAll(lightMethodInfos); res.addAll(methodInfos); return res; } } } public GroovyMethodInfo(GroovyMethodDescriptor method, @Nonnull ClassLoader classLoader) { myClassLoader = classLoader; myParams = method.getParams(); myReturnType = method.returnType; myReturnTypeCalculatorClassName = method.returnTypeCalculator; assert myReturnType == null || myReturnTypeCalculatorClassName == null; myNamedArguments = method.getArgumentsMap(); myNamedArgProviderClassName = method.namedArgsProvider; assert myNamedArguments == null || myNamedArgProviderClassName == null; } private static void addMethodDescriptor(Map<String, Map<String, List<GroovyMethodInfo>>> res, GroovyMethodDescriptor method, @Nonnull ClassLoader classLoader, @Nonnull String key) { if (method.methodName == null) { addMethodDescriptor(res, method, classLoader, null, key); } else { for (StringTokenizer st = new StringTokenizer(method.methodName, " \t,;"); st.hasMoreTokens(); ) { String name = st.nextToken(); assert GroovyNamesUtil.isIdentifier(name); addMethodDescriptor(res, method, classLoader, name, key); } } } private static void addMethodDescriptor(Map<String, Map<String, List<GroovyMethodInfo>>> res, GroovyMethodDescriptor method, @Nonnull ClassLoader classLoader, @javax.annotation.Nullable String methodName, @Nonnull String key) { Map<String, List<GroovyMethodInfo>> methodMap = res.get(key); if (methodMap == null) { methodMap = new HashMap<String, List<GroovyMethodInfo>>(); res.put(key, methodMap); } List<GroovyMethodInfo> methodsList = methodMap.get(methodName); if (methodsList == null) { methodsList = new ArrayList<GroovyMethodInfo>(); methodMap.put(methodName, methodsList); } methodsList.add(new GroovyMethodInfo(method, classLoader)); } @Nullable public String getReturnType() { return myReturnType; } public boolean isReturnTypeCalculatorDefined() { return myReturnTypeCalculatorClassName != null; } @Nonnull public PairFunction<GrMethodCall, PsiMethod, PsiType> getReturnTypeCalculator() { if (myReturnTypeCalculatorInstance == null) { myReturnTypeCalculatorInstance = ClassInstanceCache.getInstance(myReturnTypeCalculatorClassName, myClassLoader); } return myReturnTypeCalculatorInstance; } @Nullable public Map<String, NamedArgumentDescriptor> getNamedArguments() { return myNamedArguments; } public boolean isNamedArgumentProviderDefined() { return myNamedArgProviderClassName != null; } public GroovyNamedArgumentProvider getNamedArgProvider() { if (myNamedArgProviderInstance == null) { myNamedArgProviderInstance = ClassInstanceCache.getInstance(myNamedArgProviderClassName, myClassLoader); } return myNamedArgProviderInstance; } public boolean isApplicable(@Nonnull PsiMethod method) { if (myParams == null) { return true; } PsiParameterList parameterList = method.getParameterList(); if (parameterList.getParametersCount() != myParams.size()) return false; PsiParameter[] parameters = parameterList.getParameters(); for (int i = 0; i < parameters.length; i++) { if (!TypesUtil.isClassType(parameters[i].getType(), myParams.get(i))) { return false; } } return true; } }
package com.hevelian.olastic.core.api.uri.queryoption.expression.member; import static com.hevelian.olastic.core.elastic.ElasticConstants.NESTED_PATH_SEPARATOR; import static com.hevelian.olastic.core.utils.ProcessorUtils.throwNotImplemented; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.olingo.commons.api.edm.EdmAnnotation; import org.apache.olingo.commons.api.edm.EdmEntityType; import org.apache.olingo.commons.api.edm.EdmProperty; import org.apache.olingo.commons.api.edm.constants.EdmTypeKind; import org.apache.olingo.server.api.ODataApplicationException; import org.apache.olingo.server.api.uri.UriResource; import org.apache.olingo.server.api.uri.UriResourceComplexProperty; import org.apache.olingo.server.api.uri.UriResourceKind; import org.apache.olingo.server.api.uri.UriResourceLambdaAll; import org.apache.olingo.server.api.uri.UriResourceLambdaAny; import org.apache.olingo.server.api.uri.UriResourceLambdaVariable; import org.apache.olingo.server.api.uri.UriResourceNavigation; import org.apache.olingo.server.api.uri.UriResourcePartTyped; import org.apache.olingo.server.api.uri.UriResourcePrimitiveProperty; import org.apache.olingo.server.api.uri.UriResourceProperty; import org.apache.olingo.server.api.uri.queryoption.expression.Binary; import org.apache.olingo.server.api.uri.queryoption.expression.Expression; import org.apache.olingo.server.api.uri.queryoption.expression.ExpressionVisitException; import org.apache.olingo.server.api.uri.queryoption.expression.ExpressionVisitor; import org.apache.olingo.server.api.uri.queryoption.expression.Member; import org.apache.olingo.server.api.uri.queryoption.expression.Method; import org.apache.olingo.server.core.uri.UriInfoImpl; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.ChildMember; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.ExpressionResult; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.NestedMember; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.ParentWrapperMember; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.ParentPrimitiveMember; import com.hevelian.olastic.core.api.uri.queryoption.expression.member.impl.PrimitiveMember; import com.hevelian.olastic.core.edm.ElasticEdmEntityType; import com.hevelian.olastic.core.edm.ElasticEdmProperty; /** * Processes raw olingo expression member data. * * @author Taras Kohut * @author rdidyk */ public class MemberHandler { private UriResource firstPart; private UriResource lastPart; private List<UriResource> resourceParts; /** * represents the path to current member. Is useful in lambdas, because * member contains no information about its parent members */ private String pathToMember; private Map<String, UriResource> collectionResourceCache; private ExpressionVisitor<?> visitor; /** * Initializes member handler using raw olingo expression member. * * @param member * raw olingo expression member * @param visitor * visitor instance */ public MemberHandler(Member member, ExpressionVisitor<?> visitor) { this.visitor = visitor; UriInfoImpl resource = (UriInfoImpl) member.getResourcePath(); resourceParts = resource.getUriResourceParts(); firstPart = resourceParts.get(0); lastPart = resourceParts.get(resourceParts.size() - 1); String parentPath = resource.getFragment(); pathToMember = collectPathToMember(parentPath); } /** * Collects path to member. This path is helpful for complex lambdas, like * this one: $filter=info/pages/any(p:p/words/any(w:w eq 'word')) We need to * store this path manually because Member inside lambda doesn't contain * full path to itself. * * @param parentPath * path to parent member * @return path to current member */ private String collectPathToMember(String parentPath) { String parentPathPrefix = parentPath != null ? parentPath : ""; List<String> resourceNames = null; if (resourceParts.size() > 1) { // we need only parts that shows path to property // the last part is either lambda or name of the property we want to // filter by, so we ignore it resourceNames = resourceParts.subList(0, resourceParts.size() - 1).stream() .filter(resource -> resource instanceof UriResourceComplexProperty || resource instanceof UriResourcePrimitiveProperty) .map(part -> ((UriResourceProperty) part).getProperty().getName()) .collect(Collectors.toList()); } boolean namesListIsNotEmpty = resourceNames != null && !resourceNames.isEmpty(); if (namesListIsNotEmpty && !parentPathPrefix.isEmpty()) { parentPathPrefix += NESTED_PATH_SEPARATOR; } return namesListIsNotEmpty ? parentPathPrefix + String.join(NESTED_PATH_SEPARATOR, resourceNames) : parentPath; } /** * Processes raw olingo expression member. * * @param collectionResourceCache * cache with parent members collection resources. Used for * nested lambdas * @return expression member * @throws ODataApplicationException * OData app exception * @throws ExpressionVisitException * expression visitor exception */ public ExpressionMember handle(Map<String, UriResource> collectionResourceCache) throws ODataApplicationException, ExpressionVisitException { this.collectionResourceCache = collectionResourceCache; if (lastPart instanceof UriResourceLambdaAll) { return throwNotImplemented("All lambda is not implemented"); } else if (lastPart instanceof UriResourceLambdaAny) { return handleLambdaAny(); } else if (lastPart instanceof UriResourcePrimitiveProperty || lastPart instanceof UriResourceLambdaVariable) { return handlePrimitive(); } else { return throwNotImplemented(); } } /** * Analyzes uri parts and creates a member. Lambda has expression that * should be executed to get the inner query. * * @return nested or child expression member */ private ExpressionMember handleLambdaAny() throws ODataApplicationException, ExpressionVisitException { UriResourceLambdaAny lambda = (UriResourceLambdaAny) lastPart; Expression expression = lambda.getExpression(); boolean isNavigationLambdaVar = firstPart instanceof UriResourcePartTyped && ((UriResourcePartTyped) firstPart).getType() instanceof EdmEntityType; if (firstPart instanceof UriResourceNavigation || isNavigationLambdaVar) { boolean isParentNestedLambdaVar = resourceParts.stream() .anyMatch(part -> part instanceof UriResourceComplexProperty); List<String> navigationTypes = collectNavigationTypes(); if (isParentNestedLambdaVar) { // navigation parent nested collection // book?$filter=author/_dimension/any(d:d/name eq 'Validity') ExpressionResult lambdaResult = handleLambdaAny(expression); return new ParentWrapperMember(navigationTypes, lambdaResult.getQueryBuilder()) .any(); } else { if (resourceParts.size() > 2) { // navigation parent to another child // book?$filter=author/address/any(a:a/city eq 'New York')) List<String> parentTypes = navigationTypes.subList(0, navigationTypes.size() - 1); return new ParentWrapperMember(parentTypes, handleChildLambda(lambda).getQueryBuilder()).any(); } else { // navigation child property collection // author?$filter=book/any(b:b/character/any(c:c/name eq // 'Oliver')) return handleChildLambda(lambda); } } } else { // complex or primitive type collection return handleLambdaAny(expression); } } private ExpressionResult handleLambdaAny(Expression lambdaExpression) throws ODataApplicationException, ExpressionVisitException { setPath(lambdaExpression); // if any lambda uses primitive property // Books?$filter=property/any(p:p eq 'value') // than parent path already contains path and property name // that's why we need to retrieve only nested path String nestedPath = isPreLastResourcePrimitive() ? StringUtils.substringBeforeLast(pathToMember, NESTED_PATH_SEPARATOR) : pathToMember; ExpressionResult expressionResult = (ExpressionResult) lambdaExpression.accept(visitor); return isPreLastResourcePrimitive() ? expressionResult : new NestedMember(nestedPath, expressionResult.getQueryBuilder()).any(); } private ExpressionResult handleChildLambda(UriResourceLambdaAny lambda) throws ExpressionVisitException, ODataApplicationException { ExpressionResult lambdaResult = (ExpressionResult) lambda.getExpression().accept(visitor); // pre-last resource - before lambda; it's always a collection type UriResourceNavigation preLastNavResource = (UriResourceNavigation) resourceParts .get(resourceParts.size() - 2); ElasticEdmEntityType entityType = (ElasticEdmEntityType) preLastNavResource.getProperty() .getType(); return new ChildMember(entityType.getESType(), lambdaResult.getQueryBuilder()).any(); } private boolean isPreLastResourcePrimitive() { UriResource preLastResource = resourceParts.get(resourceParts.size() - 2); return preLastResource.getKind() == UriResourceKind.primitiveProperty; } private void setPath(Expression expression) { if (expression instanceof Member) { setPath((Member) expression); } else if (expression instanceof Binary) { Binary binaryExpression = (Binary) expression; setPath(binaryExpression.getLeftOperand()); setPath(binaryExpression.getRightOperand()); } else if (expression instanceof Method) { Method method = (Method) expression; method.getParameters().forEach(this::setPath); } } private void setPath(Member member) { UriInfoImpl memberUriInfo = (UriInfoImpl) member.getResourcePath(); memberUriInfo.setFragment(pathToMember); } /** * Analyzes uri parts and creates primitive or parent expression member. * Also handles primitive expressions inside lambda's expression. * * @return primitive or parent expression member */ private ExpressionMember handlePrimitive() { // filter by parent's property // Books?$filter=Author/Name eq 'Dawkins' if (firstPart instanceof UriResourceNavigation) { EdmProperty lastProperty = ((UriResourceProperty) lastPart).getProperty(); PrimitiveMember primitiveMember = new PrimitiveMember( ((ElasticEdmProperty) lastProperty).getEField(), lastProperty.getAnnotations()); return new ParentPrimitiveMember(collectNavigationTypes(), primitiveMember); } else if (firstPart instanceof UriResourceLambdaVariable && ((UriResourcePartTyped) firstPart).getType().getKind() == EdmTypeKind.COMPLEX) { // filtering by complex type collection // Books?$filter=nested/any(n:n/state eq true) EdmProperty lastProperty = ((UriResourceProperty) lastPart).getProperty(); String parentPathPrefix = pathToMember != null ? pathToMember + NESTED_PATH_SEPARATOR : ""; String nestedPath = parentPathPrefix + lastProperty.getName(); return new PrimitiveMember(nestedPath, lastProperty.getAnnotations()); } else if (firstPart instanceof UriResourceLambdaVariable && ((UriResourcePartTyped) firstPart).getType() .getKind() == EdmTypeKind.PRIMITIVE) { // filtering by primitive type collection // Books?$filter=nested/property/any(p:p/tags/any(t:t eq 'Tag')) String nestedPath = pathToMember != null ? pathToMember : ""; UriResource parentResource = collectionResourceCache.get(pathToMember); return new PrimitiveMember(nestedPath, getAnnotations(parentResource)); } else { // simple primitive expression or expression inside lambda for // retrieving children EdmProperty lastProperty = ((UriResourceProperty) lastPart).getProperty(); return new PrimitiveMember(((ElasticEdmProperty) lastProperty).getEField(), lastProperty.getAnnotations()); } } private List<String> collectNavigationTypes() { return resourceParts.stream().filter(UriResourceNavigation.class::isInstance) .map(part -> ((ElasticEdmEntityType) ((UriResourceNavigation) part).getProperty() .getType()).getESType()) .collect(Collectors.toList()); } private List<EdmAnnotation> getAnnotations(UriResource uriResource) { if (uriResource instanceof UriResourceNavigation) { return ((UriResourceNavigation) uriResource).getProperty().getAnnotations(); } else if (uriResource instanceof UriResourceProperty) { return ((UriResourceProperty) uriResource).getProperty().getAnnotations(); } else { return Collections.emptyList(); } } /** * Returns collection URI resource. * * @return collection URI resource */ public UriResource getCollectionResource() { UriResource collectionResource = null; if (lastPart.getKind() == UriResourceKind.lambdaAll || lastPart.getKind() == UriResourceKind.lambdaAny) { collectionResource = resourceParts.get(resourceParts.size() - 2); } return collectionResource; } public String getPath() { return pathToMember; } }
// @@@ START COPYRIGHT @@@ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // @@@ END COPYRIGHT @@@ package org.trafodion.ci; import java.io.File; import java.io.FileNotFoundException; import java.io.FilenameFilter; import java.io.IOException; import java.sql.SQLException; import java.sql.Statement; import java.text.DateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Properties; import java.sql.ResultSet; import java.util.Vector; import sun.misc.Signal; import sun.misc.SignalHandler; public class ParallelRun extends Thread { String scriptsDir=null; String scriptsExt=null; String logsDir=null; boolean overwriteLogs=false; int connectionCnt=2; ConsoleReader crObj=null; ConsoleWriter cwObj=null; String summaryFile=null; PrunUserInterruption pui=null; Properties lfProps=null; final String SCRIPT_EXT="sql"; int MAX_THREADS=64; int MIN_THREADS=2; List<String> scriptsList=null; Session sessObj=null; FileWriter summaryWriter=null; int threadNumber=0; int totalScriptFiles=0; int totalScriptFilesProcessed=0; int totalSQLsProcessed=0; int totalSQLErrors=0; int totalSQLWarnings=0; int totalConnections=0; int seqNo=0; Utils utils=null; SignalHandler CTRLCHandler=null; Signal INTSignal=null; boolean isUserInterrupt=false; ThreadGroup prunGroup=null; List<Session> activeSessionLists=null; StatusThread status=null; int noOfArgs=0; String[] args=null; HTMLObject htmlObj = null; XMLObject xmlObj = null; boolean errorMsg = false; Writer writeObj = null; PrunSummary summaryObj = new PrunSummary(); Query qryObj=null; final String TIMER_DEFAULT="60"; int timerValue = 60; long endTime = 0; long nowTime = 0; boolean timerMode = false; List<String> scriptsListBak=null; int connDelta = 0; //Added for debugging prun boolean prunDebug=false; ParallelRun() { } ParallelRun(ConsoleReader crObj, ConsoleWriter cwObj,Properties lfProps,Session sessObj) throws IOException { threadNumber=0; this.lfProps=lfProps; this.sessObj=sessObj; this.htmlObj = sessObj.getHtmlObj(); this.xmlObj = sessObj.getXmlObj(); this.crObj=crObj; this.cwObj=cwObj; this.summaryWriter=new FileWriter(); pui=new PrunUserInterruption(); utils=new Utils(); writeObj = sessObj.getWriter(); qryObj = sessObj.getQuery(); CTRLCHandler =new SignalHandler () { public void handle(Signal sig) { isUserInterrupt=true; if (prunGroup != null) { status.beforeStatus="Cancelling"; status.afterStatus="Cancelled"; } while (activeSessionLists !=null && activeSessionLists.size() > 0) { ((Session)(activeSessionLists.get(0))).setQueryInterrupted(true); ((Session)(activeSessionLists.get(0))).setDBConnExists(false); if (((Session)(activeSessionLists.get(0))).getCurrentStmtObj() != null) { try { ((Statement)((Session)(activeSessionLists.get(0))).getCurrentStmtObj()).cancel(); } catch (Exception e) { } } activeSessionLists.remove(0); } } }; try { INTSignal=new Signal("INT"); } catch (Exception e) {} // get the maxservers allowed for the current datasource // undocumented arg to disable calling getMaxServers(), -Dtrafci.prun.maxconn=n int maxThreads=0; String maxConn=System.getProperty("trafci.prun.maxconn"); try { if ((maxConn!=null) && ((maxThreads = Integer.parseInt(maxConn.trim() )) > MAX_THREADS )) MAX_THREADS=maxThreads; else MAX_THREADS=getMaxServers(); } catch (NumberFormatException nfe) { this.writePrunErrors(SessionError.INVALID_MAXCONN); throw nfe; } MIN_THREADS = MAX_THREADS == 1 ? MAX_THREADS:MIN_THREADS; String strTimerMode = System.getProperty("trafci.prun.timermode"); if ((strTimerMode!=null) && strTimerMode.equalsIgnoreCase("y")) { timerMode=true; String strConnDelta = System.getProperty("trafci.prun.connection.delta"); if (strConnDelta != null) connDelta = Integer.parseInt(strConnDelta); } } public void execute() throws IOException, PrunUserInterruption, InvalidNumberOfArguments, UserInterruption { //Signal.handle(INTSignal, CTRLCHandler); String queryStr = sessObj.getQuery().getQueryText(); queryStr = queryStr.replaceAll("\\s+"," ").trim(); boolean okay = validateArgs(queryStr); if (!okay) { throw new PrunUserInterruption(); } if (noOfArgs == 1) getInputs(); try { Signal.handle(INTSignal, CTRLCHandler); }catch (Exception e) {} scriptsList=null; scriptsList=new ArrayList<String>(); activeSessionLists=new ArrayList<Session>(); FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { if (System.getProperty("os.name").toUpperCase() .startsWith("WINDOW")) { return name.matches("(?i).*\\." + scriptsExt +"$" ); } else { return name.endsWith("."+scriptsExt); } } }; File dir=new File(scriptsDir); File[] fl=dir.listFiles(filter); //String separator=File.separator; for (int i=0;i< fl.length; i++) { scriptsList.add(fl[i].getName().toString()); } prunDebug = Boolean.getBoolean("trafci.prun.debug"); if (scriptsList.size() == 0) { if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { htmlObj.init(); cwObj.println(htmlObj._beginTableTag); cwObj.println(htmlObj._startCommentTag); cwObj.println("No files present with this extension."); cwObj.println(htmlObj._endCommentTag); cwObj.println(htmlObj._endTableTag); } else if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { xmlObj.init(); xmlObj.handleStartTags(); cwObj.println(xmlObj._beginStatusTag); cwObj.println(xmlObj._beginCdataTag + "No files present with this extension." + xmlObj._endCdataTag); cwObj.println(xmlObj._endStatusTag); xmlObj.handleEndTags(); } else { cwObj.println(); cwObj.println("No files present with this extension."); } return; } scriptsListBak=new ArrayList<String>(scriptsList); totalScriptFiles=this.scriptsList.size(); try { String summaryDir=logsDir+File.separator+ "error" ; if (!isValid(summaryDir,"dir","write")) { if (new File(summaryDir).mkdir()) { if (!isValid(summaryDir,"dir","write")) { summaryDir=logsDir; } } else { summaryDir=logsDir; } } summaryFile=summaryDir+File.separator+"prun.err.log"; String summaryTitle = "PRUN started at "+DateFormat.getDateTimeInstance().format(new Date()); if (!this.overwriteLogs) summaryWriter.setAppend(true); summaryWriter.initialize(summaryFile); if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { summaryWriter.writeln(htmlObj._beginTableTag); summaryWriter.writeln(htmlObj._startCommentTag + summaryTitle + htmlObj._endCommentTag); summaryWriter.writeln(htmlObj._beginRowTag); summaryWriter.writeln(htmlObj._beginTblHeadTag +"Scripts directory " + htmlObj._endTblHeadTag); summaryWriter.writeln(htmlObj._beginTblDataTag + this.scriptsDir + htmlObj._endTblDataTag); summaryWriter.writeln(htmlObj._endRowTag); summaryWriter.writeln(htmlObj._beginRowTag); summaryWriter.writeln(htmlObj._beginTblHeadTag +"Logs directory " + htmlObj._endTblHeadTag); summaryWriter.writeln(htmlObj._beginTblDataTag + this.logsDir + htmlObj._endTblDataTag); summaryWriter.writeln(htmlObj._endRowTag); summaryWriter.writeln(htmlObj._beginRowTag); summaryWriter.writeln(htmlObj._beginTblHeadTag +"Logs overwritten " + htmlObj._endTblHeadTag); summaryWriter.writeln(htmlObj._beginTblDataTag + (this.overwriteLogs ? "y":"n") + htmlObj._endTblDataTag); summaryWriter.writeln(htmlObj._endRowTag); summaryWriter.writeln(htmlObj._beginRowTag); summaryWriter.writeln(htmlObj._beginTblHeadTag +"Number of connections " + htmlObj._endTblHeadTag); summaryWriter.writeln(htmlObj._beginTblDataTag + this.connectionCnt + htmlObj._endTblDataTag); summaryWriter.writeln(htmlObj._endRowTag); if (timerMode) { summaryWriter.writeln(htmlObj._beginRowTag); summaryWriter.writeln(htmlObj._beginTblHeadTag +"Time to run (mins) " + htmlObj._endTblHeadTag); summaryWriter.writeln(htmlObj._beginTblDataTag + this.timerValue + htmlObj._endTblDataTag); summaryWriter.writeln(htmlObj._endRowTag); } } else if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { summaryWriter.writeln(xmlObj._xmlNameSpaceTag); summaryWriter.writeln(xmlObj._beginRootTag); summaryWriter.writeln(xmlObj._beginCdataTag + summaryTitle + xmlObj._endCdataTag); summaryWriter.writeln(xmlObj._beginScriptsDirTag + this.scriptsDir + xmlObj._endStriptsDirTag); summaryWriter.writeln(xmlObj._beginLogsDirTag + this.logsDir + xmlObj._endLogsDirTag); summaryWriter.writeln(xmlObj._beginLogsO + (this.overwriteLogs ? "y":"n") + xmlObj._endLogsO); summaryWriter.writeln(xmlObj._beginConnTag + this.connectionCnt + xmlObj._endConnTag); if (timerMode) summaryWriter.writeln(xmlObj._beginMinTag + this.timerValue + xmlObj._endMinTag); } else { summaryWriter.write((utils.formatString("=",80,'='))); summaryWriter.writeln(); summaryWriter.write(summaryTitle); summaryWriter.writeln(); summaryWriter.write((utils.formatString("=",80,'='))); summaryWriter.writeln(); summaryWriter.writeln(); summaryWriter.writeln("Scripts directory: "+this.scriptsDir); summaryWriter.writeln("Logs directory: "+this.logsDir); summaryWriter.writeln("Logs overwritten: "+ (this.overwriteLogs ? "y":"n") ); summaryWriter.writeln("Number of connections: "+this.connectionCnt); if (timerMode) { summaryWriter.writeln("Time to run (mins): "+this.timerValue); summaryWriter.writeln("Connection delay (secs): "+this.connDelta); } if (prunDebug) { summaryWriter.writeln(); } } } catch (IOException ioe) { cwObj.println("Could not create the summary file."); return; } prunGroup=new ThreadGroup("prgroup"); Thread prunThread=null; status = new StatusThread(cwObj, sessObj); // start the status thread if (cwObj.getConsoleOut()) status.start(); if (this.connectionCnt > totalScriptFiles) { this.connectionCnt=totalScriptFiles; } sessObj.setQryStartTime(); for (int i=1; i <= this.connectionCnt ; i++) { if (this.scriptsList.size()== 0 || this.isUserInterrupt) break; prunThread =new Thread(prunGroup,this); prunThread.setName("Thread"+i); prunThread.start(); try { Thread.sleep(connDelta * 1000); } catch (InterruptedException ie) { } } //this.start(); //Lets wait for all child threads to complete before proceeding further Thread[] activeLists= new Thread[prunGroup.activeCount()]; int activeCounts=prunGroup.enumerate(activeLists); for (int t=0;t < activeCounts; t++) { try { activeLists[t].join(); } catch (InterruptedException e) { } } // this loop is not required but need while (prunGroup.activeCount() > 0) { status.stop=false; } status.stop=true; if (cwObj.getConsoleOut()) { try { status.join(); } catch (InterruptedException e) { // TODO Auto-generated catch block //e.printStackTrace(); } while (status.isAlive()) { } } summaryObj.setTotalScriptFiles(this.totalScriptFiles); summaryObj.setTotalScriptFilesProcessed(this.totalScriptFilesProcessed); summaryObj.setTotalSQLsProcessed(this.totalSQLsProcessed); summaryObj.setTotalSQLErrors(this.totalSQLErrors); summaryObj.setTotalSQLWarnings(this.totalSQLWarnings); summaryObj.setTotalConnections(this.totalConnections); summaryObj.setTotalSQLSuccess(this.totalSQLsProcessed - this.totalSQLErrors); summaryObj.setTotalConnectionFailures(this.connectionCnt - this.totalConnections); sessObj.setQryEndTime(); String elapsedTime = writeObj.getElapsedTime(sessObj,qryObj,utils); if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { sessObj.getXmlObj().handlePrunSummary(cwObj,summaryObj, elapsedTime); } else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { sessObj.getHtmlObj().handlePrunSummary(cwObj,summaryObj, elapsedTime); } else { cwObj.println(); cwObj.println("\t"+utils.formatString("_",45,'_')); cwObj.println("\t"+utils.formatString(" PARALLELRUN(PRUN) SUMMARY",45,' ')); cwObj.println("\t"+utils.formatString("_",45,'_')); cwObj.println("\t"+utils.formatString("Total files present ",45,'.',""+this.totalScriptFiles)); cwObj.println("\t"+utils.formatString("Total files processed ",45,'.',""+(this.totalScriptFilesProcessed))); cwObj.println("\t"+utils.formatString("Total queries processed ",45,'.',""+this.totalSQLsProcessed)); cwObj.println("\t"+utils.formatString("Total errors ",45,'.',""+this.totalSQLErrors)); cwObj.println("\t"+utils.formatString("Total warnings ",45,'.',""+this.totalSQLWarnings)); cwObj.println("\t"+utils.formatString("Total successes ",45,'.',""+(this.totalSQLsProcessed-this.totalSQLErrors))); cwObj.println("\t"+utils.formatString("Total connections ",45,'.',""+ (this.totalConnections) )); cwObj.println("\t"+utils.formatString("Total connection failures ",45,'.',""+ (connectionCnt-this.totalConnections) )); cwObj.println(SessionDefaults.lineSeperator + writeObj.getElapsedTime(sessObj,qryObj,utils)); cwObj.println(); } if (this.totalSQLErrors > 0) { String errorLogInfo = "Please verify the error log file " + summaryFile + " for error summary."; cwObj.println(formatSummaryStr(errorLogInfo)); } endPrunConsoleTags(); if ((this.totalScriptFiles - this.scriptsList.size()) == 0) { String errorMsgStr = ""; if (errorMsg) errorMsgStr = "No script files have been processed. See preceeding error message(s)."; else errorMsgStr = "No script files have been processed successfully."; summaryWriter.writeln(formatSummaryStr(errorMsgStr)); } else if ((this.totalScriptFiles - this.scriptsList.size()) < this.totalScriptFiles) { String scriptFileMsg =""; if (errorMsg) scriptFileMsg = "Not all script files have been processed successfully. See preceeding error message(s)."; else scriptFileMsg = "Not all script files have been processed successfully."; summaryWriter.writeln(formatSummaryStr(scriptFileMsg)); } else { String finalMsg = ""; if (errorMsg) { finalMsg = "All the script files have been processed successfully. See preceeding error message(s)."; } else if (this.totalSQLErrors == 0) { finalMsg = "All the script files have been processed successfully."; } summaryWriter.writeln(formatSummaryStr( finalMsg)); } String summaryEnd = "PRUN completed at " + DateFormat.getDateTimeInstance().format(new Date()); if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { sessObj.getXmlObj().handlePrunSummary(summaryWriter, summaryObj, summaryEnd, elapsedTime); } else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { sessObj.getHtmlObj().handlePrunSummary(summaryWriter,summaryObj, summaryEnd, elapsedTime); } else { summaryWriter.writeln(); summaryWriter.writeln("\t"+utils.formatString("_",45,'_')); summaryWriter.writeln("\t"+utils.formatString(" PARALLELRUN(PRUN) SUMMARY",45,' ')); summaryWriter.writeln("\t"+utils.formatString("_",45,'_')); summaryWriter.writeln("\t"+utils.formatString("Total files present ",45,'.',""+this.totalScriptFiles)); summaryWriter.writeln("\t"+utils.formatString("Total files processed ",45,'.',""+(this.totalScriptFilesProcessed))); summaryWriter.writeln("\t"+utils.formatString("Total queries processed ",45,'.',""+this.totalSQLsProcessed)); summaryWriter.writeln("\t"+utils.formatString("Total errors ",45,'.',""+this.totalSQLErrors)); summaryWriter.writeln("\t"+utils.formatString("Total warnings ",45,'.',""+this.totalSQLWarnings)); summaryWriter.writeln("\t"+utils.formatString("Total successes ",45,'.',""+(this.totalSQLsProcessed-this.totalSQLErrors))); summaryWriter.writeln("\t"+utils.formatString("Total connections ",45,'.',""+ (this.totalConnections) )); summaryWriter.writeln("\t"+utils.formatString("Total connection failures ",45,'.',""+ (connectionCnt-this.totalConnections) )); summaryWriter.writeln(); summaryWriter.write((utils.formatString("=",80,'='))); summaryWriter.writeln(); summaryWriter.write(summaryEnd); summaryWriter.write(" " + elapsedTime); summaryWriter.writeln(); summaryWriter.write((utils.formatString("=",80,'='))); summaryWriter.writeln(); } this.summaryWriter.close(); errorMsg = false; } private void getInputs() throws IOException, PrunUserInterruption, UserInterruption { cwObj.print(utils.formatString("Enter * as input to stop the current prun session",50,' ')); cwObj.println(); cwObj.println(utils.formatString("-",50,'-')); cwObj.println(); while (true) { this.scriptsDir=getInput("Enter the scripts directory :",null); if (isValid(scriptsDir,"dir","read")) { scriptsDir = this.getCanonicalPath(scriptsDir); break; } cwObj.println(SessionError.DIR_NOT_FOUND.errorMessage()); cwObj.println(); } this.scriptsExt=getInput("Enter the script file extension["+SCRIPT_EXT+"] :",SCRIPT_EXT); while (true) { this.logsDir=getInput("Enter the logs directory[scripts dir] :",this.scriptsDir); if (isValid(logsDir,"dir","write")) { logsDir = this.getCanonicalPath(logsDir); break; } cwObj.println(SessionError.DIR_NOT_FOUND.errorMessage()); cwObj.println(); } while (true) { String overWriteVal=getInput("Overwrite the log files (y/n)[n]? :","n"); if (overWriteVal.equalsIgnoreCase("y")) { this.overwriteLogs=true; break; } else if (overWriteVal.equalsIgnoreCase("n")) { this.overwriteLogs=false; break; } else { cwObj.println(SessionError.INCORRECT_OVERWRITE_OPTION.errorMessage()); cwObj.println(); } } while (true) { String connValue=getInput("Enter the number of connections("+ MIN_THREADS + "-" + utils.formatString(MAX_THREADS+")["+MIN_THREADS+"]",8,' ')+":",""+MIN_THREADS); try { this.connectionCnt=Integer.parseInt(connValue); if (this.connectionCnt >= MIN_THREADS && this.connectionCnt <= MAX_THREADS) { break; } cwObj.println(SessionError.PRUN_CONN_CNT_ERR.errorMessage()); cwObj.println(); }catch (NumberFormatException nfe) { cwObj.println("Invalid value specified for connections."+SessionDefaults.lineSeperator); } } if (timerMode) { while (true) { String strTimerValue=getInput("Enter the time to run in minutes (0=single interation) ["+TIMER_DEFAULT+"]:",TIMER_DEFAULT); //0=run to completion try { timerValue= Integer.parseInt(strTimerValue); if (timerValue >= 0) { if (timerValue > 0) { this.endTime = (timerValue*1000*60) + System.currentTimeMillis(); timerMode=true; } else timerMode=false; break; } cwObj.println("Invalid timer value"); cwObj.println(); } catch (NumberFormatException nfe) { } } //end while } } private boolean isValid(String input,String type, String permissions) { if (input != null) { File file=new File(input); if (!file.exists()) { return false; } if ("dir".equals(type) && !file.isDirectory()) { return false; } else if ("file".equals(type) && file.isDirectory()) { return false; } if (("read".equals(permissions)) && !file.canRead()) { return false; } if (("write".equals(permissions)) && !file.canWrite()) { return false; } return true; } return false; } private String getInput(String requestString,String defaultVal) throws IOException, PrunUserInterruption, UserInterruption { cwObj.print((utils.formatString(requestString,46,' '))); String input=crObj.getLine(); if ((input == null || input.trim().equals("")) && defaultVal != null) { input=defaultVal; } else if (input != null) { input=input.trim(); } if ((input != null && input.equals("*")) || isUserInterrupt) { throw pui; } return input; } /* * Gets the number of connection available for parallelload * the maxthread value is set by the return value of this method */ int getMaxServers() { boolean moreResults=false; int defaultservercnt=MAX_THREADS; Statement statement=null; try { if ((statement=sessObj.getStmtObj()) != null) { moreResults=statement.execute("cfgcmd:INFO DS "+sessObj.getSessionDsn()); } if (moreResults) { ResultSet resultSet=statement.getResultSet(); if (resultSet.next()) defaultservercnt=resultSet.getInt("MAX_SRVR_CNT"); } } catch (SQLException sqle) { //printAllExceptions(sqle); } return defaultservercnt; } public void run() { SessionInterface shareObj=new SessionInterface(); Session sessObjt=null; ErrorObject errObj=null; StringBuffer errBuf = new StringBuffer(); String threadName=currentThread().getName(); String logFile=null; ArrayList<Vector<String>> errArr = new ArrayList<Vector<String>>(); Vector<String> errVec=null; try { sessObjt=new Session( this.sessObj.getSessionUser(), this.sessObj.getSessionPass(), this.sessObj.getSessionRole(), this.sessObj.getSessionServer(), this.sessObj.getSessionPort(), this.sessObj.getSessionDsn(), crObj, cwObj); // copy the required values from the main session sessObjt.setSessionCtlg(this.sessObj.getSessionCtlg()); sessObjt.setSessionSchema(this.sessObj.getSessionSchema()); sessObjt.setSessionSQLTerminator(this.sessObj.getSessionSQLTerminator()); sessObjt.setSessionPrompt(this.sessObj.getPrompt()); sessObjt.setSessionTiming(this.sessObj.isSessionTimingOn()); sessObjt.setSessionTime(this.sessObj.isSessionTimeOn()); sessObjt.setSessionColSep(this.sessObj.getSessionColSep()); sessObjt.setListCount(this.sessObj.getListCount()); sessObjt.setStrDisplayFormat(this.sessObj.getStrDisplayFormat()); sessObjt.setMxosrvrVersion(this.sessObj.getMxosrvrVersion()); sessObjt.setSutVersion(this.sessObj.getSutVersion()); sessObjt.setT4verNum(sessObj.getT4verNum()); sessObjt.setSessionAutoPrepare(sessObj.isSessionAutoPrepare()); sessObjt.setSessionStatsEnabled(sessObj.isSessionStatsEnabled()); sessObjt= shareObj.createSession(sessObjt, 1, false); } catch (FileNotFoundException e) { errObj=SessionError.SCRIPT_FILE_NOT_FOUND; return; } catch (SQLException e) { if (e.getErrorCode() == SessionDefaults.SQL_ERR_CONN_MAX_LIMIT) { errObj=SessionError.CONN_MAX_LIMIT_ERR; }else { errObj = new ErrorObject(e.toString(), e.getErrorCode()); } return; } catch (InstantiationException e) { errObj=SessionError.DRIVER_INIT_ERR; return; } catch (IllegalAccessException e) { errObj=SessionError.DRIVER_INIT_ILLEGAL_ERR; return; } catch (ClassNotFoundException e) { errObj=SessionError.DRIVER_CLASS_ERR; return; } catch (IOException e) { errObj=new ErrorObject(SessionError.INTERNAL_ERR, "", " "+e.toString()); return; } finally { if (this.isUserInterrupt) { try { if (sessObjt.getConnObj() != null) { sessObjt.getConnObj().close(); } } catch (SQLException e) { // } sessObjt.setConnObj(null); shareObj=null; return; } if (errObj != null) { synchronized(this) { //if(this.summaryWriter !=null) //this.summaryWriter.write(" Failed"); try { this.summaryWriter.writeln(); this.summaryWriter.writeln(DateFormat.getDateTimeInstance().format(new Date()) + " An error occurred in "+threadName+" when connecting to the database: "+errObj.errorMessage()); errorMsg = true; } catch (IOException e) { } } } synchronized(this) { activeSessionLists.add(sessObjt); } } //end finally this.totalConnections++; //cwObj.println("\nConnection made for "+threadName); while (true) { if (timerMode && ((nowTime=System.currentTimeMillis()) >= endTime)) { //cwObj.println("\n" + threadName + " run time reached, terminating ..."); break; } logFile=null; synchronized(this.scriptsList) { if (this.scriptsList.size() > 0) { if (this.isUserInterrupt) { break; } String fileName=this.scriptsList.get(0).toString(); logFile=fileName+".log"; shareObj.setLogFile(this.logsDir+File.separator+logFile,this.overwriteLogs); shareObj.setScriptFile(this.scriptsDir+File.separator+fileName); this.scriptsList.remove(0); this.totalScriptFilesProcessed++; //debug information, prints the thread name and the script file it picks up for processing if (prunDebug ){ try { String debugMsg = DateFormat.getDateTimeInstance().format(new Date()) + ": Thread " + threadName + " processing " + shareObj.getScriptFile(); if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) this.summaryWriter.writeln(xmlObj._beginCdataTag + debugMsg + xmlObj._endCdataTag); else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) this.summaryWriter.writeln(htmlObj._startCommentTag + debugMsg + htmlObj._endCommentTag); else this.summaryWriter.writeln( debugMsg ); } catch (IOException ioEx) { } } } else { // no more files to process if (timerMode) { this.scriptsList.addAll(this.scriptsListBak); //cwObj.println("List addAll"); continue; } break; } } try { int sqlerrCnt=shareObj.getSqlErrorCount(); int sqlQueryCnt=shareObj.getSqlQueryCount(); int sqlWarningsCnt=shareObj.getSqlWarningsCount(); shareObj.invokeSession(sessObjt); //if there are any error occurred in the current file ..write them in to the errBuf if (shareObj.getSqlErrorCount() > sqlerrCnt || shareObj.getSqlWarningsCount() > sqlWarningsCnt) { errVec = new Vector<String>(); String scriptSqls=""+(shareObj.getSqlQueryCount() - sqlQueryCnt); String scriptErrs=""+(shareObj.getSqlErrorCount() - sqlerrCnt); String scriptWarns=""+(shareObj.getSqlWarningsCount() - sqlWarningsCnt); if (sessObj.getDisplayFormat() == SessionDefaults.RAW_FORMAT || sessObj.getDisplayFormat() == SessionDefaults.CSV_FORMAT) { errBuf.append(utils.formatString(logFile,65,' ',"")+" "+utils.formatString("",10,' ',""+scriptSqls)+" "+utils.formatString("",7,' ',""+scriptErrs)+" "+utils.formatString("",10,' ',""+scriptWarns)); errBuf.append("####"); } else { errVec.add(0,logFile); errVec.add(1,scriptSqls); errVec.add(2, scriptErrs); errVec.add(3, scriptWarns); errArr.add(errVec); } } } catch (IOException e) { // where to write this exception } } try { //if(sessObjt.getConnObj() != null) sessObjt.getConnObj().close(); sessObjt.setConnObj(null); }catch (SQLException sqle) { }finally { sessObjt.setConnObj(null); synchronized(this) { //this.totalScriptFilesProcessed++; int sqlerrors=this.totalSQLErrors; int sqlwarnings=this.totalSQLWarnings; boolean isErrHeadPrinted=true; this.totalSQLErrors+=shareObj.getSqlErrorCount(); this.totalSQLWarnings+=shareObj.getSqlWarningsCount(); if ((sqlerrors == 0 && this.totalSQLErrors != 0) || (sqlwarnings == 0 && this.totalSQLWarnings !=0) ) { isErrHeadPrinted=false; } this.totalSQLsProcessed+=shareObj.getSqlQueryCount(); try { if (!isErrHeadPrinted) { if (sessObj.getDisplayFormat() == SessionDefaults.RAW_FORMAT || sessObj.getDisplayFormat() == SessionDefaults.CSV_FORMAT) { this.summaryWriter.writeln(); this.summaryWriter.writeln("Check the following log files for the detailed error message."); this.summaryWriter.writeln(utils.formatString("-",4+65+27+8,'-')); // 65 - size of the log file name + 27 - size of the remaining fields + 6 - spaces between the heading columns this.summaryWriter.writeln(utils.formatString("Seq#",4,' ',"")+" "+utils.formatString("Log File Name",65,' ',"")+" "+utils.formatString("",10,' ',"Total Qrys")+" "+utils.formatString("",7,' ',"Errors")+" "+utils.formatString("",10,' ',""+"Warnings")); this.summaryWriter.writeln(utils.formatString("-",4+65+27+8,'-',"")); } else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { this.summaryWriter.writeln(htmlObj._beginRowTag); this.summaryWriter.writeln(htmlObj._beginTblHeadTag + "Seq # " + htmlObj._endTblHeadTag); this.summaryWriter.writeln(htmlObj._beginTblHeadTag + "Logfile " + htmlObj._endTblHeadTag); this.summaryWriter.writeln(htmlObj._beginTblHeadTag + "Total Qrys" + htmlObj._endTblHeadTag); this.summaryWriter.writeln(htmlObj._beginTblHeadTag + "Errors" + htmlObj._endTblHeadTag); this.summaryWriter.writeln(htmlObj._beginTblHeadTag + "Warnings" + htmlObj._endTblHeadTag); this.summaryWriter.writeln(htmlObj._endRowTag); } isErrHeadPrinted=true; } if (errBuf.length() > 0) { int newLineIdx=-1; while ((newLineIdx=errBuf.indexOf("####")) != -1) { this.seqNo++; this.summaryWriter.writeln(utils.formatString("",4,' ',this.seqNo+"")+" "+errBuf.substring(0,newLineIdx).toString()); errBuf.delete(0,newLineIdx+4); } } else { for (int i=0; i< errArr.size();i++) { Vector<String> vect = errArr.get(i); this.seqNo++; if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { this.summaryWriter.writeln(xmlObj._beginSeqIdTag + this.seqNo + xmlObj._endAttributeTag); this.summaryWriter.writeln(xmlObj._beginLogFileNameTag + vect.get(0).trim() + xmlObj._endLogFileNameTag); this.summaryWriter.writeln(xmlObj._beginTotalSqlsTag + vect.get(1).trim() + xmlObj._endTotalSqlsTag); this.summaryWriter.writeln(xmlObj._beginTotalErrors + vect.get(2).trim() + xmlObj._endTotalErrors); this.summaryWriter.writeln(xmlObj._beginTotalWarnings + vect.get(3).trim() +xmlObj._endTotalWarnings); this.summaryWriter.writeln(xmlObj._endSeqIdTag); } else if ((sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT)) { this.summaryWriter.writeln(htmlObj._beginRowTag); this.summaryWriter.writeln(htmlObj._beginTblDataAlignTag + this.seqNo + htmlObj._endTblDataTag); this.summaryWriter.writeln(htmlObj._beginTblDataTag + vect.get(0) + htmlObj._endTblDataTag); this.summaryWriter.writeln(htmlObj._beginTblDataAlignTag +vect.get(1) + htmlObj._endTblDataTag); this.summaryWriter.writeln(htmlObj._beginTblDataAlignTag + vect.get(2)+ htmlObj._endTblDataTag); this.summaryWriter.writeln(htmlObj._beginTblDataAlignTag + vect.get(3) + htmlObj._endTblDataTag); this.summaryWriter.writeln(htmlObj._endRowTag); } } } errBuf=null; } catch (IOException e) { } } } sessObjt=null; } //end run public boolean validateArgs(String query) throws InvalidNumberOfArguments, IOException { if (query.endsWith(sessObj.getSessionSQLTerminator())) { query = query.substring(0, query.length()-sessObj.getSessionSQLTerminator().length()); } setArgs(query.split(" ")); if (noOfArgs == 1) return true; if (sessObj.isDebugOn()) System.out.println("No.of args :: " + noOfArgs); if (noOfArgs > 11) { //writeObj.writeln(); throw new InvalidNumberOfArguments(); } //Set defaults for non-interactive mode scriptsDir=System.getProperty("user.dir"); scriptsExt = SCRIPT_EXT; connectionCnt = MIN_THREADS; for (int i=1; i < noOfArgs; i++) { String value = null; String option=this.args[i++].trim(); if (i < noOfArgs) { value=this.args[i].trim(); } else { if (!option.equalsIgnoreCase("-d") && !option.equalsIgnoreCase("-defaults")) { //writeObj.writeln(); throw new InvalidNumberOfArguments(); } } if (option.equalsIgnoreCase("-sd")|| option.equalsIgnoreCase("-scriptsdir")) { scriptsDir=this.getCanonicalPath(value); } else if (option.equalsIgnoreCase("-e")|| option.equalsIgnoreCase("-extension")) { scriptsExt=value; } else if (option.equalsIgnoreCase("-ld")|| option.equalsIgnoreCase("-logsdir")) { logsDir=this.getCanonicalPath(value); } else if (option.equalsIgnoreCase("-o")|| option.equalsIgnoreCase("-overwrite")) { if (value.equalsIgnoreCase("y")) { this.overwriteLogs=true; } else if (value.equalsIgnoreCase("n")) { this.overwriteLogs=false; } else { this.writePrunErrors(SessionError.INCORRECT_OVERWRITE_OPTION); return false; } } else if (option.equalsIgnoreCase("-c")|| option.equalsIgnoreCase("-connections")) { try { connectionCnt = Integer.parseInt(value); } catch (NumberFormatException nfe) { this.writePrunErrors(SessionError.INVALID_CONN_VALUE); return false; } } else if (option.equalsIgnoreCase("-m")|| option.equalsIgnoreCase("-minutes")) { if (!timerMode) { this.writePrunErrors(new ErrorObject(SessionError.UNKOWN_OPTION, "" ,option)); //If not in XML, HTML, or CSV markup mode, display help usage. if(sessObj.getDisplayFormat() == SessionDefaults.RAW_FORMAT) printUsage(); return (false); } timerValue = Integer.parseInt(value); } else if (option.equalsIgnoreCase("-d")|| option.equalsIgnoreCase("-defaults")) { if (noOfArgs > 2) { this.writePrunErrors(SessionError.DEFAULT_OPTION_ERR); //If not in XML, HTML, or CSV markup mode, display help usage. if(sessObj.getDisplayFormat() == SessionDefaults.RAW_FORMAT) printUsage(); return (false); } } else { this.writePrunErrors(new ErrorObject(SessionError.UNKOWN_OPTION.errorCode(),SessionError.UNKOWN_OPTION.errorMessage()+ option)); //If not in XML, HTML, or CSV markup mode, display help usage. if(sessObj.getDisplayFormat() == SessionDefaults.RAW_FORMAT) printUsage(); return (false); } } //end for if (!isValid(scriptsDir,"dir","read")) { this.writePrunErrors(SessionError.SCRIPTS_DIR_NOT_FOUND); return (false); } if (logsDir == null) logsDir = scriptsDir; else if (!isValid(logsDir,"dir","read")) { this.writePrunErrors(SessionError.LOGS_DIR_NOT_FOUND); return (false); } if (connectionCnt < MIN_THREADS || connectionCnt > MAX_THREADS) { this.writePrunErrors(SessionError.PRUN_CONN_CNT_ERR); return (false); } if (timerMode) { if (timerValue > 0) { endTime= (timerValue * 1000*60) + System.currentTimeMillis(); } else if (timerValue < 0) { cwObj.println("Invalid timer value"); cwObj.println(); return (false); } else //timerValue=0 { cwObj.println(SessionDefaults.lineSeperator+"Timer mode ignored, -m is 0"+SessionDefaults.lineSeperator); timerMode = false; } } if (cwObj.getConsoleOut()) { cwObj.println(SessionDefaults.lineSeperator+"PRUN options are -scriptsdir " + scriptsDir + SessionDefaults.lineSeperator+" -logsdir " + logsDir + SessionDefaults.lineSeperator+" -extension " + scriptsExt + SessionDefaults.lineSeperator+" -overwrite " + (this.overwriteLogs ? "y":"n") + SessionDefaults.lineSeperator+" -connections " + connectionCnt); if (timerMode) cwObj.println(SessionDefaults.lineSeperator+" -minutes " + timerValue ); } return (true); } //end validateArgs private void setArgs(String[] args) { noOfArgs=args.length; this.args=args; } private String getCanonicalPath(String fileName) { File file = new File(fileName); try { return (file.getCanonicalPath()); }catch (IOException io) { } return (fileName); } public void printUsage() throws IOException { writeObj.writeln(); writeObj.writeln("prun -sd|scriptsdir <directory-name> -e|extension <extension> "); writeObj.writeln(" -ld|logsdir <directory-name> -o|overwrite {y|n} "); writeObj.writeln(" -c|connections <no-of-connections>" ); if (timerMode) writeObj.writeln(" -m|minutes <no-of-minutes>" ); writeObj.writeln(); writeObj.writeln("\t----- OR -----"); writeObj.writeln(); writeObj.writeln("prun -d|defaults"); writeObj.writeln(); writeObj.writeln("where:"); writeObj.writeln("\t-defaults \tspecifies default values for all the options."); writeObj.writeln("\t-scriptsdir \tspecifies the directory containing the script files."); writeObj.writeln("\t-extension \tspecifies the extension of script files."); writeObj.writeln("\t-logsdir \tspecifies the directory where logs files are created."); writeObj.writeln("\t-overwrite \tspecifies if the log files have to be overwritten."); writeObj.writeln("\t-connections \tspecifies the number of connections."); if (timerMode) { writeObj.writeln("\t-minutes \tspecifies the time to run test."); writeObj.writeln(); writeObj.writeln("\t[ optional property -Dtrafci.prun.connection.delta=n ]"); } } private String formatSummaryStr(String msg) { String summaryStr = null; if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { summaryStr = xmlObj._beginCdataTag + msg + xmlObj._endCdataTag; } else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { summaryStr = htmlObj._startCommentTag + msg + htmlObj._endCommentTag; } else { summaryStr = "\t" + msg + SessionDefaults.lineSeperator; } return summaryStr; } private void writePrunErrors(ErrorObject prunError) throws IOException { if(sessObj.getDisplayFormat() != SessionDefaults.XML_FORMAT) writeObj.writeln(); writeObj.writeInterfaceErrors(this.sessObj, prunError); } private void endPrunConsoleTags() { if (sessObj.getDisplayFormat() == SessionDefaults.XML_FORMAT) { cwObj.println(xmlObj._endRootTag); xmlObj._beginRootElement = false; } else if (sessObj.getDisplayFormat() == SessionDefaults.HTML_FORMAT) { cwObj.println(htmlObj._endTableTag); htmlObj._startTags = false; } } } //end class
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.remote.rest.jbpm.admin; import java.text.MessageFormat; import java.util.Arrays; import java.util.List; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Variant; import org.jbpm.services.api.DeploymentNotFoundException; import org.jbpm.services.api.NodeInstanceNotFoundException; import org.jbpm.services.api.NodeNotFoundException; import org.jbpm.services.api.ProcessInstanceNotFoundException; import org.jbpm.services.api.admin.ExecutionErrorNotFoundException; import org.kie.server.api.model.admin.ExecutionErrorInstance; import org.kie.server.api.model.admin.ExecutionErrorInstanceList; import org.kie.server.api.model.admin.MigrationReportInstance; import org.kie.server.api.model.admin.MigrationReportInstanceList; import org.kie.server.api.model.admin.ProcessNodeList; import org.kie.server.api.model.admin.TimerInstanceList; import org.kie.server.api.model.instance.NodeInstanceList; import org.kie.server.remote.rest.common.Header; import org.kie.server.services.api.KieServerRegistry; import org.kie.server.services.jbpm.admin.ProcessAdminServiceBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import static org.kie.server.api.rest.RestURI.*; import static org.kie.server.remote.rest.common.util.RestUtils.*; import static org.kie.server.remote.rest.jbpm.resources.Messages.*; @Api(value="Process instances administration :: BPM") @Path("server/" + ADMIN_PROCESS_URI) public class ProcessAdminResource { private static final Logger logger = LoggerFactory.getLogger(ProcessAdminResource.class); private ProcessAdminServiceBase processAdminServiceBase; private KieServerRegistry context; public ProcessAdminResource() { } public ProcessAdminResource(ProcessAdminServiceBase processAdminServiceBase, KieServerRegistry context) { this.processAdminServiceBase = processAdminServiceBase; this.context = context; } @ApiOperation(value="Migrates process instance to new container and process definition with optional node mapping", response=MigrationReportInstance.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @PUT @Path(MIGRATE_PROCESS_INST_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response migrateProcessInstance(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance to be migrated", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "container id that new process definition belongs to", required = true) @QueryParam("targetContainerId") String targetContainerId, @ApiParam(value = "process definition that process instance should be migrated to", required = true) @QueryParam("targetProcessId") String targetProcessId, @ApiParam(value = "node mapping - unique ids of old definition to new definition given as Map", required = false) String payload) { Variant v = getVariant(headers); String type = getContentType(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { MigrationReportInstance reportInstance = processAdminServiceBase.migrateProcessInstance(containerId, processInstanceId, targetContainerId, targetProcessId, payload, type); return createCorrectVariant(reportInstance, headers, Response.Status.CREATED, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Migrates process instances to new container and process definition with optional node mapping", response=MigrationReportInstanceList.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @PUT @Path(MIGRATE_PROCESS_INSTANCES_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response migrateProcessInstances(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instances belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "list of identifiers of process instance to be migrated", required = true) @QueryParam("pInstanceId") List<Long> processInstanceIds, @ApiParam(value = "container id that new process definition belongs to", required = true) @QueryParam("targetContainerId") String targetContainerId, @ApiParam(value = "process definition that process instances should be migrated to", required = true) @QueryParam("targetProcessId") String targetProcessId, @ApiParam(value = "node mapping - unique ids of old definition to new definition given as Map", required = false) String payload) { Variant v = getVariant(headers); String type = getContentType(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { MigrationReportInstanceList reportInstances = processAdminServiceBase.migrateProcessInstances(containerId, processInstanceIds, targetContainerId, targetProcessId, payload, type); return createCorrectVariant(reportInstances, headers, Response.Status.CREATED, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceIds), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Cancels given node instance within process instance and container", response=Void.class, code=204) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance, node instance or Container Id not found") }) @DELETE @Path(CANCEL_NODE_INST_PROCESS_INST_DELETE_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response cancelNodeInstance(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that node instance belongs to", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "identifier of node instance that should be canceled", required = true) @PathParam("nodeInstanceId") Long nodeInstanceId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.cancelNodeInstance(containerId, processInstanceId, nodeInstanceId); return noContent(v, conversationIdHeader); } catch (NodeInstanceNotFoundException e) { return notFound( MessageFormat.format(NODE_INSTANCE_NOT_FOUND, nodeInstanceId, processInstanceId), v, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retriggers given node instance within process instance and container", response=Void.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance, node instance or Container Id not found") }) @PUT @Path(RETRIGGER_NODE_INST_PROCESS_INST_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response retriggerNodeInstance(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that node instance belongs to", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "identifier of node instance that should be retriggered", required = true) @PathParam("nodeInstanceId") Long nodeInstanceId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.retriggerNodeInstance(containerId, processInstanceId, nodeInstanceId); return createResponse("", v, Response.Status.CREATED, conversationIdHeader); } catch (NodeInstanceNotFoundException e) { return notFound( MessageFormat.format(NODE_INSTANCE_NOT_FOUND, nodeInstanceId, processInstanceId), v, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Updates timer instance within process instance and container", response=Void.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance, node instance or Container Id not found") }) @PUT @Path(UPDATE_TIMER_PROCESS_INST_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response updateTimer(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that timer belongs to", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "identifier of timer instance to be updated", required = true) @PathParam("timerId") Long timerId, @ApiParam(value = "optional flag that indicates if the time expression is relative to the current date or not, defaults to true", required = false) @QueryParam("relative") @DefaultValue("true") boolean relative, @ApiParam(value = "Map of timer expressions - deplay, perios and repeat are allowed values in the map", required = true) String payload) { Variant v = getVariant(headers); String type = getContentType(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.updateTimer(containerId, processInstanceId, timerId, relative, payload, type); return createResponse("", v, Response.Status.CREATED, conversationIdHeader); } catch (NodeInstanceNotFoundException e) { return notFound( MessageFormat.format(TIMER_INSTANCE_NOT_FOUND, timerId, processInstanceId), v, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Triggers node within process instance and container", response=Void.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance, node instance or Container Id not found") }) @POST @Path(TRIGGER_NODE_PROCESS_INST_POST_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response triggerNode(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance where node should be triggered", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "identifier of the node to be triggered", required = true) @PathParam("nodeId") Long nodeId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.triggerNode(containerId, processInstanceId, nodeId); return createResponse("", v, Response.Status.CREATED, conversationIdHeader); } catch (NodeNotFoundException e) { return notFound( MessageFormat.format(NODE_NOT_FOUND, nodeId, processInstanceId), v, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieves all active timer instance from process instance and container", response=TimerInstanceList.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @GET @Path(TIMERS_PROCESS_INST_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getTimerInstances(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that timer instances should be collected for", required = true) @PathParam("pInstanceId") Long processInstanceId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { TimerInstanceList timerInstanceList = processAdminServiceBase.getTimerInstances(containerId, processInstanceId); return createCorrectVariant(timerInstanceList, headers, Response.Status.OK, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieves all active node instances from process instance and container", response=NodeInstanceList.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @GET @Path(NODE_INSTANCES_PROCESS_INST_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getActiveNodeInstances(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that active nodes instances should be collected for", required = true) @PathParam("pInstanceId") Long processInstanceId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { NodeInstanceList nodeInstanceList = processAdminServiceBase.getActiveNodeInstances(containerId, processInstanceId); return createCorrectVariant(nodeInstanceList, headers, Response.Status.OK, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieves all nodes from process instance and container", response=ProcessNodeList.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @GET @Path(NODES_PROCESS_INST_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getNodes(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that process nodes should be collected from", required = true) @PathParam("pInstanceId") Long processInstanceId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { ProcessNodeList processNodeList = processAdminServiceBase.getProcessNodes(containerId, processInstanceId); return createCorrectVariant(processNodeList, headers, Response.Status.OK, conversationIdHeader); } catch (ProcessInstanceNotFoundException e) { return notFound( MessageFormat.format(PROCESS_INSTANCE_NOT_FOUND, processInstanceId), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Acknowledge execution error by given id", response=Void.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Execution error or Container Id not found") }) @PUT @Path(ACK_ERROR_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response acknowledgeError(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that error belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of error to be acknowledged", required = true) @PathParam("errorId") String errorId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.acknowledgeError(containerId, Arrays.asList(errorId)); return createCorrectVariant("", headers, Response.Status.CREATED, conversationIdHeader); } catch (ExecutionErrorNotFoundException e) { return notFound(e.getMessage(), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound(MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Acknowledges given execution errors", response=Void.class, code=201) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Execution error or Container Id not found") }) @PUT @Path(ACK_ERRORS_PUT_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response acknowledgeErrors(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that errors belong to", required = true) @PathParam("id") String containerId, @ApiParam(value = "list of error identifiers to be acknowledged", required = true) @QueryParam("errorId") List<String> errorIds) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { processAdminServiceBase.acknowledgeError(containerId, errorIds); return createCorrectVariant("", headers, Response.Status.CREATED, conversationIdHeader); } catch (ExecutionErrorNotFoundException e) { return notFound(e.getMessage(), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound(MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieve execution error by its identifier", response=ExecutionErrorInstance.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @GET @Path(ERROR_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getExecutionErrorById(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process error belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of error to be loaded", required = true) @PathParam("errorId") String errorId) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { ExecutionErrorInstance executionErrorInstance = processAdminServiceBase.getError(containerId, errorId); return createCorrectVariant(executionErrorInstance, headers, Response.Status.OK, conversationIdHeader); } catch (ExecutionErrorNotFoundException e) { return notFound(e.getMessage(), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieves execution errors for process instance and container, applies pagination", response=ExecutionErrorInstanceList.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Container Id not found") }) @GET @Path(ERRORS_BY_PROCESS_INST_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getExecutionErrorsByProcessInstance(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that process instance belongs to", required = true) @PathParam("id") String containerId, @ApiParam(value = "identifier of process instance that errors should be collected for", required = true) @PathParam("pInstanceId") Long processInstanceId, @ApiParam(value = "optional flag that indicates if acknowledged errors should also be collected, defaults to false", required = false) @QueryParam("includeAck") @DefaultValue("false") boolean includeAcknowledged, @ApiParam(value = "optional name of the node in the process instance to filter by", required = false) @QueryParam("node") String nodeName, @ApiParam(value = "optional pagination - at which page to start, defaults to 0 (meaning first)", required = false) @QueryParam("page") @DefaultValue("0") Integer page, @ApiParam(value = "optional pagination - size of the result, defaults to 10", required = false) @QueryParam("pageSize") @DefaultValue("10") Integer pageSize, @ApiParam(value = "optional sort column, no default", required = false) @QueryParam("sort") String sort, @ApiParam(value = "optional sort direction (asc, desc) - defaults to asc", required = false) @QueryParam("sortOrder") @DefaultValue("true") boolean sortOrder) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { ExecutionErrorInstanceList executionErrorInstanceList = processAdminServiceBase.getExecutionErrorsByProcessInstance(containerId, processInstanceId, nodeName, includeAcknowledged, page, pageSize, sort, sortOrder); return createCorrectVariant(executionErrorInstanceList, headers, Response.Status.OK, conversationIdHeader); } catch (ExecutionErrorNotFoundException e) { return notFound(e.getMessage(), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } @ApiOperation(value="Retrieves execution errors for container, applies pagination", response=NodeInstanceList.class, code=200) @ApiResponses(value = { @ApiResponse(code = 500, message = "Unexpected error"), @ApiResponse(code = 404, message = "Process instance or Container Id not found") }) @GET @Path(ERRORS_GET_URI) @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) public Response getExecutionErrors(@javax.ws.rs.core.Context HttpHeaders headers, @ApiParam(value = "container id that errors belong to", required = true) @PathParam("id") String containerId, @ApiParam(value = "optional flag that indicates if acknowledged errors should also be collected, defaults to false", required = false) @QueryParam("includeAck") @DefaultValue("false") boolean includeAcknowledged, @ApiParam(value = "optional pagination - at which page to start, defaults to 0 (meaning first)", required = false) @QueryParam("page") @DefaultValue("0") Integer page, @ApiParam(value = "optional pagination - size of the result, defaults to 10", required = false) @QueryParam("pageSize") @DefaultValue("10") Integer pageSize, @ApiParam(value = "optional sort column, no default", required = false) @QueryParam("sort") String sort, @ApiParam(value = "optional sort direction (asc, desc) - defaults to asc", required = false) @QueryParam("sortOrder") @DefaultValue("true") boolean sortOrder) { Variant v = getVariant(headers); Header conversationIdHeader = buildConversationIdHeader(containerId, context, headers); try { ExecutionErrorInstanceList executionErrorInstanceList = processAdminServiceBase.getExecutionErrors(containerId, includeAcknowledged, page, pageSize, sort, sortOrder); return createCorrectVariant(executionErrorInstanceList, headers, Response.Status.OK, conversationIdHeader); } catch (ExecutionErrorNotFoundException e) { return notFound(e.getMessage(), v, conversationIdHeader); } catch (DeploymentNotFoundException e) { return notFound( MessageFormat.format(CONTAINER_NOT_FOUND, containerId), v, conversationIdHeader); } catch (Exception e) { logger.error("Unexpected error during processing {}", e.getMessage(), e); return internalServerError(MessageFormat.format(UNEXPECTED_ERROR, e.getMessage()), v, conversationIdHeader); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.debugger.engine; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.SourcePosition; import com.intellij.debugger.actions.JavaReferringObjectsValue; import com.intellij.debugger.actions.JumpToObjectAction; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluationContextImpl; import com.intellij.debugger.engine.evaluation.TextWithImportsImpl; import com.intellij.debugger.engine.evaluation.expression.Modifier; import com.intellij.debugger.engine.events.DebuggerCommandImpl; import com.intellij.debugger.engine.events.SuspendContextCommandImpl; import com.intellij.debugger.impl.DebuggerContextImpl; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.ui.impl.DebuggerTreeRenderer; import com.intellij.debugger.ui.impl.watch.*; import com.intellij.debugger.ui.tree.*; import com.intellij.debugger.ui.tree.render.*; import com.intellij.debugger.ui.tree.render.Renderer; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.CommonClassNames; import com.intellij.psi.PsiElement; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.ThreeState; import com.intellij.xdebugger.XExpression; import com.intellij.xdebugger.evaluation.XDebuggerEvaluator; import com.intellij.xdebugger.evaluation.XInstanceEvaluator; import com.intellij.xdebugger.frame.*; import com.intellij.xdebugger.frame.presentation.XErrorValuePresentation; import com.intellij.xdebugger.frame.presentation.XValuePresentation; import com.intellij.xdebugger.impl.breakpoints.XExpressionImpl; import com.intellij.xdebugger.impl.evaluate.XValueCompactPresentation; import com.intellij.xdebugger.impl.ui.XValueTextProvider; import com.intellij.xdebugger.impl.ui.tree.XValueExtendedPresentation; import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl; import com.sun.jdi.ArrayReference; import com.sun.jdi.ArrayType; import com.sun.jdi.Value; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.concurrency.AsyncPromise; import org.jetbrains.concurrency.Promise; import javax.swing.*; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * @author egor */ public class JavaValue extends XNamedValue implements NodeDescriptorProvider, XValueTextProvider { private static final Logger LOG = Logger.getInstance(JavaValue.class); private final JavaValue myParent; private final ValueDescriptorImpl myValueDescriptor; private final EvaluationContextImpl myEvaluationContext; private final NodeManagerImpl myNodeManager; private final boolean myContextSet; protected JavaValue(JavaValue parent, @NotNull ValueDescriptorImpl valueDescriptor, @NotNull EvaluationContextImpl evaluationContext, NodeManagerImpl nodeManager, boolean contextSet) { super(valueDescriptor.calcValueName()); myParent = parent; myValueDescriptor = valueDescriptor; myEvaluationContext = evaluationContext; myNodeManager = nodeManager; myContextSet = contextSet; } static JavaValue create(JavaValue parent, @NotNull ValueDescriptorImpl valueDescriptor, @NotNull EvaluationContextImpl evaluationContext, NodeManagerImpl nodeManager, boolean contextSet) { DebuggerManagerThreadImpl.assertIsManagerThread(); return new JavaValue(parent, valueDescriptor, evaluationContext, nodeManager, contextSet); } static JavaValue create(@NotNull ValueDescriptorImpl valueDescriptor, @NotNull EvaluationContextImpl evaluationContext, NodeManagerImpl nodeManager) { return create(null, valueDescriptor, evaluationContext, nodeManager, false); } public JavaValue getParent() { return myParent; } @Override @NotNull public ValueDescriptorImpl getDescriptor() { return myValueDescriptor; } @NotNull public EvaluationContextImpl getEvaluationContext() { return myEvaluationContext; } public NodeManagerImpl getNodeManager() { return myNodeManager; } @Override public void computePresentation(@NotNull final XValueNode node, @NotNull XValuePlace place) { final SuspendContextImpl suspendContext = myEvaluationContext.getSuspendContext(); myEvaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(suspendContext) { @Override public Priority getPriority() { return Priority.NORMAL; } @Override protected void commandCancelled() { node.setPresentation(null, new XErrorValuePresentation(DebuggerBundle.message("error.context.has.changed")), false); } @Override public void contextAction() throws Exception { if (node.isObsolete()) { return; } if (!myContextSet) { myValueDescriptor.setContext(myEvaluationContext); } myValueDescriptor.updateRepresentation(myEvaluationContext, new DescriptorLabelListener() { @Override public void labelChanged() { Icon nodeIcon = DebuggerTreeRenderer.getValueIcon(myValueDescriptor); final String value = getValueString(); XValuePresentation presentation; @SuppressWarnings("ThrowableResultOfMethodCallIgnored") EvaluateException exception = myValueDescriptor.getEvaluateException(); presentation = new JavaValuePresentation(value, myValueDescriptor.getIdLabel(), exception != null ? exception.getMessage() : null, myValueDescriptor); if (myValueDescriptor.getLastRenderer() instanceof FullValueEvaluatorProvider) { XFullValueEvaluator evaluator = ((FullValueEvaluatorProvider)myValueDescriptor.getLastRenderer()) .getFullValueEvaluator(myEvaluationContext, myValueDescriptor); if (evaluator != null) { node.setFullValueEvaluator(evaluator); } } else if (value.length() > XValueNode.MAX_VALUE_LENGTH) { node.setFullValueEvaluator(new JavaFullValueEvaluator(myEvaluationContext) { @Override public void evaluate(@NotNull final XFullValueEvaluationCallback callback) { final ValueDescriptorImpl fullValueDescriptor = myValueDescriptor.getFullValueDescriptor(); fullValueDescriptor.updateRepresentation(myEvaluationContext, new DescriptorLabelListener() { @Override public void labelChanged() { callback.evaluated(fullValueDescriptor.getValueText()); } }); } }); } node.setPresentation(nodeIcon, presentation, myValueDescriptor.isExpandable()); } }); } }); } public abstract static class JavaFullValueEvaluator extends XFullValueEvaluator { protected final EvaluationContextImpl myEvaluationContext; public JavaFullValueEvaluator(@NotNull String linkText, EvaluationContextImpl evaluationContext) { super(linkText); myEvaluationContext = evaluationContext; } public JavaFullValueEvaluator(EvaluationContextImpl evaluationContext) { myEvaluationContext = evaluationContext; } public abstract void evaluate(@NotNull XFullValueEvaluationCallback callback) throws Exception; protected EvaluationContextImpl getEvaluationContext() { return myEvaluationContext; } @Override public void startEvaluation(@NotNull final XFullValueEvaluationCallback callback) { if (callback.isObsolete()) return; myEvaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(myEvaluationContext.getSuspendContext()) { @Override public Priority getPriority() { return Priority.NORMAL; } @Override protected void commandCancelled() { callback.errorOccurred(DebuggerBundle.message("error.context.has.changed")); } @Override public void contextAction() throws Exception { if (callback.isObsolete()) return; evaluate(callback); } }); } } private static String truncateToMaxLength(@NotNull String value) { return value.substring(0, Math.min(value.length(), XValueNode.MAX_VALUE_LENGTH)); } private static class JavaValuePresentation extends XValueExtendedPresentation implements XValueCompactPresentation { private final String myValue; private final String myType; private final String myError; private final ValueDescriptorImpl myValueDescriptor; public JavaValuePresentation(@NotNull String value, @Nullable String type, @Nullable String error, ValueDescriptorImpl valueDescriptor) { myValue = value; myType = type; myError = error; myValueDescriptor = valueDescriptor; } @Nullable @Override public String getType() { return StringUtil.nullize(myType); } @Override public void renderValue(@NotNull XValueTextRenderer renderer) { renderValue(renderer, null); } @Override public void renderValue(@NotNull XValueTextRenderer renderer, @Nullable XValueNodeImpl node) { boolean compact = node != null; if (myError != null) { if (myValue.endsWith(myError)) { renderer.renderValue(myValue.substring(0, myValue.length() - myError.length())); } renderer.renderError(myError); } else { if (compact && node.getValueContainer() instanceof JavaValue) { final JavaValue container = (JavaValue)node.getValueContainer(); if (container.getDescriptor().isArray()) { final ArrayReference value = (ArrayReference)container.getDescriptor().getValue(); final ArrayType type = (ArrayType)container.getDescriptor().getType(); if (type != null) { final String typeName = type.componentTypeName(); if (TypeConversionUtil.isPrimitive(typeName) || CommonClassNames.JAVA_LANG_STRING.equals(typeName)) { int size = value.length(); int max = Math.min(size, CommonClassNames.JAVA_LANG_STRING.equals(typeName) ? 5 : 10); //TODO [eu]: this is a quick fix for IDEA-136606, need to move this away from EDT!!! final List<Value> values = value.getValues(0, max); int i = 0; final List<String> vals = new ArrayList<>(max); while (i < values.size()) { vals.add(StringUtil.first(values.get(i).toString(), 15, true)); i++; } String more = ""; if (vals.size() < size) { more = ", + " + (size - vals.size()) + " more"; } renderer.renderValue("{" + StringUtil.join(vals, ", ") + more + "}"); return; } } } } if (myValueDescriptor.isString()) { renderer.renderStringValue(myValue, "\"", XValueNode.MAX_VALUE_LENGTH); return; } String value = truncateToMaxLength(myValue); Renderer lastRenderer = myValueDescriptor.getLastRenderer(); if (lastRenderer instanceof CompoundTypeRenderer) { lastRenderer = ((CompoundTypeRenderer)lastRenderer).getLabelRenderer(); } if (lastRenderer instanceof ToStringRenderer) { value = StringUtil.wrapWithDoubleQuote(value); } renderer.renderValue(value); } } @NotNull @Override public String getSeparator() { boolean emptyAfterSeparator = !myValueDescriptor.isShowIdLabel() && StringUtil.isEmpty(myValue); String declaredType = myValueDescriptor.getDeclaredTypeLabel(); if (!StringUtil.isEmpty(declaredType)) { return emptyAfterSeparator ? declaredType : declaredType + " " + DEFAULT_SEPARATOR; } return emptyAfterSeparator ? "" : DEFAULT_SEPARATOR; } @Override public boolean isModified() { return myValueDescriptor.isDirty(); } } @NotNull String getValueString() { return myValueDescriptor.getValueText(); } private int myCurrentChildrenStart = 0; @Override public void computeChildren(@NotNull final XCompositeNode node) { scheduleCommand(myEvaluationContext, node, new SuspendContextCommandImpl(myEvaluationContext.getSuspendContext()) { @Override public Priority getPriority() { return Priority.NORMAL; } @Override public void contextAction() throws Exception { final XValueChildrenList children = new XValueChildrenList(); final NodeRenderer renderer = myValueDescriptor.getRenderer(myEvaluationContext.getDebugProcess()); final Ref<Integer> remainingNum = new Ref<>(0); renderer.buildChildren(myValueDescriptor.getValue(), new ChildrenBuilder() { @Override public NodeDescriptorFactory getDescriptorManager() { return myNodeManager; } @Override public NodeManager getNodeManager() { return myNodeManager; } @Override public ValueDescriptor getParentDescriptor() { return myValueDescriptor; } @Override public void setRemaining(int remaining) { remainingNum.set(remaining); } @Override public void initChildrenArrayRenderer(ArrayRenderer renderer) { renderer.START_INDEX = myCurrentChildrenStart; renderer.END_INDEX = myCurrentChildrenStart + XCompositeNode.MAX_CHILDREN_TO_SHOW - 1; myCurrentChildrenStart += XCompositeNode.MAX_CHILDREN_TO_SHOW; } @Override public void setChildren(List<DebuggerTreeNode> nodes) { for (DebuggerTreeNode node : nodes) { final NodeDescriptor descriptor = node.getDescriptor(); if (descriptor instanceof ValueDescriptorImpl) { // Value is calculated already in NodeManagerImpl children.add(create(JavaValue.this, (ValueDescriptorImpl)descriptor, myEvaluationContext, myNodeManager, false)); } else if (descriptor instanceof MessageDescriptor) { children.add(new JavaStackFrame.DummyMessageValueNode(descriptor.getLabel(), null)); } } } }, myEvaluationContext); node.addChildren(children, true); if (remainingNum.get() > 0) { node.tooManyChildren(remainingNum.get()); } } }); } protected static boolean scheduleCommand(EvaluationContextImpl evaluationContext, @NotNull final XCompositeNode node, final SuspendContextCommandImpl command) { if (node.isObsolete()) { return false; } evaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(command.getSuspendContext()) { @Override public void contextAction() throws Exception { if (node.isObsolete()) { return; } command.contextAction(); } @Override protected void commandCancelled() { node.setErrorMessage(DebuggerBundle.message("error.context.has.changed")); } }); return true; } @Override public void computeSourcePosition(@NotNull final XNavigatable navigatable) { computeSourcePosition(navigatable, false); } private void computeSourcePosition(@NotNull final XNavigatable navigatable, final boolean inline) { myEvaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(myEvaluationContext.getSuspendContext()) { @Override public Priority getPriority() { return inline ? Priority.LOWEST : Priority.NORMAL; } @Override protected void commandCancelled() { navigatable.setSourcePosition(null); } @Override public void contextAction() throws Exception { ApplicationManager.getApplication().runReadAction(() -> { SourcePosition position = SourcePositionProvider.getSourcePosition(myValueDescriptor, getProject(), getDebuggerContext(), false); if (position != null) { navigatable.setSourcePosition(DebuggerUtilsEx.toXSourcePosition(position)); } if (inline) { position = SourcePositionProvider.getSourcePosition(myValueDescriptor, getProject(), getDebuggerContext(), true); if (position != null) { navigatable.setSourcePosition(DebuggerUtilsEx.toXSourcePosition(position)); } } }); } }); } @NotNull @Override public ThreeState computeInlineDebuggerData(@NotNull final XInlineDebuggerDataCallback callback) { computeSourcePosition(callback::computed, true); return ThreeState.YES; } private DebuggerContextImpl getDebuggerContext() { return myEvaluationContext.getDebugProcess().getDebuggerContext(); } public Project getProject() { return myValueDescriptor.getProject(); } @Override public boolean canNavigateToTypeSource() { return true; } @Override public void computeTypeSourcePosition(@NotNull final XNavigatable navigatable) { if (myEvaluationContext.getSuspendContext().isResumed()) return; DebugProcessImpl debugProcess = myEvaluationContext.getDebugProcess(); debugProcess.getManagerThread().schedule(new JumpToObjectAction.NavigateCommand(getDebuggerContext(), myValueDescriptor, debugProcess, null) { @Override public Priority getPriority() { return Priority.HIGH; } @Override protected void doAction(@Nullable final SourcePosition sourcePosition) { if (sourcePosition != null) { ApplicationManager.getApplication().runReadAction(() -> navigatable.setSourcePosition(DebuggerUtilsEx.toXSourcePosition(sourcePosition))); } } }); } @Nullable @Override public XValueModifier getModifier() { return myValueDescriptor.canSetValue() ? myValueDescriptor.getModifier(this) : null; } private volatile XExpression evaluationExpression = null; @NotNull @Override public Promise<XExpression> calculateEvaluationExpression() { if (evaluationExpression != null) { return Promise.resolve(evaluationExpression); } else { final AsyncPromise<XExpression> res = new AsyncPromise<>(); myEvaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(myEvaluationContext.getSuspendContext()) { @Override public Priority getPriority() { return Priority.HIGH; } @Override public void contextAction() throws Exception { evaluationExpression = ApplicationManager.getApplication().runReadAction(new Computable<XExpression>() { @Override public XExpression compute() { try { PsiElement psiExpression = getDescriptor().getTreeEvaluation(JavaValue.this, getDebuggerContext()); if (psiExpression != null) { XExpression res = TextWithImportsImpl.toXExpression(new TextWithImportsImpl(psiExpression)); // add runtime imports if any Set<String> imports = psiExpression.getUserData(DebuggerTreeNodeExpression.ADDITIONAL_IMPORTS_KEY); if (imports != null && res != null) { if (res.getCustomInfo() != null) { imports.add(res.getCustomInfo()); } res = new XExpressionImpl(res.getExpression(), res.getLanguage(), StringUtil.join(imports, ","), res.getMode()); } return res; } } catch (EvaluateException e) { LOG.info(e); } return null; } }); res.setResult(evaluationExpression); } }); return res; } } @Override public String getValueText() { return myValueDescriptor.getValueText(); } @Nullable @Override public XReferrersProvider getReferrersProvider() { return new XReferrersProvider() { @Override public XValue getReferringObjectsValue() { return new JavaReferringObjectsValue(JavaValue.this, false); } }; } @Nullable @Override public XInstanceEvaluator getInstanceEvaluator() { return new XInstanceEvaluator() { @Override public void evaluate(@NotNull final XDebuggerEvaluator.XEvaluationCallback callback, @NotNull final XStackFrame frame) { myEvaluationContext.getManagerThread().schedule(new DebuggerCommandImpl() { @Override protected void commandCancelled() { callback.errorOccurred(DebuggerBundle.message("error.context.has.changed")); } @Override protected void action() throws Exception { ValueDescriptorImpl inspectDescriptor = myValueDescriptor; if (myValueDescriptor instanceof WatchItemDescriptor) { Modifier modifier = ((WatchItemDescriptor)myValueDescriptor).getModifier(); if (modifier != null) { NodeDescriptor item = modifier.getInspectItem(getProject()); if (item != null) { inspectDescriptor = (ValueDescriptorImpl)item; } } } EvaluationContextImpl evaluationContext = ((JavaStackFrame)frame).getFrameDebuggerContext(null).createEvaluationContext(); if (evaluationContext != null) { callback.evaluated(create(inspectDescriptor, evaluationContext, myNodeManager)); } else { callback.errorOccurred("Context is not available"); } } }); } }; } public void setRenderer(NodeRenderer nodeRenderer, final XValueNodeImpl node) { DebuggerManagerThreadImpl.assertIsManagerThread(); myValueDescriptor.setRenderer(nodeRenderer); reBuild(node); } public void reBuild(final XValueNodeImpl node) { DebuggerManagerThreadImpl.assertIsManagerThread(); myCurrentChildrenStart = 0; node.getTree().getLaterInvocator().offer(() -> { node.clearChildren(); computePresentation(node, XValuePlace.TREE); }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kafka.shuffle; import org.apache.flink.annotation.Experimental; import org.apache.flink.api.common.operators.Keys; import org.apache.flink.api.common.serialization.TypeInformationSerializationSchema; import org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo; import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.api.java.tuple.Tuple; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamUtils; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.transformations.LegacySinkTransformation; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; import org.apache.flink.streaming.util.keys.KeySelectorUtil; import org.apache.flink.util.Preconditions; import org.apache.flink.util.PropertiesUtil; import java.util.Properties; /** * {@link FlinkKafkaShuffle} uses Kafka as a message bus to shuffle and persist data at the same * time. * * <p>Persisting shuffle data is useful when - you would like to reuse the shuffle data and/or, - * you would like to avoid a full restart of a pipeline during failure recovery * * <p>Persisting shuffle is achieved by wrapping a {@link FlinkKafkaShuffleProducer} and a {@link * FlinkKafkaShuffleConsumer} together into a {@link FlinkKafkaShuffle}. Here is an example how to * use a {@link FlinkKafkaShuffle}. * * <pre>{@code * StreamExecutionEnvironment env = ... // create execution environment * DataStream<X> source = env.addSource(...) // add data stream source * DataStream<Y> dataStream = ... // some transformation(s) based on source * * KeyedStream<Y, KEY> keyedStream = FlinkKafkaShuffle * .persistentKeyBy( // keyBy shuffle through kafka * dataStream, // data stream to be shuffled * topic, // Kafka topic written to * producerParallelism, // the number of tasks of a Kafka Producer * numberOfPartitions, // the number of partitions of the Kafka topic written to * kafkaProperties, // kafka properties for Kafka Producer and Consumer * keySelector<Y, KEY>); // key selector to retrieve key from `dataStream' * * keyedStream.transform... // some other transformation(s) * * KeyedStream<Y, KEY> keyedStreamReuse = FlinkKafkaShuffle * .readKeyBy( // Read the Kafka shuffle data again for other usages * topic, // the topic of Kafka where data is persisted * env, // execution environment, and it can be a new environment * typeInformation<Y>, // type information of the data persisted in Kafka * kafkaProperties, // kafka properties for Kafka Consumer * keySelector<Y, KEY>); // key selector to retrieve key * * keyedStreamReuse.transform... // some other transformation(s) * }</pre> * * <p>Usage of {@link FlinkKafkaShuffle#persistentKeyBy} is similar to {@link * DataStream#keyBy(KeySelector)}. The differences are: * * <p>1). Partitioning is done through {@link FlinkKafkaShuffleProducer}. {@link * FlinkKafkaShuffleProducer} decides which partition a key goes when writing to Kafka * * <p>2). Shuffle data can be reused through {@link FlinkKafkaShuffle#readKeyBy}, as shown in the * example above. * * <p>3). Job execution is decoupled by the persistent Kafka message bus. In the example, the job * execution graph is decoupled to three regions: `KafkaShuffleProducer', `KafkaShuffleConsumer' and * `KafkaShuffleConsumerReuse' through `PERSISTENT DATA` as shown below. If any region fails the * execution, the other two keep progressing. * * <pre> * source -> ... KafkaShuffleProducer -> PERSISTENT DATA -> KafkaShuffleConsumer -> ... * | * | ----------> KafkaShuffleConsumerReuse -> ... * </pre> */ @Experimental public class FlinkKafkaShuffle { static final String PRODUCER_PARALLELISM = "producer parallelism"; static final String PARTITION_NUMBER = "partition number"; /** * Uses Kafka as a message bus to persist keyBy shuffle. * * <p>Persisting keyBy shuffle is achieved by wrapping a {@link FlinkKafkaShuffleProducer} and * {@link FlinkKafkaShuffleConsumer} together. * * <p>On the producer side, {@link FlinkKafkaShuffleProducer} is similar to {@link * DataStream#keyBy(KeySelector)}. They use the same key group assignment function {@link * KeyGroupRangeAssignment#assignKeyToParallelOperator} to decide which partition a key goes. * Hence, each producer task can potentially write to each Kafka partition based on where the * key goes. Here, `numberOfPartitions` equals to the key group size. In the case of using * {@link TimeCharacteristic#EventTime}, each producer task broadcasts its watermark to ALL of * the Kafka partitions to make sure watermark information is propagated correctly. * * <p>On the consumer side, each consumer task should read partitions equal to the key group * indices it is assigned. `numberOfPartitions` is the maximum parallelism of the consumer. This * version only supports numberOfPartitions = consumerParallelism. In the case of using {@link * TimeCharacteristic#EventTime}, a consumer task is responsible to emit watermarks. Watermarks * are read from the corresponding Kafka partitions. Notice that a consumer task only starts to * emit a watermark after reading at least one watermark from each producer task to make sure * watermarks are monotonically increasing. Hence a consumer task needs to know * `producerParallelism` as well. * * @see FlinkKafkaShuffle#writeKeyBy * @see FlinkKafkaShuffle#readKeyBy * @param dataStream Data stream to be shuffled * @param topic Kafka topic written to * @param producerParallelism Parallelism of producer * @param numberOfPartitions Number of partitions * @param properties Kafka properties * @param keySelector Key selector to retrieve key from `dataStream' * @param <T> Type of the input data stream * @param <K> Type of key */ public static <T, K> KeyedStream<T, K> persistentKeyBy( DataStream<T> dataStream, String topic, int producerParallelism, int numberOfPartitions, Properties properties, KeySelector<T, K> keySelector) { // KafkaProducer#propsToMap uses Properties purely as a HashMap without considering the // default properties // So we have to flatten the default property to first level elements. Properties kafkaProperties = PropertiesUtil.flatten(properties); kafkaProperties.setProperty(PRODUCER_PARALLELISM, String.valueOf(producerParallelism)); kafkaProperties.setProperty(PARTITION_NUMBER, String.valueOf(numberOfPartitions)); StreamExecutionEnvironment env = dataStream.getExecutionEnvironment(); writeKeyBy(dataStream, topic, kafkaProperties, keySelector); return readKeyBy(topic, env, dataStream.getType(), kafkaProperties, keySelector); } /** * Uses Kafka as a message bus to persist keyBy shuffle. * * <p>Persisting keyBy shuffle is achieved by wrapping a {@link FlinkKafkaShuffleProducer} and * {@link FlinkKafkaShuffleConsumer} together. * * <p>On the producer side, {@link FlinkKafkaShuffleProducer} is similar to {@link * DataStream#keyBy(KeySelector)}. They use the same key group assignment function {@link * KeyGroupRangeAssignment#assignKeyToParallelOperator} to decide which partition a key goes. * Hence, each producer task can potentially write to each Kafka partition based on where the * key goes. Here, `numberOfPartitions` equals to the key group size. In the case of using * {@link TimeCharacteristic#EventTime}, each producer task broadcasts its watermark to ALL of * the Kafka partitions to make sure watermark information is propagated correctly. * * <p>On the consumer side, each consumer task should read partitions equal to the key group * indices it is assigned. `numberOfPartitions` is the maximum parallelism of the consumer. This * version only supports numberOfPartitions = consumerParallelism. In the case of using {@link * TimeCharacteristic#EventTime}, a consumer task is responsible to emit watermarks. Watermarks * are read from the corresponding Kafka partitions. Notice that a consumer task only starts to * emit a watermark after reading at least one watermark from each producer task to make sure * watermarks are monotonically increasing. Hence a consumer task needs to know * `producerParallelism` as well. * * @see FlinkKafkaShuffle#writeKeyBy * @see FlinkKafkaShuffle#readKeyBy * @param dataStream Data stream to be shuffled * @param topic Kafka topic written to * @param producerParallelism Parallelism of producer * @param numberOfPartitions Number of partitions * @param properties Kafka properties * @param fields Key positions from the input data stream * @param <T> Type of the input data stream */ public static <T> KeyedStream<T, Tuple> persistentKeyBy( DataStream<T> dataStream, String topic, int producerParallelism, int numberOfPartitions, Properties properties, int... fields) { return persistentKeyBy( dataStream, topic, producerParallelism, numberOfPartitions, properties, keySelector(dataStream, fields)); } /** * The write side of {@link FlinkKafkaShuffle#persistentKeyBy}. * * <p>This function contains a {@link FlinkKafkaShuffleProducer} to shuffle and persist data in * Kafka. {@link FlinkKafkaShuffleProducer} uses the same key group assignment function {@link * KeyGroupRangeAssignment#assignKeyToParallelOperator} to decide which partition a key goes. * Hence, each producer task can potentially write to each Kafka partition based on the key. * Here, the number of partitions equals to the key group size. In the case of using {@link * TimeCharacteristic#EventTime}, each producer task broadcasts each watermark to all of the * Kafka partitions to make sure watermark information is propagated properly. * * <p>Attention: make sure kafkaProperties include {@link * FlinkKafkaShuffle#PRODUCER_PARALLELISM} and {@link FlinkKafkaShuffle#PARTITION_NUMBER} * explicitly. {@link FlinkKafkaShuffle#PRODUCER_PARALLELISM} is the parallelism of the * producer. {@link FlinkKafkaShuffle#PARTITION_NUMBER} is the number of partitions. They are * not necessarily the same and allowed to be set independently. * * @see FlinkKafkaShuffle#persistentKeyBy * @see FlinkKafkaShuffle#readKeyBy * @param dataStream Data stream to be shuffled * @param topic Kafka topic written to * @param kafkaProperties Kafka properties for Kafka Producer * @param keySelector Key selector to retrieve key from `dataStream' * @param <T> Type of the input data stream * @param <K> Type of key */ public static <T, K> void writeKeyBy( DataStream<T> dataStream, String topic, Properties kafkaProperties, KeySelector<T, K> keySelector) { StreamExecutionEnvironment env = dataStream.getExecutionEnvironment(); TypeSerializer<T> typeSerializer = dataStream.getType().createSerializer(env.getConfig()); // write data to Kafka FlinkKafkaShuffleProducer<T, K> kafkaProducer = new FlinkKafkaShuffleProducer<>( topic, typeSerializer, kafkaProperties, env.clean(keySelector), FlinkKafkaProducer.Semantic.EXACTLY_ONCE, FlinkKafkaProducer.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE); // make sure the sink parallelism is set to producerParallelism Preconditions.checkArgument( kafkaProperties.getProperty(PRODUCER_PARALLELISM) != null, "Missing producer parallelism for Kafka Shuffle"); int producerParallelism = PropertiesUtil.getInt(kafkaProperties, PRODUCER_PARALLELISM, Integer.MIN_VALUE); addKafkaShuffle(dataStream, kafkaProducer, producerParallelism); } /** * The write side of {@link FlinkKafkaShuffle#persistentKeyBy}. * * <p>This function contains a {@link FlinkKafkaShuffleProducer} to shuffle and persist data in * Kafka. {@link FlinkKafkaShuffleProducer} uses the same key group assignment function {@link * KeyGroupRangeAssignment#assignKeyToParallelOperator} to decide which partition a key goes. * * <p>Hence, each producer task can potentially write to each Kafka partition based on the key. * Here, the number of partitions equals to the key group size. In the case of using {@link * TimeCharacteristic#EventTime}, each producer task broadcasts each watermark to all of the * Kafka partitions to make sure watermark information is propagated properly. * * <p>Attention: make sure kafkaProperties include {@link * FlinkKafkaShuffle#PRODUCER_PARALLELISM} and {@link FlinkKafkaShuffle#PARTITION_NUMBER} * explicitly. {@link FlinkKafkaShuffle#PRODUCER_PARALLELISM} is the parallelism of the * producer. {@link FlinkKafkaShuffle#PARTITION_NUMBER} is the number of partitions. They are * not necessarily the same and allowed to be set independently. * * @see FlinkKafkaShuffle#persistentKeyBy * @see FlinkKafkaShuffle#readKeyBy * @param dataStream Data stream to be shuffled * @param topic Kafka topic written to * @param kafkaProperties Kafka properties for Kafka Producer * @param fields Key positions from the input data stream * @param <T> Type of the input data stream */ public static <T> void writeKeyBy( DataStream<T> dataStream, String topic, Properties kafkaProperties, int... fields) { writeKeyBy(dataStream, topic, kafkaProperties, keySelector(dataStream, fields)); } /** * The read side of {@link FlinkKafkaShuffle#persistentKeyBy}. * * <p>Each consumer task should read kafka partitions equal to the key group indices it is * assigned. The number of kafka partitions is the maximum parallelism of the consumer. This * version only supports numberOfPartitions = consumerParallelism. In the case of using {@link * TimeCharacteristic#EventTime}, a consumer task is responsible to emit watermarks. Watermarks * are read from the corresponding Kafka partitions. Notice that a consumer task only starts to * emit a watermark after receiving at least one watermark from each producer task to make sure * watermarks are monotonically increasing. Hence a consumer task needs to know * `producerParallelism` as well. * * <p>Attention: make sure kafkaProperties include {@link * FlinkKafkaShuffle#PRODUCER_PARALLELISM} and {@link FlinkKafkaShuffle#PARTITION_NUMBER} * explicitly. {@link FlinkKafkaShuffle#PRODUCER_PARALLELISM} is the parallelism of the * producer. {@link FlinkKafkaShuffle#PARTITION_NUMBER} is the number of partitions. They are * not necessarily the same and allowed to be set independently. * * @see FlinkKafkaShuffle#persistentKeyBy * @see FlinkKafkaShuffle#writeKeyBy * @param topic The topic of Kafka where data is persisted * @param env Execution environment. readKeyBy's environment can be different from writeKeyBy's * @param typeInformation Type information of the data persisted in Kafka * @param kafkaProperties kafka properties for Kafka Consumer * @param keySelector key selector to retrieve key * @param <T> Schema type * @param <K> Key type * @return Keyed data stream */ public static <T, K> KeyedStream<T, K> readKeyBy( String topic, StreamExecutionEnvironment env, TypeInformation<T> typeInformation, Properties kafkaProperties, KeySelector<T, K> keySelector) { TypeSerializer<T> typeSerializer = typeInformation.createSerializer(env.getConfig()); TypeInformationSerializationSchema<T> schema = new TypeInformationSerializationSchema<>(typeInformation, typeSerializer); SourceFunction<T> kafkaConsumer = new FlinkKafkaShuffleConsumer<>(topic, schema, typeSerializer, kafkaProperties); // TODO: consider situations where numberOfPartitions != consumerParallelism Preconditions.checkArgument( kafkaProperties.getProperty(PARTITION_NUMBER) != null, "Missing partition number for Kafka Shuffle"); int numberOfPartitions = PropertiesUtil.getInt(kafkaProperties, PARTITION_NUMBER, Integer.MIN_VALUE); DataStream<T> outputDataStream = env.addSource(kafkaConsumer).setParallelism(numberOfPartitions); return DataStreamUtils.reinterpretAsKeyedStream(outputDataStream, keySelector); } /** * Adds a {@link StreamKafkaShuffleSink} to {@link DataStream}. * * <p>{@link StreamKafkaShuffleSink} is associated a {@link FlinkKafkaShuffleProducer}. * * @param inputStream Input data stream connected to the shuffle * @param kafkaShuffleProducer Kafka shuffle sink function that can handle both records and * watermark * @param producerParallelism The number of tasks writing to the kafka shuffle */ private static <T, K> void addKafkaShuffle( DataStream<T> inputStream, FlinkKafkaShuffleProducer<T, K> kafkaShuffleProducer, int producerParallelism) { // read the output type of the input Transform to coax out errors about MissingTypeInfo inputStream.getTransformation().getOutputType(); StreamKafkaShuffleSink<T> shuffleSinkOperator = new StreamKafkaShuffleSink<>(kafkaShuffleProducer); LegacySinkTransformation<T> transformation = new LegacySinkTransformation<>( inputStream.getTransformation(), "kafka_shuffle", shuffleSinkOperator, inputStream.getExecutionEnvironment().getParallelism()); inputStream.getExecutionEnvironment().addOperator(transformation); transformation.setParallelism(producerParallelism); } // A better place to put this function is DataStream; but put it here for now to avoid changing // DataStream private static <T> KeySelector<T, Tuple> keySelector(DataStream<T> source, int... fields) { KeySelector<T, Tuple> keySelector; if (source.getType() instanceof BasicArrayTypeInfo || source.getType() instanceof PrimitiveArrayTypeInfo) { keySelector = KeySelectorUtil.getSelectorForArray(fields, source.getType()); } else { Keys<T> keys = new Keys.ExpressionKeys<>(fields, source.getType()); keySelector = KeySelectorUtil.getSelectorForKeys( keys, source.getType(), source.getExecutionEnvironment().getConfig()); } return keySelector; } }
package algorithms.imageProcessing.features; import algorithms.imageProcessing.GreyscaleImage; import algorithms.imageProcessing.GreyscaleImage.Type; import java.security.SecureRandom; import java.util.Arrays; import junit.framework.TestCase; import static junit.framework.TestCase.assertEquals; /** * * @author nichole */ public class GradientIntegralHistogramsTest extends TestCase { public GradientIntegralHistogramsTest() { } /* 10 220 100 10 0 1 2 3 4 5 6 7 8 */ public void test0() { GreyscaleImage g = new GreyscaleImage(2, 2); GreyscaleImage t = new GreyscaleImage(2, 2); g.setValue(0, 0, 10); t.setValue(0, 0, 20); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); GradientIntegralHistograms gh = new GradientIntegralHistograms(); int[][] histograms = gh.createHistograms(g, t, 9); int[] outHist = new int[9]; int[] outN = new int[1]; assertTest0(gh, outHist, outN, histograms, g.getWidth(), g.getHeight()); } private void assertTest0(GradientIntegralHistograms gh, int[] outHist, int[] outN, int[][] histograms, int w, int h) { gh.extractWindow(histograms, 0, 1, 0, 1, w, h, outHist, outN); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(220, outHist[4]); assertEquals(100, outHist[6]); assertEquals(10, outHist[8]); assertEquals(4, outN[0]); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 gh.extractWindow(histograms, 0, 1, 0, 0, w, h, outHist, outN); //HOGUtil._printHistograms_xy(histograms, w, h); //System.out.format("extract(%d:%d, %d:%d) = %s\n", 0, 1, 0, 0, // Arrays.toString(outHist)); assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(0, outHist[4]); assertEquals(100, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 gh.extractWindow(histograms, 0, 0, 0, 1, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(220, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); gh.extractWindow(histograms, 0, 0, 0, 0, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(0, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(1, outN[0]); gh.extractWindow(histograms, 1, 1, 1, 1, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(0, outHist[1]); assertEquals(0, outHist[4]); assertEquals(0, outHist[6]); assertEquals(10, outHist[8]); assertEquals(1, outN[0]); } private void assertTest0(GradientIntegralHistograms gh, long[] outHist, int[] outN, int[][] histograms, int w, int h) { gh.extractWindow(histograms, 0, 1, 0, 1, w, h, outHist, outN); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(220, outHist[4]); assertEquals(100, outHist[6]); assertEquals(10, outHist[8]); assertEquals(4, outN[0]); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 gh.extractWindow(histograms, 0, 1, 0, 0, w, h, outHist, outN); assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(0, outHist[4]); assertEquals(100, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); // 10 220 100 10 // 0 1 2 3 4 5 6 7 8 gh.extractWindow(histograms, 0, 0, 0, 1, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(220, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); gh.extractWindow(histograms, 0, 0, 0, 0, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(5, outHist[0]); assertEquals(5, outHist[1]); assertEquals(0, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(1, outN[0]); gh.extractWindow(histograms, 1, 1, 1, 1, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); */ assertEquals(0, outHist[1]); assertEquals(0, outHist[4]); assertEquals(0, outHist[6]); assertEquals(10, outHist[8]); assertEquals(1, outN[0]); } /* 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ public void test1() { GreyscaleImage g = new GreyscaleImage(2, 3); GreyscaleImage t = new GreyscaleImage(2, 3); /* binWidth = 20 10 30 50 70 90 110 130 150 170 0 20 40 60 80 100 120 140 160 180 */ g.setValue(0, 0, 10); t.setValue(0, 0, 20); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); int w = g.getWidth(); int h = g.getHeight(); int nBins = 9; GradientIntegralHistograms gh = new GradientIntegralHistograms(); int[][] histograms = gh.createHistograms(g, t, nBins); int[] outHist = new int[nBins]; int[] outN = new int[1]; assertTest0(gh, outHist, outN, histograms, w, h); gh.extractWindow(histograms, 0, 1, 0, 2, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ assertEquals(5, outHist[0]); assertEquals(15, outHist[1]); assertEquals(225 + 220, outHist[4]); assertEquals(100, outHist[6]); assertEquals(10, outHist[8]); assertEquals(6, outN[0]); gh.extractWindow(histograms, 0, 1, 2, 2, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ assertEquals(10, outHist[1]); assertEquals(225, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); } /* 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ public void test2() { GreyscaleImage g = new GreyscaleImage(2, 3); GreyscaleImage t = new GreyscaleImage(2, 3); /* binWidth = 20 10 30 50 70 90 110 130 150 170 0 20 40 60 80 100 120 140 160 180 */ g.setValue(0, 0, 10); t.setValue(0, 0, 20); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); int w = g.getWidth(); int h = g.getHeight(); int nBins = 9; GradientIntegralHistograms gh = new GradientIntegralHistograms(); int[][] histograms = gh.createHistograms(g, t, nBins); long[] outHist = new long[nBins]; int[] outN = new int[1]; assertTest0(gh, outHist, outN, histograms, w, h); gh.extractWindow(histograms, 0, 1, 0, 2, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ assertEquals(5, outHist[0]); assertEquals(15, outHist[1]); assertEquals(225 + 220, outHist[4]); assertEquals(100, outHist[6]); assertEquals(10, outHist[8]); assertEquals(6, outN[0]); gh.extractWindow(histograms, 0, 1, 2, 2, w, h, outHist, outN); /* g.setValue(0, 0, 10); t.setValue(0, 0, 30); g.setValue(0, 1, 220); t.setValue(0, 1, 90); g.setValue(1, 0, 100); t.setValue(1, 0, 130); g.setValue(1, 1, 10); t.setValue(1, 1, 170); g.setValue(0, 2, 10); t.setValue(0, 2, 30); g.setValue(1, 2, 225); t.setValue(1, 2, 90); 10 225 10 220 100 10 0 1 2 3 4 5 6 7 8 */ assertEquals(10, outHist[1]); assertEquals(225, outHist[4]); assertEquals(0, outHist[6]); assertEquals(0, outHist[8]); assertEquals(2, outN[0]); } public void testRandomExtract() throws Exception { SecureRandom random = SecureRandom.getInstance("SHA1PRNG"); long seed = System.currentTimeMillis(); //seed = 1525577808222L; System.out.println("SEED=" + seed); random.setSeed(seed); int nBins = 9; int w = 16; int h = 25; int nT = 100;//1<<29; // -255 to 255 GreyscaleImage gradient = new GreyscaleImage(w, h, Type.Bits64Signed); // 0 to 180 GreyscaleImage theta = new GreyscaleImage(w, h); for (int i = 0; i < theta.getNPixels(); ++i) { if (random.nextBoolean()) { continue;} //int r = random.nextInt(512); //r -= 256; int r = random.nextInt(256); gradient.setValue(i, r); r = random.nextInt(180); theta.setValue(i, r); } GradientIntegralHistograms gih = new GradientIntegralHistograms(); int[][] hists = gih.createHistograms(gradient, theta, nBins); int[] outN = new int[1]; int[] outH = new int[nBins]; int[] sumH = new int[nBins]; double[] bilinearParams = new double[5]; double f0, f1; int t, v, b0, b1; //HOGUtil._printHistograms_xy(hists, w, h); for (int i = 0; i < nT; ++i) { int x2 = random.nextInt(w - 1); x2++; int x1 = random.nextInt(x2); int y2 = random.nextInt(h - 1); y2++; int y1 = random.nextInt(y2); gih.extractWindow(hists, x1, x2, y1, y2, w, h, outH, outN); Arrays.fill(sumH, 0); for (int ii = x1; ii <= x2; ++ii) { for (int jj = y1; jj <= y2; ++jj) { v = gradient.getValue(ii, jj); // t, b0, b1, f0, f1; gih.calculateBinsAndFractions(ii, jj, theta, nBins, bilinearParams); t = (int)bilinearParams[0]; b0 = (int)bilinearParams[1]; b1 = (int)bilinearParams[2]; f0 = bilinearParams[3]; f1 = bilinearParams[4]; sumH[b0] += (f0 * v); sumH[b1] += (f1 * v); } } //System.out.format("(%d:%d, %d:%d) testN=%d\n", x1, x2, y1, y2, i); //System.out.println(" extracted H=" + Arrays.toString(outH)); //System.out.println(" expected H=" + Arrays.toString(sumH)); for (int kk = 0; kk < sumH.length; ++kk) { assertTrue(Math.abs(sumH[kk] - outH[kk]) <= 1); } } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.config.model; import java.io.Serializable; /** * */ public class ListDiscoveredResourcesResult implements Serializable, Cloneable { /** * <p> * The details that identify a resource that is discovered by AWS Config, * including the resource type, ID, and (if available) the custom resource * name. * </p> */ private com.amazonaws.internal.SdkInternalList<ResourceIdentifier> resourceIdentifiers; /** * <p> * The string that you use in a subsequent request to get the next page of * results in a paginated response. * </p> */ private String nextToken; /** * <p> * The details that identify a resource that is discovered by AWS Config, * including the resource type, ID, and (if available) the custom resource * name. * </p> * * @return The details that identify a resource that is discovered by AWS * Config, including the resource type, ID, and (if available) the * custom resource name. */ public java.util.List<ResourceIdentifier> getResourceIdentifiers() { if (resourceIdentifiers == null) { resourceIdentifiers = new com.amazonaws.internal.SdkInternalList<ResourceIdentifier>(); } return resourceIdentifiers; } /** * <p> * The details that identify a resource that is discovered by AWS Config, * including the resource type, ID, and (if available) the custom resource * name. * </p> * * @param resourceIdentifiers * The details that identify a resource that is discovered by AWS * Config, including the resource type, ID, and (if available) the * custom resource name. */ public void setResourceIdentifiers( java.util.Collection<ResourceIdentifier> resourceIdentifiers) { if (resourceIdentifiers == null) { this.resourceIdentifiers = null; return; } this.resourceIdentifiers = new com.amazonaws.internal.SdkInternalList<ResourceIdentifier>( resourceIdentifiers); } /** * <p> * The details that identify a resource that is discovered by AWS Config, * including the resource type, ID, and (if available) the custom resource * name. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setResourceIdentifiers(java.util.Collection)} or * {@link #withResourceIdentifiers(java.util.Collection)} if you want to * override the existing values. * </p> * * @param resourceIdentifiers * The details that identify a resource that is discovered by AWS * Config, including the resource type, ID, and (if available) the * custom resource name. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDiscoveredResourcesResult withResourceIdentifiers( ResourceIdentifier... resourceIdentifiers) { if (this.resourceIdentifiers == null) { setResourceIdentifiers(new com.amazonaws.internal.SdkInternalList<ResourceIdentifier>( resourceIdentifiers.length)); } for (ResourceIdentifier ele : resourceIdentifiers) { this.resourceIdentifiers.add(ele); } return this; } /** * <p> * The details that identify a resource that is discovered by AWS Config, * including the resource type, ID, and (if available) the custom resource * name. * </p> * * @param resourceIdentifiers * The details that identify a resource that is discovered by AWS * Config, including the resource type, ID, and (if available) the * custom resource name. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDiscoveredResourcesResult withResourceIdentifiers( java.util.Collection<ResourceIdentifier> resourceIdentifiers) { setResourceIdentifiers(resourceIdentifiers); return this; } /** * <p> * The string that you use in a subsequent request to get the next page of * results in a paginated response. * </p> * * @param nextToken * The string that you use in a subsequent request to get the next * page of results in a paginated response. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The string that you use in a subsequent request to get the next page of * results in a paginated response. * </p> * * @return The string that you use in a subsequent request to get the next * page of results in a paginated response. */ public String getNextToken() { return this.nextToken; } /** * <p> * The string that you use in a subsequent request to get the next page of * results in a paginated response. * </p> * * @param nextToken * The string that you use in a subsequent request to get the next * page of results in a paginated response. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDiscoveredResourcesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceIdentifiers() != null) sb.append("ResourceIdentifiers: " + getResourceIdentifiers() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDiscoveredResourcesResult == false) return false; ListDiscoveredResourcesResult other = (ListDiscoveredResourcesResult) obj; if (other.getResourceIdentifiers() == null ^ this.getResourceIdentifiers() == null) return false; if (other.getResourceIdentifiers() != null && other.getResourceIdentifiers().equals( this.getResourceIdentifiers()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceIdentifiers() == null) ? 0 : getResourceIdentifiers().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListDiscoveredResourcesResult clone() { try { return (ListDiscoveredResourcesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.apache.solr.handler.component; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StoredDocument; import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.cloud.CloudDescriptor; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.transform.DocTransformer; import org.apache.solr.response.transform.TransformContext; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.ReturnFields; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrReturnFields; import org.apache.solr.update.DocumentBuilder; import org.apache.solr.update.PeerSync; import org.apache.solr.update.UpdateLog; import org.apache.solr.util.RefCounted; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RealTimeGetComponent extends SearchComponent { public static Logger log = LoggerFactory.getLogger(UpdateLog.class); public static final String COMPONENT_NAME = "get"; @Override public void prepare(ResponseBuilder rb) throws IOException { // Set field flags ReturnFields returnFields = new SolrReturnFields( rb.req ); rb.rsp.setReturnFields( returnFields ); } @Override public void process(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req; SolrQueryResponse rsp = rb.rsp; SolrParams params = req.getParams(); if (!params.getBool(COMPONENT_NAME, true)) { return; } String val = params.get("getVersions"); if (val != null) { processGetVersions(rb); return; } val = params.get("getUpdates"); if (val != null) { processGetUpdates(rb); return; } String id[] = params.getParams("id"); String ids[] = params.getParams("ids"); if (id == null && ids == null) { return; } String[] allIds = id==null ? new String[0] : id; if (ids != null) { List<String> lst = new ArrayList<String>(); for (String s : allIds) { lst.add(s); } for (String idList : ids) { lst.addAll( StrUtils.splitSmart(idList, ",", true) ); } allIds = lst.toArray(new String[lst.size()]); } SolrCore core = req.getCore(); SchemaField idField = core.getLatestSchema().getUniqueKeyField(); FieldType fieldType = idField.getType(); SolrDocumentList docList = new SolrDocumentList(); UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); RefCounted<SolrIndexSearcher> searcherHolder = null; DocTransformer transformer = rsp.getReturnFields().getTransformer(); if (transformer != null) { TransformContext context = new TransformContext(); context.req = req; transformer.setContext(context); } try { SolrIndexSearcher searcher = null; BytesRef idBytes = new BytesRef(); for (String idStr : allIds) { fieldType.readableToIndexed(idStr, idBytes); if (ulog != null) { Object o = ulog.lookup(idBytes); if (o != null) { // should currently be a List<Oper,Ver,Doc/Id> List entry = (List)o; assert entry.size() >= 3; int oper = (Integer)entry.get(0) & UpdateLog.OPERATION_MASK; switch (oper) { case UpdateLog.ADD: SolrDocument doc = toSolrDoc((SolrInputDocument)entry.get(entry.size()-1), core.getLatestSchema()); if(transformer!=null) { transformer.transform(doc, -1); // unknown docID } docList.add(doc); break; case UpdateLog.DELETE: break; default: throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown Operation! " + oper); } continue; } } // didn't find it in the update log, so it should be in the newest searcher opened if (searcher == null) { searcherHolder = core.getRealtimeSearcher(); searcher = searcherHolder.get(); } // SolrCore.verbose("RealTimeGet using searcher ", searcher); int docid = searcher.getFirstMatch(new Term(idField.getName(), idBytes)); if (docid < 0) continue; StoredDocument luceneDocument = searcher.doc(docid); SolrDocument doc = toSolrDoc(luceneDocument, core.getLatestSchema()); if( transformer != null ) { transformer.transform(doc, docid); } docList.add(doc); } } finally { if (searcherHolder != null) { searcherHolder.decref(); } } // if the client specified a single id=foo, then use "doc":{ // otherwise use a standard doclist if (ids == null && allIds.length <= 1) { // if the doc was not found, then use a value of null. rsp.add("doc", docList.size() > 0 ? docList.get(0) : null); } else { docList.setNumFound(docList.size()); rsp.add("response", docList); } } public static SolrInputDocument DELETED = new SolrInputDocument(); /** returns the SolrInputDocument from the current tlog, or DELETED if it has been deleted, or * null if there is no record of it in the current update log. If null is returned, it could * still be in the latest index. */ public static SolrInputDocument getInputDocumentFromTlog(SolrCore core, BytesRef idBytes) { UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); if (ulog != null) { Object o = ulog.lookup(idBytes); if (o != null) { // should currently be a List<Oper,Ver,Doc/Id> List entry = (List)o; assert entry.size() >= 3; int oper = (Integer)entry.get(0) & UpdateLog.OPERATION_MASK; switch (oper) { case UpdateLog.ADD: return (SolrInputDocument)entry.get(entry.size()-1); case UpdateLog.DELETE: return DELETED; default: throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown Operation! " + oper); } } } return null; } public static SolrInputDocument getInputDocument(SolrCore core, BytesRef idBytes) throws IOException { SolrInputDocument sid = null; RefCounted<SolrIndexSearcher> searcherHolder = null; try { SolrIndexSearcher searcher = null; sid = getInputDocumentFromTlog(core, idBytes); if (sid == DELETED) { return null; } if (sid == null) { // didn't find it in the update log, so it should be in the newest searcher opened if (searcher == null) { searcherHolder = core.getRealtimeSearcher(); searcher = searcherHolder.get(); } // SolrCore.verbose("RealTimeGet using searcher ", searcher); SchemaField idField = core.getLatestSchema().getUniqueKeyField(); int docid = searcher.getFirstMatch(new Term(idField.getName(), idBytes)); if (docid < 0) return null; StoredDocument luceneDocument = searcher.doc(docid); sid = toSolrInputDocument(luceneDocument, core.getLatestSchema()); } } finally { if (searcherHolder != null) { searcherHolder.decref(); } } return sid; } private static SolrInputDocument toSolrInputDocument(StoredDocument doc, IndexSchema schema) { SolrInputDocument out = new SolrInputDocument(); for( StorableField f : doc.getFields() ) { String fname = f.name(); SchemaField sf = schema.getFieldOrNull(f.name()); Object val = null; if (sf != null) { if (!sf.stored() || schema.isCopyFieldTarget(sf)) continue; val = sf.getType().toObject(f); // object or external string? } else { val = f.stringValue(); if (val == null) val = f.numericValue(); if (val == null) val = f.binaryValue(); if (val == null) val = f; } // todo: how to handle targets of copy fields (including polyfield sub-fields)? out.addField(fname, val); } return out; } private static SolrDocument toSolrDoc(StoredDocument doc, IndexSchema schema) { SolrDocument out = new SolrDocument(); for( StorableField f : doc.getFields() ) { // Make sure multivalued fields are represented as lists Object existing = out.get(f.name()); if (existing == null) { SchemaField sf = schema.getFieldOrNull(f.name()); // don't return copyField targets if (sf != null && schema.isCopyFieldTarget(sf)) continue; if (sf != null && sf.multiValued()) { List<Object> vals = new ArrayList<Object>(); vals.add( f ); out.setField( f.name(), vals ); } else{ out.setField( f.name(), f ); } } else { out.addField( f.name(), f ); } } return out; } private static SolrDocument toSolrDoc(SolrInputDocument sdoc, IndexSchema schema) { // TODO: do something more performant than this double conversion Document doc = DocumentBuilder.toDocument(sdoc, schema); // copy the stored fields only StoredDocument out = new StoredDocument(); for (IndexableField f : doc.getFields()) { if (f.fieldType().stored() ) { out.add((StorableField) f); } } return toSolrDoc(out, schema); } @Override public int distributedProcess(ResponseBuilder rb) throws IOException { if (rb.stage < ResponseBuilder.STAGE_GET_FIELDS) return ResponseBuilder.STAGE_GET_FIELDS; if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) { return createSubRequests(rb); } return ResponseBuilder.STAGE_DONE; } public int createSubRequests(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); String id1[] = params.getParams("id"); String ids[] = params.getParams("ids"); if (id1 == null && ids == null) { return ResponseBuilder.STAGE_DONE; } List<String> allIds = new ArrayList<String>(); if (id1 != null) { for (String s : id1) { allIds.add(s); } } if (ids != null) { for (String s : ids) { allIds.addAll( StrUtils.splitSmart(s, ",", true) ); } } // TODO: handle collection=...? ZkController zkController = rb.req.getCore().getCoreDescriptor().getCoreContainer().getZkController(); // if shards=... then use that if (zkController != null && params.get("shards") == null) { CloudDescriptor cloudDescriptor = rb.req.getCore().getCoreDescriptor().getCloudDescriptor(); String collection = cloudDescriptor.getCollectionName(); ClusterState clusterState = zkController.getClusterState(); DocCollection coll = clusterState.getCollection(collection); Map<String, List<String>> sliceToId = new HashMap<String, List<String>>(); for (String id : allIds) { Slice slice = coll.getRouter().getTargetSlice(id, null, params, coll); List<String> idsForShard = sliceToId.get(slice.getName()); if (idsForShard == null) { idsForShard = new ArrayList<String>(2); sliceToId.put(slice.getName(), idsForShard); } idsForShard.add(id); } for (Map.Entry<String,List<String>> entry : sliceToId.entrySet()) { String shard = entry.getKey(); String shardIdList = StrUtils.join(entry.getValue(), ','); ShardRequest sreq = new ShardRequest(); sreq.purpose = 1; // sreq.shards = new String[]{shard}; // TODO: would be nice if this would work... sreq.shards = sliceToShards(rb, collection, shard); sreq.actualShards = sreq.shards; sreq.params = new ModifiableSolrParams(); sreq.params.set(ShardParams.SHARDS_QT,"/get"); // TODO: how to avoid hardcoding this and hit the same handler? sreq.params.set("distrib",false); sreq.params.set("ids", shardIdList); rb.addRequest(this, sreq); } } else { String shardIdList = StrUtils.join(allIds, ','); ShardRequest sreq = new ShardRequest(); sreq.purpose = 1; sreq.shards = null; // ALL sreq.actualShards = sreq.shards; sreq.params = new ModifiableSolrParams(); sreq.params.set(ShardParams.SHARDS_QT,"/get"); // TODO: how to avoid hardcoding this and hit the same handler? sreq.params.set("distrib",false); sreq.params.set("ids", shardIdList); rb.addRequest(this, sreq); } return ResponseBuilder.STAGE_DONE; } private String[] sliceToShards(ResponseBuilder rb, String collection, String slice) { String lookup = collection + '_' + slice; // seems either form may be filled in rb.slices? // We use this since the shard handler already filled in the slice to shards mapping. // A better approach would be to avoid filling out every slice each time, or to cache // the mappings. for (int i=0; i<rb.slices.length; i++) { log.info("LOOKUP_SLICE:" + rb.slices[i] + "=" + rb.shards[i]); if (lookup.equals(rb.slices[i]) || slice.equals(rb.slices[i])) { return new String[]{rb.shards[i]}; } } throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Can't find shard '" + lookup + "'"); } /*** private void handleRegularResponses(ResponseBuilder rb, ShardRequest sreq) { } ***/ @Override public void finishStage(ResponseBuilder rb) { if (rb.stage != ResponseBuilder.STAGE_GET_FIELDS) { return; } mergeResponses(rb); } private void mergeResponses(ResponseBuilder rb) { SolrDocumentList docList = new SolrDocumentList(); for (ShardRequest sreq : rb.finished) { // if shards=shard1,shard2 was used, then we query both shards for each id and // can get more than one response for (ShardResponse srsp : sreq.responses) { SolrResponse sr = srsp.getSolrResponse(); NamedList nl = sr.getResponse(); SolrDocumentList subList = (SolrDocumentList)nl.get("response"); docList.addAll(subList); } } if (docList.size() <= 1 && rb.req.getParams().getParams("ids")==null) { // if the doc was not found, then use a value of null. rb.rsp.add("doc", docList.size() > 0 ? docList.get(0) : null); } else { docList.setNumFound(docList.size()); rb.rsp.add("response", docList); } } //////////////////////////////////////////// /// SolrInfoMBean //////////////////////////////////////////// @Override public String getDescription() { return "query"; } @Override public String getSource() { return "$URL: http://svn.apache.org/repos/asf/lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java $"; } @Override public URL[] getDocs() { return null; } /////////////////////////////////////////////////////////////////////////////////// // Returns last versions added to index /////////////////////////////////////////////////////////////////////////////////// public void processGetVersions(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req; SolrQueryResponse rsp = rb.rsp; SolrParams params = req.getParams(); if (!params.getBool(COMPONENT_NAME, true)) { return; } int nVersions = params.getInt("getVersions", -1); if (nVersions == -1) return; String sync = params.get("sync"); if (sync != null) { processSync(rb, nVersions, sync); return; } UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog(); if (ulog == null) return; UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates(); try { rb.rsp.add("versions", recentUpdates.getVersions(nVersions)); } finally { recentUpdates.close(); // cache this somehow? } } public void processSync(ResponseBuilder rb, int nVersions, String sync) { boolean onlyIfActive = rb.req.getParams().getBool("onlyIfActive", false); if (onlyIfActive) { if (!rb.req.getCore().getCoreDescriptor().getCloudDescriptor().getLastPublished().equals(ZkStateReader.ACTIVE)) { log.info("Last published state was not ACTIVE, cannot sync."); rb.rsp.add("sync", "false"); return; } } List<String> replicas = StrUtils.splitSmart(sync, ",", true); boolean cantReachIsSuccess = rb.req.getParams().getBool("cantReachIsSuccess", false); PeerSync peerSync = new PeerSync(rb.req.getCore(), replicas, nVersions, cantReachIsSuccess, true); boolean success = peerSync.sync(); // TODO: more complex response? rb.rsp.add("sync", success); } public void processGetUpdates(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req; SolrQueryResponse rsp = rb.rsp; SolrParams params = req.getParams(); if (!params.getBool(COMPONENT_NAME, true)) { return; } String versionsStr = params.get("getUpdates"); if (versionsStr == null) return; UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog(); if (ulog == null) return; List<String> versions = StrUtils.splitSmart(versionsStr, ",", true); List<Object> updates = new ArrayList<Object>(versions.size()); long minVersion = Long.MAX_VALUE; // TODO: get this from cache instead of rebuilding? UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates(); try { for (String versionStr : versions) { long version = Long.parseLong(versionStr); try { Object o = recentUpdates.lookup(version); if (o == null) continue; if (version > 0) { minVersion = Math.min(minVersion, version); } // TODO: do any kind of validation here? updates.add(o); } catch (SolrException e) { log.warn("Exception reading log for updates", e); } catch (ClassCastException e) { log.warn("Exception reading log for updates", e); } } // Must return all delete-by-query commands that occur after the first add requested // since they may apply. updates.addAll( recentUpdates.getDeleteByQuery(minVersion)); rb.rsp.add("updates", updates); } finally { recentUpdates.close(); // cache this somehow? } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.bulk; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.replication.TransportWriteAction.WritePrimaryResult; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.RootObjectMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.Collections; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TransportShardBulkActionTests extends IndexShardTestCase { private static final ActionListener<Void> ASSERTING_DONE_LISTENER = ActionTestUtils.assertNoFailureListener(r -> {}); private final ShardId shardId = new ShardId("index", "_na_", 0); private final Settings idxSettings = Settings.builder() .put("index.number_of_shards", 1) .put("index.number_of_replicas", 0) .put("index.version.created", Version.CURRENT.id) .build(); private IndexMetadata indexMetadata() throws IOException { return IndexMetadata.builder("index") .putMapping("{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}") .settings(idxSettings) .primaryTerm(0, 1).build(); } public void testExecuteBulkIndexRequest() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE) .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Translog should change, since there were no problems assertNotNull(context.getLocationToSync()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(create ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.INDEX)); assertFalse(primaryResponse.isFailed()); // Assert that the document actually made it there assertDocCount(shard, 1); writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(true); primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext secondContext = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(secondContext, null, threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(new RuntimeException("fail")), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); assertNull(secondContext.getLocationToSync()); BulkItemRequest replicaRequest = bulkShardRequest.items()[0]; primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.CREATE)); // Should be failed since the document already exists assertTrue(primaryResponse.isFailed()); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause().getClass(), equalTo(VersionConflictEngineException.class)); assertThat(failure.getCause().getMessage(), containsString("version conflict, document already exists (current version [1])")); assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); assertThat(replicaRequest, equalTo(primaryRequest)); // Assert that the document count is still 1 assertDocCount(shard, 1); closeShards(shard); } public void testSkipBulkIndexRequestIfAborted() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; for (int i = 0; i < items.length; i++) { DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index").id("id_" + i) .source(Requests.INDEX_CONTENT_TYPE) .opType(DocWriteRequest.OpType.INDEX); items[i] = new BulkItemRequest(i, writeRequest); } BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); // Preemptively abort one of the bulk items, but allow the others to proceed BulkItemRequest rejectItem = randomFrom(items); RestStatus rejectionStatus = randomFrom(RestStatus.BAD_REQUEST, RestStatus.CONFLICT, RestStatus.FORBIDDEN, RestStatus.LOCKED); final ElasticsearchStatusException rejectionCause = new ElasticsearchStatusException("testing rejection", rejectionStatus); rejectItem.abort("index", rejectionCause); final CountDownLatch latch = new CountDownLatch(1); TransportShardBulkAction.performOnPrimary( bulkShardRequest, shard, null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ActionListener.runAfter( ActionTestUtils.assertNoFailureListener(result -> { // since at least 1 item passed, the tran log location should exist, assertThat(((WritePrimaryResult<BulkShardRequest, BulkShardResponse>) result).location, notNullValue()); // and the response should exist and match the item count assertThat(result.finalResponseIfSuccessful, notNullValue()); assertThat(result.finalResponseIfSuccessful.getResponses(), arrayWithSize(items.length)); // check each response matches the input item, including the rejection for (int i = 0; i < items.length; i++) { BulkItemResponse response = result.finalResponseIfSuccessful.getResponses()[i]; assertThat(response.getItemId(), equalTo(i)); assertThat(response.getIndex(), equalTo("index")); assertThat(response.getId(), equalTo("id_" + i)); assertThat(response.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); if (response.getItemId() == rejectItem.id()) { assertTrue(response.isFailed()); assertThat(response.getFailure().getCause(), equalTo(rejectionCause)); assertThat(response.status(), equalTo(rejectionStatus)); } else { assertFalse(response.isFailed()); } } // Check that the non-rejected updates made it to the shard try { assertDocCount(shard, items.length - 1); closeShards(shard); } catch (IOException e) { throw new AssertionError(e); } }), latch::countDown), threadPool); latch.await(); } public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index").id("id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Engine.IndexResult mappingUpdate = new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap())); Translog.Location resultLocation = new Translog.Location(42, 42, 42); Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); IndexShard shard = mock(IndexShard.class); when(shard.shardId()).thenReturn(shardId); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(mappingUpdate); when(shard.mapperService()).thenReturn(mock(MapperService.class)); randomlySetIgnoredPrimaryResponse(items[0]); // Pretend the mappings haven't made it to the node yet BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); AtomicInteger updateCalled = new AtomicInteger(); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> { // There should indeed be a mapping update assertNotNull(update); updateCalled.incrementAndGet(); listener.onResponse(null); }, listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER); assertTrue(context.isInitial()); assertTrue(context.hasMoreOperationsToExecute()); assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1)); // Verify that the shard "executed" the operation once verify(shard, times(1)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean()); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(success); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> fail("should not have had to update the mappings"), listener -> {}, ASSERTING_DONE_LISTENER); // Verify that the shard "executed" the operation only once (1 for previous invocations plus // 1 for this execution) verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(writeRequest.opType())); assertFalse(primaryResponse.isFailed()); closeShards(shard); } public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index").id("id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); // Return an exception when trying to update the mapping, or when waiting for it to come RuntimeException err = new RuntimeException("some kind of exception"); boolean errorOnWait = randomBoolean(); randomlySetIgnoredPrimaryResponse(items[0]); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); final CountDownLatch latch = new CountDownLatch(1); TransportShardBulkAction.executeBulkItemRequest( context, null, threadPool::absoluteTimeInMillis, errorOnWait == false ? new ThrowingMappingUpdatePerformer(err) : new NoopMappingUpdatePerformer(), errorOnWait ? listener -> listener.onFailure(err) : listener -> listener.onResponse(null), new LatchedActionListener<>(new ActionListener<Void>() { @Override public void onResponse(Void aVoid) { } @Override public void onFailure(final Exception e) { assertEquals(err, e); } }, latch)); latch.await(); assertFalse(context.hasMoreOperationsToExecute()); // Translog shouldn't be synced, as there were conflicting mappings assertThat(context.getLocationToSync(), nullValue()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); // Since this was not a conflict failure, the primary response // should be filled out with the failure information assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("some kind of exception")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); closeShards(shard); } public void testExecuteBulkDeleteRequest() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; DocWriteRequest<DeleteRequest> writeRequest = new DeleteRequest("index").id("id"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); randomlySetIgnoredPrimaryResponse(items[0]); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Translog changes, even though the document didn't exist assertThat(context.getLocationToSync(), not(location)); BulkItemRequest replicaRequest = bulkShardRequest.items()[0]; DocWriteRequest<?> replicaDeleteRequest = replicaRequest.request(); BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); DeleteResponse response = primaryResponse.getResponse(); // Any version can be matched on replica assertThat(replicaDeleteRequest.version(), equalTo(Versions.MATCH_ANY)); assertThat(replicaDeleteRequest.versionType(), equalTo(VersionType.INTERNAL)); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertFalse(primaryResponse.isFailed()); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.NOT_FOUND)); assertThat(response.getShardId(), equalTo(shard.shardId())); assertThat(response.getIndex(), equalTo("index")); assertThat(response.getId(), equalTo("id")); assertThat(response.getVersion(), equalTo(1L)); assertThat(response.getSeqNo(), equalTo(0L)); assertThat(response.forcedRefresh(), equalTo(false)); // Now do the same after indexing the document, it should now find and delete the document indexDoc(shard, "_doc", "id", "{}"); writeRequest = new DeleteRequest("index", "id"); items[0] = new BulkItemRequest(0, writeRequest); bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); location = context.getLocationToSync(); randomlySetIgnoredPrimaryResponse(items[0]); context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Translog changes, because the document was deleted assertThat(context.getLocationToSync(), not(location)); replicaRequest = bulkShardRequest.items()[0]; replicaDeleteRequest = replicaRequest.request(); primaryResponse = replicaRequest.getPrimaryResponse(); response = primaryResponse.getResponse(); // Any version can be matched on replica assertThat(replicaDeleteRequest.version(), equalTo(Versions.MATCH_ANY)); assertThat(replicaDeleteRequest.versionType(), equalTo(VersionType.INTERNAL)); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertFalse(primaryResponse.isFailed()); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.DELETED)); assertThat(response.getShardId(), equalTo(shard.shardId())); assertThat(response.getIndex(), equalTo("index")); assertThat(response.getId(), equalTo("id")); assertThat(response.getVersion(), equalTo(3L)); assertThat(response.getSeqNo(), equalTo(2L)); assertThat(response.forcedRefresh(), equalTo(false)); assertDocCount(shard, 0); closeShards(shard); } public void testNoopUpdateRequest() throws Exception { DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "id", 0, 2, 1, DocWriteResponse.Result.NOOP); IndexShard shard = mock(IndexShard.class); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(noopUpdateResponse, DocWriteResponse.Result.NOOP, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Basically nothing changes in the request since it's a noop assertThat(context.getLocationToSync(), nullValue()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(primaryResponse.getResponse(), equalTo(noopUpdateResponse)); assertThat(primaryResponse.getResponse().getResult(), equalTo(DocWriteResponse.Result.NOOP)); assertThat(bulkShardRequest.items().length, equalTo(1)); assertEquals(primaryRequest, bulkShardRequest.items()[0]); // check that bulk item was not mutated assertThat(primaryResponse.getResponse().getSeqNo(), equalTo(0L)); } public void testUpdateRequestWithFailure() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new ElasticsearchException("I'm dead <(x.x)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); IndexShard shard = mock(IndexShard.class); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Since this was not a conflict failure, the primary response // should be filled out with the failure information assertNull(context.getLocationToSync()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("I'm dead <(x.x)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); } public void testUpdateRequestWithConflictFailure() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); IndexShard shard = mock(IndexShard.class); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); assertNull(context.getLocationToSync()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("I'm conflicted <(;_;)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); } public void testUpdateRequestWithSuccess() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); boolean created = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); Engine.IndexResult indexResult = new FakeIndexResult(1, 1, 13, created, resultLocation); IndexShard shard = mock(IndexShard.class); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(indexResult); when(shard.indexSettings()).thenReturn(indexSettings); when(shard.shardId()).thenReturn(shardId); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(updateResponse, created ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Check that the translog is successfully advanced assertThat(context.getLocationToSync(), equalTo(resultLocation)); assertThat(bulkShardRequest.items()[0].request(), equalTo(updateResponse)); // Since this was not a conflict failure, the primary response // should be filled out with the failure information BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); DocWriteResponse response = primaryResponse.getResponse(); assertThat(response.status(), equalTo(created ? RestStatus.CREATED : RestStatus.OK)); assertThat(response.getSeqNo(), equalTo(13L)); } public void testUpdateWithDelete() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); DeleteRequest updateResponse = new DeleteRequest("index", "id"); boolean found = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); final long resultSeqNo = 13; Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation); IndexShard shard = mock(IndexShard.class); when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); when(shard.indexSettings()).thenReturn(indexSettings); when(shard.shardId()).thenReturn(shardId); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(updateResponse, DocWriteResponse.Result.DELETED, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); // Check that the translog is successfully advanced assertThat(context.getLocationToSync(), equalTo(resultLocation)); assertThat(bulkShardRequest.items()[0].request(), equalTo(updateResponse)); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); DocWriteResponse response = primaryResponse.getResponse(); assertThat(response.status(), equalTo(RestStatus.OK)); assertThat(response.getSeqNo(), equalTo(resultSeqNo)); } public void testFailureDuringUpdateProcessing() throws Exception { DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexShard shard = mock(IndexShard.class); UpdateHelper updateHelper = mock(UpdateHelper.class); final ElasticsearchException err = new ElasticsearchException("oops"); when(updateHelper.prepare(any(), eq(shard), any())).thenThrow(err); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); randomlySetIgnoredPrimaryResponse(primaryRequest); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); assertFalse(context.hasMoreOperationsToExecute()); assertNull(context.getLocationToSync()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("oops")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); } public void testTranslogPositionToSync() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; for (int i = 0; i < items.length; i++) { DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index").id("id_" + i) .source(Requests.INDEX_CONTENT_TYPE) .opType(DocWriteRequest.OpType.INDEX); items[i] = new BulkItemRequest(i, writeRequest); } BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); while (context.hasMoreOperationsToExecute()) { TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> {}, ASSERTING_DONE_LISTENER); } assertTrue(shard.isSyncNeeded()); // if we sync the location, nothing else is unsynced CountDownLatch latch = new CountDownLatch(1); shard.sync(context.getLocationToSync(), e -> { if (e != null) { throw new AssertionError(e); } latch.countDown(); }); latch.await(); assertFalse(shard.isSyncNeeded()); closeShards(shard); } public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { final IndexShard shard = spy(newStartedShard(false)); BulkItemRequest itemRequest = new BulkItemRequest(0, new IndexRequest("index").source(Requests.INDEX_CONTENT_TYPE)); final String failureMessage = "simulated primary failure"; final IOException exception = new IOException(failureMessage); itemRequest.setPrimaryResponse(new BulkItemResponse(0, randomFrom( DocWriteRequest.OpType.CREATE, DocWriteRequest.OpType.DELETE, DocWriteRequest.OpType.INDEX ), new BulkItemResponse.Failure("index", "1", exception, 1L, 1L) )); BulkItemRequest[] itemRequests = new BulkItemRequest[1]; itemRequests[0] = itemRequest; BulkShardRequest bulkShardRequest = new BulkShardRequest( shard.shardId(), RefreshPolicy.NONE, itemRequests); TransportShardBulkAction.performOnReplica(bulkShardRequest, shard); verify(shard, times(1)).markSeqNoAsNoop(1, 1, exception.toString()); closeShards(shard); } public void testRetries() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); UpdateRequest writeRequest = new UpdateRequest("index", "id") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); // the beating will continue until success has come. writeRequest.retryOnConflict(Integer.MAX_VALUE); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>"); Engine.IndexResult conflictedResult = new Engine.IndexResult(err, 0); Engine.IndexResult mappingUpdate = new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap())); Translog.Location resultLocation = new Translog.Location(42, 42, 42); Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); IndexShard shard = mock(IndexShard.class); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())).thenAnswer(ir -> { if (randomBoolean()) { return conflictedResult; } if (randomBoolean()) { return mappingUpdate; } else { return success; } }); when(shard.indexSettings()).thenReturn(indexSettings); when(shard.shardId()).thenReturn(shardId); when(shard.mapperService()).thenReturn(mock(MapperService.class)); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); final CountDownLatch latch = new CountDownLatch(1); TransportShardBulkAction.performOnPrimary( bulkShardRequest, shard, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), listener -> listener.onResponse(null), new LatchedActionListener<>( ActionTestUtils.assertNoFailureListener(result -> { assertThat(((WritePrimaryResult<BulkShardRequest, BulkShardResponse>) result).location, equalTo(resultLocation)); BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); DocWriteResponse response = primaryResponse.getResponse(); assertThat(response.status(), equalTo(RestStatus.CREATED)); assertThat(response.getSeqNo(), equalTo(13L)); }), latch), threadPool); latch.await(); } public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { TestThreadPool rejectingThreadPool = new TestThreadPool( "TransportShardBulkActionTests#testForceExecutionOnRejectionAfterMappingUpdate", Settings.builder() .put("thread_pool." + ThreadPool.Names.WRITE + ".size", 1) .put("thread_pool." + ThreadPool.Names.WRITE + ".queue_size", 1) .build()); CyclicBarrier cyclicBarrier = new CyclicBarrier(2); rejectingThreadPool.executor(ThreadPool.Names.WRITE).execute(() -> { try { cyclicBarrier.await(); logger.info("blocking the write executor"); cyclicBarrier.await(); logger.info("unblocked the write executor"); } catch (Exception e) { throw new RuntimeException(e); } }); try { cyclicBarrier.await(); // Place a task in the queue to block next enqueue rejectingThreadPool.executor(ThreadPool.Names.WRITE).execute(() -> {}); BulkItemRequest[] items = new BulkItemRequest[2]; DocWriteRequest<IndexRequest> writeRequest1 = new IndexRequest("index").id("id") .source(Requests.INDEX_CONTENT_TYPE, "foo", 1); DocWriteRequest<IndexRequest> writeRequest2 = new IndexRequest("index").id("id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest1); items[1] = new BulkItemRequest(1, writeRequest2); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Engine.IndexResult mappingUpdate = new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap())); Translog.Location resultLocation1 = new Translog.Location(42, 36, 36); Translog.Location resultLocation2 = new Translog.Location(42, 42, 42); Engine.IndexResult success1 = new FakeIndexResult(1, 1, 10, true, resultLocation1); Engine.IndexResult success2 = new FakeIndexResult(1, 1, 13, true, resultLocation2); IndexShard shard = mock(IndexShard.class); when(shard.shardId()).thenReturn(shardId); when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean())) .thenReturn(success1, mappingUpdate, success2); when(shard.getFailedIndexResult(any(EsRejectedExecutionException.class), anyLong())).thenCallRealMethod(); when(shard.mapperService()).thenReturn(mock(MapperService.class)); randomlySetIgnoredPrimaryResponse(items[0]); AtomicInteger updateCalled = new AtomicInteger(); final CountDownLatch latch = new CountDownLatch(1); TransportShardBulkAction.performOnPrimary( bulkShardRequest, shard, null, rejectingThreadPool::absoluteTimeInMillis, (update, shardId, listener) -> { // There should indeed be a mapping update assertNotNull(update); updateCalled.incrementAndGet(); listener.onResponse(null); try { // Release blocking task now that the continue write execution has been rejected and // the finishRequest execution has been force enqueued cyclicBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { throw new IllegalStateException(e); } }, listener -> listener.onResponse(null), new LatchedActionListener<>( ActionTestUtils.assertNoFailureListener(result -> // Assert that we still need to fsync the location that was successfully written assertThat(((WritePrimaryResult<BulkShardRequest, BulkShardResponse>) result).location, equalTo(resultLocation1))), latch), rejectingThreadPool); latch.await(); assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1)); verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong(), anyLong(), anyBoolean()); BulkItemResponse primaryResponse1 = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse1.getItemId(), equalTo(0)); assertThat(primaryResponse1.getId(), equalTo("id")); assertThat(primaryResponse1.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertFalse(primaryResponse1.isFailed()); assertThat(primaryResponse1.getResponse().status(), equalTo(RestStatus.CREATED)); assertThat(primaryResponse1.getResponse().getSeqNo(), equalTo(10L)); BulkItemResponse primaryResponse2 = bulkShardRequest.items()[1].getPrimaryResponse(); assertThat(primaryResponse2.getItemId(), equalTo(1)); assertThat(primaryResponse2.getId(), equalTo("id")); assertThat(primaryResponse2.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertTrue(primaryResponse2.isFailed()); assertNull(primaryResponse2.getResponse()); assertEquals(primaryResponse2.status(), RestStatus.TOO_MANY_REQUESTS); assertThat(primaryResponse2.getFailure().getCause(), instanceOf(EsRejectedExecutionException.class)); closeShards(shard); } finally { rejectingThreadPool.shutdownNow(); } } private void randomlySetIgnoredPrimaryResponse(BulkItemRequest primaryRequest) { if (randomBoolean()) { // add a response to the request and thereby check that it is ignored for the primary. primaryRequest.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, new IndexResponse(shardId, "ignore-primary-response-on-primary", 42, 42, 42, false))); } } /** * Fake IndexResult that has a settable translog location */ static class FakeIndexResult extends Engine.IndexResult { private final Translog.Location location; protected FakeIndexResult(long version, long term, long seqNo, boolean created, Translog.Location location) { super(version, term, seqNo, created); this.location = location; } @Override public Translog.Location getTranslogLocation() { return this.location; } } /** * Fake DeleteResult that has a settable translog location */ static class FakeDeleteResult extends Engine.DeleteResult { private final Translog.Location location; protected FakeDeleteResult(long version, long term, long seqNo, boolean found, Translog.Location location) { super(version, term, seqNo, found); this.location = location; } @Override public Translog.Location getTranslogLocation() { return this.location; } } /** Doesn't perform any mapping updates */ public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer { @Override public void updateMappings(Mapping update, ShardId shardId, ActionListener<Void> listener) { listener.onResponse(null); } } /** Always throw the given exception */ private class ThrowingMappingUpdatePerformer implements MappingUpdatePerformer { private final RuntimeException e; ThrowingMappingUpdatePerformer(RuntimeException e) { this.e = e; } @Override public void updateMappings(Mapping update, ShardId shardId, ActionListener<Void> listener) { listener.onFailure(e); } } }
/* * Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: XPathNamespaceImpl.java,v 1.2.4.1 2005/09/10 04:10:02 jeffsuttor Exp $ */ package com.sun.org.apache.xpath.internal.domapi; import org.w3c.dom.Attr; import org.w3c.dom.DOMException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.xpath.XPathNamespace; import org.w3c.dom.UserDataHandler; /** * * * The <code>XPathNamespace</code> interface is returned by * <code>XPathResult</code> interfaces to represent the XPath namespace node * type that DOM lacks. There is no public constructor for this node type. * Attempts to place it into a hierarchy or a NamedNodeMap result in a * <code>DOMException</code> with the code <code>HIERARCHY_REQUEST_ERR</code> * . This node is read only, so methods or setting of attributes that would * mutate the node result in a DOMException with the code * <code>NO_MODIFICATION_ALLOWED_ERR</code>. * <p>The core specification describes attributes of the <code>Node</code> * interface that are different for different node node types but does not * describe <code>XPATH_NAMESPACE_NODE</code>, so here is a description of * those attributes for this node type. All attributes of <code>Node</code> * not described in this section have a <code>null</code> or * <code>false</code> value. * <p><code>ownerDocument</code> matches the <code>ownerDocument</code> of the * <code>ownerElement</code> even if the element is later adopted. * <p><code>prefix</code> is the prefix of the namespace represented by the * node. * <p><code>nodeName</code> is the same as <code>prefix</code>. * <p><code>nodeType</code> is equal to <code>XPATH_NAMESPACE_NODE</code>. * <p><code>namespaceURI</code> is the namespace URI of the namespace * represented by the node. * <p><code>adoptNode</code>, <code>cloneNode</code>, and * <code>importNode</code> fail on this node type by raising a * <code>DOMException</code> with the code <code>NOT_SUPPORTED_ERR</code>.In * future versions of the XPath specification, the definition of a namespace * node may be changed incomatibly, in which case incompatible changes to * field values may be required to implement versions beyond XPath 1.0. * <p>See also the <a href='http://www.w3.org/TR/2004/NOTE-DOM-Level-3-XPath-20040226'>Document Object Model (DOM) Level 3 XPath Specification</a>. * * This implementation wraps the DOM attribute node that contained the * namespace declaration. * @xsl.usage internal */ class XPathNamespaceImpl implements XPathNamespace { // Node that XPathNamespaceImpl wraps final private Node m_attributeNode; /** * Constructor for XPathNamespaceImpl. */ XPathNamespaceImpl(Node node) { m_attributeNode = node; } /** * @see com.sun.org.apache.xalan.internal.dom3.xpath.XPathNamespace#getOwnerElement() */ public Element getOwnerElement() { return ((Attr)m_attributeNode).getOwnerElement(); } /** * @see Node#getNodeName() */ public String getNodeName() { return "#namespace"; } /** * @see Node#getNodeValue() */ public String getNodeValue() throws DOMException { return m_attributeNode.getNodeValue(); } /** * @see Node#setNodeValue(String) */ public void setNodeValue(String arg0) throws DOMException { } /** * @see Node#getNodeType() */ public short getNodeType() { return XPathNamespace.XPATH_NAMESPACE_NODE; } /** * @see Node#getParentNode() */ public Node getParentNode() { return m_attributeNode.getParentNode(); } /** * @see Node#getChildNodes() */ public NodeList getChildNodes() { return m_attributeNode.getChildNodes(); } /** * @see Node#getFirstChild() */ public Node getFirstChild() { return m_attributeNode.getFirstChild(); } /** * @see Node#getLastChild() */ public Node getLastChild() { return m_attributeNode.getLastChild(); } /** * @see Node#getPreviousSibling() */ public Node getPreviousSibling() { return m_attributeNode.getPreviousSibling(); } /** * @see Node#getNextSibling() */ public Node getNextSibling() { return m_attributeNode.getNextSibling(); } /** * @see Node#getAttributes() */ public NamedNodeMap getAttributes() { return m_attributeNode.getAttributes(); } /** * @see Node#getOwnerDocument() */ public Document getOwnerDocument() { return m_attributeNode.getOwnerDocument(); } /** * @see Node#insertBefore(Node, Node) */ public Node insertBefore(Node arg0, Node arg1) throws DOMException { return null; } /** * @see Node#replaceChild(Node, Node) */ public Node replaceChild(Node arg0, Node arg1) throws DOMException { return null; } /** * @see Node#removeChild(Node) */ public Node removeChild(Node arg0) throws DOMException { return null; } /** * @see Node#appendChild(Node) */ public Node appendChild(Node arg0) throws DOMException { return null; } /** * @see Node#hasChildNodes() */ public boolean hasChildNodes() { return false; } /** * @see Node#cloneNode(boolean) */ public Node cloneNode(boolean arg0) { throw new DOMException(DOMException.NOT_SUPPORTED_ERR,null); } /** * @see Node#normalize() */ public void normalize() { m_attributeNode.normalize(); } /** * @see Node#isSupported(String, String) */ public boolean isSupported(String arg0, String arg1) { return m_attributeNode.isSupported(arg0, arg1); } /** * @see Node#getNamespaceURI() */ public String getNamespaceURI() { // For namespace node, the namespaceURI is the namespace URI // of the namespace represented by the node. return m_attributeNode.getNodeValue(); } /** * @see Node#getPrefix() */ public String getPrefix() { return m_attributeNode.getPrefix(); } /** * @see Node#setPrefix(String) */ public void setPrefix(String arg0) throws DOMException { } /** * @see Node#getLocalName() */ public String getLocalName() { // For namespace node, the local name is the same as the prefix return m_attributeNode.getPrefix(); } /** * @see Node#hasAttributes() */ public boolean hasAttributes() { return m_attributeNode.hasAttributes(); } public String getBaseURI ( ) { return null; } public short compareDocumentPosition(Node other) throws DOMException { return 0; } private String textContent; public String getTextContent() throws DOMException { return textContent; } public void setTextContent(String textContent) throws DOMException { this.textContent = textContent; } public boolean isSameNode(Node other) { return false; } public String lookupPrefix(String namespaceURI) { return ""; //PENDING } public boolean isDefaultNamespace(String namespaceURI) { return false; } public String lookupNamespaceURI(String prefix) { return null; } public boolean isEqualNode(Node arg) { return false; } public Object getFeature(String feature, String version) { return null; //PENDING } public Object setUserData(String key, Object data, UserDataHandler handler) { return null; //PENDING } public Object getUserData(String key) { return null; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.frauddetector.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The details of the rule. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/frauddetector-2019-11-15/RuleDetail" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RuleDetail implements Serializable, Cloneable, StructuredPojo { /** * <p> * The rule ID. * </p> */ private String ruleId; /** * <p> * The rule description. * </p> */ private String description; /** * <p> * The detector for which the rule is associated. * </p> */ private String detectorId; /** * <p> * The rule version. * </p> */ private String ruleVersion; /** * <p> * The rule expression. * </p> */ private String expression; /** * <p> * The rule language. * </p> */ private String language; /** * <p> * The rule outcomes. * </p> */ private java.util.List<String> outcomes; /** * <p> * Timestamp of the last time the rule was updated. * </p> */ private String lastUpdatedTime; /** * <p> * The timestamp of when the rule was created. * </p> */ private String createdTime; /** * <p> * The rule ARN. * </p> */ private String arn; /** * <p> * The rule ID. * </p> * * @param ruleId * The rule ID. */ public void setRuleId(String ruleId) { this.ruleId = ruleId; } /** * <p> * The rule ID. * </p> * * @return The rule ID. */ public String getRuleId() { return this.ruleId; } /** * <p> * The rule ID. * </p> * * @param ruleId * The rule ID. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withRuleId(String ruleId) { setRuleId(ruleId); return this; } /** * <p> * The rule description. * </p> * * @param description * The rule description. */ public void setDescription(String description) { this.description = description; } /** * <p> * The rule description. * </p> * * @return The rule description. */ public String getDescription() { return this.description; } /** * <p> * The rule description. * </p> * * @param description * The rule description. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withDescription(String description) { setDescription(description); return this; } /** * <p> * The detector for which the rule is associated. * </p> * * @param detectorId * The detector for which the rule is associated. */ public void setDetectorId(String detectorId) { this.detectorId = detectorId; } /** * <p> * The detector for which the rule is associated. * </p> * * @return The detector for which the rule is associated. */ public String getDetectorId() { return this.detectorId; } /** * <p> * The detector for which the rule is associated. * </p> * * @param detectorId * The detector for which the rule is associated. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withDetectorId(String detectorId) { setDetectorId(detectorId); return this; } /** * <p> * The rule version. * </p> * * @param ruleVersion * The rule version. */ public void setRuleVersion(String ruleVersion) { this.ruleVersion = ruleVersion; } /** * <p> * The rule version. * </p> * * @return The rule version. */ public String getRuleVersion() { return this.ruleVersion; } /** * <p> * The rule version. * </p> * * @param ruleVersion * The rule version. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withRuleVersion(String ruleVersion) { setRuleVersion(ruleVersion); return this; } /** * <p> * The rule expression. * </p> * * @param expression * The rule expression. */ public void setExpression(String expression) { this.expression = expression; } /** * <p> * The rule expression. * </p> * * @return The rule expression. */ public String getExpression() { return this.expression; } /** * <p> * The rule expression. * </p> * * @param expression * The rule expression. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withExpression(String expression) { setExpression(expression); return this; } /** * <p> * The rule language. * </p> * * @param language * The rule language. * @see Language */ public void setLanguage(String language) { this.language = language; } /** * <p> * The rule language. * </p> * * @return The rule language. * @see Language */ public String getLanguage() { return this.language; } /** * <p> * The rule language. * </p> * * @param language * The rule language. * @return Returns a reference to this object so that method calls can be chained together. * @see Language */ public RuleDetail withLanguage(String language) { setLanguage(language); return this; } /** * <p> * The rule language. * </p> * * @param language * The rule language. * @return Returns a reference to this object so that method calls can be chained together. * @see Language */ public RuleDetail withLanguage(Language language) { this.language = language.toString(); return this; } /** * <p> * The rule outcomes. * </p> * * @return The rule outcomes. */ public java.util.List<String> getOutcomes() { return outcomes; } /** * <p> * The rule outcomes. * </p> * * @param outcomes * The rule outcomes. */ public void setOutcomes(java.util.Collection<String> outcomes) { if (outcomes == null) { this.outcomes = null; return; } this.outcomes = new java.util.ArrayList<String>(outcomes); } /** * <p> * The rule outcomes. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setOutcomes(java.util.Collection)} or {@link #withOutcomes(java.util.Collection)} if you want to override * the existing values. * </p> * * @param outcomes * The rule outcomes. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withOutcomes(String... outcomes) { if (this.outcomes == null) { setOutcomes(new java.util.ArrayList<String>(outcomes.length)); } for (String ele : outcomes) { this.outcomes.add(ele); } return this; } /** * <p> * The rule outcomes. * </p> * * @param outcomes * The rule outcomes. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withOutcomes(java.util.Collection<String> outcomes) { setOutcomes(outcomes); return this; } /** * <p> * Timestamp of the last time the rule was updated. * </p> * * @param lastUpdatedTime * Timestamp of the last time the rule was updated. */ public void setLastUpdatedTime(String lastUpdatedTime) { this.lastUpdatedTime = lastUpdatedTime; } /** * <p> * Timestamp of the last time the rule was updated. * </p> * * @return Timestamp of the last time the rule was updated. */ public String getLastUpdatedTime() { return this.lastUpdatedTime; } /** * <p> * Timestamp of the last time the rule was updated. * </p> * * @param lastUpdatedTime * Timestamp of the last time the rule was updated. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withLastUpdatedTime(String lastUpdatedTime) { setLastUpdatedTime(lastUpdatedTime); return this; } /** * <p> * The timestamp of when the rule was created. * </p> * * @param createdTime * The timestamp of when the rule was created. */ public void setCreatedTime(String createdTime) { this.createdTime = createdTime; } /** * <p> * The timestamp of when the rule was created. * </p> * * @return The timestamp of when the rule was created. */ public String getCreatedTime() { return this.createdTime; } /** * <p> * The timestamp of when the rule was created. * </p> * * @param createdTime * The timestamp of when the rule was created. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withCreatedTime(String createdTime) { setCreatedTime(createdTime); return this; } /** * <p> * The rule ARN. * </p> * * @param arn * The rule ARN. */ public void setArn(String arn) { this.arn = arn; } /** * <p> * The rule ARN. * </p> * * @return The rule ARN. */ public String getArn() { return this.arn; } /** * <p> * The rule ARN. * </p> * * @param arn * The rule ARN. * @return Returns a reference to this object so that method calls can be chained together. */ public RuleDetail withArn(String arn) { setArn(arn); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRuleId() != null) sb.append("RuleId: ").append(getRuleId()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getDetectorId() != null) sb.append("DetectorId: ").append(getDetectorId()).append(","); if (getRuleVersion() != null) sb.append("RuleVersion: ").append(getRuleVersion()).append(","); if (getExpression() != null) sb.append("Expression: ").append("***Sensitive Data Redacted***").append(","); if (getLanguage() != null) sb.append("Language: ").append(getLanguage()).append(","); if (getOutcomes() != null) sb.append("Outcomes: ").append(getOutcomes()).append(","); if (getLastUpdatedTime() != null) sb.append("LastUpdatedTime: ").append(getLastUpdatedTime()).append(","); if (getCreatedTime() != null) sb.append("CreatedTime: ").append(getCreatedTime()).append(","); if (getArn() != null) sb.append("Arn: ").append(getArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RuleDetail == false) return false; RuleDetail other = (RuleDetail) obj; if (other.getRuleId() == null ^ this.getRuleId() == null) return false; if (other.getRuleId() != null && other.getRuleId().equals(this.getRuleId()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getDetectorId() == null ^ this.getDetectorId() == null) return false; if (other.getDetectorId() != null && other.getDetectorId().equals(this.getDetectorId()) == false) return false; if (other.getRuleVersion() == null ^ this.getRuleVersion() == null) return false; if (other.getRuleVersion() != null && other.getRuleVersion().equals(this.getRuleVersion()) == false) return false; if (other.getExpression() == null ^ this.getExpression() == null) return false; if (other.getExpression() != null && other.getExpression().equals(this.getExpression()) == false) return false; if (other.getLanguage() == null ^ this.getLanguage() == null) return false; if (other.getLanguage() != null && other.getLanguage().equals(this.getLanguage()) == false) return false; if (other.getOutcomes() == null ^ this.getOutcomes() == null) return false; if (other.getOutcomes() != null && other.getOutcomes().equals(this.getOutcomes()) == false) return false; if (other.getLastUpdatedTime() == null ^ this.getLastUpdatedTime() == null) return false; if (other.getLastUpdatedTime() != null && other.getLastUpdatedTime().equals(this.getLastUpdatedTime()) == false) return false; if (other.getCreatedTime() == null ^ this.getCreatedTime() == null) return false; if (other.getCreatedTime() != null && other.getCreatedTime().equals(this.getCreatedTime()) == false) return false; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRuleId() == null) ? 0 : getRuleId().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getDetectorId() == null) ? 0 : getDetectorId().hashCode()); hashCode = prime * hashCode + ((getRuleVersion() == null) ? 0 : getRuleVersion().hashCode()); hashCode = prime * hashCode + ((getExpression() == null) ? 0 : getExpression().hashCode()); hashCode = prime * hashCode + ((getLanguage() == null) ? 0 : getLanguage().hashCode()); hashCode = prime * hashCode + ((getOutcomes() == null) ? 0 : getOutcomes().hashCode()); hashCode = prime * hashCode + ((getLastUpdatedTime() == null) ? 0 : getLastUpdatedTime().hashCode()); hashCode = prime * hashCode + ((getCreatedTime() == null) ? 0 : getCreatedTime().hashCode()); hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); return hashCode; } @Override public RuleDetail clone() { try { return (RuleDetail) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.frauddetector.model.transform.RuleDetailMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.nifi.processors.mongodb; import com.mongodb.MongoClient; import com.mongodb.MongoClientOptions; import com.mongodb.MongoClientOptions.Builder; import com.mongodb.MongoClientURI; import com.mongodb.WriteConcern; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.authentication.exception.ProviderCreationException; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.security.util.SslContextFactory; import org.apache.nifi.ssl.SSLContextService; import org.bson.Document; import javax.net.ssl.SSLContext; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Map; public abstract class AbstractMongoProcessor extends AbstractProcessor { static final String WRITE_CONCERN_ACKNOWLEDGED = "ACKNOWLEDGED"; static final String WRITE_CONCERN_UNACKNOWLEDGED = "UNACKNOWLEDGED"; static final String WRITE_CONCERN_FSYNCED = "FSYNCED"; static final String WRITE_CONCERN_JOURNALED = "JOURNALED"; static final String WRITE_CONCERN_REPLICA_ACKNOWLEDGED = "REPLICA_ACKNOWLEDGED"; static final String WRITE_CONCERN_MAJORITY = "MAJORITY"; protected static final PropertyDescriptor URI = new PropertyDescriptor.Builder() .name("Mongo URI") .displayName("Mongo URI") .description("MongoURI, typically of the form: mongodb://host1[:port1][,host2[:port2],...]") .required(true) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); protected static final PropertyDescriptor DATABASE_NAME = new PropertyDescriptor.Builder() .name("Mongo Database Name") .displayName("Mongo Database Name") .description("The name of the database to use") .required(true) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); protected static final PropertyDescriptor COLLECTION_NAME = new PropertyDescriptor.Builder() .name("Mongo Collection Name") .description("The name of the collection to use") .required(true) .expressionLanguageSupported(true) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("ssl-context-service") .displayName("SSL Context Service") .description("The SSL Context Service used to provide client certificate information for TLS/SSL " + "connections.") .required(false) .identifiesControllerService(SSLContextService.class) .build(); public static final PropertyDescriptor CLIENT_AUTH = new PropertyDescriptor.Builder() .name("ssl-client-auth") .displayName("Client Auth") .description("Client authentication policy when connecting to secure (TLS/SSL) cluster. " + "Possible values are REQUIRED, WANT, NONE. This property is only used when an SSL Context " + "has been defined and enabled.") .required(false) .allowableValues(SSLContextService.ClientAuth.values()) .defaultValue("REQUIRED") .build(); public static final PropertyDescriptor WRITE_CONCERN = new PropertyDescriptor.Builder() .name("Write Concern") .displayName("Write Concern") .description("The write concern to use") .required(true) .allowableValues(WRITE_CONCERN_ACKNOWLEDGED, WRITE_CONCERN_UNACKNOWLEDGED, WRITE_CONCERN_FSYNCED, WRITE_CONCERN_JOURNALED, WRITE_CONCERN_REPLICA_ACKNOWLEDGED, WRITE_CONCERN_MAJORITY) .defaultValue(WRITE_CONCERN_ACKNOWLEDGED) .build(); static final PropertyDescriptor RESULTS_PER_FLOWFILE = new PropertyDescriptor.Builder() .name("results-per-flowfile") .displayName("Results Per FlowFile") .description("How many results to put into a flowfile at once. The whole body will be treated as a JSON array of results.") .required(false) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .defaultValue("1") .build(); static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder() .name("Batch Size") .displayName("Batch Size") .description("The number of elements returned from the server in one batch.") .required(false) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .defaultValue("100") .build(); static final PropertyDescriptor QUERY_ATTRIBUTE = new PropertyDescriptor.Builder() .name("mongo-query-attribute") .displayName("Query Output Attribute") .description("If set, the query will be written to a specified attribute on the output flowfiles.") .expressionLanguageSupported(true) .addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR) .required(false) .build(); static final PropertyDescriptor CHARSET = new PropertyDescriptor.Builder() .name("mongo-charset") .displayName("Character Set") .description("Specifies the character set of the document data.") .required(true) .defaultValue("UTF-8") .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR) .expressionLanguageSupported(true) .build(); static List<PropertyDescriptor> descriptors = new ArrayList<>(); static { descriptors.add(URI); descriptors.add(DATABASE_NAME); descriptors.add(COLLECTION_NAME); descriptors.add(SSL_CONTEXT_SERVICE); descriptors.add(CLIENT_AUTH); } protected MongoClient mongoClient; @OnScheduled public final void createClient(ProcessContext context) throws IOException { if (mongoClient != null) { closeClient(); } getLogger().info("Creating MongoClient"); // Set up the client for secure (SSL/TLS communications) if configured to do so final SSLContextService sslService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); final String rawClientAuth = context.getProperty(CLIENT_AUTH).getValue(); final SSLContext sslContext; if (sslService != null) { final SSLContextService.ClientAuth clientAuth; if (StringUtils.isBlank(rawClientAuth)) { clientAuth = SSLContextService.ClientAuth.REQUIRED; } else { try { clientAuth = SSLContextService.ClientAuth.valueOf(rawClientAuth); } catch (final IllegalArgumentException iae) { throw new ProviderCreationException(String.format("Unrecognized client auth '%s'. Possible values are [%s]", rawClientAuth, StringUtils.join(SslContextFactory.ClientAuth.values(), ", "))); } } sslContext = sslService.createSSLContext(clientAuth); } else { sslContext = null; } try { if(sslContext == null) { mongoClient = new MongoClient(new MongoClientURI(getURI(context))); } else { mongoClient = new MongoClient(new MongoClientURI(getURI(context), getClientOptions(sslContext))); } } catch (Exception e) { getLogger().error("Failed to schedule {} due to {}", new Object[] { this.getClass().getName(), e }, e); throw e; } } protected Builder getClientOptions(final SSLContext sslContext) { MongoClientOptions.Builder builder = MongoClientOptions.builder(); builder.sslEnabled(true); builder.socketFactory(sslContext.getSocketFactory()); return builder; } @OnStopped public final void closeClient() { if (mongoClient != null) { getLogger().info("Closing MongoClient"); mongoClient.close(); mongoClient = null; } } protected MongoDatabase getDatabase(final ProcessContext context) { return getDatabase(context, null); } protected MongoDatabase getDatabase(final ProcessContext context, final FlowFile flowFile) { final String databaseName = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(flowFile).getValue(); return mongoClient.getDatabase(databaseName); } protected MongoCollection<Document> getCollection(final ProcessContext context) { return getCollection(context, null); } protected MongoCollection<Document> getCollection(final ProcessContext context, final FlowFile flowFile) { final String collectionName = context.getProperty(COLLECTION_NAME).evaluateAttributeExpressions(flowFile).getValue(); return getDatabase(context, flowFile).getCollection(collectionName); } protected String getURI(final ProcessContext context) { return context.getProperty(URI).evaluateAttributeExpressions().getValue(); } protected WriteConcern getWriteConcern(final ProcessContext context) { final String writeConcernProperty = context.getProperty(WRITE_CONCERN).getValue(); WriteConcern writeConcern = null; switch (writeConcernProperty) { case WRITE_CONCERN_ACKNOWLEDGED: writeConcern = WriteConcern.ACKNOWLEDGED; break; case WRITE_CONCERN_UNACKNOWLEDGED: writeConcern = WriteConcern.UNACKNOWLEDGED; break; case WRITE_CONCERN_FSYNCED: writeConcern = WriteConcern.FSYNCED; break; case WRITE_CONCERN_JOURNALED: writeConcern = WriteConcern.JOURNALED; break; case WRITE_CONCERN_REPLICA_ACKNOWLEDGED: writeConcern = WriteConcern.REPLICA_ACKNOWLEDGED; break; case WRITE_CONCERN_MAJORITY: writeConcern = WriteConcern.MAJORITY; break; default: writeConcern = WriteConcern.ACKNOWLEDGED; } return writeConcern; } protected void writeBatch(String payload, FlowFile parent, ProcessContext context, ProcessSession session, Map extraAttributes, Relationship rel) throws UnsupportedEncodingException { String charset = parent != null ? context.getProperty(CHARSET).evaluateAttributeExpressions(parent).getValue() : context.getProperty(CHARSET).evaluateAttributeExpressions().getValue(); FlowFile flowFile = parent != null ? session.create(parent) : session.create(); flowFile = session.importFrom(new ByteArrayInputStream(payload.getBytes(charset)), flowFile); flowFile = session.putAllAttributes(flowFile, extraAttributes); session.getProvenanceReporter().receive(flowFile, getURI(context)); session.transfer(flowFile, rel); } }
/******************************************************************************* * * Copyright FUJITSU LIMITED 2017 * * Creation Date: Dec 10, 2012 * *******************************************************************************/ package org.oscm.billingservice.business.calculation.revenue; import java.math.BigDecimal; import java.util.Calendar; import java.util.List; import java.util.Set; import org.oscm.billingservice.business.calculation.BigDecimals; import org.oscm.billingservice.business.calculation.revenue.model.UsageDetails; import org.oscm.billingservice.business.calculation.revenue.model.UserAssignmentDetails; import org.oscm.billingservice.business.calculation.revenue.model.UserAssignmentFactors; import org.oscm.billingservice.dao.BillingDataRetrievalServiceLocal; import org.oscm.billingservice.dao.model.XParameterData; import org.oscm.billingservice.dao.model.XParameterIdData; import org.oscm.billingservice.dao.model.XParameterPeriodValue; import org.oscm.billingservice.service.model.BillingInput; import org.oscm.domobjects.ParameterHistory; import org.oscm.domobjects.PriceModelHistory; import org.oscm.domobjects.SubscriptionHistory; import org.oscm.domobjects.UsageLicenseHistory; import org.oscm.domobjects.enums.ModificationType; import org.oscm.internal.types.enumtypes.PricingPeriod; /** * @author afschar * */ public class CostCalculatorProRata extends CostCalculator { CostCalculatorProRata() { super(); } /** * Returns the date representing the first day of the previous month, 0:00. * * @param baseTime * The base time. Based on it the previous month will be * determined. * @return The long representation of the date for the first day of the * previous month. */ public static final long getStartDateOfLastMonth(long baseTime) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(baseTime); cal.add(Calendar.MONTH, -1); cal.set(Calendar.DAY_OF_MONTH, 1); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTimeInMillis(); } /** * Returns the date representing the first day of the current month, 0:00. * As this allows equivalent usage as taking the last day of the previous * month, 23:59:59, this proceeding is okay. * * @param baseTime * The current time, based on which the time will be determined. * @return The long representation of the first day of this month. */ public static final long getEndDateOfLastMonth(long baseTime) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(getStartDateOfLastMonth(baseTime)); cal.add(Calendar.MONTH, 1); return cal.getTimeInMillis(); } @Override public double computeFactorForUsageTime(PricingPeriod pricingPeriod, BillingInput billingInput, long usagePeriodStart, long usagePeriodEnd) { long billingPeriodStart = billingInput.getCutOffDate(); long billingPeriodEnd = billingInput.getBillingPeriodEnd(); if (usagePeriodStart < billingPeriodStart) { usagePeriodStart = billingPeriodStart; } if (usagePeriodEnd > billingPeriodEnd) { usagePeriodEnd = billingPeriodEnd; } return computeFractionalFactor(usagePeriodStart, usagePeriodEnd, pricingPeriod); } @Override public UserAssignmentFactors computeUserAssignmentsFactors( List<UsageLicenseHistory> ulHistList, PriceModelHistory referencePMHistory, BillingInput billingInput, long periodStart, long periodEnd) { // now determine the billable period for every user final UserAssignmentFactors result = new UserAssignmentFactors(); Long referenceUserKey = null; long periodEndTime = periodEnd; for (UsageLicenseHistory ulHist : ulHistList) { if (referenceUserKey == null || referenceUserKey.longValue() != ulHist.getUserObjKey()) { // if a new user has to be handled, only reset the // temporary variables referenceUserKey = Long.valueOf(ulHist.getUserObjKey()); periodEndTime = periodEnd; } // if the status is not deleted, register the used period, // which is from mod date to period end time if (ulHist.getModtype() != ModificationType.DELETE) { long entryModTime = ulHist.getModdate().getTime(); UsageDetails ud = new UsageDetails(); ud.addUsagePeriod(Math.max(entryModTime, periodStart), periodEndTime); if (ulHist.getRoleDefinitionObjKey() != null) { result.addUsageDataForUserAndRole(referenceUserKey, ulHist .getDataContainer().getApplicationUserId(), ulHist .getRoleDefinitionObjKey(), ud); } else { result.addUsageDataForUser(referenceUserKey, ulHist .getDataContainer().getApplicationUserId(), ud); } } periodEndTime = ulHist.getModdate().getTime(); } // the periods for each user of this subscription have been // determined. Now handle them according to the settings in the // price model Set<Long> userKeys = result.getUserKeys(); if (userKeys != null) { for (Long userKey : userKeys) { UserAssignmentDetails userAssignmentDetails = result .getUserAssignmentDetails(userKey); final double modifiedFactorForDefinedHandling = computeFactorForUsageDetails( userAssignmentDetails.getUsageDetails(), referencePMHistory.getPeriod()); // update in user table for later xml representation UsageDetails ud = new UsageDetails(); ud.setFactor(modifiedFactorForDefinedHandling); result.addUsageDataForUser(userKey, userAssignmentDetails.getUserId(), ud); // the periods for each user of this subscription have been // determined. Now handle them according to the settings in the // price model for (Long roleKey : userAssignmentDetails.getRoleKeys()) { final UsageDetails usageDetails = userAssignmentDetails .getUsageDetails(roleKey); final double roleAssignmentFactor = computeFactorForUsageDetails( usageDetails, referencePMHistory.getPeriod()); userAssignmentDetails.addRoleFactor(roleKey, roleAssignmentFactor); } } } return result; } private double computeFactorForUsageDetails(UsageDetails usageDetails, PricingPeriod period) { double factor = 0.0D; for (UsageDetails.UsagePeriod usagePeriod : usageDetails .getUsagePeriods()) { factor += computeFractionalFactor(usagePeriod.getStartTime(), usagePeriod.getEndTime(), period); } return factor; } @Override public long computeUserAssignmentStartTimeForParameters( PricingPeriod period, long paramValueEndTime, ParameterHistory paramHist, PriceModelHistory pmh, long paramValueStartTime) { return paramValueStartTime; } @Override public long determineStartTime(long startTimeForPeriod, long endTimeForPeriod, PricingPeriod period) { return startTimeForPeriod; } @Override public long computeEndTimeForPaymentPreview(long endTimeForPeriod, long billingPeriodEnd, PricingPeriod period) { return endTimeForPeriod; } @Override public void computeParameterPeriodFactor(BillingInput billingInput, XParameterData parameterData, long startTimeForPeriod, long endTimeForPeriod) { if (parameterData != null) { for (XParameterIdData parameterIdData : parameterData.getIdData()) { for (XParameterPeriodValue parameterPeriodValue : parameterIdData .getPeriodValues()) { double periodFactor = computeFractionalFactor( parameterPeriodValue.getStartTime(), parameterPeriodValue.getEndTime(), parameterData.getPeriod()); parameterPeriodValue.setPeriodFactor(periodFactor); } } } } @Override public boolean isSuspendedAndResumedInSameTimeUnit( SubscriptionHistory current, SubscriptionHistory next, PriceModelHistory pm) { // All history entries are relevant in pro rata calculation return false; } @Override public BigDecimal calculateParameterUserCosts( XParameterPeriodValue parameterPeriodValue, BigDecimal valueMultplier) { BigDecimal costs = parameterPeriodValue.getPricePerUser().multiply( valueMultplier); costs = BigDecimals.multiply(costs, parameterPeriodValue.getUserAssignmentFactor()); return costs; } @Override public void computeParameterUserFactorAndRoleFactor( BillingDataRetrievalServiceLocal billingDao, BillingInput input, XParameterData parameterData, long startTimeForPeriod, long endTimeForPeriod) { // already calculated by billing data retrieval service when loading // parameter data } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.benchmarks.jmh.notify; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import javax.cache.configuration.Factory; import javax.cache.expiry.Duration; import javax.cache.expiry.ExpiryPolicy; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.Ignition; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.benchmarks.jmh.cache.JmhCacheAbstractBenchmark; import org.apache.ignite.internal.benchmarks.model.IntValue; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.BenchmarkParams; import org.openjdk.jmh.results.RunResult; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.TimeValue; /** * */ @OutputTimeUnit(TimeUnit.MICROSECONDS) @BenchmarkMode({/*Mode.AverageTime,*/ Mode.Throughput}) @Warmup(iterations = 5, time = 5, timeUnit = TimeUnit.SECONDS) @Measurement(iterations = 5, time = 10, timeUnit = TimeUnit.SECONDS) @Fork(1) @State(Scope.Benchmark) public class JmhWaitStategyBenchmark extends JmhCacheAbstractBenchmark { /** */ private static class RandomExpiryPolicy implements ExpiryPolicy { /** rate duration will decrease with */ private final double rate; /** current duration. */ private final AtomicLong duration = new AtomicLong(1_000_000_000); /** */ RandomExpiryPolicy(double rate) { this.rate = rate; } /** {@inheritDoc} */ @Override public Duration getExpiryForCreation() { boolean generateEvt = ThreadLocalRandom.current().nextDouble() < rate; return generateEvt ? new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()) : new Duration(TimeUnit.MILLISECONDS, duration.get()); } /** {@inheritDoc} */ @Override public Duration getExpiryForAccess() { boolean generateEvt = ThreadLocalRandom.current().nextDouble() < rate; return generateEvt ? new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()) : new Duration(TimeUnit.MILLISECONDS, duration.get()); } /** {@inheritDoc} */ @Override public Duration getExpiryForUpdate() { boolean generateEvt = ThreadLocalRandom.current().nextDouble() < rate; return generateEvt ? new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()) : new Duration(TimeUnit.MILLISECONDS, duration.get()); } } /** @param rate duration will decrease with */ private static Factory<ExpiryPolicy> getExpiryPolicyFactoryWithDecreasingRate(final double rate) { return new Factory<ExpiryPolicy>() { @Override public ExpiryPolicy create() { return new RandomExpiryPolicy(rate); } }; } /** Decreasing expiry policy. */ private static final ExpiryPolicy DECREASING_EXPIRY_POLICY = new ExpiryPolicy() { AtomicLong duration = new AtomicLong(1_000_000_000); @Override public Duration getExpiryForCreation() { return new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()); } @Override public Duration getExpiryForAccess() { return new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()); } @Override public Duration getExpiryForUpdate() { return new Duration(TimeUnit.MILLISECONDS, duration.decrementAndGet()); } }; /** Increasing expiry policy. */ private static final ExpiryPolicy INCREASING_EXPIRY_POLICY = new ExpiryPolicy() { AtomicLong duration = new AtomicLong(1_000_000); @Override public Duration getExpiryForCreation() { return new Duration(TimeUnit.MILLISECONDS, duration.incrementAndGet()); } @Override public Duration getExpiryForAccess() { return new Duration(TimeUnit.MILLISECONDS, duration.incrementAndGet()); } @Override public Duration getExpiryForUpdate() { return new Duration(TimeUnit.MILLISECONDS, duration.incrementAndGet()); } }; /** Decreasing policy factory. */ private static final Factory<ExpiryPolicy> DECREASING_POLICY_FACTORY = new Factory<ExpiryPolicy>() { @Override public ExpiryPolicy create() { return DECREASING_EXPIRY_POLICY; } }; /** Increasing policy factory. */ private static final Factory<ExpiryPolicy> INCREASING_POLICY_FACTORY = new Factory<ExpiryPolicy>() { @Override public ExpiryPolicy create() { return INCREASING_EXPIRY_POLICY; } }; /** {@inheritDoc} */ @Setup (Level.Iteration) @Override public void setup() throws Exception { Ignition.stopAll(true); super.setup(); CacheConfiguration<Object, Object> cfg = new CacheConfiguration<>(); cfg.setName("cache"); cfg.setEagerTtl(true); cfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); String prop = System.getProperty("bench.exp.policy"); switch (prop) { case "inc": cfg.setExpiryPolicyFactory(INCREASING_POLICY_FACTORY); break; case "dec": cfg.setExpiryPolicyFactory(DECREASING_POLICY_FACTORY); break; default: assert prop.charAt(0) == 'r'; double rate = Double.parseDouble(prop.trim().substring(1)) / 100; cfg.setExpiryPolicyFactory(getExpiryPolicyFactoryWithDecreasingRate(rate)); break; } node.createCache(cfg); cache = node.getOrCreateCache("cache"); IgniteDataStreamer<Integer, IntValue> dataLdr = node.dataStreamer(cache.getName()); for (int i = 0; i < CNT; i++) dataLdr.addData(i, new IntValue(i)); dataLdr.close(); System.out.println("Cache populated."); } /** {@inheritDoc} */ @TearDown @Override public void tearDown() throws Exception { Ignition.stopAll(true); } /** * Test PUT operation. * * @throws Exception If failed. */ @Benchmark public void put() throws Exception { int key = ThreadLocalRandom.current().nextInt(CNT); cache.put(key, new IntValue(key)); } /** * Benchmark runner */ public static void main(String[] args) throws RunnerException { List<String> policies = Arrays.asList("inc", "dec", "r25", "r50", "r75"); int[] threads = {2, 4, 8, 16, 32}; List<RunResult> results = new ArrayList<>(); for (String policy : policies) { for (int thread : threads) { ChainedOptionsBuilder builder = new OptionsBuilder() .jvmArgs() .timeUnit(TimeUnit.MILLISECONDS) .measurementIterations(10) .measurementTime(TimeValue.seconds(20)) .warmupIterations(5) .warmupTime(TimeValue.seconds(10)) .jvmArgs("-Dbench.exp.policy=" + policy) .forks(1) .threads(thread) .mode(Mode.Throughput) .include(JmhWaitStategyBenchmark.class.getSimpleName()); results.addAll(new Runner(builder.build()).run()); } } for (RunResult result : results) { BenchmarkParams params = result.getParams(); Collection<String> args1 = params.getJvmArgs(); for (String s : args1) { System.out.print(s.substring(s.length() - 3, s.length())); System.out.print(" x "); } System.out.print(params.getThreads()); System.out.print("\t\t"); System.out.println(result.getPrimaryResult().toString()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.http; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; import java.io.PrintStream; import java.net.BindException; import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.http.conf.ConfServlet; import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet; import org.apache.hadoop.hbase.http.log.LogLevel; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.Shell; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.SslConnectionFactory; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.handler.RequestLogHandler; import org.eclipse.jetty.servlet.FilterMapping; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.MultiException; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.webapp.WebAppContext; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Create a Jetty embedded server to answer http requests. The primary goal * is to serve up status information for the server. * There are three contexts: * "/logs/" -&gt; points to the log directory * "/static/" -&gt; points to common static files (src/webapps/static) * "/" -&gt; the jsp server code from (src/webapps/&lt;name&gt;) */ @InterfaceAudience.Private @InterfaceStability.Evolving public class HttpServer implements FilterContainer { private static final Log LOG = LogFactory.getLog(HttpServer.class); private static final String EMPTY_STRING = ""; private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K static final String FILTER_INITIALIZERS_PROPERTY = "hbase.http.filter.initializers"; static final String HTTP_MAX_THREADS = "hbase.http.max.threads"; public static final String HTTP_UI_AUTHENTICATION = "hbase.security.authentication.ui"; static final String HTTP_AUTHENTICATION_PREFIX = "hbase.security.authentication."; static final String HTTP_SPNEGO_AUTHENTICATION_PREFIX = HTTP_AUTHENTICATION_PREFIX + "spnego."; static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX = "kerberos.principal"; public static final String HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY = HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX; static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX = "kerberos.keytab"; public static final String HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY = HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX; static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules"; public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY = HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX; static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX = "signature.secret.file"; public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY = HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX; // The ServletContext attribute where the daemon Configuration // gets stored. public static final String CONF_CONTEXT_ATTRIBUTE = "hbase.conf"; public static final String ADMINS_ACL = "admins.acl"; public static final String BIND_ADDRESS = "bind.address"; public static final String SPNEGO_FILTER = "SpnegoFilter"; public static final String NO_CACHE_FILTER = "NoCacheFilter"; public static final String APP_DIR = "webapps"; private final AccessControlList adminsAcl; protected final Server webServer; protected String appDir; protected String logDir; private static class ListenerInfo { /** * Boolean flag to determine whether the HTTP server should clean up the * listener in stop(). */ private final boolean isManaged; private final ServerConnector listener; private ListenerInfo(boolean isManaged, ServerConnector listener) { this.isManaged = isManaged; this.listener = listener; } } private final List<ListenerInfo> listeners = Lists.newArrayList(); protected final WebAppContext webAppContext; protected final boolean findPort; protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>(); protected final List<String> filterNames = new ArrayList<>(); static final String STATE_DESCRIPTION_ALIVE = " - alive"; static final String STATE_DESCRIPTION_NOT_LIVE = " - not live"; /** * Class to construct instances of HTTP server with specific options. */ public static class Builder { private ArrayList<URI> endpoints = Lists.newArrayList(); private Configuration conf; private String[] pathSpecs; private AccessControlList adminsAcl; private boolean securityEnabled = false; private String usernameConfKey; private String keytabConfKey; private boolean needsClientAuth; private String hostName; private String appDir = APP_DIR; private String logDir; private boolean findPort; private String trustStore; private String trustStorePassword; private String trustStoreType; private String keyStore; private String keyStorePassword; private String keyStoreType; // The -keypass option in keytool private String keyPassword; private String kerberosNameRulesKey; private String signatureSecretFileKey; @Deprecated private String name; @Deprecated private String bindAddress; @Deprecated private int port = -1; /** * Add an endpoint that the HTTP server should listen to. * * @param endpoint * the endpoint of that the HTTP server should listen to. The * scheme specifies the protocol (i.e. HTTP / HTTPS), the host * specifies the binding address, and the port specifies the * listening port. Unspecified or zero port means that the server * can listen to any port. */ public Builder addEndpoint(URI endpoint) { endpoints.add(endpoint); return this; } /** * Set the hostname of the http server. The host name is used to resolve the * _HOST field in Kerberos principals. The hostname of the first listener * will be used if the name is unspecified. */ public Builder hostName(String hostName) { this.hostName = hostName; return this; } public Builder trustStore(String location, String password, String type) { this.trustStore = location; this.trustStorePassword = password; this.trustStoreType = type; return this; } public Builder keyStore(String location, String password, String type) { this.keyStore = location; this.keyStorePassword = password; this.keyStoreType = type; return this; } public Builder keyPassword(String password) { this.keyPassword = password; return this; } /** * Specify whether the server should authorize the client in SSL * connections. */ public Builder needsClientAuth(boolean value) { this.needsClientAuth = value; return this; } /** * Use setAppDir() instead. */ @Deprecated public Builder setName(String name){ this.name = name; return this; } /** * Use addEndpoint() instead. */ @Deprecated public Builder setBindAddress(String bindAddress){ this.bindAddress = bindAddress; return this; } /** * Use addEndpoint() instead. */ @Deprecated public Builder setPort(int port) { this.port = port; return this; } public Builder setFindPort(boolean findPort) { this.findPort = findPort; return this; } public Builder setConf(Configuration conf) { this.conf = conf; return this; } public Builder setPathSpec(String[] pathSpec) { this.pathSpecs = pathSpec; return this; } public Builder setACL(AccessControlList acl) { this.adminsAcl = acl; return this; } public Builder setSecurityEnabled(boolean securityEnabled) { this.securityEnabled = securityEnabled; return this; } public Builder setUsernameConfKey(String usernameConfKey) { this.usernameConfKey = usernameConfKey; return this; } public Builder setKeytabConfKey(String keytabConfKey) { this.keytabConfKey = keytabConfKey; return this; } public Builder setKerberosNameRulesKey(String kerberosNameRulesKey) { this.kerberosNameRulesKey = kerberosNameRulesKey; return this; } public Builder setSignatureSecretFileKey(String signatureSecretFileKey) { this.signatureSecretFileKey = signatureSecretFileKey; return this; } public Builder setAppDir(String appDir) { this.appDir = appDir; return this; } public Builder setLogDir(String logDir) { this.logDir = logDir; return this; } public HttpServer build() throws IOException { // Do we still need to assert this non null name if it is deprecated? if (this.name == null) { throw new HadoopIllegalArgumentException("name is not set"); } // Make the behavior compatible with deprecated interfaces if (bindAddress != null && port != -1) { try { endpoints.add(0, new URI("http", "", bindAddress, port, "", "", "")); } catch (URISyntaxException e) { throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e); } } if (endpoints.isEmpty()) { throw new HadoopIllegalArgumentException("No endpoints specified"); } if (hostName == null) { hostName = endpoints.get(0).getHost(); } if (this.conf == null) { conf = new Configuration(); } HttpServer server = new HttpServer(this); if (this.securityEnabled) { server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey, kerberosNameRulesKey, signatureSecretFileKey); } for (URI ep : endpoints) { ServerConnector listener = null; String scheme = ep.getScheme(); HttpConfiguration httpConfig = new HttpConfiguration(); httpConfig.setSecureScheme("https"); httpConfig.setHeaderCacheSize(DEFAULT_MAX_HEADER_SIZE); httpConfig.setResponseHeaderSize(DEFAULT_MAX_HEADER_SIZE); httpConfig.setRequestHeaderSize(DEFAULT_MAX_HEADER_SIZE); if ("http".equals(scheme)) { listener = new ServerConnector(server.webServer, new HttpConnectionFactory(httpConfig)); } else if ("https".equals(scheme)) { HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig); httpsConfig.addCustomizer(new SecureRequestCustomizer()); SslContextFactory sslCtxFactory = new SslContextFactory(); sslCtxFactory.setNeedClientAuth(needsClientAuth); sslCtxFactory.setKeyManagerPassword(keyPassword); if (keyStore != null) { sslCtxFactory.setKeyStorePath(keyStore); sslCtxFactory.setKeyStoreType(keyStoreType); sslCtxFactory.setKeyStorePassword(keyStorePassword); } if (trustStore != null) { sslCtxFactory.setTrustStorePath(trustStore); sslCtxFactory.setTrustStoreType(trustStoreType); sslCtxFactory.setTrustStorePassword(trustStorePassword); } listener = new ServerConnector(server.webServer, new SslConnectionFactory(sslCtxFactory, HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig)); } else { throw new HadoopIllegalArgumentException( "unknown scheme for endpoint:" + ep); } // default settings for connector listener.setAcceptQueueSize(128); if (Shell.WINDOWS) { // result of setting the SO_REUSEADDR flag is different on Windows // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx // without this 2 NN's can start on the same machine and listen on // the same port with indeterminate routing of incoming requests to them listener.setReuseAddress(false); } listener.setHost(ep.getHost()); listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort()); server.addManagedListener(listener); } server.loadListeners(); return server; } } /** Same as this(name, bindAddress, port, findPort, null); */ @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort ) throws IOException { this(name, bindAddress, port, findPort, new Configuration()); } /** * Create a status server on the given port. Allows you to specify the * path specifications that this server will be serving so that they will be * added to the filters properly. * * @param name The name of the server * @param bindAddress The address for this server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration * @param pathSpecs Path specifications that this httpserver will be serving. * These will be added to any filters. */ @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, String[] pathSpecs) throws IOException { this(name, bindAddress, port, findPort, conf, null, pathSpecs); } /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/&lt;name&gt;. * @param name The name of the server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration */ @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf) throws IOException { this(name, bindAddress, port, findPort, conf, null, null); } @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl) throws IOException { this(name, bindAddress, port, findPort, conf, adminsAcl, null); } /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/&lt;name&gt;. * @param name The name of the server * @param bindAddress The address for this server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration * @param adminsAcl {@link AccessControlList} of the admins * @param pathSpecs Path specifications that this httpserver will be serving. * These will be added to any filters. */ @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl, String[] pathSpecs) throws IOException { this(new Builder().setName(name) .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) .setFindPort(findPort).setConf(conf).setACL(adminsAcl) .setPathSpec(pathSpecs)); } private HttpServer(final Builder b) throws IOException { this.appDir = b.appDir; this.logDir = b.logDir; final String appDir = getWebAppsPath(b.name); int maxThreads = b.conf.getInt(HTTP_MAX_THREADS, 16); // If HTTP_MAX_THREADS is less than or equal to 0, QueueThreadPool() will use the // default value (currently 200). QueuedThreadPool threadPool = maxThreads <= 0 ? new QueuedThreadPool() : new QueuedThreadPool(maxThreads); threadPool.setDaemon(true); this.webServer = new Server(threadPool); this.adminsAcl = b.adminsAcl; this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir); this.findPort = b.findPort; initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs); } private void initializeWebServer(String name, String hostName, Configuration conf, String[] pathSpecs) throws FileNotFoundException, IOException { Preconditions.checkNotNull(webAppContext); HandlerCollection handlerCollection = new HandlerCollection(); ContextHandlerCollection contexts = new ContextHandlerCollection(); RequestLog requestLog = HttpRequestLog.getRequestLog(name); if (requestLog != null) { RequestLogHandler requestLogHandler = new RequestLogHandler(); requestLogHandler.setRequestLog(requestLog); handlerCollection.addHandler(requestLogHandler); } final String appDir = getWebAppsPath(name); handlerCollection.addHandler(contexts); handlerCollection.addHandler(webAppContext); webServer.setHandler(handlerCollection); addDefaultApps(contexts, appDir, conf); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); Map<String, String> params = new HashMap<>(); params.put("xframeoptions", conf.get("hbase.http.filter.xframeoptions.mode", "DENY")); addGlobalFilter("clickjackingprevention", ClickjackingPreventionFilter.class.getName(), params); final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { conf = new Configuration(conf); conf.set(BIND_ADDRESS, hostName); for (FilterInitializer c : initializers) { c.initFilter(this, conf); } } addDefaultServlets(); if (pathSpecs != null) { for (String path : pathSpecs) { LOG.info("adding path spec: " + path); addFilterPathMapping(path, webAppContext); } } } private void addManagedListener(ServerConnector connector) { listeners.add(new ListenerInfo(true, connector)); } private static WebAppContext createWebAppContext(String name, Configuration conf, AccessControlList adminsAcl, final String appDir) { WebAppContext ctx = new WebAppContext(); ctx.setDisplayName(name); ctx.setContextPath("/"); ctx.setWar(appDir + "/" + name); ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); addNoCacheFilter(ctx); return ctx; } private static void addNoCacheFilter(WebAppContext ctxt) { defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(), Collections.<String, String> emptyMap(), new String[] { "/*" }); } /** Get an array of FilterConfiguration specified in the conf */ private static FilterInitializer[] getFilterInitializers(Configuration conf) { if (conf == null) { return null; } Class<?>[] classes = conf.getClasses(FILTER_INITIALIZERS_PROPERTY); if (classes == null) { return null; } FilterInitializer[] initializers = new FilterInitializer[classes.length]; for(int i = 0; i < classes.length; i++) { initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(classes[i]); } return initializers; } /** * Add default apps. * @param appDir The application directory * @throws IOException */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir, Configuration conf) throws IOException { // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = this.logDir; if (logDir == null) { logDir = System.getProperty("hadoop.log.dir"); } if (logDir != null) { ServletContextHandler logContext = new ServletContextHandler(parent, "/logs"); logContext.addServlet(AdminAuthorizedServlet.class, "/*"); logContext.setResourceBase(logDir); if (conf.getBoolean( ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES, ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) { Map<String, String> params = logContext.getInitParams(); params.put( "org.mortbay.jetty.servlet.Default.aliases", "true"); } logContext.setDisplayName("logs"); setContextAttributes(logContext, conf); addNoCacheFilter(webAppContext); defaultContexts.put(logContext, true); } // set up the context for "/static/*" ServletContextHandler staticContext = new ServletContextHandler(parent, "/static"); staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.setDisplayName("static"); setContextAttributes(staticContext, conf); defaultContexts.put(staticContext, true); } private void setContextAttributes(ServletContextHandler context, Configuration conf) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); } /** * Add default servlets. */ protected void addDefaultServlets() { // set up default servlets addServlet("stacks", "/stacks", StackServlet.class); addServlet("logLevel", "/logLevel", LogLevel.Servlet.class); // Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's // MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2. // Remove when we drop support for hbase on hadoop2.x. try { Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet"); addServlet("metrics", "/metrics", clz); } catch (Exception e) { // do nothing } addServlet("jmx", "/jmx", JMXJsonServlet.class); addServlet("conf", "/conf", ConfServlet.class); } /** * Set a value in the webapp context. These values are available to the jsp * pages as "application.getAttribute(name)". * @param name The name of the attribute * @param value The value of the attribute */ public void setAttribute(String name, Object value) { webAppContext.setAttribute(name, value); } /** * Add a Jersey resource package. * @param packageName The Java package name containing the Jersey resource. * @param pathSpec The path spec for the servlet */ public void addJerseyResourcePackage(final String packageName, final String pathSpec) { LOG.info("addJerseyResourcePackage: packageName=" + packageName + ", pathSpec=" + pathSpec); ResourceConfig application = new ResourceConfig().packages(packageName); final ServletHolder sh = new ServletHolder(new ServletContainer(application)); webAppContext.addServlet(sh, pathSpec); } /** * Add a servlet in the server. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); addFilterPathMapping(pathSpec, webAppContext); } /** * Add an internal servlet in the server. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For * servlets added using this method, filters are not enabled. * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); } /** * Add an internal servlet in the server, specifying whether or not to * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For + * servlets added using this method, filters (except internal Kerberos * filters) are not enabled. * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class * @param requireAuth Require Kerberos authenticate to access servlet */ public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { holder.setName(name); } webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { LOG.info("Adding Kerberos (SPNEGO) filter to " + name); ServletHandler handler = webAppContext.getServletHandler(); FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(SPNEGO_FILTER); fmap.setDispatches(FilterMapping.ALL); handler.addFilterMapping(fmap); } } @Override public void addFilter(String name, String classname, Map<String, String> parameters) { final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) { if (e.getValue()) { ServletContextHandler handler = e.getKey(); defineFilter(handler, name, classname, parameters, ALL_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + handler.getDisplayName()); } } filterNames.add(name); } @Override public void addGlobalFilter(String name, String classname, Map<String, String> parameters) { final String[] ALL_URLS = { "/*" }; defineFilter(webAppContext, name, classname, parameters, ALL_URLS); for (ServletContextHandler ctx : defaultContexts.keySet()) { defineFilter(ctx, name, classname, parameters, ALL_URLS); } LOG.info("Added global filter '" + name + "' (class=" + classname + ")"); } /** * Define a filter for a context and set up default url mappings. */ public static void defineFilter(ServletContextHandler handler, String name, String classname, Map<String,String> parameters, String[] urls) { FilterHolder holder = new FilterHolder(); holder.setName(name); holder.setClassName(classname); if (parameters != null) { holder.setInitParameters(parameters); } FilterMapping fmap = new FilterMapping(); fmap.setPathSpecs(urls); fmap.setDispatches(FilterMapping.ALL); fmap.setFilterName(name); handler.getServletHandler().addFilter(holder, fmap); } /** * Add the path spec to the filter path mapping. * @param pathSpec The path spec * @param webAppCtx The WebApplicationContext to add to */ protected void addFilterPathMapping(String pathSpec, WebAppContext webAppCtx) { for(String name : filterNames) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(name); fmap.setDispatches(FilterMapping.ALL); webAppCtx.getServletHandler().addFilterMapping(fmap); } } /** * Get the value in the webapp context. * @param name The name of the attribute * @return The value of the attribute */ public Object getAttribute(String name) { return webAppContext.getAttribute(name); } public WebAppContext getWebAppContext(){ return this.webAppContext; } public String getWebAppsPath(String appName) throws FileNotFoundException { return getWebAppsPath(this.appDir, appName); } /** * Get the pathname to the webapps files. * @param appName eg "secondary" or "datanode" * @return the pathname as a URL * @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH. */ protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException { URL url = getClass().getClassLoader().getResource(webapps + "/" + appName); if (url == null) throw new FileNotFoundException(webapps + "/" + appName + " not found in CLASSPATH"); String urlString = url.toString(); return urlString.substring(0, urlString.lastIndexOf('/')); } /** * Get the port that the server is on * @return the port */ @Deprecated public int getPort() { return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort(); } /** * Get the address that corresponds to a particular connector. * * @return the corresponding address for the connector, or null if there's no * such connector or the connector is not bounded. */ public InetSocketAddress getConnectorAddress(int index) { Preconditions.checkArgument(index >= 0); if (index > webServer.getConnectors().length) return null; ServerConnector c = (ServerConnector)webServer.getConnectors()[index]; if (c.getLocalPort() == -1 || c.getLocalPort() == -2) { // -1 if the connector has not been opened // -2 if it has been closed return null; } return new InetSocketAddress(c.getHost(), c.getLocalPort()); } /** * Set the min, max number of worker threads (simultaneous connections). */ public void setThreads(int min, int max) { QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool(); pool.setMinThreads(min); pool.setMaxThreads(max); } private void initSpnego(Configuration conf, String hostName, String usernameConfKey, String keytabConfKey, String kerberosNameRuleKey, String signatureSecretKeyFileKey) throws IOException { Map<String, String> params = new HashMap<>(); String principalInConf = getOrEmptyString(conf, usernameConfKey); if (!principalInConf.isEmpty()) { params.put(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX, SecurityUtil.getServerPrincipal( principalInConf, hostName)); } String httpKeytab = getOrEmptyString(conf, keytabConfKey); if (!httpKeytab.isEmpty()) { params.put(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX, httpKeytab); } String kerberosNameRule = getOrEmptyString(conf, kerberosNameRuleKey); if (!kerberosNameRule.isEmpty()) { params.put(HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX, kerberosNameRule); } String signatureSecretKeyFile = getOrEmptyString(conf, signatureSecretKeyFileKey); if (!signatureSecretKeyFile.isEmpty()) { params.put(HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX, signatureSecretKeyFile); } params.put(AuthenticationFilter.AUTH_TYPE, "kerberos"); // Verify that the required options were provided if (isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_SUFFIX)) || isMissing(params.get(HTTP_SPNEGO_AUTHENTICATION_KEYTAB_SUFFIX))) { throw new IllegalArgumentException(usernameConfKey + " and " + keytabConfKey + " are both required in the configuration " + "to enable SPNEGO/Kerberos authentication for the Web UI"); } addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params); } /** * Returns true if the argument is non-null and not whitespace */ private boolean isMissing(String value) { if (null == value) { return true; } return value.trim().isEmpty(); } /** * Extracts the value for the given key from the configuration of returns a string of * zero length. */ private String getOrEmptyString(Configuration conf, String key) { if (null == key) { return EMPTY_STRING; } final String value = conf.get(key.trim()); return null == value ? EMPTY_STRING : value; } /** * Start the server. Does not wait for the server to start. */ public void start() throws IOException { try { try { openListeners(); webServer.start(); } catch (IOException ex) { LOG.info("HttpServer.start() threw a non Bind IOException", ex); throw ex; } catch (MultiException ex) { LOG.info("HttpServer.start() threw a MultiException", ex); throw ex; } // Make sure there is no handler failures. Handler[] handlers = webServer.getHandlers(); for (int i = 0; i < handlers.length; i++) { if (handlers[i].isFailed()) { throw new IOException( "Problem in starting http server. Server handlers failed"); } } // Make sure there are no errors initializing the context. Throwable unavailableException = webAppContext.getUnavailableException(); if (unavailableException != null) { // Have to stop the webserver, or else its non-daemon threads // will hang forever. webServer.stop(); throw new IOException("Unable to initialize WebAppContext", unavailableException); } } catch (IOException e) { throw e; } catch (InterruptedException e) { throw (IOException) new InterruptedIOException( "Interrupted while starting HTTP server").initCause(e); } catch (Exception e) { throw new IOException("Problem starting http server", e); } } private void loadListeners() { for (ListenerInfo li : listeners) { webServer.addConnector(li.listener); } } /** * Open the main listener for the server * @throws Exception */ void openListeners() throws Exception { for (ListenerInfo li : listeners) { ServerConnector listener = li.listener; if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) { // This listener is either started externally, or has not been opened, or has been closed continue; } int port = listener.getPort(); while (true) { // jetty has a bug where you can't reopen a listener that previously // failed to open w/o issuing a close first, even if the port is changed try { listener.close(); listener.open(); LOG.info("Jetty bound to port " + listener.getLocalPort()); break; } catch (BindException ex) { if (port == 0 || !findPort) { BindException be = new BindException("Port in use: " + listener.getHost() + ":" + listener.getPort()); be.initCause(ex); throw be; } } // try the next port number listener.setPort(++port); Thread.sleep(100); } } } /** * stop the server */ public void stop() throws Exception { MultiException exception = null; for (ListenerInfo li : listeners) { if (!li.isManaged) { continue; } try { li.listener.close(); } catch (Exception e) { LOG.error( "Error while stopping listener for webapp" + webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } } try { // clear & stop webAppContext attributes to avoid memory leaks. webAppContext.clearAttributes(); webAppContext.stop(); } catch (Exception e) { LOG.error("Error while stopping web app context for webapp " + webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } try { webServer.stop(); } catch (Exception e) { LOG.error("Error while stopping web server for webapp " + webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } if (exception != null) { exception.ifExceptionThrow(); } } private MultiException addMultiException(MultiException exception, Exception e) { if(exception == null){ exception = new MultiException(); } exception.add(e); return exception; } public void join() throws InterruptedException { webServer.join(); } /** * Test for the availability of the web server * @return true if the web server is started, false otherwise */ public boolean isAlive() { return webServer != null && webServer.isStarted(); } /** * Return the host and port of the HttpServer, if live * @return the classname and any HTTP URL */ @Override public String toString() { if (listeners.isEmpty()) { return "Inactive HttpServer"; } else { StringBuilder sb = new StringBuilder("HttpServer (") .append(isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE).append("), listening at:"); for (ListenerInfo li : listeners) { ServerConnector l = li.listener; sb.append(l.getHost()).append(":").append(l.getPort()).append("/,"); } return sb.toString(); } } /** * Checks the user has privileges to access to instrumentation servlets. * <p> * If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE * (default value) it always returns TRUE. * </p><p> * If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE * it will check that if the current user is in the admin ACLS. If the user is * in the admin ACLs it returns TRUE, otherwise it returns FALSE. * </p> * * @param servletContext the servlet context. * @param request the servlet request. * @param response the servlet response. * @return TRUE/FALSE based on the logic decribed above. */ public static boolean isInstrumentationAccessAllowed( ServletContext servletContext, HttpServletRequest request, HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); boolean access = true; boolean adminAccess = conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, false); if (adminAccess) { access = hasAdministratorAccess(servletContext, request, response); } return access; } /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. * * @param servletContext * @param request * @param response used to send the error response if user does not have admin access. * @return true if admin-authorized, false otherwise * @throws IOException */ public static boolean hasAdministratorAccess( ServletContext servletContext, HttpServletRequest request, HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); // If there is no authorization, anybody has administrator access. if (!conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { return true; } String remoteUser = request.getRemoteUser(); if (remoteUser == null) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthenticated users are not " + "authorized to access this page."); return false; } if (servletContext.getAttribute(ADMINS_ACL) != null && !userHasAdministratorAccess(servletContext, remoteUser)) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + remoteUser + " is unauthorized to access this page."); return false; } return true; } /** * Get the admin ACLs from the given ServletContext and check if the given * user is in the ACL. * * @param servletContext the context containing the admin ACL. * @param remoteUser the remote user to check for. * @return true if the user is present in the ACL, false if no ACL is set or * the user is not present */ public static boolean userHasAdministratorAccess(ServletContext servletContext, String remoteUser) { AccessControlList adminsAcl = (AccessControlList) servletContext .getAttribute(ADMINS_ACL); UserGroupInformation remoteUserUGI = UserGroupInformation.createRemoteUser(remoteUser); return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI); } /** * A very simple servlet to serve up a text representation of the current * stack traces. It both returns the stacks to the caller and logs them. * Currently the stack traces are done sequentially rather than exactly the * same data. */ public static class StackServlet extends HttpServlet { private static final long serialVersionUID = -6284183679759467039L; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) { return; } response.setContentType("text/plain; charset=UTF-8"); try (PrintStream out = new PrintStream( response.getOutputStream(), false, "UTF-8")) { Threads.printThreadInfo(out, ""); out.flush(); } ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); } } /** * A Servlet input filter that quotes all HTML active characters in the * parameter names and values. The goal is to quote the characters to make * all of the servlets resistant to cross-site scripting attacks. */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public static class QuotingInputFilter implements Filter { private FilterConfig config; public static class RequestQuoter extends HttpServletRequestWrapper { private final HttpServletRequest rawRequest; public RequestQuoter(HttpServletRequest rawRequest) { super(rawRequest); this.rawRequest = rawRequest; } /** * Return the set of parameter names, quoting each name. */ @Override public Enumeration<String> getParameterNames() { return new Enumeration<String>() { private Enumeration<String> rawIterator = rawRequest.getParameterNames(); @Override public boolean hasMoreElements() { return rawIterator.hasMoreElements(); } @Override public String nextElement() { return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement()); } }; } /** * Unquote the name and quote the value. */ @Override public String getParameter(String name) { return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter (HtmlQuoting.unquoteHtmlChars(name))); } @Override public String[] getParameterValues(String name) { String unquoteName = HtmlQuoting.unquoteHtmlChars(name); String[] unquoteValue = rawRequest.getParameterValues(unquoteName); if (unquoteValue == null) { return null; } String[] result = new String[unquoteValue.length]; for(int i=0; i < result.length; ++i) { result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]); } return result; } @Override public Map<String, String[]> getParameterMap() { Map<String, String[]> result = new HashMap<>(); Map<String, String[]> raw = rawRequest.getParameterMap(); for (Map.Entry<String,String[]> item: raw.entrySet()) { String[] rawValue = item.getValue(); String[] cookedValue = new String[rawValue.length]; for(int i=0; i< rawValue.length; ++i) { cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]); } result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue); } return result; } /** * Quote the url so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public StringBuffer getRequestURL(){ String url = rawRequest.getRequestURL().toString(); return new StringBuffer(HtmlQuoting.quoteHtmlChars(url)); } /** * Quote the server name so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public String getServerName() { return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName()); } } @Override public void init(FilterConfig config) throws ServletException { this.config = config; } @Override public void destroy() { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { HttpServletRequestWrapper quoted = new RequestQuoter((HttpServletRequest) request); HttpServletResponse httpResponse = (HttpServletResponse) response; String mime = inferMimeType(request); if (mime == null) { httpResponse.setContentType("text/plain; charset=utf-8"); } else if (mime.startsWith("text/html")) { // HTML with unspecified encoding, we want to // force HTML with utf-8 encoding // This is to avoid the following security issue: // http://openmya.hacker.jp/hasegawa/security/utf7cs.html httpResponse.setContentType("text/html; charset=utf-8"); } else if (mime.startsWith("application/xml")) { httpResponse.setContentType("text/xml; charset=utf-8"); } chain.doFilter(quoted, httpResponse); } /** * Infer the mime type for the response based on the extension of the request * URI. Returns null if unknown. */ private String inferMimeType(ServletRequest request) { String path = ((HttpServletRequest)request).getRequestURI(); ServletContext context = config.getServletContext(); return context.getMimeType(path); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.api; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.dag.api.VertexGroup.GroupInfo; import org.apache.tez.dag.api.TaskLocationHint; import org.apache.tez.runtime.api.LogicalIOProcessor; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Defines a vertex in the DAG. It represents the application logic that * processes and transforms the input data to create the output data. The * vertex represents the template from which tasks are created to execute * the application in parallel across a distributed execution environment. */ @Public public class Vertex { private final String vertexName; private final ProcessorDescriptor processorDescriptor; private int parallelism; private VertexLocationHint locationHint; private Resource taskResource; private final Map<String, LocalResource> taskLocalResources = new HashMap<String, LocalResource>(); private Map<String, String> taskEnvironment = new HashMap<String, String>(); private final List<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>> additionalInputs = new ArrayList<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>>(); private final List<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>> additionalOutputs = new ArrayList<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>>(); private VertexManagerPluginDescriptor vertexManagerPlugin; private final List<Vertex> inputVertices = new ArrayList<Vertex>(); private final List<Vertex> outputVertices = new ArrayList<Vertex>(); private final List<Edge> inputEdges = new ArrayList<Edge>(); private final List<Edge> outputEdges = new ArrayList<Edge>(); private final Map<String, GroupInfo> groupInputs = Maps.newHashMap(); private final List<DataSourceDescriptor> dataSources = Lists.newLinkedList(); private final List<DataSinkDescriptor> dataSinks = Lists.newLinkedList(); private String taskLaunchCmdOpts = ""; @InterfaceAudience.Private Vertex(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism, Resource taskResource) { this(vertexName, processorDescriptor, parallelism, taskResource, false); } private Vertex(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism) { this(vertexName, processorDescriptor, parallelism, null, true); } private Vertex(String vertexName, ProcessorDescriptor processorDescriptor) { this(vertexName, processorDescriptor, -1); } private Vertex(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism, Resource taskResource, boolean allowIncomplete) { this.vertexName = vertexName; this.processorDescriptor = processorDescriptor; this.parallelism = parallelism; this.taskResource = taskResource; if (parallelism < -1) { throw new IllegalArgumentException( "Parallelism should be -1 if determined by the AM" + ", otherwise should be >= 0"); } if (!allowIncomplete && taskResource == null) { throw new IllegalArgumentException("Resource cannot be null"); } } /** * Create a new vertex with the given name. * * @param vertexName * Name of the vertex * @param processorDescriptor * Description of the processor that is executed in every task of * this vertex * @param parallelism * Number of tasks in this vertex. Set to -1 if this is going to be * decided at runtime. Parallelism may change at runtime due to graph * reconfigurations. * @param taskResource * Physical resources like memory/cpu thats used by each task of this * vertex. * @return a new Vertex with the given parameters */ public static Vertex create(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism, Resource taskResource) { return new Vertex(vertexName, processorDescriptor, parallelism, taskResource); } /** * Create a new vertex with the given name. <br> * The vertex task resource will be picked from configuration <br> * The vertex parallelism will be inferred. If it cannot be inferred then an * error will be reported. This constructor may be used for vertices that have * data sources, or connected via 1-1 edges or have runtime parallelism * estimation via data source initializers or vertex managers. Calling this * constructor is equivalent to calling * {@link Vertex#Vertex(String, ProcessorDescriptor, int)} with the * parallelism set to -1. * * @param vertexName * Name of the vertex * @param processorDescriptor * Description of the processor that is executed in every task of * this vertex * @return a new Vertex with the given parameters */ public static Vertex create(String vertexName, ProcessorDescriptor processorDescriptor) { return new Vertex(vertexName, processorDescriptor); } /** * Create a new vertex with the given name and parallelism. <br> * The vertex task resource will be picked from configuration * {@link TezConfiguration#TEZ_TASK_RESOURCE_MEMORY_MB} & * {@link TezConfiguration#TEZ_TASK_RESOURCE_CPU_VCORES} Applications that * want more control over their task resource specification may create their * own logic to determine task resources and use * {@link Vertex#Vertex(String, ProcessorDescriptor, int, Resource)} to create * the Vertex. * * @param vertexName * Name of the vertex * @param processorDescriptor * Description of the processor that is executed in every task of * this vertex * @param parallelism * Number of tasks in this vertex. Set to -1 if this is going to be * decided at runtime. Parallelism may change at runtime due to graph * reconfigurations. * @return a new Vertex with the given parameters */ public static Vertex create(String vertexName, ProcessorDescriptor processorDescriptor, int parallelism) { return new Vertex(vertexName, processorDescriptor, parallelism); } /** * Get the vertex name * @return vertex name */ public String getName() { return vertexName; } /** * Get the vertex task processor descriptor * @return process descriptor */ public ProcessorDescriptor getProcessorDescriptor() { return this.processorDescriptor; } /** * Get the specified number of tasks specified to run in this vertex. It may * be -1 if the parallelism is defined at runtime. Parallelism may change at * runtime * @return vertex parallelism */ public int getParallelism() { return parallelism; } /** * Set the number of tasks for this vertex * @param parallelism Parallelism for this vertex */ void setParallelism(int parallelism) { this.parallelism = parallelism; } /** * Get the resources for the vertex * @return the physical resources like pcu/memory of each vertex task */ public Resource getTaskResource() { return taskResource; } /** * Specify location hints for the tasks of this vertex. Hints must be specified * for all tasks as defined by the parallelism * @param locationHint list of locations for each task in the vertex * @return this Vertex */ public Vertex setLocationHint(VertexLocationHint locationHint) { List<TaskLocationHint> locations = locationHint.getTaskLocationHints(); if (locations == null) { return this; } Preconditions.checkArgument((locations.size() == parallelism), "Locations array length must match the parallelism set for the vertex"); this.locationHint = locationHint; return this; } // used internally to create parallelism location resource file VertexLocationHint getLocationHint() { return locationHint; } /** * Set the files etc that must be provided to the tasks of this vertex * @param localFiles * files that must be available locally for each task. These files * may be regular files, archives etc. as specified by the value * elements of the map. * @return this Vertex */ public Vertex addTaskLocalFiles(Map<String, LocalResource> localFiles) { if (localFiles != null) { TezCommonUtils.addAdditionalLocalResources(localFiles, taskLocalResources); } return this; } /** * Get the files etc that must be provided by the tasks of this vertex * @return local files of the vertex. Key is the file name. */ public Map<String, LocalResource> getTaskLocalFiles() { return taskLocalResources; } /** * Set the Key-Value pairs of environment variables for tasks of this vertex. * This method should be used if different vertices need different env. Else, * set environment for all vertices via Tezconfiguration#TEZ_TASK_LAUNCH_ENV * @param environment * @return this Vertex */ public Vertex setTaskEnvironment(Map<String, String> environment) { Preconditions.checkArgument(environment != null); this.taskEnvironment.putAll(environment); return this; } /** * Get the environment variables of the tasks * @return environment variable map */ public Map<String, String> getTaskEnvironment() { return taskEnvironment; } /** * Set the command opts for tasks of this vertex. This method should be used * when different vertices have different opts. Else, set the launch opts for ' * all vertices via Tezconfiguration#TEZ_TASK_LAUNCH_CMD_OPTS * @param cmdOpts * @return this Vertex */ public Vertex setTaskLaunchCmdOpts(String cmdOpts){ this.taskLaunchCmdOpts = cmdOpts; return this; } /** * Specifies an external data source for a Vertex. This is meant to be used * when a Vertex reads Input directly from an external source </p> * * For vertices which read data generated by another vertex - use the * {@link DAG addEdge} method. * * If a vertex needs to use data generated by another vertex in the DAG and * also from an external source, a combination of this API and the DAG.addEdge * API can be used. </p> * * Note: If more than one RootInput exists on a vertex, which generates events * which need to be routed, or generates information to set parallelism, a * custom vertex manager should be setup to handle this. Not using a custom * vertex manager for such a scenario will lead to a runtime failure. * * @param inputName * the name of the input. This will be used when accessing the input * in the {@link LogicalIOProcessor} * @param dataSourceDescriptor * the @{link DataSourceDescriptor} for this input. * @return this Vertex */ public Vertex addDataSource(String inputName, DataSourceDescriptor dataSourceDescriptor) { additionalInputs .add(new RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>( inputName, dataSourceDescriptor.getInputDescriptor(), dataSourceDescriptor.getInputInitializerDescriptor())); this.dataSources.add(dataSourceDescriptor); return this; } /** * Specifies an external data sink for a Vertex. This is meant to be used when * a Vertex writes Output directly to an external destination. </p> * * If an output of the vertex is meant to be consumed by another Vertex in the * DAG - use the {@link DAG addEdge} method. * * If a vertex needs generate data to an external source as well as for * another Vertex in the DAG, a combination of this API and the DAG.addEdge * API can be used. * * @param outputName * the name of the output. This will be used when accessing the * output in the {@link LogicalIOProcessor} * @param dataSinkDescriptor * the {@link DataSinkDescriptor} for this output * @return this Vertex */ public Vertex addDataSink(String outputName, DataSinkDescriptor dataSinkDescriptor) { additionalOutputs .add(new RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>( outputName, dataSinkDescriptor.getOutputDescriptor(), dataSinkDescriptor.getOutputCommitterDescriptor())); this.dataSinks.add(dataSinkDescriptor); return this; } Vertex addAdditionalDataSink(RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor> output) { additionalOutputs.add(output); return this; } /** * Specifies a {@link VertexManagerPlugin} for the vertex. This plugin can be * used to modify the parallelism or reconfigure the vertex at runtime using * user defined code embedded in the plugin * * @param vertexManagerPluginDescriptor * @return this Vertex */ public Vertex setVertexManagerPlugin( VertexManagerPluginDescriptor vertexManagerPluginDescriptor) { this.vertexManagerPlugin = vertexManagerPluginDescriptor; return this; } /** * Get the launch command opts for tasks in this vertex * @return launch command opts */ public String getTaskLaunchCmdOpts(){ return taskLaunchCmdOpts; } @Override public String toString() { return "[" + vertexName + " : " + processorDescriptor.getClassName() + "]"; } VertexManagerPluginDescriptor getVertexManagerPlugin() { return vertexManagerPlugin; } Map<String, GroupInfo> getGroupInputs() { return groupInputs; } void addGroupInput(String groupName, GroupInfo groupInputInfo) { if (groupInputs.put(groupName, groupInputInfo) != null) { throw new IllegalStateException( "Vertex: " + getName() + " already has group input with name:" + groupName); } } void addInputVertex(Vertex inputVertex, Edge edge) { inputVertices.add(inputVertex); inputEdges.add(edge); } void addOutputVertex(Vertex outputVertex, Edge edge) { outputVertices.add(outputVertex); outputEdges.add(edge); } /** * Get the input vertices for this vertex * @return List of input vertices */ public List<Vertex> getInputVertices() { return Collections.unmodifiableList(inputVertices); } /** * Get the output vertices for this vertex * @return List of output vertices */ public List<Vertex> getOutputVertices() { return Collections.unmodifiableList(outputVertices); } /** * Set the cpu/memory etc resources used by tasks of this vertex * @param resource {@link Resource} for the tasks of this vertex */ void setTaskResource(Resource resource) { this.taskResource = resource; } @Private public List<DataSourceDescriptor> getDataSources() { return dataSources; } @Private public List<DataSinkDescriptor> getDataSinks() { return dataSinks; } List<Edge> getInputEdges() { return inputEdges; } List<Edge> getOutputEdges() { return outputEdges; } List<RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor>> getInputs() { return additionalInputs; } List<RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor>> getOutputs() { return additionalOutputs; } }
package ankit.account.manager.util; import java.security.SecureRandom; import javax.crypto.Cipher; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import android.util.Base64; /** * The SecurityUtils class contains methods used for password encryption/decryption. */ public final class SecurityUtils { /** * The SecurityUtils class should not be instantiated, so its constructor. * is private to prevent instantiation by other objects. */ private SecurityUtils() { } /** * Digits used in hex format. */ private static final String HEX = "0123456789ABCDEF"; /** * Seed to encryption. */ private static final String SEED = "com.jiahaoliuliu.android.sampleaccountandserver"; /** * This method appends a byte to a string buffer with hex format. * @param sb String buffer to append in * @param b Byte to append to the buffer */ private static void appendHex(StringBuffer sb, byte b) { sb.append(SecurityUtils.HEX.charAt((b >> 4) & 0x0f)).append(SecurityUtils.HEX.charAt(b & 0x0f)); } /** * This method gets a raw key from the given seed. * Raw key is needed for encrypt/decrypt text * @param seed Seed used to get the raw key * @return Byte array with the raw key * @throws Exception */ private static byte[] getRawKey(byte[] seed) throws Exception { KeyGenerator kgen = KeyGenerator.getInstance("AES"); SecureRandom sr = SecureRandom.getInstance("SHA1PRNG"); sr.setSeed(seed); kgen.init(128, sr); // 192 and 256 bits may not be available SecretKey skey = kgen.generateKey(); byte[] raw = skey.getEncoded(); return raw; } /** * This method encrypts text with the given raw byte array. * The algorithm used is AES * @param raw * @param clear Byte representation of text to encrypt * @return Byte array with encrypted text * @throws Exception */ private static byte[] encrypt(byte[] raw, byte[] clear) throws Exception { SecretKeySpec skeySpec = new SecretKeySpec(raw, "AES"); Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.ENCRYPT_MODE, skeySpec); byte[] encrypted = cipher.doFinal(clear); return encrypted; } /** * This method decrypts encrypted text with the given raw byte array. * @param raw Byte array * @param encrypted Text encrypted for decryption * @return Byte array with decrypted text * @throws Exception */ private static byte[] decrypt(byte[] raw, byte[] encrypted) throws Exception { SecretKeySpec skeySpec = new SecretKeySpec(raw, "AES"); Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.DECRYPT_MODE, skeySpec); byte[] decrypted = cipher.doFinal(encrypted); return decrypted; } /** * This method converts a byte value into a String with hexadecimal format. * @param value Value to convert * @return String with hexadecimal format */ private static String toHexadecimal(byte[] value) { String result = ""; for (byte aux : value) { int b = aux & 0xff; if (Integer.toHexString(b).length() == 1) { result += "0"; } result += Integer.toHexString(b); } return result; } /** * This method encrypts text. * @param cleartext Text to encrypt * @return String in hex format with |cleartext| encrypted * @throws Exception */ public static String encryptToHex(String cleartext) throws Exception { byte[] rawKey = SecurityUtils.getRawKey(SecurityUtils.SEED.getBytes()); byte[] result = SecurityUtils.encrypt(rawKey, cleartext.getBytes()); return SecurityUtils.toHex(result); } /** * This method encrypts text. * @param cleartext Text to encrypt * @return |cleartext| encrypted into byte array * @throws Exception */ public static byte[] encryptToBytes(String cleartext) throws Exception { byte[] rawKey = SecurityUtils.getRawKey(SecurityUtils.SEED.getBytes()); byte[] result = SecurityUtils.encrypt(rawKey, cleartext.getBytes()); return result; } /** * This method decrypts text. * @param encrypted Text to decrypt * @return String |encrypted| decrypted into a string * @throws Exception */ public static String decrypt(String encrypted) throws Exception { byte[] rawKey = SecurityUtils.getRawKey(SecurityUtils.SEED.getBytes()); byte[] enc = SecurityUtils.toByte(encrypted); byte[] result = SecurityUtils.decrypt(rawKey, enc); return new String(result); } /** * This method encodes a byte array into a string with base64 format. * @param bytes Byte array to encode * @return String with |bytes| encoded into base64 */ public static String base64Encode(byte[] bytes) { return Base64.encodeToString(bytes, Base64.NO_WRAP); } /** * This method decodes a base64 encoded string. * @param base64 String encoded in base64 format * @return String with |base64| decoded */ public static String base64Decode(String base64) { return SecurityUtils.toHexadecimal(Base64.decode(base64, Base64.NO_WRAP)); } /** * This method converts a string to its hex representation. */ public static String toHex(String txt) { return SecurityUtils.toHex(txt.getBytes()); } /** * This method converts a string in hex format to a byte array. */ public static String fromHex(String hex) { return new String(SecurityUtils.toByte(hex)); } /** * This method converts a string in hex format to a byte array. * @param hexString String in hex format to convert * @return Byte array gets from |hexString| */ public static byte[] toByte(String hexString) { int len = hexString.length() / 2; byte[] result = new byte[len]; for (int i = 0; i < len; i++) { result[i] = Integer.valueOf(hexString.substring(2 * i, 2 * i + 2), 16).byteValue(); } return result; } /** * This method gives a hex conversion of a byte array. * @param buf Byte array to get its hex representation * @return String with |buf| in hex format */ public static String toHex(byte[] buf) { if (buf == null) { return ""; } StringBuffer result = new StringBuffer(2 * buf.length); for (int i = 0; i < buf.length; i++) { SecurityUtils.appendHex(result, buf[i]); } return result.toString(); } }
/* * Copyright 2020 Kyuhyen Hwang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.resilience4j.spelresolver; import io.github.resilience4j.DummySpelBean; import io.github.resilience4j.TestApplication; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.beans.factory.config.EmbeddedValueResolver; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.core.StandardReflectionParameterNameDiscoverer; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.lang.reflect.Method; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.then; import static org.mockito.Mockito.times; @RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest(classes = TestApplication.class, properties = "property=backend") public class DefaultSpelResolverTest { private DefaultSpelResolver sut; @Autowired private ConfigurableBeanFactory configurableBeanFactory; @MockBean(name="dummySpelBean") DummySpelBean dummySpelBean; @Before public void setUp() { sut = new DefaultSpelResolver(new SpelExpressionParser(), new StandardReflectionParameterNameDiscoverer(), configurableBeanFactory); sut.setEmbeddedValueResolver(new EmbeddedValueResolver(configurableBeanFactory)); } @Test public void givenNonSpelExpression_whenParse_returnsItself() throws Exception { String testExpression = "backendA"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo(testExpression); } /** * #root.args[0] */ @Test public void testRootArgs() throws Exception { String testExpression = "#root.args[0]"; String firstArgument = "test"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{firstArgument}, testExpression); assertThat(result).isEqualTo(firstArgument); } /** * #root.methodName */ @Test public void testRootMethodName() throws Exception { String testExpression = "#root.methodName"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("testMethod"); } /** * #p0 */ @Test public void testP0() throws Exception { String testExpression = "#p0"; String firstArgument = "test"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{firstArgument}, testExpression); assertThat(result).isEqualTo(firstArgument); } /** * #a0 */ @Test public void testA0() throws Exception { String testExpression = "#a0"; String firstArgument = "test"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{firstArgument}, testExpression); assertThat(result).isEqualTo(firstArgument); } /** * #{'recover'} */ @Test public void stringSpelTest() throws Exception { String testExpression = "#{'recover'}"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("recover"); } /** * ${missingProperty:default} */ @Test public void placeholderSpelTest() throws Exception { String testExpression = "${missingProperty:default}"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("default"); } /** * ${property:default} */ @Test public void placeholderSpelTest2() throws Exception { String testExpression = "${property:default}"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("backend"); } @Test public void beanMethodSpelTest() throws Exception { String testExpression = "@dummySpelBean.getBulkheadName(#parameter)"; String testMethodArg = "argg"; String bulkheadName = "sgt. bulko"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); given(dummySpelBean.getBulkheadName(testMethodArg)).willReturn(bulkheadName); String result = sut.resolve(testMethod, new Object[]{testMethodArg}, testExpression); then(dummySpelBean).should(times(1)).getBulkheadName(testMethodArg); assertThat(result).isEqualTo(bulkheadName); } @Test public void atTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, "@"); assertThat(result).isEqualTo("@"); } @Test public void nullTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, null); assertThat(result).isNull(); } @Test public void emptyStringTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, ""); assertThat(result).isEqualTo(""); } @Test public void dollarTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, "$"); assertThat(result).isEqualTo("$"); } public String testMethod(String parameter) { return "test"; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.user; import java.util.List; import java.util.Map; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.cloudstack.acl.ControlledEntity; import org.apache.cloudstack.acl.RoleType; import org.apache.cloudstack.acl.SecurityChecker.AccessType; import org.apache.cloudstack.api.command.admin.account.UpdateAccountCmd; import org.apache.cloudstack.api.command.admin.user.DeleteUserCmd; import org.apache.cloudstack.api.command.admin.user.RegisterCmd; import org.apache.cloudstack.api.command.admin.user.UpdateUserCmd; import org.springframework.stereotype.Component; import com.cloud.api.query.vo.ControlledViewEntity; import com.cloud.domain.Domain; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.PermissionDeniedException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.projects.Project.ListProjectResourcesCriteria; import com.cloud.utils.Pair; import com.cloud.utils.Ternary; import com.cloud.utils.component.Manager; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; @Component @Local(value = { AccountManager.class, AccountService.class }) public class MockAccountManagerImpl extends ManagerBase implements Manager, AccountManager { @Override public boolean deleteUserAccount(long accountId) { // TODO Auto-generated method stub return false; } @Override public UserAccount disableUser(long userId) { // TODO Auto-generated method stub return null; } @Override public UserAccount enableUser(long userId) { // TODO Auto-generated method stub return null; } @Override public UserAccount lockUser(long userId) { // TODO Auto-generated method stub return null; } @Override public UserAccount updateUser(UpdateUserCmd cmd) { // TODO Auto-generated method stub return null; } @Override public Account disableAccount(String accountName, Long domainId, Long accountId) throws ConcurrentOperationException, ResourceUnavailableException { // TODO Auto-generated method stub return null; } @Override public Account enableAccount(String accountName, Long domainId, Long accountId) { // TODO Auto-generated method stub return null; } @Override public Account lockAccount(String accountName, Long domainId, Long accountId) { // TODO Auto-generated method stub return null; } @Override public Account updateAccount(UpdateAccountCmd cmd) { // TODO Auto-generated method stub return null; } @Override public Account getSystemAccount() { return new AccountVO(); } @Override public User getSystemUser() { return new UserVO(); } @Override public boolean deleteUser(DeleteUserCmd deleteUserCmd) { // TODO Auto-generated method stub return false; } @Override public boolean isAdmin(short accountType) { // TODO Auto-generated method stub return false; } @Override public Account finalizeOwner(Account caller, String accountName, Long domainId, Long projectId) { // TODO Auto-generated method stub return null; } @Override public Account getActiveAccountByName(String accountName, Long domainId) { // TODO Auto-generated method stub return null; } @Override public Account getActiveAccountById(long accountId) { // TODO Auto-generated method stub return null; } @Override public Account getAccount(long accountId) { // TODO Auto-generated method stub return null; } @Override public User getActiveUser(long userId) { // TODO Auto-generated method stub return null; } @Override public User getUserIncludingRemoved(long userId) { // TODO Auto-generated method stub return null; } @Override public boolean isRootAdmin(short accountType) { // TODO Auto-generated method stub return false; } @Override public User getActiveUserByRegistrationToken(String registrationToken) { // TODO Auto-generated method stub return null; } @Override public void markUserRegistered(long userId) { // TODO Auto-generated method stub } @Override public boolean disableAccount(long accountId) throws ConcurrentOperationException, ResourceUnavailableException { // TODO Auto-generated method stub return false; } @Override public void checkAccess(Account account, Domain domain) throws PermissionDeniedException { // TODO Auto-generated method stub } @Override public Long checkAccessAndSpecifyAuthority(Account caller, Long zoneId) { // TODO Auto-generated method stub return null; } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { return true; } @Override public boolean start() { return true; } @Override public boolean stop() { // TODO Auto-generated method stub return false; } @Override public String getName() { // TODO Auto-generated method stub return null; } @Override public void checkAccess(Account account, AccessType accessType, boolean sameOwner, ControlledEntity... entities) throws PermissionDeniedException { // TODO Auto-generated method stub } @Override public void logoutUser(long userId) { // TODO Auto-generated method stub } @Override public UserAccount authenticateUser(String username, String password, Long domainId, String loginIpAddress, Map<String, Object[]> requestParameters) { return null; } @Override public Pair<User, Account> findUserByApiKey(String apiKey) { return null; } @Override public String[] createApiKeyAndSecretKey(RegisterCmd cmd) { return null; } @Override public boolean enableAccount(long accountId) { // TODO Auto-generated method stub return false; } @Override public void buildACLSearchBuilder(SearchBuilder<? extends ControlledEntity> sb, Long domainId, boolean isRecursive, List<Long> permittedAccounts, ListProjectResourcesCriteria listProjectResourcesCriteria) { // TODO Auto-generated method stub } @Override public void buildACLSearchCriteria(SearchCriteria<? extends ControlledEntity> sc, Long domainId, boolean isRecursive, List<Long> permittedAccounts, ListProjectResourcesCriteria listProjectResourcesCriteria) { // TODO Auto-generated method stub } @Override public void buildACLSearchParameters(Account caller, Long id, String accountName, Long projectId, List<Long> permittedAccounts, Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject, boolean listAll, boolean forProjectInvitation) { // TODO Auto-generated method stub } @Override public void buildACLViewSearchBuilder(SearchBuilder<? extends ControlledViewEntity> sb, Long domainId, boolean isRecursive, List<Long> permittedAccounts, ListProjectResourcesCriteria listProjectResourcesCriteria) { // TODO Auto-generated method stub } @Override public void buildACLViewSearchCriteria(SearchCriteria<? extends ControlledViewEntity> sc, Long domainId, boolean isRecursive, List<Long> permittedAccounts, ListProjectResourcesCriteria listProjectResourcesCriteria) { // TODO Auto-generated method stub } /* (non-Javadoc) * @see com.cloud.user.AccountService#getUserByApiKey(java.lang.String) */ @Override public UserAccount getUserByApiKey(String apiKey) { // TODO Auto-generated method stub return null; } @Override public UserAccount createUserAccount(String userName, String password, String firstName, String lastName, String email, String timezone, String accountName, short accountType, Long domainId, String networkDomain, Map<String, String> details, String accountUUID, String userUUID) { // TODO Auto-generated method stub return null; } @Override public User createUser(String userName, String password, String firstName, String lastName, String email, String timeZone, String accountName, Long domainId, String userUUID) { // TODO Auto-generated method stub return null; } @Override public RoleType getRoleType(Account account) { return null; } @Override public boolean deleteAccount(AccountVO account, long callerUserId, Account caller) { // TODO Auto-generated method stub return false; } @Override public Account createAccount(String accountName, short accountType, Long domainId, String networkDomain, Map<String, String> details, String uuid) { // TODO Auto-generated method stub return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.storage.am.lsm.invertedindex.impls; import java.util.List; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter; import org.apache.hyracks.storage.am.btree.impls.RangePredicate; import org.apache.hyracks.storage.am.common.api.ILSMIndexCursor; import org.apache.hyracks.storage.am.common.impls.NoOpIndexAccessParameters; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType; import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter; import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext; import org.apache.hyracks.storage.common.EnforcedIndexCursor; import org.apache.hyracks.storage.common.ICursorInitialState; import org.apache.hyracks.storage.common.IIndexAccessor; import org.apache.hyracks.storage.common.IIndexCursor; import org.apache.hyracks.storage.common.ISearchOperationCallback; import org.apache.hyracks.storage.common.ISearchPredicate; import org.apache.hyracks.storage.common.MultiComparator; /** * Searches the components one-by-one, completely consuming a cursor before moving on to the next one. * Therefore, the are no guarantees about sort order of the results. */ public class LSMInvertedIndexSearchCursor extends EnforcedIndexCursor implements ILSMIndexCursor { private IIndexAccessor currentAccessor; private IIndexCursor currentCursor; private int accessorIndex = -1; private boolean tupleConsumed = true; private ILSMHarness harness; private IIndexAccessor[] indexAccessors; private ISearchPredicate searchPred; private ISearchOperationCallback searchCallback; // Assuming the cursor for all deleted-keys indexes are of the same type. private IIndexCursor[] deletedKeysBTreeCursors; private BloomFilter[] deletedKeysBTreeBloomFilters; private IIndexAccessor[] deletedKeysBTreeAccessors; private RangePredicate keySearchPred; private ILSMIndexOperationContext opCtx; private boolean includeMemoryComponents; private List<ILSMComponent> operationalComponents; private ITupleReference currentTuple = null; private boolean resultOfSearchCallBackProceed = false; private final long[] hashes = BloomFilter.createHashArray(); @Override public void doOpen(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException { LSMInvertedIndexSearchCursorInitialState lsmInitState = (LSMInvertedIndexSearchCursorInitialState) initialState; LSMInvertedIndexOpContext lsmOpCtx = (LSMInvertedIndexOpContext) lsmInitState.getOpContext(); harness = lsmInitState.getLSMHarness(); operationalComponents = lsmInitState.getOperationalComponents(); indexAccessors = new IIndexAccessor[operationalComponents.size()]; opCtx = lsmInitState.getOpContext(); accessorIndex = 0; this.searchPred = searchPred; this.searchCallback = lsmInitState.getSearchOperationCallback(); includeMemoryComponents = false; // For searching the deleted-keys BTrees. deletedKeysBTreeAccessors = new IIndexAccessor[operationalComponents.size()]; deletedKeysBTreeCursors = new IIndexCursor[operationalComponents.size()]; deletedKeysBTreeBloomFilters = new BloomFilter[operationalComponents.size()]; for (int i = 0; i < operationalComponents.size(); i++) { ILSMComponent component = operationalComponents.get(i); indexAccessors[i] = component.getIndex().createAccessor(lsmOpCtx.getIndexAccessParameters()); if (component.getType() == LSMComponentType.MEMORY) { // No need for a bloom filter for the in-memory BTree. deletedKeysBTreeAccessors[i] = ((LSMInvertedIndexMemoryComponent) component).getBuddyIndex() .createAccessor(NoOpIndexAccessParameters.INSTANCE); deletedKeysBTreeBloomFilters[i] = null; includeMemoryComponents = true; } else { deletedKeysBTreeAccessors[i] = ((LSMInvertedIndexDiskComponent) component).getBuddyIndex() .createAccessor(NoOpIndexAccessParameters.INSTANCE); deletedKeysBTreeBloomFilters[i] = ((LSMInvertedIndexDiskComponent) component).getBloomFilter(); } deletedKeysBTreeCursors[i] = deletedKeysBTreeAccessors[i].createSearchCursor(false); } MultiComparator keyCmp = lsmInitState.getKeyComparator(); keySearchPred = new RangePredicate(null, null, true, true, keyCmp, keyCmp); } protected boolean isDeleted(ITupleReference key) throws HyracksDataException { keySearchPred.setLowKey(key, true); keySearchPred.setHighKey(key, true); for (int i = 0; i < accessorIndex; i++) { deletedKeysBTreeCursors[i].close(); if (deletedKeysBTreeBloomFilters[i] != null && !deletedKeysBTreeBloomFilters[i].contains(key, hashes)) { continue; } try { deletedKeysBTreeAccessors[i].search(deletedKeysBTreeCursors[i], keySearchPred); if (deletedKeysBTreeCursors[i].hasNext()) { return true; } } finally { deletedKeysBTreeCursors[i].close(); } } return false; } // Move to the next tuple that has not been deleted. private boolean nextValidTuple() throws HyracksDataException { while (currentCursor.hasNext()) { currentCursor.next(); currentTuple = currentCursor.getTuple(); resultOfSearchCallBackProceed = includeMemoryComponents && accessorIndex == 0 ? searchCallback.proceed(currentTuple) : true; if (!resultOfSearchCallBackProceed) { // We assume that the underlying cursors materialize their results such that // there is no need to reposition the result cursor after reconciliation. searchCallback.reconcile(currentTuple); } if (!isDeleted(currentTuple)) { tupleConsumed = false; return true; } else if (!resultOfSearchCallBackProceed) { // reconcile & tuple deleted case: needs to cancel the effect of reconcile(). searchCallback.cancel(currentTuple); } } return false; } @Override public boolean doHasNext() throws HyracksDataException { if (!tupleConsumed) { return true; } if (currentCursor != null) { if (nextValidTuple()) { return true; } currentCursor.close(); accessorIndex++; } while (accessorIndex < indexAccessors.length) { // Current cursor has been exhausted, switch to next accessor/cursor. currentAccessor = indexAccessors[accessorIndex]; currentCursor = currentAccessor.createSearchCursor(false); currentAccessor.search(currentCursor, searchPred); if (nextValidTuple()) { return true; } // Close as we go to release resources. currentCursor.close(); accessorIndex++; } return false; } @Override public void doNext() throws HyracksDataException { // Mark the tuple as consumed, so hasNext() can move on. tupleConsumed = true; } @Override public void doDestroy() throws HyracksDataException { doClose(); } @Override public void doClose() throws HyracksDataException { try { if (currentCursor != null) { currentCursor.close(); currentCursor = null; } accessorIndex = 0; } finally { if (harness != null) { harness.endSearch(opCtx); } } } @Override public ITupleReference doGetTuple() { return currentCursor.getTuple(); } @Override public ITupleReference getFilterMinTuple() { ILSMComponentFilter filter = getComponentFilter(); return filter == null ? null : filter.getMinTuple(); } @Override public ITupleReference getFilterMaxTuple() { ILSMComponentFilter filter = getComponentFilter(); return filter == null ? null : filter.getMaxTuple(); } private ILSMComponentFilter getComponentFilter() { if (accessorIndex < 0) { return null; } return operationalComponents.get(accessorIndex).getLSMComponentFilter(); } @Override public boolean getSearchOperationCallbackProceedResult() { return false; } }
package com.intellij.dupLocator.index; import com.intellij.codeInspection.*; import com.intellij.dupLocator.DuplicatesProfile; import com.intellij.dupLocator.DuplocatorState; import com.intellij.dupLocator.LightDuplicateProfile; import com.intellij.dupLocator.treeHash.FragmentsCollector; import com.intellij.dupLocator.util.PsiFragment; import com.intellij.lang.FileASTNode; import com.intellij.lang.LighterAST; import com.intellij.lang.LighterASTNode; import com.intellij.lang.TreeBackedLighterAST; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileWithId; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.tree.ILightStubFileElementType; import com.intellij.util.SmartList; import com.intellij.util.indexing.FileBasedIndex; import gnu.trove.TIntArrayList; import gnu.trove.TIntIntHashMap; import gnu.trove.TIntObjectHashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Iterator; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; public class DuplicatesInspectionBase extends LocalInspectionTool { private static final int MIN_FRAGMENT_SIZE = 3; @Nullable @Override public ProblemDescriptor[] checkFile(@NotNull final PsiFile psiFile, @NotNull final InspectionManager manager, final boolean isOnTheFly) { final VirtualFile virtualFile = psiFile.getVirtualFile(); if (!(virtualFile instanceof VirtualFileWithId) || /*!isOnTheFly || */!DuplicatesIndex.ourEnabled) return ProblemDescriptor.EMPTY_ARRAY; final DuplicatesProfile profile = DuplicatesIndex.findDuplicatesProfile(psiFile.getFileType()); if (profile == null) return ProblemDescriptor.EMPTY_ARRAY; final Ref<DuplicatedCodeProcessor> myProcessorRef = new Ref<DuplicatedCodeProcessor>(); final FileASTNode node = psiFile.getNode(); boolean usingLightProfile = profile instanceof LightDuplicateProfile && node.getElementType() instanceof ILightStubFileElementType && DuplicatesIndex.ourEnabledLightProfiles; if (usingLightProfile) { LighterAST ast = node.getLighterAST(); assert ast != null; ((LightDuplicateProfile)profile).process(ast, new LightDuplicateProfile.Callback() { DuplicatedCodeProcessor<LighterASTNode> myProcessor; @Override public void process(@NotNull final LighterAST ast, @NotNull final LighterASTNode node, int hash) { class LightDuplicatedCodeProcessor extends DuplicatedCodeProcessor<LighterASTNode> { LightDuplicatedCodeProcessor(VirtualFile file, Project project) { super(file, project); } @Override protected TextRange getRangeInElement(LighterASTNode node) { return null; } @Override protected PsiElement getPsi(LighterASTNode node) { return ((TreeBackedLighterAST)ast).unwrap(node).getPsi(); } @Override protected int getStartOffset(LighterASTNode node) { return node.getStartOffset(); } @Override protected int getEndOffset(LighterASTNode node) { return node.getEndOffset(); } @Override protected boolean isLightProfile() { return true; } } if (myProcessor == null) { myProcessor = new LightDuplicatedCodeProcessor(virtualFile, psiFile.getProject()); myProcessorRef.set(myProcessor); } myProcessor.process(hash, node); } }); } else { final DuplocatorState state = profile.getDuplocatorState(psiFile.getLanguage()); profile.createVisitor(new FragmentsCollector() { DuplicatedCodeProcessor<PsiFragment> myProcessor; @Override public void add(int hash, final int cost, @Nullable final PsiFragment frag) { if (!DuplicatesIndex.isIndexedFragment(frag, cost, profile, state)) { return; } class OldDuplicatedCodeProcessor extends DuplicatedCodeProcessor<PsiFragment> { OldDuplicatedCodeProcessor(VirtualFile file, Project project) { super(file, project); } @Override protected TextRange getRangeInElement(PsiFragment node) { PsiElement[] elements = node.getElements(); TextRange rangeInElement = null; if (elements.length > 1) { PsiElement lastElement = elements[elements.length - 1]; rangeInElement = new TextRange( elements[0].getStartOffsetInParent(), lastElement.getStartOffsetInParent() + lastElement.getTextLength() ); } return rangeInElement; } @Override protected PsiElement getPsi(PsiFragment node) { PsiElement[] elements = node.getElements(); return elements.length > 1 ? elements[0].getParent() : elements[0]; } @Override protected int getStartOffset(PsiFragment node) { return node.getStartOffset(); } @Override protected int getEndOffset(PsiFragment node) { return node.getEndOffset(); } @Override protected boolean isLightProfile() { return false; } } if (myProcessor == null) { myProcessor = new OldDuplicatedCodeProcessor(virtualFile, psiFile.getProject()); myProcessorRef.set(myProcessor); } myProcessor.process(hash, frag); } }, true).visitNode(psiFile); } DuplicatedCodeProcessor<?> processor = myProcessorRef.get(); final SmartList<ProblemDescriptor> descriptors = new SmartList<ProblemDescriptor>(); if (processor != null) { for(Map.Entry<Integer, TextRange> entry:processor.reportedRanges.entrySet()) { final Integer offset = entry.getKey(); // todo 3 statements constant if (!usingLightProfile && processor.fragmentSize.get(offset) < MIN_FRAGMENT_SIZE) continue; final VirtualFile file = processor.reportedFiles.get(offset); String message = "Found duplicated code in " + file.getPath(); PsiElement targetElement = processor.reportedPsi.get(offset); TextRange rangeInElement = entry.getValue(); final int offsetInOtherFile = processor.reportedOffsetInOtherFiles.get(offset); LocalQuickFix fix = createNavigateToDupeFix(file, offsetInOtherFile); int hash = processor.fragmentHash.get(offset); LocalQuickFix viewAllDupesFix = hash != 0 ? createShowOtherDupesFix(virtualFile, offset, hash, psiFile.getProject()) : null; ProblemDescriptor descriptor = manager .createProblemDescriptor(targetElement, rangeInElement, message, ProblemHighlightType.WEAK_WARNING, isOnTheFly, fix, viewAllDupesFix); descriptors.add(descriptor); } } return descriptors.isEmpty() ? null : descriptors.toArray(new ProblemDescriptor[descriptors.size()]); } protected LocalQuickFix createNavigateToDupeFix(@NotNull VirtualFile file, int offsetInOtherFile) { return null; } protected LocalQuickFix createShowOtherDupesFix(VirtualFile file, int offset, int hash, Project project) { return null; } static abstract class DuplicatedCodeProcessor<T> implements FileBasedIndex.ValueProcessor<TIntArrayList> { final TreeMap<Integer, TextRange> reportedRanges = new TreeMap<Integer, TextRange>(); final TIntObjectHashMap<VirtualFile> reportedFiles = new TIntObjectHashMap<VirtualFile>(); final TIntObjectHashMap<PsiElement> reportedPsi = new TIntObjectHashMap<PsiElement>(); final TIntIntHashMap reportedOffsetInOtherFiles = new TIntIntHashMap(); final TIntIntHashMap fragmentSize = new TIntIntHashMap(); final TIntIntHashMap fragmentHash = new TIntIntHashMap(); final VirtualFile virtualFile; final Project project; final ProjectFileIndex myProjectFileIndex; T myNode; int myHash; DuplicatedCodeProcessor(VirtualFile file, Project project) { virtualFile = file; this.project = project; myProjectFileIndex = ProjectFileIndex.SERVICE.getInstance(project); } void process(int hash, T node) { ProgressManager.checkCanceled(); myNode = node; myHash = hash; FileBasedIndex.getInstance().processValues(DuplicatesIndex.NAME, hash, null, this, GlobalSearchScope.projectScope(project)); } @Override public boolean process(VirtualFile file, TIntArrayList list) { for(int i = 0, len = list.size(); i < len; ++i) { ProgressManager.checkCanceled(); int value = list.getQuick(i); if (myProjectFileIndex.isInSource(virtualFile) && !myProjectFileIndex.isInSource(file)) return true; if (!myProjectFileIndex.isInSource(virtualFile) && myProjectFileIndex.isInSource(file)) return true; final int startOffset = getStartOffset(myNode); final int endOffset = getEndOffset(myNode); if (file.equals(virtualFile) && value >= startOffset && value < endOffset) continue; PsiElement target = getPsi(myNode); TextRange rangeInElement = getRangeInElement(myNode); Integer fragmentStartOffsetInteger = startOffset; SortedMap<Integer,TextRange> map = reportedRanges.subMap(fragmentStartOffsetInteger, endOffset); int newFragmentSize = !map.isEmpty() ? 0:1; Iterator<Integer> iterator = map.keySet().iterator(); while(iterator.hasNext()) { Integer next = iterator.next(); iterator.remove(); reportedFiles.remove(next); reportedOffsetInOtherFiles.remove(next); reportedPsi.remove(next); newFragmentSize += fragmentSize.remove(next); } reportedRanges.put(fragmentStartOffsetInteger, rangeInElement); reportedFiles.put(fragmentStartOffsetInteger, file); reportedOffsetInOtherFiles.put(fragmentStartOffsetInteger, value); reportedPsi.put(fragmentStartOffsetInteger, target); fragmentSize.put(fragmentStartOffsetInteger, newFragmentSize); if (newFragmentSize >= MIN_FRAGMENT_SIZE || isLightProfile()) fragmentHash.put(fragmentStartOffsetInteger, myHash); return false; } return true; } protected abstract TextRange getRangeInElement(T node); protected abstract PsiElement getPsi(T node); protected abstract int getStartOffset(T node); protected abstract int getEndOffset(T node); protected abstract boolean isLightProfile(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.http; import java.io.IOException; import java.io.PrintWriter; import java.net.BindException; import java.net.InetSocketAddress; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Handler; import org.mortbay.jetty.Server; import org.mortbay.jetty.handler.ContextHandlerCollection; import org.mortbay.jetty.nio.SelectChannelConnector; import org.mortbay.jetty.security.SslSocketConnector; import org.mortbay.jetty.servlet.Context; import org.mortbay.jetty.servlet.DefaultServlet; import org.mortbay.jetty.servlet.FilterHolder; import org.mortbay.jetty.servlet.FilterMapping; import org.mortbay.jetty.servlet.ServletHandler; import org.mortbay.jetty.servlet.ServletHolder; import org.mortbay.jetty.webapp.WebAppContext; import org.mortbay.thread.QueuedThreadPool; import org.mortbay.util.MultiException; /** * Create a Jetty embedded server to answer http requests. The primary goal * is to serve up status information for the server. * There are three contexts: * "/logs/" -> points to the log directory * "/static/" -> points to common static files (src/webapps/static) * "/" -> the jsp server code from (src/webapps/<name>) */ public class HttpServer implements FilterContainer { public static final Log LOG = LogFactory.getLog(HttpServer.class); static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; // The ServletContext attribute where the daemon Configuration // gets stored. static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf"; static final String ADMINS_ACL = "admins.acl"; private AccessControlList adminsAcl; protected final Server webServer; protected final Connector listener; protected final WebAppContext webAppContext; protected final boolean findPort; protected final Map<Context, Boolean> defaultContexts = new HashMap<Context, Boolean>(); protected final List<String> filterNames = new ArrayList<String>(); private static final int MAX_RETRIES = 10; private final Configuration conf; private boolean listenerStartedExternally = false; /** Same as this(name, bindAddress, port, findPort, null); */ public HttpServer(String name, String bindAddress, int port, boolean findPort ) throws IOException { this(name, bindAddress, port, findPort, new Configuration()); } public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf) throws IOException { this(name, bindAddress, port, findPort, conf, null, null); } public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, Connector connector) throws IOException { this(name, bindAddress, port, findPort, conf, null, connector); } /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/<name>. * @param name The name of the server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration * @param adminsAcl {@link AccessControlList} of the admins */ public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl) throws IOException { this(name, bindAddress, port, findPort, conf, adminsAcl, null); } public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl, Connector connector) throws IOException{ webServer = new Server(); this.findPort = findPort; this.conf = conf; this.adminsAcl = adminsAcl; if(connector == null) { listenerStartedExternally = false; listener = createBaseListener(conf); listener.setHost(bindAddress); listener.setPort(port); } else { listenerStartedExternally = true; listener = connector; } webServer.addConnector(listener); webServer.setThreadPool(new QueuedThreadPool()); final String appDir = getWebAppsPath(); ContextHandlerCollection contexts = new ContextHandlerCollection(); webServer.setHandler(contexts); webAppContext = new WebAppContext(); webAppContext.setDisplayName("WepAppsContext"); webAppContext.setContextPath("/"); webAppContext.setWar(appDir + "/" + name); webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); webServer.addHandler(webAppContext); addDefaultApps(contexts, appDir); defineFilter(webAppContext, "krb5Filter", Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(), null, null); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { for(FilterInitializer c : initializers) { c.initFilter(this, conf); } } addDefaultServlets(); } /** * Create a required listener for the Jetty instance listening on the port * provided. This wrapper and all subclasses must create at least one * listener. */ public Connector createBaseListener(Configuration conf) throws IOException { return HttpServer.createDefaultChannelConnector(); } // LimitedPrivate for creating secure datanodes public static Connector createDefaultChannelConnector() { SelectChannelConnector ret = new SelectChannelConnector(); ret.setLowResourceMaxIdleTime(10000); ret.setAcceptQueueSize(128); ret.setResolveNames(false); ret.setUseDirectBuffers(false); return ret; } /** Get an array of FilterConfiguration specified in the conf */ private static FilterInitializer[] getFilterInitializers(Configuration conf) { if (conf == null) { return null; } Class<?>[] classes = conf.getClasses(FILTER_INITIALIZER_PROPERTY); if (classes == null) { return null; } FilterInitializer[] initializers = new FilterInitializer[classes.length]; for(int i = 0; i < classes.length; i++) { initializers[i] = (FilterInitializer)ReflectionUtils.newInstance( classes[i], conf); } return initializers; } /** * Add default apps. * @param appDir The application directory * @throws IOException */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir) throws IOException { // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = System.getProperty("hadoop.log.dir"); if (logDir != null) { Context logContext = new Context(parent, "/logs"); logContext.setResourceBase(logDir); logContext.addServlet(AdminAuthorizedServlet.class, "/"); logContext.setDisplayName("logs"); setContextAttributes(logContext); defaultContexts.put(logContext, true); } // set up the context for "/static/*" Context staticContext = new Context(parent, "/static"); staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.setDisplayName("static"); setContextAttributes(staticContext); defaultContexts.put(staticContext, true); } private void setContextAttributes(Context context) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); } /** * Add default servlets. */ protected void addDefaultServlets() { // set up default servlets addServlet("stacks", "/stacks", StackServlet.class); addServlet("logLevel", "/logLevel", LogLevel.Servlet.class); } public void addContext(Context ctxt, boolean isFiltered) throws IOException { webServer.addHandler(ctxt); defaultContexts.put(ctxt, isFiltered); } /** * Add a context * @param pathSpec The path spec for the context * @param dir The directory containing the context * @param isFiltered if true, the servlet is added to the filter path mapping * @throws IOException */ protected void addContext(String pathSpec, String dir, boolean isFiltered) throws IOException { if (0 == webServer.getHandlers().length) { throw new RuntimeException("Couldn't find handler"); } WebAppContext webAppCtx = new WebAppContext(); webAppCtx.setContextPath(pathSpec); webAppCtx.setWar(dir); addContext(webAppCtx, true); } /** * Set a value in the webapp context. These values are available to the jsp * pages as "application.getAttribute(name)". * @param name The name of the attribute * @param value The value of the attribute */ public void setAttribute(String name, Object value) { setAttribute(webAppContext, name, value); } /** * Set a value in the webapp context. These values are available to the jsp * pages as "application.getAttribute(name)". * @param context Context to add attribute * @param name The name of the attribute * @param value The value of the attribute */ public void setAttribute(Context context, String name, Object value) { context.setAttribute(name, value); } /** * Add a servlet in the server. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); addFilterPathMapping(pathSpec, webAppContext); } /** * Add an internal servlet in the server. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class * @deprecated this is a temporary method */ @Deprecated public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); } /** * Add an internal servlet in the server, specifying whether or not to * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For * servlets added using this method, filters (except internal Kerberized * filters) are not enabled. * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { holder.setName(name); } webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { LOG.info("Adding Kerberos filter to " + name); ServletHandler handler = webAppContext.getServletHandler(); FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName("krb5Filter"); fmap.setDispatches(Handler.ALL); handler.addFilterMapping(fmap); } } /** {@inheritDoc} */ public void addFilter(String name, String classname, Map<String, String> parameters) { final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) { if (e.getValue()) { Context ctx = e.getKey(); defineFilter(ctx, name, classname, parameters, ALL_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + ctx.getDisplayName()); } } filterNames.add(name); } /** {@inheritDoc} */ public void addGlobalFilter(String name, String classname, Map<String, String> parameters) { final String[] ALL_URLS = { "/*" }; defineFilter(webAppContext, name, classname, parameters, ALL_URLS); for (Context ctx : defaultContexts.keySet()) { defineFilter(ctx, name, classname, parameters, ALL_URLS); } LOG.info("Added global filter" + name + " (class=" + classname + ")"); } /** * Define a filter for a context and set up default url mappings. */ protected void defineFilter(Context ctx, String name, String classname, Map<String,String> parameters, String[] urls) { FilterHolder holder = new FilterHolder(); holder.setName(name); holder.setClassName(classname); holder.setInitParameters(parameters); FilterMapping fmap = new FilterMapping(); fmap.setPathSpecs(urls); fmap.setDispatches(Handler.ALL); fmap.setFilterName(name); ServletHandler handler = ctx.getServletHandler(); handler.addFilter(holder, fmap); } /** * Add the path spec to the filter path mapping. * @param pathSpec The path spec * @param webAppCtx The WebApplicationContext to add to */ protected void addFilterPathMapping(String pathSpec, Context webAppCtx) { ServletHandler handler = webAppCtx.getServletHandler(); for(String name : filterNames) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(name); fmap.setDispatches(Handler.ALL); handler.addFilterMapping(fmap); } } /** * Get the value in the webapp context. * @param name The name of the attribute * @return The value of the attribute */ public Object getAttribute(String name) { return webAppContext.getAttribute(name); } /** * Get the pathname to the webapps files. * @return the pathname as a URL * @throws IOException if 'webapps' directory cannot be found on CLASSPATH. */ protected String getWebAppsPath() throws IOException { URL url = getClass().getClassLoader().getResource("webapps"); if (url == null) throw new IOException("webapps not found in CLASSPATH"); return url.toString(); } /** * Get the port that the server is on * @return the port */ public int getPort() { return webServer.getConnectors()[0].getLocalPort(); } /** * Set the min, max number of worker threads (simultaneous connections). */ public void setThreads(int min, int max) { QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool() ; pool.setMinThreads(min); pool.setMaxThreads(max); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param keystore location of the keystore * @param storPass password for the keystore * @param keyPass password for the key * @deprecated Use {@link #addSslListener(InetSocketAddress, Configuration, boolean)} */ @Deprecated public void addSslListener(InetSocketAddress addr, String keystore, String storPass, String keyPass) throws IOException { if (webServer.isStarted()) { throw new IOException("Failed to add ssl listener"); } SslSocketConnector sslListener = new SslSocketConnector(); sslListener.setHost(addr.getHostName()); sslListener.setPort(addr.getPort()); sslListener.setKeystore(keystore); sslListener.setPassword(storPass); sslListener.setKeyPassword(keyPass); webServer.addConnector(sslListener); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param sslConf conf to retrieve ssl options * @param needClientAuth whether client authentication is required */ public void addSslListener(InetSocketAddress addr, Configuration sslConf, boolean needClientAuth) throws IOException { addSslListener(addr, sslConf, needClientAuth, false); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param sslConf conf to retrieve ssl options * @param needCertsAuth whether x509 certificate authentication is required * @param needKrbAuth whether to allow kerberos auth */ public void addSslListener(InetSocketAddress addr, Configuration sslConf, boolean needCertsAuth, boolean needKrbAuth) throws IOException { if (webServer.isStarted()) { throw new IOException("Failed to add ssl listener"); } if (needCertsAuth) { // setting up SSL truststore for authenticating clients System.setProperty("javax.net.ssl.trustStore", sslConf.get( "ssl.server.truststore.location", "")); System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get( "ssl.server.truststore.password", "")); System.setProperty("javax.net.ssl.trustStoreType", sslConf.get( "ssl.server.truststore.type", "jks")); } Krb5AndCertsSslSocketConnector.MODE mode; if(needCertsAuth && needKrbAuth) mode = MODE.BOTH; else if (!needCertsAuth && needKrbAuth) mode = MODE.KRB; else // Default to certificates mode = MODE.CERTS; SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode); sslListener.setHost(addr.getHostName()); sslListener.setPort(addr.getPort()); sslListener.setKeystore(sslConf.get("ssl.server.keystore.location")); sslListener.setPassword(sslConf.get("ssl.server.keystore.password", "")); sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", "")); sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks")); sslListener.setNeedClientAuth(needCertsAuth); webServer.addConnector(sslListener); } /** * Start the server. Does not wait for the server to start. */ public void start() throws IOException { try { if(listenerStartedExternally) { // Expect that listener was started securely if(listener.getLocalPort() == -1) // ... and verify throw new Exception("Exepected webserver's listener to be started" + "previously but wasn't"); // And skip all the port rolling issues. webServer.start(); } else { int port = 0; int oriPort = listener.getPort(); // The original requested port while (true) { try { port = webServer.getConnectors()[0].getLocalPort(); LOG.info("Port returned by webServer.getConnectors()[0]." + "getLocalPort() before open() is "+ port + ". Opening the listener on " + oriPort); listener.open(); port = listener.getLocalPort(); LOG.info("listener.getLocalPort() returned " + listener.getLocalPort() + " webServer.getConnectors()[0].getLocalPort() returned " + webServer.getConnectors()[0].getLocalPort()); //Workaround to handle the problem reported in HADOOP-4744 if (port < 0) { Thread.sleep(100); int numRetries = 1; while (port < 0) { LOG.warn("listener.getLocalPort returned " + port); if (numRetries++ > MAX_RETRIES) { throw new Exception(" listener.getLocalPort is returning " + "less than 0 even after " +numRetries+" resets"); } for (int i = 0; i < 2; i++) { LOG.info("Retrying listener.getLocalPort()"); port = listener.getLocalPort(); if (port > 0) { break; } Thread.sleep(200); } if (port > 0) { break; } LOG.info("Bouncing the listener"); listener.close(); Thread.sleep(1000); listener.setPort(oriPort == 0 ? 0 : (oriPort += 1)); listener.open(); Thread.sleep(100); port = listener.getLocalPort(); } } //Workaround end LOG.info("Jetty bound to port " + port); webServer.start(); break; } catch (IOException ex) { // if this is a bind exception, // then try the next port number. if (ex instanceof BindException) { if (!findPort) { throw (BindException) ex; } } else { LOG.info("HttpServer.start() threw a non Bind IOException"); throw ex; } } catch (MultiException ex) { LOG.info("HttpServer.start() threw a MultiException"); throw ex; } listener.setPort((oriPort += 1)); } } } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Problem starting http server", e); } } /** * stop the server */ public void stop() throws Exception { listener.close(); webServer.stop(); } public void join() throws InterruptedException { webServer.join(); } /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. * * @param servletContext * @param request * @param response * @return true if admin-authorized, false otherwise * @throws IOException */ public static boolean hasAdministratorAccess( ServletContext servletContext, HttpServletRequest request, HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); // If there is no authorization, anybody has administrator access. if (!conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { return true; } String remoteUser = request.getRemoteUser(); if (remoteUser == null) { return true; } AccessControlList adminsAcl = (AccessControlList) servletContext .getAttribute(ADMINS_ACL); UserGroupInformation remoteUserUGI = UserGroupInformation.createRemoteUser(remoteUser); if (adminsAcl != null) { if (!adminsAcl.isUserAllowed(remoteUserUGI)) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + remoteUser + " is unauthorized to access this page. " + "AccessControlList for accessing this page : " + adminsAcl.toString()); return false; } } return true; } /** * A very simple servlet to serve up a text representation of the current * stack traces. It both returns the stacks to the caller and logs them. * Currently the stack traces are done sequentially rather than exactly the * same data. */ public static class StackServlet extends HttpServlet { private static final long serialVersionUID = -6284183679759467039L; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Do the authorization if (!HttpServer.hasAdministratorAccess(getServletContext(), request, response)) { return; } PrintWriter out = new PrintWriter (HtmlQuoting.quoteOutputStream(response.getOutputStream())); ReflectionUtils.printThreadInfo(out, ""); out.close(); ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); } } /** * A Servlet input filter that quotes all HTML active characters in the * parameter names and values. The goal is to quote the characters to make * all of the servlets resistant to cross-site scripting attacks. */ public static class QuotingInputFilter implements Filter { public static class RequestQuoter extends HttpServletRequestWrapper { private final HttpServletRequest rawRequest; public RequestQuoter(HttpServletRequest rawRequest) { super(rawRequest); this.rawRequest = rawRequest; } /** * Return the set of parameter names, quoting each name. */ @SuppressWarnings("unchecked") @Override public Enumeration<String> getParameterNames() { return new Enumeration<String>() { private Enumeration<String> rawIterator = rawRequest.getParameterNames(); @Override public boolean hasMoreElements() { return rawIterator.hasMoreElements(); } @Override public String nextElement() { return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement()); } }; } /** * Unquote the name and quote the value. */ @Override public String getParameter(String name) { return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter (HtmlQuoting.unquoteHtmlChars(name))); } @Override public String[] getParameterValues(String name) { String unquoteName = HtmlQuoting.unquoteHtmlChars(name); String[] unquoteValue = rawRequest.getParameterValues(unquoteName); String[] result = new String[unquoteValue.length]; for(int i=0; i < result.length; ++i) { result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]); } return result; } @SuppressWarnings("unchecked") @Override public Map<String, String[]> getParameterMap() { Map<String, String[]> result = new HashMap<String,String[]>(); Map<String, String[]> raw = rawRequest.getParameterMap(); for (Map.Entry<String,String[]> item: raw.entrySet()) { String[] rawValue = item.getValue(); String[] cookedValue = new String[rawValue.length]; for(int i=0; i< rawValue.length; ++i) { cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]); } result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue); } return result; } /** * Quote the url so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public StringBuffer getRequestURL(){ String url = rawRequest.getRequestURL().toString(); return new StringBuffer(HtmlQuoting.quoteHtmlChars(url)); } /** * Quote the server name so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public String getServerName() { return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName()); } } @Override public void init(FilterConfig config) throws ServletException { } @Override public void destroy() { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { HttpServletRequestWrapper quoted = new RequestQuoter((HttpServletRequest) request); final HttpServletResponse httpResponse = (HttpServletResponse) response; // set the default to UTF-8 so that we don't need to worry about IE7 // choosing to interpret the special characters as UTF-7 httpResponse.setContentType("text/html;charset=utf-8"); chain.doFilter(quoted, response); } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.python; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Pair; import com.facebook.buck.rules.AbstractBuildRule; import com.facebook.buck.rules.AddToRuleKey; import com.facebook.buck.rules.BinaryBuildRule; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildableContext; import com.facebook.buck.rules.ExternalTestRunnerRule; import com.facebook.buck.rules.ExternalTestRunnerTestSpec; import com.facebook.buck.rules.ForwardingBuildTargetSourcePath; import com.facebook.buck.rules.HasRuntimeDeps; import com.facebook.buck.rules.Label; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.TestRule; import com.facebook.buck.rules.Tool; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.test.TestCaseSummary; import com.facebook.buck.test.TestResultSummary; import com.facebook.buck.test.TestResults; import com.facebook.buck.test.TestRunningOptions; import com.facebook.buck.util.MoreCollectors; import com.facebook.buck.util.ObjectMappers; import com.facebook.buck.util.RichStream; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.nio.file.Path; import java.util.Optional; import java.util.concurrent.Callable; import java.util.stream.Stream; @SuppressWarnings("PMD.TestClassWithoutTestCases") public class PythonTest extends AbstractBuildRule implements TestRule, HasRuntimeDeps, ExternalTestRunnerRule, BinaryBuildRule { private final SourcePathRuleFinder ruleFinder; private final Supplier<ImmutableSortedSet<BuildRule>> originalDeclaredDeps; @AddToRuleKey private final Supplier<ImmutableMap<String, String>> env; @AddToRuleKey private final PythonBinary binary; private final ImmutableSet<Label> labels; private final Optional<Long> testRuleTimeoutMs; private final ImmutableSet<String> contacts; private final ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage; private PythonTest( BuildRuleParams params, SourcePathRuleFinder ruleFinder, Supplier<ImmutableSortedSet<BuildRule>> originalDeclaredDeps, Supplier<ImmutableMap<String, String>> env, PythonBinary binary, ImmutableSet<Label> labels, ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage, Optional<Long> testRuleTimeoutMs, ImmutableSet<String> contacts) { super(params); this.ruleFinder = ruleFinder; this.originalDeclaredDeps = originalDeclaredDeps; this.env = env; this.binary = binary; this.labels = labels; this.neededCoverage = neededCoverage; this.testRuleTimeoutMs = testRuleTimeoutMs; this.contacts = contacts; } public static PythonTest from( BuildRuleParams params, SourcePathRuleFinder ruleFinder, Supplier<ImmutableMap<String, String>> env, PythonBinary binary, ImmutableSet<Label> labels, ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage, Optional<Long> testRuleTimeoutMs, ImmutableSet<String> contacts) { return new PythonTest( params.copyReplacingDeclaredAndExtraDeps( Suppliers.ofInstance(ImmutableSortedSet.of(binary)), Suppliers.ofInstance(ImmutableSortedSet.of())), ruleFinder, params.getDeclaredDeps(), env, binary, labels, neededCoverage, testRuleTimeoutMs, contacts); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { return ImmutableList.of(); } @Override public SourcePath getSourcePathToOutput() { return new ForwardingBuildTargetSourcePath(getBuildTarget(), binary.getSourcePathToOutput()); } @Override public ImmutableList<Step> runTests( ExecutionContext executionContext, TestRunningOptions options, SourcePathResolver pathResolver, TestReportingCallback testReportingCallback) { return new ImmutableList.Builder<Step>() .addAll(MakeCleanDirectoryStep.of(getProjectFilesystem(), getPathToTestOutputDirectory())) .add(new PythonRunTestsStep( getProjectFilesystem().getRootPath(), getBuildTarget().getFullyQualifiedName(), binary.getExecutableCommand().getCommandPrefix(pathResolver), getMergedEnv(pathResolver), options.getTestSelectorList(), testRuleTimeoutMs, getProjectFilesystem().resolve(getPathToTestOutputResult()))) .build(); } private ImmutableMap<String, String> getMergedEnv(SourcePathResolver pathResolver) { return new ImmutableMap.Builder<String, String>() .putAll(binary.getExecutableCommand().getEnvironment(pathResolver)) .putAll(env.get()) .build(); } @Override public ImmutableSet<String> getContacts() { return contacts; } @Override public Path getPathToTestOutputDirectory() { return BuildTargets.getGenPath( getProjectFilesystem(), getBuildTarget(), "__test_%s_output__"); } public Path getPathToTestOutputResult() { return getPathToTestOutputDirectory().resolve("results.json"); } @Override public boolean hasTestResultFiles() { return getProjectFilesystem().isFile(getPathToTestOutputResult()); } @Override public ImmutableSet<Label> getLabels() { return labels; } @Override public Callable<TestResults> interpretTestResults( final ExecutionContext executionContext, boolean isUsingTestSelectors) { return () -> { Optional<String> resultsFileContents = getProjectFilesystem().readFileIfItExists( getPathToTestOutputResult()); TestResultSummary[] testResultSummaries = ObjectMappers.readValue( resultsFileContents.get(), TestResultSummary[].class); return TestResults.of( getBuildTarget(), ImmutableList.of( new TestCaseSummary( getBuildTarget().getFullyQualifiedName(), ImmutableList.copyOf(testResultSummaries))), contacts, labels.stream() .map(Object::toString) .collect(MoreCollectors.toImmutableSet())); }; } @Override public boolean runTestSeparately() { return false; } // A python test rule is actually just a {@link NoopBuildRule} which contains a references to // a {@link PythonBinary} rule, which is the actual test binary. Therefore, we *need* this // rule around to run this test, so model this via the {@link HasRuntimeDeps} interface. @Override public Stream<BuildTarget> getRuntimeDeps() { return RichStream.<BuildTarget>empty() .concat(originalDeclaredDeps.get().stream().map(BuildRule::getBuildTarget)) .concat(binary.getRuntimeDeps()) .concat( binary.getExecutableCommand().getDeps(ruleFinder).stream() .map(BuildRule::getBuildTarget)); } @Override public boolean supportsStreamingTests() { return false; } @VisibleForTesting protected PythonBinary getBinary() { return binary; } @Override public Tool getExecutableCommand() { return binary.getExecutableCommand(); } @Override public ExternalTestRunnerTestSpec getExternalTestRunnerSpec( ExecutionContext executionContext, TestRunningOptions testRunningOptions, SourcePathResolver pathResolver) { return ExternalTestRunnerTestSpec.builder() .setTarget(getBuildTarget()) .setType("pyunit") .setNeededCoverage(neededCoverage) .addAllCommand(binary.getExecutableCommand().getCommandPrefix(pathResolver)) .putAllEnv(getMergedEnv(pathResolver)) .addAllLabels(getLabels()) .addAllContacts(getContacts()) .build(); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.redshift.model; import java.io.Serializable; /** * <p> * Describes a subnet group. * </p> */ public class ClusterSubnetGroup implements Serializable, Cloneable { /** * The name of the cluster subnet group. */ private String clusterSubnetGroupName; /** * The description of the cluster subnet group. */ private String description; /** * The VPC ID of the cluster subnet group. */ private String vpcId; /** * The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. */ private String subnetGroupStatus; /** * A list of the VPC <a>Subnet</a> elements. */ private com.amazonaws.internal.ListWithAutoConstructFlag<Subnet> subnets; /** * The list of tags for the cluster subnet group. */ private com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tags; /** * The name of the cluster subnet group. * * @return The name of the cluster subnet group. */ public String getClusterSubnetGroupName() { return clusterSubnetGroupName; } /** * The name of the cluster subnet group. * * @param clusterSubnetGroupName The name of the cluster subnet group. */ public void setClusterSubnetGroupName(String clusterSubnetGroupName) { this.clusterSubnetGroupName = clusterSubnetGroupName; } /** * The name of the cluster subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param clusterSubnetGroupName The name of the cluster subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withClusterSubnetGroupName(String clusterSubnetGroupName) { this.clusterSubnetGroupName = clusterSubnetGroupName; return this; } /** * The description of the cluster subnet group. * * @return The description of the cluster subnet group. */ public String getDescription() { return description; } /** * The description of the cluster subnet group. * * @param description The description of the cluster subnet group. */ public void setDescription(String description) { this.description = description; } /** * The description of the cluster subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param description The description of the cluster subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withDescription(String description) { this.description = description; return this; } /** * The VPC ID of the cluster subnet group. * * @return The VPC ID of the cluster subnet group. */ public String getVpcId() { return vpcId; } /** * The VPC ID of the cluster subnet group. * * @param vpcId The VPC ID of the cluster subnet group. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * The VPC ID of the cluster subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param vpcId The VPC ID of the cluster subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withVpcId(String vpcId) { this.vpcId = vpcId; return this; } /** * The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. * * @return The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. */ public String getSubnetGroupStatus() { return subnetGroupStatus; } /** * The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. * * @param subnetGroupStatus The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. */ public void setSubnetGroupStatus(String subnetGroupStatus) { this.subnetGroupStatus = subnetGroupStatus; } /** * The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnetGroupStatus The status of the cluster subnet group. Possible values are * <code>Complete</code>, <code>Incomplete</code> and * <code>Invalid</code>. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withSubnetGroupStatus(String subnetGroupStatus) { this.subnetGroupStatus = subnetGroupStatus; return this; } /** * A list of the VPC <a>Subnet</a> elements. * * @return A list of the VPC <a>Subnet</a> elements. */ public java.util.List<Subnet> getSubnets() { if (subnets == null) { subnets = new com.amazonaws.internal.ListWithAutoConstructFlag<Subnet>(); subnets.setAutoConstruct(true); } return subnets; } /** * A list of the VPC <a>Subnet</a> elements. * * @param subnets A list of the VPC <a>Subnet</a> elements. */ public void setSubnets(java.util.Collection<Subnet> subnets) { if (subnets == null) { this.subnets = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<Subnet> subnetsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Subnet>(subnets.size()); subnetsCopy.addAll(subnets); this.subnets = subnetsCopy; } /** * A list of the VPC <a>Subnet</a> elements. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSubnets(java.util.Collection)} or {@link * #withSubnets(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnets A list of the VPC <a>Subnet</a> elements. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withSubnets(Subnet... subnets) { if (getSubnets() == null) setSubnets(new java.util.ArrayList<Subnet>(subnets.length)); for (Subnet value : subnets) { getSubnets().add(value); } return this; } /** * A list of the VPC <a>Subnet</a> elements. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnets A list of the VPC <a>Subnet</a> elements. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withSubnets(java.util.Collection<Subnet> subnets) { if (subnets == null) { this.subnets = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<Subnet> subnetsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Subnet>(subnets.size()); subnetsCopy.addAll(subnets); this.subnets = subnetsCopy; } return this; } /** * The list of tags for the cluster subnet group. * * @return The list of tags for the cluster subnet group. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>(); tags.setAutoConstruct(true); } return tags; } /** * The list of tags for the cluster subnet group. * * @param tags The list of tags for the cluster subnet group. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>(tags.size()); tagsCopy.addAll(tags); this.tags = tagsCopy; } /** * The list of tags for the cluster subnet group. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setTags(java.util.Collection)} or {@link * #withTags(java.util.Collection)} if you want to override the existing * values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param tags The list of tags for the cluster subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withTags(Tag... tags) { if (getTags() == null) setTags(new java.util.ArrayList<Tag>(tags.length)); for (Tag value : tags) { getTags().add(value); } return this; } /** * The list of tags for the cluster subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param tags The list of tags for the cluster subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public ClusterSubnetGroup withTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>(tags.size()); tagsCopy.addAll(tags); this.tags = tagsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getClusterSubnetGroupName() != null) sb.append("ClusterSubnetGroupName: " + getClusterSubnetGroupName() + ","); if (getDescription() != null) sb.append("Description: " + getDescription() + ","); if (getVpcId() != null) sb.append("VpcId: " + getVpcId() + ","); if (getSubnetGroupStatus() != null) sb.append("SubnetGroupStatus: " + getSubnetGroupStatus() + ","); if (getSubnets() != null) sb.append("Subnets: " + getSubnets() + ","); if (getTags() != null) sb.append("Tags: " + getTags() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getClusterSubnetGroupName() == null) ? 0 : getClusterSubnetGroupName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getSubnetGroupStatus() == null) ? 0 : getSubnetGroupStatus().hashCode()); hashCode = prime * hashCode + ((getSubnets() == null) ? 0 : getSubnets().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ClusterSubnetGroup == false) return false; ClusterSubnetGroup other = (ClusterSubnetGroup)obj; if (other.getClusterSubnetGroupName() == null ^ this.getClusterSubnetGroupName() == null) return false; if (other.getClusterSubnetGroupName() != null && other.getClusterSubnetGroupName().equals(this.getClusterSubnetGroupName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getSubnetGroupStatus() == null ^ this.getSubnetGroupStatus() == null) return false; if (other.getSubnetGroupStatus() != null && other.getSubnetGroupStatus().equals(this.getSubnetGroupStatus()) == false) return false; if (other.getSubnets() == null ^ this.getSubnets() == null) return false; if (other.getSubnets() != null && other.getSubnets().equals(this.getSubnets()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public ClusterSubnetGroup clone() { try { return (ClusterSubnetGroup) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * AnnotationHandler.java December 2009 * * Copyright (C) 2009, Niall Gallagher <niallg@users.sf.net> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.simpleframework.xml.core; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; /** * The <code>AnnotationHandler</code> object is used to handle all * invocation made on a synthetic annotation. This is required so * that annotations can be created without an implementation. The * <code>java.lang.reflect.Proxy</code> object is used to wrap this * invocation handler with the annotation interface. * * @author Niall Gallagher */ class AnnotationHandler implements InvocationHandler { /** * This is the method used to acquire the associated type. */ private static final String CLASS = "annotationType"; /** * This is used to acquire a string value for the annotation. */ private static final String STRING = "toString"; /** * This is used to determine if annotations are optional. */ private static final String REQUIRED = "required"; /** * This is used to perform a comparison of the annotations. */ private static final String EQUAL = "equals"; /** * This is used to perform a comparison of the annotations. */ private final Comparer comparer; /** * This is annotation type associated with this handler. */ private final Class type; /** * This is used to determine if the annotation is required. */ private final boolean required; /** * Constructor for the <code>AnnotationHandler</code> object. This * is used to create a handler for invocations on a synthetic * annotation. The annotation type wrapped must be provided. By * default the requirement of the annotations is true. * * @param type this is the annotation type that this is wrapping */ public AnnotationHandler(Class type) { this(type, true); } /** * Constructor for the <code>AnnotationHandler</code> object. This * is used to create a handler for invocations on a synthetic * annotation. The annotation type wrapped must be provided. * * @param type this is the annotation type that this is wrapping * @param required this is used to determine if its required */ public AnnotationHandler(Class type, boolean required) { this.comparer = new Comparer(); this.required = required; this.type = type; } /** * This is used to handle all invocations on the wrapped annotation. * Typically the response to an invocation will result in the * default value of the annotation attribute being returned. If the * method is an <code>equals</code> or <code>toString</code> then * this will be handled by an internal implementation. * * @param proxy this is the proxy object the invocation was made on * @param method this is the method that was invoked on the proxy * @param list this is the list of parameters to be used * * @return this is used to return the result of the invocation */ public Object invoke(Object proxy, Method method, Object[] list) throws Throwable { String name = method.getName(); if(name.equals(STRING)) { return toString(); } if(name.equals(EQUAL)) { return equals(proxy, list); } if(name.equals(CLASS)) { return type; } if(name.equals(REQUIRED)) { return required; } return method.getDefaultValue(); } /** * This is used to determine if two annotations are equals based * on the attributes of the annotation. The comparison done can * ignore specific attributes, for instance the name attribute. * * @param proxy this is the annotation the invocation was made on * @param list this is the parameters provided to the invocation * * @return this returns true if the annotations are equals */ private boolean equals(Object proxy, Object[] list) throws Throwable { Annotation left = (Annotation) proxy; Annotation right = (Annotation) list[0]; if(left.annotationType() != right.annotationType()) { throw new PersistenceException("Annotation %s is not the same as %s", left, right); } return comparer.equals(left, right); } /** * This is used to build a string from the annotation. The string * produces adheres to the typical string representation of a * normal annotation. This ensures that an exceptions that are * thrown with a string representation of the annotation are * identical to those thrown with a normal annotation. * * @return returns a string representation of the annotation */ public String toString() { StringBuilder builder = new StringBuilder(); if(type != null) { name(builder); attributes(builder); } return builder.toString(); } /** * This is used to build a string from the annotation. The string * produces adheres to the typical string representation of a * normal annotation. This ensures that an exceptions that are * thrown with a string representation of the annotation are * identical to those thrown with a normal annotation. * * @param builder this is the builder used to compose the text */ private void name(StringBuilder builder) { String name = type.getName(); if(name != null) { builder.append('@'); builder.append(name); builder.append('('); } } /** * This is used to build a string from the annotation. The string * produces adheres to the typical string representation of a * normal annotation. This ensures that an exceptions that are * thrown with a string representation of the annotation are * identical to those thrown with a normal annotation. * * @param builder this is the builder used to compose the text */ private void attributes(StringBuilder builder) { Method[] list = type.getDeclaredMethods(); for(int i = 0; i < list.length; i++) { String attribute = list[i].getName(); Object value = value(list[i]); if(i > 0) { builder.append(','); builder.append(' '); } builder.append(attribute); builder.append('='); builder.append(value); } builder.append(')'); } /** * This is used to extract the default value used for the provided * annotation attribute. This will return the default value for * all attributes except that it makes the requirement optional. * Making the requirement optional provides better functionality. * * @param method this is the annotation representing the attribute * * @return this returns the default value for the attribute */ private Object value(Method method) { String name = method.getName(); if(name.equals(REQUIRED)) { return required; } return method.getDefaultValue(); } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.groups.smartldap; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.ConcurrentException; import org.apache.commons.lang3.concurrent.LazyInitializer; import org.danann.cernunnos.Task; import org.danann.cernunnos.runtime.RuntimeRequestResponse; import org.danann.cernunnos.runtime.ScriptRunner; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.groups.ComponentGroupServiceDescriptor; import org.jasig.portal.groups.EntityGroupImpl; import org.jasig.portal.groups.EntityTestingGroupImpl; import org.jasig.portal.groups.GroupsException; import org.jasig.portal.groups.IEntityGroup; import org.jasig.portal.groups.IEntityGroupStore; import org.jasig.portal.groups.IEntityGroupStoreFactory; import org.jasig.portal.groups.IGroupConstants; import org.jasig.portal.groups.IGroupMember; import org.jasig.portal.groups.ILockableEntityGroup; import org.jasig.portal.security.IPerson; import org.jasig.portal.security.PersonFactory; import org.jasig.services.persondir.IPersonAttributeDao; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Required; import org.springframework.ldap.core.AttributesMapper; import org.springframework.ldap.core.ContextSource; import javax.annotation.PostConstruct; import javax.annotation.Resource; public final class SmartLdapGroupStore implements IEntityGroupStore { // Instance Members. private String memberOfAttributeName = "memberOf"; // default public void setMemberOfAttributeName(String memberOfAttributeName) { this.memberOfAttributeName = memberOfAttributeName; } private String baseGroupDn = null; public void setBaseGroupDn(String baseGroupDn) { this.baseGroupDn = baseGroupDn; } private String filter = "(objectCategory=group)"; // default public void setFilter(String filter) { this.filter = filter; } private ContextSource ldapContext = null; // default; must be set if used -- validated in refreshTree() public void setLdapContext(ContextSource ldapContext) { this.ldapContext = ldapContext; } private boolean resolveMemberGroups = false; // default public void setResolveMemberGroups(boolean resolveMemberGroups) { this.resolveMemberGroups = resolveMemberGroups; } private List<String> resolveDnList = Collections.emptyList(); // default; used with resolveMemberGroups public void setResolveDn(String resolveDn) { this.resolveDnList = Collections.singletonList(resolveDn); } @SuppressWarnings("unused") public void setResolveDnList(List<String> resolveDnList) { this.resolveDnList = Collections.unmodifiableList(resolveDnList); } private AttributesMapper attributesMapper; @Required public void setAttributesMapper(AttributesMapper attributesMapper) { this.attributesMapper = attributesMapper; } /** * Period after which SmartLdap will drop and rebuild the groups tree. May * be overridden in SmartLdapGroupStoreConfix.xml. A value of zero or less * (negative) disables this feature. */ private long groupsTreeRefreshIntervalSeconds = 900; // default public void setGroupsTreeRefreshIntervalSeconds(long groupsTreeRefreshIntervalSeconds) { this.groupsTreeRefreshIntervalSeconds = groupsTreeRefreshIntervalSeconds; } /** * Timestamp (milliseconds) of the last tree refresh. */ private volatile long lastTreeRefreshTime = 0; // Cernunnos tech... private final ScriptRunner runner = new ScriptRunner(); private final Task initTask = runner.compileTask(getClass().getResource("init.crn").toExternalForm()); @Resource(name="personAttributeDao") private IPersonAttributeDao personAttributeDao; private final Logger log = LoggerFactory.getLogger(getClass()); /* * Indexed Collections. */ /** * Single-object abstraction that contains all knowledge of SmartLdap groups: * <ul> * <li>Map of all groups keyed by 'key' (DN). Includes ROOT_GROUP.</li> * <li>Map of all parent relationships keyed by the 'key' (DN) of the child; * the values are lists of the 'keys' (DNs) of its parents. * Includes ROOT_GROUP.</li> * <li>Map of all child relationships keyed by the 'key' (DN) of the parent; * the values are lists of the 'keys' (DNs) of its children. * Includes ROOT_GROUP.</li> * <li>Map of all 'keys' (DNs) of SmartLdap managed groups indexed by group * name in upper case. Includes ROOT_GROUP.</li> * </ul> */ private GroupsTree groupsTree; /* * Public API. */ public static final String UNSUPPORTED_MESSAGE = "The SmartLdap implementation of JA-SIG Groups and Permissions (GaP) " + "does not support this operation."; public static final String ROOT_KEY = "SmartLdap ROOT"; public static final String ROOT_DESC = "A root group provided for the SmartLdapGroupStore."; private static final LazyInitializer<IEntityGroup> rootGroupInitializer = new LazyInitializer<IEntityGroup>() { @Override protected IEntityGroup initialize() { IEntityGroup rslt = new EntityTestingGroupImpl(ROOT_KEY, IPerson.class); rslt.setCreatorID("System"); rslt.setName(ROOT_KEY); rslt.setDescription(ROOT_DESC); return rslt; } }; public boolean contains(IEntityGroup group, IGroupMember member) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.contains"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } public void delete(IEntityGroup group) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.delete"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } /** * Returns an instance of the <code>IEntityGroup</code> from the data store. * @return org.jasig.portal.groups.IEntityGroup * @param key java.lang.String */ public IEntityGroup find(String key) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } log.debug("Invoking find() for key: {}", key); // All of our groups (incl. ROOT_GROUP) // are indexed in the 'groups' map by key... return groupsTree.getGroups().get(key); } /** * Returns an <code>Iterator</code> over the <code>Collection</code> of * <code>IEntityGroups</code> that the <code>IGroupMember</code> belongs to. * @return java.util.Iterator * @param gm org.jasig.portal.groups.IEntityGroup */ public Iterator findContainingGroups(IGroupMember gm) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } List<IEntityGroup> rslt = new LinkedList<>(); final IEntityGroup root = getRootGroup(); if (gm.isGroup()) { // Check the local indeces... IEntityGroup group = (IEntityGroup) gm; List<String> list = groupsTree.getParents().get(group.getLocalKey()); if (list != null) { // should only reach this code if its a SmartLdap managed group... for (String s : list) { rslt.add(groupsTree.getGroups().get(s)); } } } else if (gm.isEntity() && gm.getEntityType().equals(root.getEntityType())) { // Ask the individual... EntityIdentifier ei = gm.getUnderlyingEntityIdentifier(); Map<String,List<Object>> seed = new HashMap<>(); List<Object> seedValue = new LinkedList<>(); seedValue.add(ei.getKey()); seed.put(IPerson.USERNAME, seedValue); Map<String,List<Object>> attr = personAttributeDao.getMultivaluedUserAttributes(seed); // avoid NPEs and unnecessary IPerson creation if (attr != null && !attr.isEmpty()) { IPerson p = PersonFactory.createPerson(); p.setAttributes(attr); // Analyze its memberships... Object[] groupKeys = p.getAttributeValues(memberOfAttributeName); // IPerson returns null if no value is defined for this attribute... if (groupKeys != null) { List<String> list = new LinkedList<>(); for (Object o : groupKeys) { list.add((String) o); } for (String s : list) { if (groupsTree.getGroups().containsKey(s)) { rslt.add(groupsTree.getGroups().get(s)); } } } } } return rslt.iterator(); } /** * Returns an <code>Iterator</code> over the <code>Collection</code> of * <code>IEntities</code> that are members of this <code>IEntityGroup</code>. * @return java.util.Iterator * @param group org.jasig.portal.groups.IEntityGroup */ public Iterator findEntitiesForGroup(IEntityGroup group) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } log.debug("Invoking findEntitiesForGroup() for group: {}", group.getLocalKey()); // We only deal w/ group-group relationships here... return findMemberGroups(group); } public ILockableEntityGroup findLockable(String key) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.findLockable"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } /** * Returns a <code>String[]</code> containing the keys of <code>IEntityGroups</code> * that are members of this <code>IEntityGroup</code>. In a composite group * system, a group may contain a member group from a different service. This is * called a foreign membership, and is only possible in an internally-managed * service. A group store in such a service can return the key of a foreign member * group, but not the group itself, which can only be returned by its local store. * * @return String[] * @param group org.jasig.portal.groups.IEntityGroup */ public String[] findMemberGroupKeys(IEntityGroup group) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } log.debug("Invoking findMemberGroupKeys() for group: {}", group.getLocalKey()); List<String> rslt = new LinkedList<>(); for (Iterator it=findMemberGroups(group); it.hasNext();) { IEntityGroup g = (IEntityGroup) it.next(); // Return composite keys here... rslt.add(g.getKey()); } return rslt.toArray(new String[rslt.size()]); } /** * Returns an <code>Iterator</code> over the <code>Collection</code> of * <code>IEntityGroups</code> that are members of this <code>IEntityGroup</code>. * @return java.util.Iterator * @param group org.jasig.portal.groups.IEntityGroup */ public Iterator findMemberGroups(IEntityGroup group) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } log.debug("Invoking findMemberGroups() for group: {}", group.getLocalKey()); List<IEntityGroup> rslt = new LinkedList<>(); List<String> list = groupsTree.getChildren().get(group.getLocalKey()); if (list != null) { // should only reach this code if its a SmartLdap managed group... for (String s : list) { rslt.add(groupsTree.getGroups().get(s)); } } return rslt.iterator(); } public IEntityGroup newInstance(Class entityType) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.newInstance"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype) throws GroupsException { if (isTreeRefreshRequired()) { refreshTree(); } log.debug("Invoking searchForGroups(): query={}, method={}, leaftype=", query, method, leaftype.getClass().getName()); // We only match the IPerson leaf type... final IEntityGroup root = getRootGroup(); if (!leaftype.equals(root.getEntityType())) { return new EntityIdentifier[0]; } // We need to escape regex special characters that appear in the query string... final String[][] specials = new String[][] { /* backslash must come first! */ new String[] { "\\", "\\\\"}, new String[] { "[", "\\[" }, /* closing ']' isn't needed b/c it's a normal character w/o a preceding '[' */ new String[] { "{", "\\{" }, /* closing '}' isn't needed b/c it's a normal character w/o a preceding '{' */ new String[] { "^", "\\^" }, new String[] { "$", "\\$" }, new String[] { ".", "\\." }, new String[] { "|", "\\|" }, new String[] { "?", "\\?" }, new String[] { "*", "\\*" }, new String[] { "+", "\\+" }, new String[] { "(", "\\(" }, new String[] { ")", "\\)" } }; for (String[] s : specials) { query = query.replace(s[0], s[1]); } // Establish the regex pattern to match on... String regex; switch (method) { case IGroupConstants.IS: regex = query.toUpperCase(); break; case IGroupConstants.STARTS_WITH: regex = query.toUpperCase() + ".*"; break; case IGroupConstants.ENDS_WITH: regex = ".*" + query.toUpperCase(); break; case IGroupConstants.CONTAINS: regex = ".*" + query.toUpperCase() + ".*"; break; default: String msg = "Unsupported search method: " + method; throw new GroupsException(msg); } List<EntityIdentifier> rslt = new LinkedList<>(); for (Map.Entry<String,List<String>> y : groupsTree.getKeysByUpperCaseName().entrySet()) { if (y.getKey().matches(regex)) { List<String> keys = y.getValue(); for (String k : keys) { rslt.add(new EntityIdentifier(k, IEntityGroup.class)); } } } return rslt.toArray(new EntityIdentifier[rslt.size()]); } public void update(IEntityGroup group) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.update"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } public void updateMembers(IEntityGroup group) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.updateMembers"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @SuppressWarnings("unused") public LdapRecord detectAndEliminateGroupReferences(LdapRecord record, List<String> groupChain) { LdapRecord rslt = record; // default List<String> keysOfChildren = record.getKeysOfChildren(); List<String> filteredChildren = new ArrayList<>(); for (String key : keysOfChildren) { if (!groupChain.contains(key)) { filteredChildren.add(key); } else { // Circular reference detected! log.warn("Circular reference detected and removed for the following groups: '{}' and '{}'", key, record.getGroup().getLocalKey()); } } if (filteredChildren.size() < keysOfChildren.size()) { rslt = new LdapRecord(record.getGroup(), filteredChildren); } return rslt; } @SuppressWarnings("unused") public boolean hasUndiscoveredChildrenWithinDn(LdapRecord record, String referenceDn, Set<LdapRecord> groupsSet) { boolean rslt = false; // default for (String childKey : record.getKeysOfChildren()) { if (childKey.endsWith(referenceDn)) { // Make sure the one we found isn't already in the groupsSet; // NOTE!... this test takes advantage of the implementation of // equals() on LdapRecord, which states that 2 records with the // same group key are equal. IEntityGroup group = new EntityGroupImpl(childKey, IPerson.class); List<String> list = Collections.emptyList(); LdapRecord proxy = new LdapRecord(group, list); if (!groupsSet.contains(proxy)) { rslt = true; break; } else { log.trace("Child group is already in collection: {}", childKey); } } } log.trace("Query for children of parent group '{}': {}", record.getGroup().getLocalKey(), rslt); return rslt; } /* * Implementation. */ @PostConstruct private void postConstruct() { Factory.setInstance(this); } private IEntityGroup getRootGroup() { try { return rootGroupInitializer.get(); } catch (ConcurrentException ce) { throw new RuntimeException("Failed to obtain the SmartLdap root group", ce); } } private boolean isTreeRefreshRequired() { if (groupsTree == null) { // Of course we need it return true; } if (groupsTreeRefreshIntervalSeconds <= 0) { // SmartLdap refresh feature may be disabled by setting // groupsTreeRefreshIntervalSeconds to zero or negative. return false; } // The 'lastTreeRefreshTime' member variable is volatile. As of JDK 5, // this fact should make reads of this variable dependable in a multi- // threaded environment. final long treeExpiresTimestamp = lastTreeRefreshTime + (groupsTreeRefreshIntervalSeconds * 1000L); return System.currentTimeMillis() > treeExpiresTimestamp; } /** * Verifies that the collection of groups needs rebuilding and, if so, * spawns a new worker <code>Thread</code> for that purpose. */ private synchronized void refreshTree() { if (!isTreeRefreshRequired()) { // The groupsTree was already re-built while // we were waiting to enter this method. return; } log.info("Refreshing groups tree for SmartLdap"); // We must join the builder thread if // we don't have an existing groupsTree. final boolean doJoin = groupsTree == null; // In most cases, re-build the tree in a separate thread; the current // request can proceed with the newly-expired groupsTree. Thread refresh = new Thread("SmartLdap Refresh Worker") { public void run() { // Replace the old with the new... try { groupsTree = buildGroupsTree(); } catch (Throwable t) { log.error("SmartLdapGroupStore failed to build the groups tree", t); } } }; refresh.setDaemon(true); refresh.start(); if (doJoin) { try { log.info("Joining the SmartLdap Refresh Worker Thread"); refresh.join(); } catch (InterruptedException ie) { throw new RuntimeException(ie); } } // Even if the refresh thread failed, don't try // again for another groupsTreeRefreshIntervalSeconds. lastTreeRefreshTime = System.currentTimeMillis(); } private GroupsTree buildGroupsTree() { long timestamp = System.currentTimeMillis(); // Prepare the new local indeces... Map<String,IEntityGroup> new_groups = Collections.synchronizedMap(new HashMap<String,IEntityGroup>()); Map<String,List<String>> new_parents = Collections.synchronizedMap(new HashMap<String,List<String>>()); Map<String,List<String>> new_children = Collections.synchronizedMap(new HashMap<String,List<String>>()); Map<String,List<String>> new_keysByUpperCaseName = Collections.synchronizedMap(new HashMap<String,List<String>>()); // Gather IEntityGroup objects from LDAP... RuntimeRequestResponse req = new RuntimeRequestResponse(); Set<LdapRecord> set = new HashSet<>(); req.setAttribute("GROUPS", set); req.setAttribute("smartLdapGroupStore", this); SubQueryCounter queryCounter = new SubQueryCounter(); req.setAttribute("queryCounter", queryCounter); req.setAttribute("filter", filter); // This one changes iteratively... req.setAttribute("baseFilter", filter); // while this one stays the same. if (StringUtils.isBlank(baseGroupDn)) { throw new IllegalStateException("baseGroupDn property not set"); } req.setAttribute("baseGroupDn", baseGroupDn); if (ldapContext == null) { throw new IllegalStateException("ldapContext property not set"); } req.setAttribute("ldapContext", ldapContext); req.setAttribute("resolveMemberGroups", resolveMemberGroups); req.setAttribute("resolveDnList", resolveDnList); req.setAttribute("memberOfAttributeName", memberOfAttributeName); req.setAttribute("attributesMapper", attributesMapper); runner.run(initTask, req); log.info("init() found {} records", set.size()); // Do a first loop to build the main catalog (new_groups)... for (LdapRecord r : set) { // new_groups (me)... IEntityGroup g = r.getGroup(); new_groups.put(g.getLocalKey(), g); } // Do a second loop to build local indeces... for (LdapRecord r : set) { IEntityGroup g = r.getGroup(); // new_parents (I am a parent for all my children)... for (String childKey : r.getKeysOfChildren()) { // NB: We're only interested in relationships between // objects in the main catalog (i.e. new_groups); // discard everything else... if (!new_groups.containsKey(childKey)) { break; } List<String> parentsList = new_parents.get(childKey); if (parentsList == null) { // first parent for this child... parentsList = Collections.synchronizedList(new LinkedList<String>()); new_parents.put(childKey, parentsList); } parentsList.add(g.getLocalKey()); } // new_children... List<String> childrenList = Collections.synchronizedList(new LinkedList<String>()); for (String childKey : r.getKeysOfChildren()) { // NB: We're only interested in relationships between // objects in the main catalog (i.e. new_groups); // discard everything else... if (new_groups.containsKey(childKey)) { childrenList.add(childKey); } } new_children.put(g.getLocalKey(), childrenList); // new_keysByUpperCaseName... List<String> groupsWithMyName = new_keysByUpperCaseName.get(g.getName().toUpperCase()); if (groupsWithMyName == null) { // I am the first group with my name (pretty likely)... groupsWithMyName = Collections.synchronizedList(new LinkedList<String>()); new_keysByUpperCaseName.put(g.getName().toUpperCase(), groupsWithMyName); } groupsWithMyName.add(g.getLocalKey()); } /* * Now load the ROOT_GROUP into the collections... */ // new_groups (me)... final IEntityGroup root = getRootGroup(); new_groups.put(root.getLocalKey(), root); // new_parents (I am a parent for all groups that have no other parent)... List<String> childrenOfRoot = Collections.synchronizedList(new LinkedList<String>()); // for later... for (String possibleChildKey : new_groups.keySet()) { if (!possibleChildKey.equals(root.getLocalKey()) && !new_parents.containsKey(possibleChildKey)) { List<String> p = Collections.synchronizedList(new LinkedList<String>()); p.add(root.getLocalKey()); new_parents.put(possibleChildKey, p); childrenOfRoot.add(possibleChildKey); // for later... } } // new_children... new_children.put(root.getLocalKey(), childrenOfRoot); // new_keysByUpperCaseName... List<String> groupsWithMyName = new_keysByUpperCaseName.get(root.getName().toUpperCase()); if (groupsWithMyName == null) { // I am the first group with my name (pretty likely)... groupsWithMyName = Collections.synchronizedList(new LinkedList<String>()); new_keysByUpperCaseName.put(root.getName().toUpperCase(), groupsWithMyName); } groupsWithMyName.add(root.getLocalKey()); final long benchmark = System.currentTimeMillis() - timestamp; log.info("Refresh of groups tree completed in {} milliseconds", benchmark); log.info("Total number of LDAP queries: {}", queryCounter.getCount() + 1); final String msg = "init() :: final size of each collection is as follows..." + "\n\tgroups={}" + "\n\tparents={}" + "\n\tchildren={}" + "\n\tkeysByUpperCaseName={}"; log.info(msg, new_groups.size(), new_parents.size(), new_children.size(), new_keysByUpperCaseName.size()); if (log.isTraceEnabled()) { StringBuilder sbuilder = new StringBuilder(); // new_groups... sbuilder.setLength(0); sbuilder.append("Here are the keys of the new_groups collection:"); for (String s : new_groups.keySet()) { sbuilder.append("\n\t").append(s); } log.trace(sbuilder.toString()); // new_parents... sbuilder.setLength(0); sbuilder.append("Here are the parents of each child in the new_parents collection:"); for (Map.Entry<String,List<String>> y : new_parents.entrySet()) { sbuilder.append("\n\tchild=").append(y.getKey()); for (String s : y.getValue()) { sbuilder.append("\n\t\tparent=").append(s); } } log.trace(sbuilder.toString()); // new_children... sbuilder.setLength(0); sbuilder.append("Here are the children of each parent in the new_children collection:"); for (Map.Entry<String,List<String>> y : new_children.entrySet()) { sbuilder.append("\n\tparent=").append(y.getKey()); for (String s : y.getValue()) { sbuilder.append("\n\t\tchild=").append(s); } } log.trace(sbuilder.toString()); // new_keysByUpperCaseName... sbuilder.append("Here are the groups that have each name in the new_keysByUpperCaseName collection:"); for (Map.Entry<String,List<String>> y : new_keysByUpperCaseName.entrySet()) { sbuilder.append("\n\tname=").append(y.getKey()); for (String s : y.getValue()) { sbuilder.append("\n\t\tgroup=").append(s); } } log.trace(sbuilder.toString()); } return new GroupsTree(new_groups, new_parents, new_children, new_keysByUpperCaseName); } /* * Nested Types. */ public static final class Factory implements IEntityGroupStoreFactory { private static IEntityGroupStore instance; private static void setInstance(IEntityGroupStore smartLdapGroupStore) { instance = smartLdapGroupStore; } /* * Public API. */ public IEntityGroupStore newGroupStore() throws GroupsException { return instance; } public IEntityGroupStore newGroupStore(ComponentGroupServiceDescriptor svcDescriptor) throws GroupsException { return instance; } } private static final class GroupsTree { // Instance Members. private final Map<String,IEntityGroup> groups; private final Map<String,List<String>> parents; private final Map<String,List<String>> children; private final Map<String,List<String>> keysByUpperCaseName; /* * Public API. */ public GroupsTree(Map<String,IEntityGroup> groups, Map<String,List<String>> parents, Map<String,List<String>> children, Map<String,List<String>> keysByUpperCaseName) { // Assertions. if (groups == null) { String msg = "Argument 'groups' cannot be null."; throw new IllegalArgumentException(msg); } if (parents == null) { String msg = "Argument 'parents' cannot be null."; throw new IllegalArgumentException(msg); } if (children == null) { String msg = "Argument 'children' cannot be null."; throw new IllegalArgumentException(msg); } if (keysByUpperCaseName == null) { String msg = "Argument 'keysByUpperCaseName' cannot be null."; throw new IllegalArgumentException(msg); } // Instance Members. this.groups = groups; this.parents = parents; this.children = children; this.keysByUpperCaseName = keysByUpperCaseName; } public Map<String,IEntityGroup> getGroups() { return groups; } public Map<String,List<String>> getParents() { return parents; } public Map<String,List<String>> getChildren() { return children; } public Map<String,List<String>> getKeysByUpperCaseName() { return keysByUpperCaseName; } } private static final class SubQueryCounter { private int count = 0; public void increment() { ++count; } public int getCount() { return count; } } }
package it.unibz.krdb.obda.model.impl; /* * #%L * ontop-obdalib-core * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.model.BooleanOperationPredicate; import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.model.Predicate.COL_TYPE; import it.unibz.krdb.obda.model.StringOperationPredicate; import it.unibz.krdb.obda.model.ValueConstant; public class OBDAVocabulary { /*GeoSPARQL namespaces*/ public static final String GEOSPARQL_FUNCTION_NS = "<http://www.opengis.net/def/function/geosparql/"; /* Constants */ public static final ValueConstant NULL = new ValueConstantImpl("null", COL_TYPE.STRING); public static final ValueConstant TRUE = new ValueConstantImpl("true", COL_TYPE.BOOLEAN); public static final ValueConstant FALSE = new ValueConstantImpl("false", COL_TYPE.BOOLEAN); /* Numeric operations */ public static final String MINUS_STR = "minus"; public static final String ADD_STR = "add"; public static final String SUBSTRACT_STR = "substract"; public static final String MULTIPLY_STR = "multiply"; /* Numeric operation predicates */ public static final Predicate MINUS = new NumericalOperationPredicateImpl("minus", 1); // TODO (ROMAN): check -- never used public static final Predicate ADD = new NumericalOperationPredicateImpl("add", 2); public static final Predicate SUBTRACT = new NumericalOperationPredicateImpl("subtract", 2); public static final Predicate MULTIPLY = new NumericalOperationPredicateImpl("multiply", 2); /* Boolean predicate URIs */ public static final String strAND = "AND"; public static final String strEQ = "EQ"; public static final String strGTE = "GTE"; public static final String strGT = "GT"; public static final String strLTE = "LTE"; public static final String strLT = "LT"; public static final String strNEQ = "NEQ"; public static final String strNOT = "NOT"; public static final String strOR = "OR"; public static final String strIS_NULL = "IS_NULL"; public static final String strIS_NOT_NULL = "IS_NOT_NULL"; public static final String strIS_TRUE = "IS_TRUE"; /*GeoSPARQL geometry topology functions*/ public static final String strOverlaps = "OVERLAPS"; public static final String sfEquals = "SF-EQUALS"; public static final String sfDisjoint = "SF-DISJOINT"; public static final String sfIntersects = "SF-INTERSECTS"; public static final String sfTouches = "SF-TOUCHES"; public static final String sfWithin = "SF-WITHIN"; public static final String sfContains = "SF-CONTAINS"; public static final String sfCrosses = "SF-CROSSES"; public static final String ehEquals = "EH-EQUALS"; public static final String ehDisjoint = "EH-DISJOINT"; public static final String ehOverlap = "EH-OVERLAP"; public static final String ehCovers = "EH-COVERS"; public static final String ehCoveredBy = "EH-COVEREDBY"; public static final String ehInside = "EH-INSIDE"; public static final String ehContains = "EH-CONTAINS"; public static final String sfdistance = "SF-DISTANCE"; public static final String sfbuffer = "SF-BUFFER"; public static final String sfconvexHull = "SF-CONVEXHULL"; public static final String sfIntersection = "SF-INTERSECTION"; public static final String sfUnion = "SF-UNION"; public static final String sfDifference = "SF-DIFFERENCE"; public static final String sfSymDifference = "SF-SYMDIFFERENCE"; public static final String sfEnvelope = "SF-ENVELOPE"; public static final String sfBoundary = "SF-BOUNDARY"; public static final String sfGetSRID = "SF-SRID"; public static final String strGeomFromWKT = "GEOMFROMWKT"; /* Boolean predicates */ public static final BooleanOperationPredicate AND = new BooleanOperationPredicateImpl("AND", 2); public static final BooleanOperationPredicate OR = new BooleanOperationPredicateImpl("OR", 2); public static final BooleanOperationPredicate NOT = new BooleanOperationPredicateImpl("NOT", 1); public static final Predicate OVERLAPS = new BooleanOperationPredicateImpl( strOverlaps, 2); public static final Predicate SFCONTAINS = new BooleanOperationPredicateImpl( sfContains, 2); public static final Predicate SFCROSSES = new BooleanOperationPredicateImpl( sfCrosses, 2); public static final Predicate SFDISJOINT = new BooleanOperationPredicateImpl( sfDisjoint, 2); public static final Predicate SFINTERSECTS = new BooleanOperationPredicateImpl( sfIntersects, 2); public static final Predicate SFTOUCHES = new BooleanOperationPredicateImpl( sfTouches, 2); public static final Predicate SFWITHIN = new BooleanOperationPredicateImpl( sfWithin, 2); public static final Predicate SFEQUALS = new BooleanOperationPredicateImpl( sfEquals, 2); public static final Predicate EHCOVEREDBY = new BooleanOperationPredicateImpl( ehCoveredBy, 2); public static final Predicate EHCOVERS = new BooleanOperationPredicateImpl( ehCovers, 2); public static final Predicate EHDISJOINT = new BooleanOperationPredicateImpl( ehDisjoint, 2); public static final Predicate EHEQUALS = new BooleanOperationPredicateImpl( ehEquals, 2); public static final Predicate EHINSIDE = new BooleanOperationPredicateImpl( ehInside, 2); public static final Predicate EHOVERLAPS = new BooleanOperationPredicateImpl( ehOverlap, 2); public static final Predicate EHCONTAINS = new BooleanOperationPredicateImpl( ehContains, 2); public static final Predicate SFDISTANCE = new BooleanOperationPredicateImpl( sfdistance, 2); public static final Predicate SFBUFFER = new BooleanOperationPredicateImpl( sfbuffer, 2); public static final Predicate SFCONVEXHULL = new BooleanOperationPredicateImpl( sfconvexHull, 2); public static final Predicate SFINTERSECTION = new BooleanOperationPredicateImpl( sfIntersection, 2); public static final Predicate SFUNION = new BooleanOperationPredicateImpl( sfUnion, 2); public static final Predicate SFDIFFERENCE = new BooleanOperationPredicateImpl( sfDifference, 2); public static final Predicate SFSYMDIFFERENCE = new BooleanOperationPredicateImpl( sfSymDifference, 2); public static final Predicate SFENVELOPE = new BooleanOperationPredicateImpl( sfEnvelope, 2); public static final Predicate SFBOUNDARY = new BooleanOperationPredicateImpl( sfBoundary, 2); public static final Predicate SFGETSRID = new BooleanOperationPredicateImpl( sfGetSRID, 2); public static final Predicate GEOMFROMWKT = new BooleanOperationPredicateImpl( strGeomFromWKT, 1); public static final BooleanOperationPredicate EQ = new BooleanOperationPredicateImpl("EQ", 2); public static final BooleanOperationPredicate NEQ = new BooleanOperationPredicateImpl("NEQ", 2); public static final BooleanOperationPredicate GTE = new BooleanOperationPredicateImpl("GTE", 2); public static final BooleanOperationPredicate GT = new BooleanOperationPredicateImpl("GT", 2); public static final BooleanOperationPredicate LTE = new BooleanOperationPredicateImpl("LTE", 2); public static final BooleanOperationPredicate LT = new BooleanOperationPredicateImpl("LT", 2); public static final BooleanOperationPredicate IS_NULL = new BooleanOperationPredicateImpl("IS_NULL", 1); public static final BooleanOperationPredicate IS_NOT_NULL = new BooleanOperationPredicateImpl("IS_NOT_NULL", 1); public static final BooleanOperationPredicate IS_TRUE = new BooleanOperationPredicateImpl("IS_TRUE", 1); /* String predicates */ public static final StringOperationPredicate REPLACE = new StringOperationPredicateImpl( "REPLACE", 3, new COL_TYPE[]{COL_TYPE.LITERAL, COL_TYPE.LITERAL, COL_TYPE.LITERAL}); public static final StringOperationPredicate CONCAT = new StringOperationPredicateImpl( "CONCAT", 2, new COL_TYPE[]{COL_TYPE.LITERAL, COL_TYPE.LITERAL}); public static final String RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; public static final String GEOSPARQL_WKT_LITERAL_DATATYPE = "http://www.opengis.net/ont/geosparql#wktLiteral"; public static final Predicate GEOSPARQL_WKT_LITERAL = new DatatypePredicateImpl( GEOSPARQL_WKT_LITERAL_DATATYPE, COL_TYPE.GEOMETRY); /* Common namespaces and prefixes */ public static final String NS_XSD = "http://www.w3.org/2001/XMLSchema#"; public static final String NS_RDF = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; public static final String NS_RDFS = "http://www.w3.org/2000/01/rdf-schema#"; public static final String NS_OWL = "http://www.w3.org/2002/07/owl#"; public static final String NS_QUEST = "http://obda.org/quest#"; public static final String PREFIX_XSD = "xsd:"; public static final String PREFIX_RDF = "rdf:"; public static final String PREFIX_RDFS = "rdfs:"; public static final String PREFIX_OWL = "owl:"; public static final String PREFIX_QUEST = "quest:"; // TODO: to be removed public static final String RDFS_LITERAL_URI = "http://www.w3.org/2000/01/rdf-schema#Literal"; /* Built-in function URIs */ // The name of the function that creates URI's in Quest public static final String QUEST_URI = "URI"; public static final Predicate QUEST_CAST = new PredicateImpl("cast", 2, new COL_TYPE[2]); public static final String QUEST_QUERY = "ans1"; /* SPARQL algebra operations */ public static final Predicate SPARQL_JOIN = new AlgebraOperatorPredicateImpl("Join"); public static final Predicate SPARQL_LEFTJOIN = new AlgebraOperatorPredicateImpl("LeftJoin"); /*SPARQL spatial functions*/ public static final String NS_STRDF = "<http://strdf.di.uoa.gr/ontology#>"; public static final String overlap = "<http://strdf.di.uoa.gr/ontology#overlap>"; /* SPARQL built-in functions */ public static final Predicate SPARQL_STR = new NonBooleanOperationPredicateImpl("str"); public static final Predicate SPARQL_DATATYPE = new NonBooleanOperationPredicateImpl("datatype"); public static final Predicate SPARQL_LANG = new NonBooleanOperationPredicateImpl("lang"); /* SPARQL built-in predicates */ public static final BooleanOperationPredicate SPARQL_IS_LITERAL = new BooleanOperationPredicateImpl("isLiteral", 1); public static final BooleanOperationPredicate SPARQL_IS_URI = new BooleanOperationPredicateImpl("isURI", 1); public static final BooleanOperationPredicate SPARQL_IS_IRI = new BooleanOperationPredicateImpl("isIRI", 1); public static final BooleanOperationPredicate SPARQL_IS_BLANK = new BooleanOperationPredicateImpl("isBlank", 1); public static final BooleanOperationPredicate SPARQL_LANGMATCHES = new BooleanOperationPredicateImpl("LangMatches", 2); public static final BooleanOperationPredicate SPARQL_REGEX = new BooleanOperationPredicateImpl("regex", 3); public static final BooleanOperationPredicate SPARQL_LIKE = new BooleanOperationPredicateImpl("like", 2); }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.ui; import com.intellij.codeInsight.editorActions.SelectWordUtil; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.SelectionModel; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.util.TextRange; import com.intellij.ui.EditorComboBoxEditor; import com.intellij.ui.EditorComboBoxRenderer; import com.intellij.ui.EditorTextField; import com.intellij.ui.StringComboboxEditor; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.EventListenerList; import java.awt.*; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.util.ArrayList; import java.util.EventListener; import java.util.List; public class NameSuggestionsField extends JPanel { private final JComponent myComponent; private final EventListenerList myListenerList = new EventListenerList(); private final MyComboBoxModel myComboBoxModel; private final Project myProject; private MyDocumentListener myDocumentListener; private MyComboBoxItemListener myComboBoxItemListener; private boolean myNonHumanChange = false; public NameSuggestionsField(Project project) { super(new BorderLayout()); myProject = project; myComboBoxModel = new MyComboBoxModel(); final ComboBox comboBox = new ComboBox(myComboBoxModel,-1); myComponent = comboBox; add(myComponent, BorderLayout.CENTER); setupComboBox(comboBox, StdFileTypes.JAVA); } public NameSuggestionsField(String[] nameSuggestions, Project project) { this(nameSuggestions, project, StdFileTypes.JAVA); } public NameSuggestionsField(String[] nameSuggestions, Project project, FileType fileType) { super(new BorderLayout()); myProject = project; if (nameSuggestions == null || nameSuggestions.length <= 1) { myComponent = createTextFieldForName(nameSuggestions, fileType); } else { final ComboBox combobox = new ComboBox(nameSuggestions,-1); combobox.setSelectedIndex(0); setupComboBox(combobox, fileType); myComponent = combobox; } add(myComponent, BorderLayout.CENTER); myComboBoxModel = null; } public NameSuggestionsField(final String[] suggestedNames, final Project project, final FileType fileType, @Nullable final Editor editor) { this(suggestedNames, project, fileType); if (editor == null) return; // later here because EditorTextField creates Editor during addNotify() SwingUtilities.invokeLater(new Runnable() { @Override public void run() { final int offset = editor.getCaretModel().getOffset(); List<TextRange> ranges = new ArrayList<TextRange>(); SelectWordUtil.addWordSelection(editor.getSettings().isCamelWords(), editor.getDocument().getCharsSequence(), offset, ranges); Editor myEditor = getEditor(); if (myEditor == null) return; for (TextRange wordRange : ranges) { String word = editor.getDocument().getText(wordRange); if (!word.equals(getEnteredName())) continue; final SelectionModel selectionModel = editor.getSelectionModel(); myEditor.getSelectionModel().removeSelection(); int myOffset = offset - wordRange.getStartOffset(); myEditor.getCaretModel().moveToOffset(myOffset); TextRange selected = new TextRange(selectionModel.getSelectionStart(), selectionModel.getSelectionEnd()).shiftRight(-wordRange.getStartOffset()); selected = selected.intersection(new TextRange(0, myEditor.getDocument().getTextLength())); if (selectionModel.hasSelection() && selected != null && !selected.isEmpty()) { myEditor.getSelectionModel().setSelection(selected.getStartOffset(), selected.getEndOffset()); } else if (shouldSelectAll()) { myEditor.getSelectionModel().setSelection(0, myEditor.getDocument().getTextLength()); } break; } } }); } protected boolean shouldSelectAll() { return true; } public void selectNameWithoutExtension() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { Editor editor = getEditor(); if (editor == null) return; final int pos = editor.getDocument().getText().lastIndexOf('.'); if (pos > 0) { editor.getSelectionModel().setSelection(0, pos); editor.getCaretModel().moveToOffset(pos); } } }); } public void setSuggestions(final String[] suggestions) { if(myComboBoxModel == null) return; JComboBox comboBox = (JComboBox) myComponent; final String oldSelectedItem = (String)comboBox.getSelectedItem(); final String oldItemFromTextField = (String) comboBox.getEditor().getItem(); final boolean shouldUpdateTextField = oldItemFromTextField.equals(oldSelectedItem) || oldItemFromTextField.trim().length() == 0; myComboBoxModel.setSuggestions(suggestions); if(suggestions.length > 0 && shouldUpdateTextField) { if (oldSelectedItem != null) { comboBox.setSelectedItem(oldSelectedItem); } else { comboBox.setSelectedIndex(0); } } else { comboBox.getEditor().setItem(oldItemFromTextField); } } public JComponent getComponent() { return this; } public JComponent getFocusableComponent() { if(myComponent instanceof JComboBox) { return (JComponent) ((JComboBox) myComponent).getEditor().getEditorComponent(); } else { return myComponent; } } public String getEnteredName() { if (myComponent instanceof JComboBox) { return (String)((JComboBox)myComponent).getEditor().getItem(); } else { return ((EditorTextField) myComponent).getText(); } } private JComponent createTextFieldForName(String[] nameSuggestions, FileType fileType) { final String text; if (nameSuggestions != null && nameSuggestions.length > 0 && nameSuggestions[0] != null) { text = nameSuggestions[0]; } else { text = ""; } EditorTextField field = new EditorTextField(text, myProject, fileType); field.selectAll(); return field; } private static class MyComboBoxModel extends DefaultComboBoxModel { private String[] mySuggestions; MyComboBoxModel() { mySuggestions = ArrayUtil.EMPTY_STRING_ARRAY; } // implements javax.swing.ListModel @Override public int getSize() { return mySuggestions.length; } // implements javax.swing.ListModel @Override public Object getElementAt(int index) { return mySuggestions[index]; } public void setSuggestions(String[] suggestions) { fireIntervalRemoved(this, 0, mySuggestions.length); mySuggestions = suggestions; fireIntervalAdded(this, 0, mySuggestions.length); } } private void setupComboBox(final ComboBox combobox, FileType fileType) { final EditorComboBoxEditor comboEditor = new StringComboboxEditor(myProject, fileType, combobox) { @Override public void setItem(Object anObject) { myNonHumanChange = true; super.setItem(anObject); } }; combobox.setEditor(comboEditor); combobox.setRenderer(new EditorComboBoxRenderer(comboEditor)); combobox.setEditable(true); combobox.setMaximumRowCount(8); comboEditor.selectAll(); } public Editor getEditor() { if (myComponent instanceof EditorTextField) { return ((EditorTextField)myComponent).getEditor(); } else { return ((EditorTextField)((JComboBox)myComponent).getEditor().getEditorComponent()).getEditor(); } } public interface DataChanged extends EventListener { void dataChanged(); } public void addDataChangedListener(DataChanged listener) { myListenerList.add(DataChanged.class, listener); attachListeners(); } public void removeDataChangedListener(DataChanged listener) { myListenerList.remove(DataChanged.class, listener); if (myListenerList.getListenerCount() == 0) { detachListeners(); } } private void attachListeners() { if (myDocumentListener == null) { myDocumentListener = new MyDocumentListener(); ((EditorTextField) getFocusableComponent()).addDocumentListener(myDocumentListener); } if (myComboBoxItemListener == null && myComponent instanceof JComboBox) { myComboBoxItemListener = new MyComboBoxItemListener(); ((JComboBox) myComponent).addItemListener(myComboBoxItemListener); } } private void detachListeners() { if (myDocumentListener != null) { ((EditorTextField) getFocusableComponent()).removeDocumentListener(myDocumentListener); myDocumentListener = null; } if (myComboBoxItemListener != null) { ((JComboBox) myComponent).removeItemListener(myComboBoxItemListener); myComboBoxItemListener = null; } } private void fireDataChanged() { Object[] list = myListenerList.getListenerList(); for (Object aList : list) { if (aList instanceof DataChanged) { ((DataChanged)aList).dataChanged(); } } } @Override public boolean requestFocusInWindow() { if(myComponent instanceof JComboBox) { return ((JComboBox) myComponent).getEditor().getEditorComponent().requestFocusInWindow(); } else { return myComponent.requestFocusInWindow(); } } @Override public void setEnabled (boolean enabled) { myComponent.setEnabled(enabled); } private class MyDocumentListener implements DocumentListener { @Override public void beforeDocumentChange(DocumentEvent event) { } @Override public void documentChanged(DocumentEvent event) { if (!myNonHumanChange && myComponent instanceof JComboBox && ((JComboBox)myComponent).isPopupVisible()) { ((JComboBox)myComponent).hidePopup(); } myNonHumanChange = false; fireDataChanged(); } } private class MyComboBoxItemListener implements ItemListener { @Override public void itemStateChanged(ItemEvent e) { fireDataChanged(); } } }
package com.example.avjindersinghsekhon.minimaltodo; import android.animation.Animator; import android.content.Intent; import android.graphics.Color; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.NavUtils; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.TextWatcher; import android.text.format.DateFormat; import android.util.Log; import android.view.MenuItem; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.wdullaer.materialdatetimepicker.date.DatePickerDialog; import com.wdullaer.materialdatetimepicker.time.RadialPickerLayout; import com.wdullaer.materialdatetimepicker.time.TimePickerDialog; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; public class AddToDoActivity extends AppCompatActivity implements DatePickerDialog.OnDateSetListener, TimePickerDialog.OnTimeSetListener{ private Date mLastEdited; private EditText mToDoTextBodyEditText; private SwitchCompat mToDoDateSwitch; // private TextView mLastSeenTextView; private LinearLayout mUserDateSpinnerContainingLinearLayout; private TextView mReminderTextView; private EditText mDateEditText; private EditText mTimeEditText; private String mDefaultTimeOptions12H[]; private String mDefaultTimeOptions24H[]; private Button mChooseDateButton; private Button mChooseTimeButton; private ToDoItem mUserToDoItem; private FloatingActionButton mToDoSendFloatingActionButton; public static final String DATE_FORMAT = "MMM d, yyyy"; public static final String DATE_FORMAT_MONTH_DAY = "MMM d"; public static final String DATE_FORMAT_TIME = "H:m"; private String mUserEnteredText; private boolean mUserHasReminder; private Toolbar mToolbar; private Date mUserReminderDate; private int mUserColor; private boolean setDateButtonClickedOnce = false; private boolean setTimeButtonClickedOnce = false; private LinearLayout mContainerLayout; private String theme; @Override protected void onResume() { super.onResume(); } @SuppressWarnings("deprecation") @Override protected void onCreate(Bundle savedInstanceState) { //Need references to these to change them during light/dark mode ImageButton reminderIconImageButton; TextView reminderRemindMeTextView; theme = getSharedPreferences(MainActivity.THEME_PREFERENCES, MODE_PRIVATE).getString(MainActivity.THEME_SAVED, MainActivity.LIGHTTHEME); if(theme.equals(MainActivity.LIGHTTHEME)){ setTheme(R.style.CustomStyle_LightTheme); Log.d("OskarSchindler", "Light Theme"); } else{ setTheme(R.style.CustomStyle_DarkTheme); } super.onCreate(savedInstanceState); // setContentView(R.layout.activity_add_to_do); //Testing out a new layout setContentView(R.layout.activity_todo_test); //Show an X in place of <- final Drawable cross = getResources().getDrawable(R.drawable.ic_clear_white_24dp); if(cross !=null){ cross.setColorFilter(getResources().getColor(R.color.icons), PorterDuff.Mode.SRC_ATOP); } mToolbar = (Toolbar)findViewById(R.id.toolbar); setSupportActionBar(mToolbar); if(getSupportActionBar()!=null){ getSupportActionBar().setElevation(0); getSupportActionBar().setDisplayShowTitleEnabled(false); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setHomeAsUpIndicator(cross ); } mUserToDoItem = (ToDoItem)getIntent().getSerializableExtra(MainActivity.TODOITEM); mUserEnteredText = mUserToDoItem.getToDoText(); mUserHasReminder = mUserToDoItem.hasReminder(); mUserReminderDate = mUserToDoItem.getToDoDate(); mUserColor = mUserToDoItem.getTodoColor(); // if(mUserToDoItem.getLastEdited()==null) { // mLastEdited = new Date(); // } // else{ // mLastEdited = mUserToDoItem.getLastEdited(); // } reminderIconImageButton = (ImageButton)findViewById(R.id.userToDoReminderIconImageButton); reminderRemindMeTextView = (TextView)findViewById(R.id.userToDoRemindMeTextView); if(theme.equals(MainActivity.DARKTHEME)){ reminderIconImageButton.setImageDrawable(getResources().getDrawable(R.drawable.ic_alarm_add_white_24dp)); reminderRemindMeTextView.setTextColor(Color.WHITE); } mContainerLayout = (LinearLayout)findViewById(R.id.todoReminderAndDateContainerLayout); mUserDateSpinnerContainingLinearLayout = (LinearLayout)findViewById(R.id.toDoEnterDateLinearLayout); mToDoTextBodyEditText = (EditText)findViewById(R.id.userToDoEditText); mToDoDateSwitch = (SwitchCompat)findViewById(R.id.toDoHasDateSwitchCompat); // mLastSeenTextView = (TextView)findViewById(R.id.toDoLastEditedTextView); mToDoSendFloatingActionButton = (FloatingActionButton)findViewById(R.id.makeToDoFloatingActionButton); mReminderTextView = (TextView)findViewById(R.id.newToDoDateTimeReminderTextView); mContainerLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { hideKeyboard(mToDoTextBodyEditText); } }); if(mUserHasReminder && (mUserReminderDate!=null)){ // mUserDateSpinnerContainingLinearLayout.setVisibility(View.VISIBLE); setReminderTextView(); setEnterDateLayoutVisibleWithAnimations(true); } if(mUserReminderDate==null){ mToDoDateSwitch.setChecked(false); mReminderTextView.setVisibility(View.INVISIBLE); } // TextInputLayout til = (TextInputLayout)findViewById(R.id.toDoCustomTextInput); // til.requestFocus(); mToDoTextBodyEditText.requestFocus(); mToDoTextBodyEditText.setText(mUserEnteredText); InputMethodManager imm = (InputMethodManager)this.getSystemService(INPUT_METHOD_SERVICE); // imm.showSoftInput(mToDoTextBodyEditText, InputMethodManager.SHOW_IMPLICIT); imm.toggleSoftInput(InputMethodManager.SHOW_FORCED, InputMethodManager.HIDE_IMPLICIT_ONLY); mToDoTextBodyEditText.setSelection(mToDoTextBodyEditText.length()); mToDoTextBodyEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { mUserEnteredText = s.toString(); } @Override public void afterTextChanged(Editable s) { } }); // String lastSeen = formatDate(DATE_FORMAT, mLastEdited); // mLastSeenTextView.setText(String.format(getResources().getString(R.string.last_edited), lastSeen)); setEnterDateLayoutVisible(mToDoDateSwitch.isChecked()); mToDoDateSwitch.setChecked(mUserHasReminder && (mUserReminderDate != null)); mToDoDateSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (!isChecked) { mUserReminderDate = null; } mUserHasReminder = isChecked; setDateAndTimeEditText(); setEnterDateLayoutVisibleWithAnimations(isChecked); hideKeyboard(mToDoTextBodyEditText); } }); mToDoSendFloatingActionButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mToDoTextBodyEditText.length() <= 0){ mToDoTextBodyEditText.setError(getString(R.string.todo_error)); } else if(mUserReminderDate!=null && mUserReminderDate.before(new Date())){ makeResult(RESULT_CANCELED); } else{ makeResult(RESULT_OK); finish(); } hideKeyboard(mToDoTextBodyEditText); } }); mDateEditText = (EditText)findViewById(R.id.newTodoDateEditText); mTimeEditText = (EditText)findViewById(R.id.newTodoTimeEditText); mDateEditText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Date date; hideKeyboard(mToDoTextBodyEditText); if(mUserToDoItem.getToDoDate()!=null){ // date = mUserToDoItem.getToDoDate(); date = mUserReminderDate; } else{ date = new Date(); } Calendar calendar = Calendar.getInstance(); calendar.setTime(date); int year = calendar.get(Calendar.YEAR); int month = calendar.get(Calendar.MONTH); int day = calendar.get(Calendar.DAY_OF_MONTH); DatePickerDialog datePickerDialog = DatePickerDialog.newInstance(AddToDoActivity.this, year, month, day); if(theme.equals(MainActivity.DARKTHEME)){ datePickerDialog.setThemeDark(true); } datePickerDialog.show(getFragmentManager(), "DateFragment"); } }); mTimeEditText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Date date; hideKeyboard(mToDoTextBodyEditText); if(mUserToDoItem.getToDoDate()!=null){ // date = mUserToDoItem.getToDoDate(); date = mUserReminderDate; } else{ date = new Date(); } Calendar calendar = Calendar.getInstance(); calendar.setTime(date); int hour = calendar.get(Calendar.HOUR_OF_DAY); int minute = calendar.get(Calendar.MINUTE); TimePickerDialog timePickerDialog = TimePickerDialog.newInstance(AddToDoActivity.this, hour, minute, DateFormat.is24HourFormat(AddToDoActivity.this)); if(theme.equals(MainActivity.DARKTHEME)){ timePickerDialog.setThemeDark(true); } timePickerDialog.show(getFragmentManager(), "TimeFragment"); } }); // mDefaultTimeOptions12H = new String[]{"9:00 AM", "12:00 PM", "3:00 PM", "6:00 PM", "9:00 PM", "12:00 AM"}; // mDefaultTimeOptions24H = new String[]{"9:00", "12:00", "15:00", "18:00", "21:00", "24:00"}; setDateAndTimeEditText(); // // mChooseDateButton = (Button)findViewById(R.id.newToDoChooseDateButton); // mChooseTimeButton = (Button)findViewById(R.id.newToDoChooseTimeButton); // // mChooseDateButton.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View v) { // Date date; // hideKeyboard(mToDoTextBodyEditText); // if(mUserToDoItem.getToDoDate()!=null){ // date = mUserToDoItem.getToDoDate(); // } // else{ // date = new Date(); // } // Calendar calendar = Calendar.getInstance(); // calendar.setTime(date); // int year = calendar.get(Calendar.YEAR); // int month = calendar.get(Calendar.MONTH); // int day = calendar.get(Calendar.DAY_OF_MONTH); // // // DatePickerDialog datePickerDialog = DatePickerDialog.newInstance(AddToDoActivity.this, year, month, day); // if(theme.equals(MainActivity.DARKTHEME)){ // datePickerDialog.setThemeDark(true); // } // datePickerDialog.show(getFragmentManager(), "DateFragment"); // } // }); // // mChooseTimeButton.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View v) { // Date date; // hideKeyboard(mToDoTextBodyEditText); // if(mUserToDoItem.getToDoDate()!=null){ // date = mUserToDoItem.getToDoDate(); // } // else{ // date = new Date(); // } // Calendar calendar = Calendar.getInstance(); // calendar.setTime(date); // int hour = calendar.get(Calendar.HOUR_OF_DAY); // int minute = calendar.get(Calendar.MINUTE); // // TimePickerDialog timePickerDialog = TimePickerDialog.newInstance(AddToDoActivity.this, hour, minute, DateFormat.is24HourFormat(AddToDoActivity.this)); // if(theme.equals(MainActivity.DARKTHEME)){ // timePickerDialog.setThemeDark(true); // } // timePickerDialog.show(getFragmentManager(), "TimeFragment"); // } // }); } private void setDateAndTimeEditText(){ if(mUserToDoItem.hasReminder() && mUserReminderDate!=null){ String userDate = formatDate("d MMM, yyyy", mUserReminderDate); String formatToUse; if(DateFormat.is24HourFormat(this)){ formatToUse = "k:mm"; } else{ formatToUse = "h:mm a"; } String userTime = formatDate(formatToUse, mUserReminderDate); mTimeEditText.setText(userTime); mDateEditText.setText(userDate); } else{ mDateEditText.setText(getString(R.string.date_reminder_default)); // mUserReminderDate = new Date(); boolean time24 = DateFormat.is24HourFormat(this); Calendar cal = Calendar.getInstance(); if(time24){ cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY)+1); } else{ cal.set(Calendar.HOUR, cal.get(Calendar.HOUR)+1); } cal.set(Calendar.MINUTE, 0); mUserReminderDate = cal.getTime(); Log.d("OskarSchindler", "Imagined Date: "+mUserReminderDate); String timeString; if(time24){ timeString = formatDate("k:mm", mUserReminderDate); } else{ timeString = formatDate("h:mm a", mUserReminderDate); } mTimeEditText.setText(timeString); // int hour = calendar.get(Calendar.HOUR_OF_DAY); // if(hour<9){ // timeOption = time24?mDefaultTimeOptions24H[0]:mDefaultTimeOptions12H[0]; // } // else if(hour < 12){ // timeOption = time24?mDefaultTimeOptions24H[1]:mDefaultTimeOptions12H[1]; // } // else if(hour < 15){ // timeOption = time24?mDefaultTimeOptions24H[2]:mDefaultTimeOptions12H[2]; // } // else if(hour < 18){ // timeOption = time24?mDefaultTimeOptions24H[3]:mDefaultTimeOptions12H[3]; // } // else if(hour < 21){ // timeOption = time24?mDefaultTimeOptions24H[4]:mDefaultTimeOptions12H[4]; // } // else{ // timeOption = time24?mDefaultTimeOptions24H[5]:mDefaultTimeOptions12H[5]; // } // mTimeEditText.setText(timeOption); } } private String getThemeSet(){ return getSharedPreferences(MainActivity.THEME_PREFERENCES, MODE_PRIVATE).getString(MainActivity.THEME_SAVED, MainActivity.LIGHTTHEME); } public void hideKeyboard(EditText et){ InputMethodManager imm = (InputMethodManager)getSystemService(INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(et.getWindowToken(), 0); } public void setDate(int year, int month, int day){ Calendar calendar = Calendar.getInstance(); int hour, minute; // int currentYear = calendar.get(Calendar.YEAR); // int currentMonth = calendar.get(Calendar.MONTH); // int currentDay = calendar.get(Calendar.DAY_OF_MONTH); Calendar reminderCalendar = Calendar.getInstance(); reminderCalendar.set(year, month, day); if(reminderCalendar.before(calendar)){ Toast.makeText(this, "My time-machine is a bit rusty", Toast.LENGTH_SHORT).show(); return; } if(mUserReminderDate!=null){ calendar.setTime(mUserReminderDate); } if(DateFormat.is24HourFormat(this)){ hour = calendar.get(Calendar.HOUR_OF_DAY); } else{ hour = calendar.get(Calendar.HOUR); } minute = calendar.get(Calendar.MINUTE); calendar.set(year, month, day, hour, minute); mUserReminderDate = calendar.getTime(); setReminderTextView(); // setDateAndTimeEditText(); setDateEditText(); } public void setTime(int hour, int minute){ Calendar calendar = Calendar.getInstance(); if(mUserReminderDate!=null){ calendar.setTime(mUserReminderDate); } // if(DateFormat.is24HourFormat(this) && hour == 0){ // //done for 24h time // hour = 24; // } int year = calendar.get(Calendar.YEAR); int month = calendar.get(Calendar.MONTH); int day = calendar.get(Calendar.DAY_OF_MONTH); Log.d("OskarSchindler", "Time set: "+hour); calendar.set(year, month, day, hour, minute, 0); mUserReminderDate = calendar.getTime(); setReminderTextView(); // setDateAndTimeEditText(); setTimeEditText(); } public void setDateEditText(){ String dateFormat = "d MMM, yyyy"; mDateEditText.setText(formatDate(dateFormat, mUserReminderDate)); } public void setTimeEditText(){ String dateFormat; if(DateFormat.is24HourFormat(this)){ dateFormat = "k:mm"; } else{ dateFormat = "h:mm a"; } mTimeEditText.setText(formatDate(dateFormat, mUserReminderDate)); } public void setReminderTextView(){ if(mUserReminderDate!=null){ mReminderTextView.setVisibility(View.VISIBLE); if(mUserReminderDate.before(new Date())){ Log.d("OskarSchindler", "DATE is "+mUserReminderDate); mReminderTextView.setText(getString(R.string.date_error_check_again)); mReminderTextView.setTextColor(Color.RED); return; } Date date = mUserReminderDate; String dateString = formatDate("d MMM, yyyy", date); String timeString; String amPmString = ""; if(DateFormat.is24HourFormat(this)){ timeString = formatDate("k:mm", date); } else{ timeString = formatDate("h:mm", date); amPmString = formatDate("a", date); } String finalString = String.format(getResources().getString(R.string.remind_date_and_time), dateString, timeString, amPmString); mReminderTextView.setTextColor(getResources().getColor(R.color.secondary_text)); mReminderTextView.setText(finalString); } else{ mReminderTextView.setVisibility(View.INVISIBLE); } } public void makeResult(int result){ Intent i = new Intent(); if(mUserEnteredText.length()>0){ String capitalizedString = Character.toUpperCase(mUserEnteredText.charAt(0))+mUserEnteredText.substring(1); mUserToDoItem.setToDoText(capitalizedString); } else{ mUserToDoItem.setToDoText(mUserEnteredText); } // mUserToDoItem.setLastEdited(mLastEdited); if(mUserReminderDate!=null){ Calendar calendar = Calendar.getInstance(); calendar.setTime(mUserReminderDate); calendar.set(Calendar.SECOND, 0); mUserReminderDate = calendar.getTime(); } mUserToDoItem.setHasReminder(mUserHasReminder); mUserToDoItem.setToDoDate(mUserReminderDate); mUserToDoItem.setTodoColor(mUserColor); i.putExtra(MainActivity.TODOITEM, mUserToDoItem); setResult(result, i); } @Override public void onBackPressed() { if(mUserReminderDate.before(new Date())){ mUserToDoItem.setToDoDate(null); } makeResult(RESULT_OK); super.onBackPressed(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()){ case android.R.id.home: if(NavUtils.getParentActivityName(this)!=null){ makeResult(RESULT_CANCELED); NavUtils.navigateUpFromSameTask(this); } hideKeyboard(mToDoTextBodyEditText); return true; default: return super.onOptionsItemSelected(item); } } public static String formatDate(String formatString, Date dateToFormat){ SimpleDateFormat simpleDateFormat = new SimpleDateFormat(formatString); return simpleDateFormat.format(dateToFormat); } @Override public void onTimeSet(RadialPickerLayout radialPickerLayout, int hour, int minute) { setTime(hour, minute); } @Override public void onDateSet(DatePickerDialog datePickerDialog, int year, int month, int day) { setDate(year, month, day); } public void setEnterDateLayoutVisible(boolean checked){ if(checked){ mUserDateSpinnerContainingLinearLayout.setVisibility(View.VISIBLE); } else{ mUserDateSpinnerContainingLinearLayout.setVisibility(View.INVISIBLE); } } public void setEnterDateLayoutVisibleWithAnimations(boolean checked){ if(checked){ setReminderTextView(); mUserDateSpinnerContainingLinearLayout.animate().alpha(1.0f).setDuration(500).setListener( new Animator.AnimatorListener() { @Override public void onAnimationStart(Animator animation) { mUserDateSpinnerContainingLinearLayout.setVisibility(View.VISIBLE); } @Override public void onAnimationEnd(Animator animation) { } @Override public void onAnimationCancel(Animator animation) { } @Override public void onAnimationRepeat(Animator animation) { } } ); } else{ mUserDateSpinnerContainingLinearLayout.animate().alpha(0.0f).setDuration(500).setListener( new Animator.AnimatorListener() { @Override public void onAnimationStart(Animator animation) { } @Override public void onAnimationEnd(Animator animation) { mUserDateSpinnerContainingLinearLayout.setVisibility(View.INVISIBLE); } @Override public void onAnimationCancel(Animator animation) { } @Override public void onAnimationRepeat(Animator animation) { } } ); } } }
/* Copyright (c) 2013-2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Victor Olaya (Boundless) - initial implementation */ package org.locationtech.geogig.api.porcelain; import static com.google.common.base.Preconditions.checkState; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.jdt.annotation.Nullable; import org.locationtech.geogig.api.AbstractGeoGigOp; import org.locationtech.geogig.api.CommitBuilder; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.api.Platform; import org.locationtech.geogig.api.Ref; import org.locationtech.geogig.api.RevCommit; import org.locationtech.geogig.api.SymRef; import org.locationtech.geogig.api.plumbing.FindCommonAncestor; import org.locationtech.geogig.api.plumbing.ForEachRef; import org.locationtech.geogig.api.plumbing.RefParse; import org.locationtech.geogig.api.plumbing.UpdateRef; import org.locationtech.geogig.api.plumbing.UpdateSymRef; import org.locationtech.geogig.api.porcelain.ResetOp.ResetMode; import org.locationtech.geogig.repository.Repository; import org.locationtech.geogig.storage.GraphDatabase; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Operation to squash commits into one. */ public class SquashOp extends AbstractGeoGigOp<ObjectId> { private RevCommit since; private RevCommit until; private String message; /** * Indicates the first commit to squash. If no message is provided, the message from this commit * will be used * * @param the first (oldest) commit to squash * @return {@code this} */ public SquashOp setSince(final RevCommit since) { this.since = since; return this; } /** * Indicates the last commit to squash * * @param the last (most recent) commit to squash * @return {@code this} */ public SquashOp setUntil(RevCommit until) { this.until = until; return this; } /** * Indicates the message to use for the commit. If null, the message from the 'since' commit * will be used * * @param the message to use for the commit * @return {@code this} */ public SquashOp setMessage(String message) { this.message = message; return this; } /** * Executes the squash operation. * * @return the new head after modifying the history squashing commits * @see org.locationtech.geogig.api.AbstractGeoGigOp#call() */ @Override protected ObjectId _call() { Preconditions.checkNotNull(since); Preconditions.checkNotNull(until); GraphDatabase graphDb = graphDatabase(); Repository repository = repository(); Platform platform = platform(); final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call(); Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash."); Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD"); final SymRef headRef = (SymRef) currHead.get(); final String currentBranch = headRef.getTarget(); Preconditions.checkState(index().isClean() && workingTree().isClean(), "You must have a clean working tree and index to perform a squash."); Optional<ObjectId> ancestor = command(FindCommonAncestor.class).setLeft(since) .setRight(until).call(); Preconditions.checkArgument(ancestor.isPresent(), "'since' and 'until' command do not have a common ancestor"); Preconditions.checkArgument(ancestor.get().equals(since.getId()), "Commits provided in wrong order"); Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents"); // we get a a list of commits to apply on top of the squashed commits List<RevCommit> commits = getCommitsAfterUntil(); ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter(Ref.HEADS_PREFIX).call(); // we create a list of all parents of those squashed commits, in case they are // merge commits. The resulting commit will have all these parents // // While iterating the set of commits to squash, we check that there are no branch starting // points among them. Any commit with more than one child causes an exception to be thrown, // since the squash operation does not support squashing those commits Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0)) .setUntil(until.getId()).setFirstParentOnly(true).call(); List<ObjectId> firstParents = Lists.newArrayList(); List<ObjectId> secondaryParents = Lists.newArrayList(); final List<ObjectId> squashedIds = Lists.newArrayList(); RevCommit commitToSquash = until; while (toSquash.hasNext()) { commitToSquash = toSquash.next(); squashedIds.add(commitToSquash.getId()); Preconditions .checkArgument( graphDb.getChildren(commitToSquash.getId()).size() < 2, "The commits to squash include a branch starting point. Squashing that type of commit is not supported."); for (Ref ref : refs) { // In case a branch has been created but no commit has been made on it and the // starting commit has just one child Preconditions .checkArgument( !ref.getObjectId().equals(commitToSquash.getId()) || ref.getObjectId().equals(currHead.get().getObjectId()) || commitToSquash.getParentIds().size() > 1, "The commits to squash include a branch starting point. Squashing that type of commit is not supported."); } ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds(); for (int i = 1; i < parentIds.size(); i++) { secondaryParents.add(parentIds.get(i)); } firstParents.add(parentIds.get(0)); } Preconditions.checkArgument(since.equals(commitToSquash), "Cannot reach 'since' from 'until' commit through first parentage"); // We do the same check in the children commits for (RevCommit commit : commits) { Preconditions .checkArgument( graphDb.getChildren(commit.getId()).size() < 2, "The commits after the ones to squash include a branch starting point. This scenario is not supported."); for (Ref ref : refs) { // In case a branch has been created but no commit has been made on it Preconditions .checkArgument( !ref.getObjectId().equals(commit.getId()) || ref.getObjectId().equals(currHead.get().getObjectId()) || commit.getParentIds().size() > 1, "The commits after the ones to squash include a branch starting point. This scenario is not supported."); } } ObjectId newHead; // rewind the head newHead = since.getParentIds().get(0); command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD) .call(); // add the current HEAD as first parent of the resulting commit // parents.add(0, newHead); // Create new commit List<ObjectId> parents = Lists.newArrayList(); parents.addAll(firstParents); parents.addAll(secondaryParents); ObjectId endTree = until.getTreeId(); CommitBuilder builder = new CommitBuilder(until); Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() { @Override public boolean apply(@Nullable ObjectId id) { return !squashedIds.contains(id); } }); builder.setParentIds(Lists.newArrayList(filteredParents)); builder.setTreeId(endTree); if (message == null) { message = since.getMessage(); } long timestamp = platform.currentTimeMillis(); builder.setMessage(message); builder.setCommitter(resolveCommitter()); builder.setCommitterEmail(resolveCommitterEmail()); builder.setCommitterTimestamp(timestamp); builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp)); builder.setAuthorTimestamp(until.getAuthor().getTimestamp()); RevCommit newCommit = builder.build(); repository.objectDatabase().put(newCommit); newHead = newCommit.getId(); ObjectId newTreeId = newCommit.getTreeId(); command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call(); command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call(); workingTree().updateWorkHead(newTreeId); index().updateStageHead(newTreeId); // now put the other commits after the squashed one newHead = addCommits(commits, currentBranch, newHead); return newHead; } private ObjectId addCommits(List<RevCommit> commits, String currentBranch, final ObjectId squashedId) { final Platform platform = platform(); final Map<ObjectId, ObjectId> replacedCommits = Maps.newHashMap(); replacedCommits.put(until.getId(), squashedId); ObjectId head = squashedId; for (RevCommit commit : commits) { CommitBuilder builder = new CommitBuilder(commit); Collection<ObjectId> parents = Collections2.transform(commit.getParentIds(), new Function<ObjectId, ObjectId>() { @Override @Nullable public ObjectId apply(@Nullable ObjectId id) { if (replacedCommits.containsKey(id)) { return replacedCommits.get(id); } else { return id; } } }); builder.setParentIds(Lists.newArrayList(parents)); builder.setTreeId(commit.getTreeId()); long timestamp = platform.currentTimeMillis(); builder.setCommitterTimestamp(timestamp); builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp)); RevCommit newCommit = builder.build(); replacedCommits.put(commit.getId(), newCommit.getId()); objectDatabase().put(newCommit); head = newCommit.getId(); ObjectId newTreeId = newCommit.getTreeId(); command(UpdateRef.class).setName(currentBranch).setNewValue(head).call(); command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call(); workingTree().updateWorkHead(newTreeId); index().updateStageHead(newTreeId); } return head; } private List<RevCommit> getCommitsAfterUntil() { Iterator<RevCommit> commitIterator = command(LogOp.class).setSince(until.getId()).call(); List<RevCommit> commits = Lists.newArrayList(commitIterator); Collections.reverse(commits); return commits; } private String resolveCommitter() { final String key = "user.name"; Optional<String> name = command(ConfigGet.class).setName(key).call(); checkState( name.isPresent(), "%s not found in config. Use geogig config [--global] %s <your name> to configure it.", key, key); return name.get(); } private String resolveCommitterEmail() { final String key = "user.email"; Optional<String> email = command(ConfigGet.class).setName(key).call(); checkState( email.isPresent(), "%s not found in config. Use geogig config [--global] %s <your email> to configure it.", key, key); return email.get(); } }
/* * Copyright 2015-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.funtl.framework.alipay.trade.pay.protocol.downloadbill.util; import java.io.IOException; import java.io.OutputStream; import java.util.HashSet; import java.util.Vector; import java.util.zip.CRC32; import java.util.zip.Deflater; import java.util.zip.ZipException; /** * This class implements an output stream filter for writing files in the * ZIP file format. Includes support for both compressed and uncompressed * entries. * * @author David Connelly * @version 1.35, 07/31/06 */ public class ZipOutputStream extends DeflaterOutputStream implements ZipConstants { private static class XEntry { public final ZipEntry entry; public final long offset; public final int flag; public XEntry(ZipEntry entry, long offset) { this.entry = entry; this.offset = offset; this.flag = (entry.method == DEFLATED && (entry.size == -1 || entry.csize == -1 || entry.crc == -1)) // store size, compressed size, and crc-32 in data descriptor // immediately following the compressed entry data ? 8 // store size, compressed size, and crc-32 in LOC header : 0; } } private XEntry current; private Vector<XEntry> xentries = new Vector<XEntry>(); private HashSet<String> names = new HashSet<String>(); private CRC32 crc = new CRC32(); private long written = 0; private long locoff = 0; private String comment; private int method = DEFLATED; private boolean finished; private boolean closed = false; private static int version(ZipEntry e) throws ZipException { switch (e.method) { case DEFLATED: return 20; case STORED: return 10; default: throw new ZipException("unsupported compression method"); } } /** * Checks to make sure that this stream has not been closed. */ private void ensureOpen() throws IOException { if (closed) { throw new IOException("Stream closed"); } } /** * Compression method for uncompressed (STORED) entries. */ public static final int STORED = ZipEntry.STORED; /** * Compression method for compressed (DEFLATED) entries. */ public static final int DEFLATED = ZipEntry.DEFLATED; /** * Creates a new ZIP output stream. * * @param out the actual output stream */ public ZipOutputStream(OutputStream out) { super(out, new Deflater(Deflater.DEFAULT_COMPRESSION, true)); usesDefaultDeflater = true; } /** * Sets the ZIP file comment. * * @param comment the comment string * @throws IllegalArgumentException if the length of the specified * ZIP file comment is greater than 0xFFFF bytes */ public void setComment(String comment) { if (comment != null && comment.length() > 0xffff / 3 && getUTF8Length(comment) > 0xffff) { throw new IllegalArgumentException("ZIP file comment too long."); } this.comment = comment; } /** * Sets the default compression method for subsequent entries. This * default will be used whenever the compression method is not specified * for an individual ZIP file entry, and is initially set to DEFLATED. * * @param method the default compression method * @throws IllegalArgumentException if the specified compression method * is invalid */ public void setMethod(int method) { if (method != DEFLATED && method != STORED) { throw new IllegalArgumentException("invalid compression method"); } this.method = method; } /** * Sets the compression level for subsequent entries which are DEFLATED. * The default setting is DEFAULT_COMPRESSION. * * @param level the compression level (0-9) * @throws IllegalArgumentException if the compression level is invalid */ public void setLevel(int level) { def.setLevel(level); } /** * Begins writing a new ZIP file entry and positions the stream to the * start of the entry data. Closes the current entry if still active. * The default compression method will be used if no compression method * was specified for the entry, and the current time will be used if * the entry has no set modification time. * * @param e the ZIP entry to be written * @throws ZipException if a ZIP format error has occurred * @throws IOException if an I/O error has occurred */ public void putNextEntry(ZipEntry e) throws IOException { ensureOpen(); if (current != null) { closeEntry(); // close previous entry } if (e.time == -1) { e.setTime(System.currentTimeMillis()); } if (e.method == -1) { e.method = method; // use default method } switch (e.method) { case DEFLATED: break; case STORED: // compressed size, uncompressed size, and crc-32 must all be // set for entries using STORED compression method if (e.size == -1) { e.size = e.csize; } else if (e.csize == -1) { e.csize = e.size; } else if (e.size != e.csize) { throw new ZipException("STORED entry where compressed != uncompressed size"); } if (e.size == -1 || e.crc == -1) { throw new ZipException("STORED entry missing size, compressed size, or crc-32"); } break; default: throw new ZipException("unsupported compression method"); } if (!names.add(e.name)) { throw new ZipException("duplicate entry: " + e.name); } current = new XEntry(e, written); xentries.add(current); writeLOC(current); } /** * Closes the current ZIP entry and positions the stream for writing * the next entry. * * @throws ZipException if a ZIP format error has occurred * @throws IOException if an I/O error has occurred */ public void closeEntry() throws IOException { ensureOpen(); if (current != null) { ZipEntry e = current.entry; switch (e.method) { case DEFLATED: def.finish(); while (!def.finished()) { deflate(); } if ((current.flag & 8) == 0) { // verify size, compressed size, and crc-32 settings if (e.size != def.getBytesRead()) { throw new ZipException("invalid entry size (expected " + e.size + " but got " + def.getBytesRead() + " bytes)"); } if (e.csize != def.getBytesWritten()) { throw new ZipException("invalid entry compressed size (expected " + e.csize + " but got " + def.getBytesWritten() + " bytes)"); } if (e.crc != crc.getValue()) { throw new ZipException("invalid entry CRC-32 (expected 0x" + Long.toHexString(e.crc) + " but got 0x" + Long.toHexString(crc.getValue()) + ")"); } } else { e.size = def.getBytesRead(); e.csize = def.getBytesWritten(); e.crc = crc.getValue(); writeEXT(e); } def.reset(); written += e.csize; break; case STORED: // we already know that both e.size and e.csize are the same if (e.size != written - locoff) { throw new ZipException("invalid entry size (expected " + e.size + " but got " + (written - locoff) + " bytes)"); } if (e.crc != crc.getValue()) { throw new ZipException("invalid entry crc-32 (expected 0x" + Long.toHexString(e.crc) + " but got 0x" + Long.toHexString(crc.getValue()) + ")"); } break; default: throw new ZipException("invalid compression method"); } crc.reset(); current = null; } } /** * Writes an array of bytes to the current ZIP entry data. This method * will block until all the bytes are written. * * @param b the data to be written * @param off the start offset in the data * @param len the number of bytes that are written * @throws ZipException if a ZIP file error has occurred * @throws IOException if an I/O error has occurred */ public synchronized void write(byte[] b, int off, int len) throws IOException { ensureOpen(); if (off < 0 || len < 0 || off > b.length - len) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; } if (current == null) { throw new ZipException("no current ZIP entry"); } ZipEntry entry = current.entry; switch (entry.method) { case DEFLATED: super.write(b, off, len); break; case STORED: written += len; if (written - locoff > entry.size) { throw new ZipException("attempt to write past end of STORED entry"); } out.write(b, off, len); break; default: throw new ZipException("invalid compression method"); } crc.update(b, off, len); } /** * Finishes writing the contents of the ZIP output stream without closing * the underlying stream. Use this method when applying multiple filters * in succession to the same output stream. * * @throws ZipException if a ZIP file error has occurred * @throws IOException if an I/O exception has occurred */ public void finish() throws IOException { ensureOpen(); if (finished) { return; } if (current != null) { closeEntry(); } if (xentries.size() < 1) { throw new ZipException("ZIP file must have at least one entry"); } // write central directory long off = written; for (XEntry xentry : xentries) writeCEN(xentry); writeEND(off, written - off); finished = true; } /** * Closes the ZIP output stream as well as the stream being filtered. * * @throws ZipException if a ZIP file error has occurred * @throws IOException if an I/O error has occurred */ public void close() throws IOException { if (!closed) { super.close(); closed = true; } } /* * Writes local file (LOC) header for specified entry. */ private void writeLOC(XEntry xentry) throws IOException { ZipEntry e = xentry.entry; int flag = xentry.flag; writeInt(LOCSIG); // LOC header signature writeShort(version(e)); // version needed to extract writeShort(flag); // general purpose bit flag writeShort(e.method); // compression method writeInt(e.time); // last modification time if ((flag & 8) == 8) { // store size, uncompressed size, and crc-32 in data descriptor // immediately following compressed entry data writeInt(0); writeInt(0); writeInt(0); } else { writeInt(e.crc); // crc-32 writeInt(e.csize); // compressed size writeInt(e.size); // uncompressed size } byte[] nameBytes = getUTF8Bytes(e.name); writeShort(nameBytes.length); writeShort(e.extra != null ? e.extra.length : 0); writeBytes(nameBytes, 0, nameBytes.length); if (e.extra != null) { writeBytes(e.extra, 0, e.extra.length); } locoff = written; } /* * Writes extra data descriptor (EXT) for specified entry. */ private void writeEXT(ZipEntry e) throws IOException { writeInt(EXTSIG); // EXT header signature writeInt(e.crc); // crc-32 writeInt(e.csize); // compressed size writeInt(e.size); // uncompressed size } /* * Write central directory (CEN) header for specified entry. * REMIND: add support for file attributes */ private void writeCEN(XEntry xentry) throws IOException { ZipEntry e = xentry.entry; int flag = xentry.flag; int version = version(e); writeInt(CENSIG); // CEN header signature writeShort(version); // version made by writeShort(version); // version needed to extract writeShort(flag); // general purpose bit flag writeShort(e.method); // compression method writeInt(e.time); // last modification time writeInt(e.crc); // crc-32 writeInt(e.csize); // compressed size writeInt(e.size); // uncompressed size byte[] nameBytes = getUTF8Bytes(e.name); writeShort(nameBytes.length); writeShort(e.extra != null ? e.extra.length : 0); byte[] commentBytes; if (e.comment != null) { commentBytes = getUTF8Bytes(e.comment); writeShort(commentBytes.length); } else { commentBytes = null; writeShort(0); } writeShort(0); // starting disk number writeShort(0); // internal file attributes (unused) writeInt(0); // external file attributes (unused) writeInt(xentry.offset); // relative offset of local header writeBytes(nameBytes, 0, nameBytes.length); if (e.extra != null) { writeBytes(e.extra, 0, e.extra.length); } if (commentBytes != null) { writeBytes(commentBytes, 0, commentBytes.length); } } /* * Writes end of central directory (END) header. */ private void writeEND(long off, long len) throws IOException { int count = xentries.size(); writeInt(ENDSIG); // END record signature writeShort(0); // number of this disk writeShort(0); // central directory start disk writeShort(count); // number of directory entries on disk writeShort(count); // total number of directory entries writeInt(len); // length of central directory writeInt(off); // offset of central directory if (comment != null) { // zip file comment byte[] b = getUTF8Bytes(comment); writeShort(b.length); writeBytes(b, 0, b.length); } else { writeShort(0); } } /* * Writes a 16-bit short to the output stream in little-endian byte order. */ private void writeShort(int v) throws IOException { OutputStream out = this.out; out.write((v >>> 0) & 0xff); out.write((v >>> 8) & 0xff); written += 2; } /* * Writes a 32-bit int to the output stream in little-endian byte order. */ private void writeInt(long v) throws IOException { OutputStream out = this.out; out.write((int) ((v >>> 0) & 0xff)); out.write((int) ((v >>> 8) & 0xff)); out.write((int) ((v >>> 16) & 0xff)); out.write((int) ((v >>> 24) & 0xff)); written += 4; } /* * Writes an array of bytes to the output stream. */ private void writeBytes(byte[] b, int off, int len) throws IOException { super.out.write(b, off, len); written += len; } /* * Returns the length of String's UTF8 encoding. */ static int getUTF8Length(String s) { int count = 0; for (int i = 0; i < s.length(); i++) { char ch = s.charAt(i); if (ch <= 0x7f) { count++; } else if (ch <= 0x7ff) { count += 2; } else { count += 3; } } return count; } /* * Returns an array of bytes representing the UTF8 encoding * of the specified String. */ private static byte[] getUTF8Bytes(String s) { char[] c = s.toCharArray(); int len = c.length; // Count the number of encoded bytes... int count = 0; for (int i = 0; i < len; i++) { int ch = c[i]; if (ch <= 0x7f) { count++; } else if (ch <= 0x7ff) { count += 2; } else { count += 3; } } // Now return the encoded bytes... byte[] b = new byte[count]; int off = 0; for (int i = 0; i < len; i++) { int ch = c[i]; if (ch <= 0x7f) { b[off++] = (byte) ch; } else if (ch <= 0x7ff) { b[off++] = (byte) ((ch >> 6) | 0xc0); b[off++] = (byte) ((ch & 0x3f) | 0x80); } else { b[off++] = (byte) ((ch >> 12) | 0xe0); b[off++] = (byte) (((ch >> 6) & 0x3f) | 0x80); b[off++] = (byte) ((ch & 0x3f) | 0x80); } } return b; } }