gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.hl7.fhir.instance.model.valuesets; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0 public enum ObjectRole { /** * This object is the patient that is the subject of care related to this event. It is identifiable by patient ID or equivalent. The patient may be either human or animal */ _1, /** * This is a location identified as related to the event. This is usually the location where the event took place. Note that for shipping, the usual events are arrival at a location or departure from a location. */ _2, /** * This object is any kind of persistent document created as a result of the event. This could be a paper report, film, electronic report, DICOM Study, etc. Issues related to medical records life cycle management are conveyed elsewhere. */ _3, /** * A logical object related to a health record event. This is any healthcare specific resource (object) not restricted to FHIR defined Resources. */ _4, /** * This is any configurable file used to control creation of documents. Examples include the objects maintained by the HL7 Master File transactions, Value Sets, etc. */ _5, /** * A human participant not otherwise identified by some other category */ _6, /** * (deprecated) */ _7, /** * Typically a licensed person who is providing or performing care related to the event, generally a physician. The key distinction between doctor and practitioner is with regards to their role, not the licensing. The doctor is the human who actually performed the work. The practitioner is the human or organization that is responsible for the work. */ _8, /** * A person or system that is being notified as part of the event. This is relevant in situations where automated systems provide notifications to other parties when an event took place. */ _9, /** * Insurance company, or any other organization who accepts responsibility for paying for the healthcare event. */ _10, /** * A person or active system object involved in the event with a security role. */ _11, /** * A person or system object involved in the event with the authority to modify security roles of other objects. */ _12, /** * A passive object, such as a role table, that is relevant to the event. */ _13, /** * (deprecated) Relevant to certain RBAC security methodologies. */ _14, /** * Any person or organization responsible for providing care. This encompasses all forms of care, licensed or otherwise, and all sorts of teams and care groups. Note, the distinction between practitioners and the doctor that actually provided the care to the patient. */ _15, /** * The source or destination for data transfer, when it does not match some other role. */ _16, /** * A source or destination for data transfer, that acts as an archive, database, or similar role. */ _17, /** * An object that holds schedule information. This could be an appointment book, availability information, etc. */ _18, /** * An organization or person that is the recipient of services. This could be an organization that is buying services for a patient, or a person that is buying services for an animal. */ _19, /** * An order, task, work item, procedure step, or other description of work to be performed. E.g., a particular instance of an MPPS. */ _20, /** * A list of jobs or a system that provides lists of jobs. E.g., an MWL SCP. */ _21, /** * (Deprecated) */ _22, /** * An object that specifies or controls the routing or delivery of items. For example, a distribution list is the routing criteria for mail. The items delivered may be documents, jobs, or other objects. */ _23, /** * The contents of a query. This is used to capture the contents of any kind of query. For security surveillance purposes knowing the queries being made is very important. */ _24, /** * added to help the parsers */ NULL; public static ObjectRole fromCode(String codeString) throws Exception { if (codeString == null || "".equals(codeString)) return null; if ("1".equals(codeString)) return _1; if ("2".equals(codeString)) return _2; if ("3".equals(codeString)) return _3; if ("4".equals(codeString)) return _4; if ("5".equals(codeString)) return _5; if ("6".equals(codeString)) return _6; if ("7".equals(codeString)) return _7; if ("8".equals(codeString)) return _8; if ("9".equals(codeString)) return _9; if ("10".equals(codeString)) return _10; if ("11".equals(codeString)) return _11; if ("12".equals(codeString)) return _12; if ("13".equals(codeString)) return _13; if ("14".equals(codeString)) return _14; if ("15".equals(codeString)) return _15; if ("16".equals(codeString)) return _16; if ("17".equals(codeString)) return _17; if ("18".equals(codeString)) return _18; if ("19".equals(codeString)) return _19; if ("20".equals(codeString)) return _20; if ("21".equals(codeString)) return _21; if ("22".equals(codeString)) return _22; if ("23".equals(codeString)) return _23; if ("24".equals(codeString)) return _24; throw new Exception("Unknown ObjectRole code '"+codeString+"'"); } public String toCode() { switch (this) { case _1: return "1"; case _2: return "2"; case _3: return "3"; case _4: return "4"; case _5: return "5"; case _6: return "6"; case _7: return "7"; case _8: return "8"; case _9: return "9"; case _10: return "10"; case _11: return "11"; case _12: return "12"; case _13: return "13"; case _14: return "14"; case _15: return "15"; case _16: return "16"; case _17: return "17"; case _18: return "18"; case _19: return "19"; case _20: return "20"; case _21: return "21"; case _22: return "22"; case _23: return "23"; case _24: return "24"; default: return "?"; } } public String getSystem() { return "http://hl7.org/fhir/object-role"; } public String getDefinition() { switch (this) { case _1: return "This object is the patient that is the subject of care related to this event. It is identifiable by patient ID or equivalent. The patient may be either human or animal"; case _2: return "This is a location identified as related to the event. This is usually the location where the event took place. Note that for shipping, the usual events are arrival at a location or departure from a location."; case _3: return "This object is any kind of persistent document created as a result of the event. This could be a paper report, film, electronic report, DICOM Study, etc. Issues related to medical records life cycle management are conveyed elsewhere."; case _4: return "A logical object related to a health record event. This is any healthcare specific resource (object) not restricted to FHIR defined Resources."; case _5: return "This is any configurable file used to control creation of documents. Examples include the objects maintained by the HL7 Master File transactions, Value Sets, etc."; case _6: return "A human participant not otherwise identified by some other category"; case _7: return "(deprecated)"; case _8: return "Typically a licensed person who is providing or performing care related to the event, generally a physician. The key distinction between doctor and practitioner is with regards to their role, not the licensing. The doctor is the human who actually performed the work. The practitioner is the human or organization that is responsible for the work."; case _9: return "A person or system that is being notified as part of the event. This is relevant in situations where automated systems provide notifications to other parties when an event took place."; case _10: return "Insurance company, or any other organization who accepts responsibility for paying for the healthcare event."; case _11: return "A person or active system object involved in the event with a security role."; case _12: return "A person or system object involved in the event with the authority to modify security roles of other objects."; case _13: return "A passive object, such as a role table, that is relevant to the event."; case _14: return "(deprecated) Relevant to certain RBAC security methodologies."; case _15: return "Any person or organization responsible for providing care. This encompasses all forms of care, licensed or otherwise, and all sorts of teams and care groups. Note, the distinction between practitioners and the doctor that actually provided the care to the patient."; case _16: return "The source or destination for data transfer, when it does not match some other role."; case _17: return "A source or destination for data transfer, that acts as an archive, database, or similar role."; case _18: return "An object that holds schedule information. This could be an appointment book, availability information, etc."; case _19: return "An organization or person that is the recipient of services. This could be an organization that is buying services for a patient, or a person that is buying services for an animal."; case _20: return "An order, task, work item, procedure step, or other description of work to be performed. E.g., a particular instance of an MPPS."; case _21: return "A list of jobs or a system that provides lists of jobs. E.g., an MWL SCP."; case _22: return "(Deprecated)"; case _23: return "An object that specifies or controls the routing or delivery of items. For example, a distribution list is the routing criteria for mail. The items delivered may be documents, jobs, or other objects."; case _24: return "The contents of a query. This is used to capture the contents of any kind of query. For security surveillance purposes knowing the queries being made is very important."; default: return "?"; } } public String getDisplay() { switch (this) { case _1: return "Patient"; case _2: return "Location"; case _3: return "Report"; case _4: return "DomainResource"; case _5: return "Master file"; case _6: return "User"; case _7: return "List"; case _8: return "Doctor"; case _9: return "Subscriber"; case _10: return "Guarantor"; case _11: return "Security User Entity"; case _12: return "Security User Group"; case _13: return "Security Resource"; case _14: return "Security Granularity Definition"; case _15: return "Practitioner"; case _16: return "Data Destination"; case _17: return "Data Repository"; case _18: return "Schedule"; case _19: return "Customer"; case _20: return "Job"; case _21: return "Job Stream"; case _22: return "Table"; case _23: return "Routing Criteria"; case _24: return "Query"; default: return "?"; } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import com.facebook.buck.core.util.log.Logger; import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.Nullable; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.ASTParser; import org.eclipse.jdt.core.dom.ASTVisitor; import org.eclipse.jdt.core.dom.AbstractTypeDeclaration; import org.eclipse.jdt.core.dom.Annotation; import org.eclipse.jdt.core.dom.AnnotationTypeDeclaration; import org.eclipse.jdt.core.dom.AnonymousClassDeclaration; import org.eclipse.jdt.core.dom.ArrayType; import org.eclipse.jdt.core.dom.BodyDeclaration; import org.eclipse.jdt.core.dom.CompilationUnit; import org.eclipse.jdt.core.dom.EnumDeclaration; import org.eclipse.jdt.core.dom.FieldDeclaration; import org.eclipse.jdt.core.dom.ImportDeclaration; import org.eclipse.jdt.core.dom.MarkerAnnotation; import org.eclipse.jdt.core.dom.MethodDeclaration; import org.eclipse.jdt.core.dom.MethodInvocation; import org.eclipse.jdt.core.dom.Name; import org.eclipse.jdt.core.dom.NormalAnnotation; import org.eclipse.jdt.core.dom.PackageDeclaration; import org.eclipse.jdt.core.dom.ParameterizedType; import org.eclipse.jdt.core.dom.QualifiedName; import org.eclipse.jdt.core.dom.SimpleType; import org.eclipse.jdt.core.dom.SingleMemberAnnotation; import org.eclipse.jdt.core.dom.SingleVariableDeclaration; import org.eclipse.jdt.core.dom.Type; import org.eclipse.jdt.core.dom.TypeDeclaration; import org.eclipse.jdt.core.dom.TypeDeclarationStatement; /** * Extracts the set of exported symbols (class and enum names) from a Java code file, using the * ASTParser from Eclipse. */ public class JavaFileParser { private static final Logger LOG = Logger.get(JavaFileParser.class); private final int jlsLevel; private final String javaVersion; private static final ImmutableMap<String, String> javaVersionMap = ImmutableMap.<String, String>builder() .put("1", JavaCore.VERSION_1_1) .put("2", JavaCore.VERSION_1_2) .put("3", JavaCore.VERSION_1_3) .put("4", JavaCore.VERSION_1_4) .put("5", JavaCore.VERSION_1_5) .put("6", JavaCore.VERSION_1_6) .put("7", JavaCore.VERSION_1_7) .put("8", JavaCore.VERSION_1_8) .build(); /** * Types that are in java.lang. These can be used without being imported. * * <p>Current as of Java 8: * https://docs.oracle.com/javase/8/docs/api/java/lang/package-summary.html */ private static final Set<String> JAVA_LANG_TYPES = ImmutableSet.of( // Interface "Appendable", "AutoCloseable", "CharSequence", "Cloneable", "Comparable", "Iterable", "Readable", "Runnable", "Thread.UncaughtExceptionHandler", // Class "Boolean", "Byte", "Character", "Character.Subset", "Class", "ClassLoader", "ClassValue", "Compiler", "Double", "Enum", "Float", "InheritableThreadLocal", "Integer", "Long", "Math", "Number", "Object", "Package", "Process", "ProcessBuilder", "ProcessBuilder.Redirect", "Runtime", "RuntimePermission", "SecurityManager", "Short", "StackTraceElement", "StrictMath", "String", "StringBuffer", "StringBuilder", "System", "Thread", "ThreadGroup", "ThreadLocal", "Throwable", "Void", // Enum "Character.UnicodeScript", "ProcessBuilder.Redirect.Type", "Thread.State", // Exception "ArithmeticException", "ArrayIndexOutOfBoundsException", "ArrayStoreException", "ClassCastException", "ClassNotFoundException", "CloneNotSupportedException", "EnumConstantNotPresentException", "Exception", "IllegalAccessException", "IllegalArgumentException", "IllegalMonitorStateException", "IllegalStateException", "IllegalThreadStateException", "IndexOutOfBoundsException", "InstantiationException", "InterruptedException", "NegativeArraySizeException", "NoSuchFieldException", "NoSuchMethodException", "NullPointerException", "NumberFormatException", "ReflectiveOperationException", "RuntimeException", "StringIndexOutOfBoundsException", "TypeNotPresentException", "UnsupportedOperationException", // Error "AbstractMethodError", "AssertionError", "BootstrapMethodError", "ClassCircularityError", "ClassFormatError", "Error", "ExceptionInInitializerError", "IllegalAccessError", "IncompatibleClassChangeError", "InstantiationError", "InternalError", "LinkageError", "NoClassDefFoundError", "NoSuchFieldError", "NoSuchMethodError", "OutOfMemoryError", "StackOverflowError", "ThreadDeath", "UnknownError", "UnsatisfiedLinkError", "UnsupportedClassVersionError", "VerifyError", "VirtualMachineError", // Annotation Types "Deprecated", "FunctionalInheritance", "Override", "SafeVarargs", "SuppressWarnings"); /** * Symbols provided by {@code import java.io.*}. * * <p>This collection was created by running the following on an OS X laptop with Java 8 * installed: * * <pre> * jar tf /Library/Java/JavaVirtualMachines/jdk1.8.0_74.jdk/Contents/Home/src.zip \ * | grep -E 'java/io/[^/]+$' \ * | sed -e 's#java/io/\(.*\)\.java#"\1",#' \ * | sort * </pre> */ private static final ImmutableSet<String> JAVA_IO_TYPES = ImmutableSet.of( "Bits", "BufferedInputStream", "BufferedOutputStream", "BufferedReader", "BufferedWriter", "ByteArrayInputStream", "ByteArrayOutputStream", "CharArrayReader", "CharArrayWriter", "CharConversionException", "Closeable", "Console", "DataInput", "DataInputStream", "DataOutput", "DataOutputStream", "DefaultFileSystem", "DeleteOnExitHook", "EOFException", "ExpiringCache", "Externalizable", "File", "FileDescriptor", "FileFilter", "FileInputStream", "FileNotFoundException", "FileOutputStream", "FilePermission", "FileReader", "FileSystem", "FileWriter", "FilenameFilter", "FilterInputStream", "FilterOutputStream", "FilterReader", "FilterWriter", "Flushable", "IOError", "IOException", "InputStream", "InputStreamReader", "InterruptedIOException", "InvalidClassException", "InvalidObjectException", "LineNumberInputStream", "LineNumberReader", "NotActiveException", "NotSerializableException", "ObjectInput", "ObjectInputStream", "ObjectInputValidation", "ObjectOutput", "ObjectOutputStream", "ObjectStreamClass", "ObjectStreamConstants", "ObjectStreamException", "ObjectStreamField", "OptionalDataException", "OutputStream", "OutputStreamWriter", "PipedInputStream", "PipedOutputStream", "PipedReader", "PipedWriter", "PrintStream", "PrintWriter", "PushbackInputStream", "PushbackReader", "RandomAccessFile", "Reader", "SequenceInputStream", "SerialCallbackContext", "Serializable", "SerializablePermission", "StreamCorruptedException", "StreamTokenizer", "StringBufferInputStream", "StringReader", "StringWriter", "SyncFailedException", "UTFDataFormatException", "UncheckedIOException", "UnixFileSystem", "UnsupportedEncodingException", "WriteAbortedException", "Writer"); /** * Symbols provided by {@code import java.util.*}. * * <p>This collection was created by running the following on an OS X laptop with Java 8 * installed: * * <pre> * jar tf /Library/Java/JavaVirtualMachines/jdk1.8.0_74.jdk/Contents/Home/src.zip \ * | grep -E 'java/util/[^/]+$' \ * | sed -e 's#java/util/\(.*\)\.java#"\1",#' \ * | sort * </pre> */ private static final ImmutableSet<String> JAVA_UTIL_TYPES = ImmutableSet.of( "AbstractCollection", "AbstractList", "AbstractMap", "AbstractQueue", "AbstractSequentialList", "AbstractSet", "ArrayDeque", "ArrayList", "ArrayPrefixHelpers", "Arrays", "ArraysParallelSortHelpers", "Base64", "BitSet", "Calendar", "Collection", "Collections", "ComparableTimSort", "Comparator", "Comparators", "ConcurrentModificationException", "Currency", "Date", "Deque", "Dictionary", "DoubleSummaryStatistics", "DualPivotQuicksort", "DuplicateFormatFlagsException", "EmptyStackException", "EnumMap", "EnumSet", "Enumeration", "EventListener", "EventListenerProxy", "EventObject", "FormatFlagsConversionMismatchException", "Formattable", "FormattableFlags", "Formatter", "FormatterClosedException", "GregorianCalendar", "HashMap", "HashSet", "Hashtable", "IdentityHashMap", "IllegalFormatCodePointException", "IllegalFormatConversionException", "IllegalFormatException", "IllegalFormatFlagsException", "IllegalFormatPrecisionException", "IllegalFormatWidthException", "IllformedLocaleException", "InputMismatchException", "IntSummaryStatistics", "InvalidPropertiesFormatException", "Iterator", "JapaneseImperialCalendar", "JumboEnumSet", "LinkedHashMap", "LinkedHashSet", "LinkedList", "List", "ListIterator", "ListResourceBundle", "Locale", "LocaleISOData", "LongSummaryStatistics", "Map", "MissingFormatArgumentException", "MissingFormatWidthException", "MissingResourceException", "NavigableMap", "NavigableSet", "NoSuchElementException", "Objects", "Observable", "Observer", "Optional", "OptionalDouble", "OptionalInt", "OptionalLong", "PrimitiveIterator", "PriorityQueue", "Properties", "PropertyPermission", "PropertyResourceBundle", "Queue", "Random", "RandomAccess", "RegularEnumSet", "ResourceBundle", "Scanner", "ServiceConfigurationError", "ServiceLoader", "Set", "SimpleTimeZone", "SortedMap", "SortedSet", "Spliterator", "Spliterators", "SplittableRandom", "Stack", "StringJoiner", "StringTokenizer", "TimSort", "TimeZone", "Timer", "TimerTask", "TooManyListenersException", "TreeMap", "TreeSet", "Tripwire", "UUID", "UnknownFormatConversionException", "UnknownFormatFlagsException", "Vector", "WeakHashMap"); private static final ImmutableMap<String, ImmutableSet<String>> SUPPORTED_WILDCARD_IMPORTS = ImmutableMap.of( "java.util", JAVA_UTIL_TYPES, "java.io", JAVA_IO_TYPES); private JavaFileParser(int jlsLevel, String javaVersion) { this.jlsLevel = jlsLevel; this.javaVersion = javaVersion; } public static JavaFileParser createJavaFileParser(AbstractJavacLanguageLevelOptions options) { String javaVersion = Objects.requireNonNull(javaVersionMap.get(options.getSourceLevel())); return new JavaFileParser(AST.JLS8, javaVersion); } public ImmutableSortedSet<String> getExportedSymbolsFromString(String code) { return extractFeaturesFromJavaCode(code).providedSymbols; } public Optional<String> getPackageNameFromSource(String code) { CompilationUnit compilationUnit = makeCompilationUnitFromSource(code); // A Java file might not have a package. Hopefully all of ours do though... PackageDeclaration packageDecl = compilationUnit.getPackage(); if (packageDecl != null) { return Optional.of(packageDecl.getName().toString()); } return Optional.empty(); } private enum DependencyType { REQUIRED, EXPORTED, } public JavaFileFeatures extractFeaturesFromJavaCode(String code) { // For now, we will harcode this. Ultimately, we probably want to make this configurable via // .buckconfig. For example, the Buck project itself is diligent about disallowing wildcard // imports, but the one exception is the Java code generated via Thrift in src-gen. boolean shouldThrowForUnsupportedWildcardImport = false; AtomicBoolean isPoisonedByUnsupportedWildcardImport = new AtomicBoolean(false); CompilationUnit compilationUnit = makeCompilationUnitFromSource(code); ImmutableSortedSet.Builder<String> providedSymbols = ImmutableSortedSet.naturalOrder(); ImmutableSortedSet.Builder<String> requiredSymbols = ImmutableSortedSet.naturalOrder(); ImmutableSortedSet.Builder<String> exportedSymbols = ImmutableSortedSet.naturalOrder(); ImmutableSortedSet.Builder<String> requiredSymbolsFromExplicitImports = ImmutableSortedSet.naturalOrder(); compilationUnit.accept( new ASTVisitor() { @Nullable private String packageName; /** Maps simple name to fully-qualified name. */ private Map<String, String> simpleImportedTypes = new HashMap<>(); /** * Maps wildcard import prefixes, such as {@code "java.util"} to the types in the * respective package if a wildcard import such as {@code import java.util.*} is used. */ private Map<String, ImmutableSet<String>> wildcardImports = new HashMap<>(); @Override public boolean visit(PackageDeclaration node) { Preconditions.checkState( packageName == null, "There should be at most one package declaration"); packageName = node.getName().getFullyQualifiedName(); return false; } // providedSymbols @Override public boolean visit(TypeDeclaration node) { // Local classes can be declared inside of methods. Skip over these. if (node.getParent() instanceof TypeDeclarationStatement) { return true; } String fullyQualifiedName = getFullyQualifiedTypeName(node); if (fullyQualifiedName != null) { providedSymbols.add(fullyQualifiedName); } @SuppressWarnings("unchecked") List<Type> interfaceTypes = node.superInterfaceTypes(); for (Type interfaceType : interfaceTypes) { tryAddType(interfaceType, DependencyType.EXPORTED); } Type superclassType = node.getSuperclassType(); if (superclassType != null) { tryAddType(superclassType, DependencyType.EXPORTED); } return true; } @Override public boolean visit(EnumDeclaration node) { String fullyQualifiedName = getFullyQualifiedTypeName(node); if (fullyQualifiedName != null) { providedSymbols.add(fullyQualifiedName); } return true; } @Override public boolean visit(AnnotationTypeDeclaration node) { String fullyQualifiedName = getFullyQualifiedTypeName(node); if (fullyQualifiedName != null) { providedSymbols.add(fullyQualifiedName); } return true; } // requiredSymbols /** * Uses heuristics to try to figure out what type of QualifiedName this is. Returns a * non-null value if this is believed to be a reference that qualifies as a "required * symbol" relationship. */ @Override public boolean visit(QualifiedName node) { QualifiedName ancestor = findMostQualifiedAncestor(node); ASTNode parent = ancestor.getParent(); if (!(parent instanceof PackageDeclaration) && !(parent instanceof ImportDeclaration)) { String symbol = ancestor.getFullyQualifiedName(); // If it does not start with an uppercase letter, it is probably because it is a // property lookup. if (CharMatcher.javaUpperCase().matches(symbol.charAt(0))) { addTypeFromDotDelimitedSequence(symbol, DependencyType.REQUIRED); } } return false; } /** * @param expr could be "Example", "Example.field", "com.example.Example". Note it could * also be a built-in type, such as "java.lang.Integer", in which case it will not be * added to the set of required symbols. */ private void addTypeFromDotDelimitedSequence(String expr, DependencyType dependencyType) { // At this point, symbol could be `System.out`. We want to reduce it to `System` and // then check it against JAVA_LANG_TYPES. if (startsWithUppercaseChar(expr)) { int index = expr.indexOf('.'); if (index >= 0) { String leftmostComponent = expr.substring(0, index); if (JAVA_LANG_TYPES.contains(leftmostComponent)) { return; } } } expr = qualifyWithPackageNameIfNecessary(expr); addSymbol(expr, dependencyType); } @Override public boolean visit(ImportDeclaration node) { String fullyQualifiedName = node.getName().getFullyQualifiedName(); // Apparently, "on demand" means "uses a wildcard," such as "import java.util.*". // Although we can choose to prohibit these in our own code, it is much harder to // enforce for third-party code. As such, we will tolerate these for some of the common // cases. if (node.isOnDemand()) { ImmutableSet<String> value = SUPPORTED_WILDCARD_IMPORTS.get(fullyQualifiedName); if (value != null) { wildcardImports.put(fullyQualifiedName, value); return false; } else if (shouldThrowForUnsupportedWildcardImport) { throw new RuntimeException( String.format( "Use of wildcard 'import %s.*' makes it impossible to statically determine " + "required symbols in this file. Please enumerate explicit imports.", fullyQualifiedName)); } else { isPoisonedByUnsupportedWildcardImport.set(true); return false; } } // Only worry about the dependency on the enclosing type. Optional<String> simpleName = getSimpleNameFromFullyQualifiedName(fullyQualifiedName); if (simpleName.isPresent()) { String name = simpleName.get(); int index = fullyQualifiedName.indexOf("." + name); String enclosingType = fullyQualifiedName.substring(0, index + name.length() + 1); requiredSymbolsFromExplicitImports.add(enclosingType); simpleImportedTypes.put(name, enclosingType); } else { LOG.info("Suspicious import lacks obvious enclosing type: %s", fullyQualifiedName); // The one example we have seen of this in the wild is // "org.whispersystems.curve25519.java.curve_sigs". In practice, we still need to add // it as a required symbol in this case. requiredSymbols.add(fullyQualifiedName); } return false; } @Override public boolean visit(MethodInvocation node) { if (node.getExpression() == null) { return true; } String receiver = node.getExpression().toString(); if (looksLikeAType(receiver)) { addTypeFromDotDelimitedSequence(receiver, DependencyType.REQUIRED); } return true; } /** An annotation on a member with zero arguments. */ @Override public boolean visit(MarkerAnnotation node) { DependencyType dependencyType = findDependencyTypeForAnnotation(node); addSimpleTypeName(node.getTypeName(), dependencyType); return true; } /** An annotation on a member with named arguments. */ @Override public boolean visit(NormalAnnotation node) { DependencyType dependencyType = findDependencyTypeForAnnotation(node); addSimpleTypeName(node.getTypeName(), dependencyType); return true; } /** An annotation on a member with a single, unnamed argument. */ @Override public boolean visit(SingleMemberAnnotation node) { DependencyType dependencyType = findDependencyTypeForAnnotation(node); addSimpleTypeName(node.getTypeName(), dependencyType); return true; } private DependencyType findDependencyTypeForAnnotation(Annotation annotation) { ASTNode parentNode = annotation.getParent(); if (parentNode == null) { return DependencyType.REQUIRED; } if (parentNode instanceof BodyDeclaration) { // Note that BodyDeclaration is an abstract class. Its subclasses are things like // FieldDeclaration and MethodDeclaration. We want to be sure that an annotation on // any non-private declaration is considered an exported symbol. BodyDeclaration declaration = (BodyDeclaration) parentNode; int modifiers = declaration.getModifiers(); if ((modifiers & Modifier.PRIVATE) == 0) { return DependencyType.EXPORTED; } } return DependencyType.REQUIRED; } @Override public boolean visit(SimpleType node) { // This method is responsible for finding the overwhelming majority of the required // symbols in the AST. tryAddType(node, DependencyType.REQUIRED); return true; } // exportedSymbols @Override public boolean visit(MethodDeclaration node) { // Types from private method signatures need not be exported. if ((node.getModifiers() & Modifier.PRIVATE) != 0) { return true; } Type returnType = node.getReturnType2(); if (returnType != null) { tryAddType(returnType, DependencyType.EXPORTED); } @SuppressWarnings("unchecked") List<SingleVariableDeclaration> params = node.parameters(); for (SingleVariableDeclaration decl : params) { tryAddType(decl.getType(), DependencyType.EXPORTED); } @SuppressWarnings("unchecked") List<Type> exceptions = node.thrownExceptionTypes(); for (Type exception : exceptions) { tryAddType(exception, DependencyType.EXPORTED); } return true; } @Override public boolean visit(FieldDeclaration node) { // Types from private fields need not be exported. if ((node.getModifiers() & Modifier.PRIVATE) == 0) { tryAddType(node.getType(), DependencyType.EXPORTED); } return true; } private void tryAddType(Type type, DependencyType dependencyType) { if (type.isSimpleType()) { SimpleType simpleType = (SimpleType) type; Name simpleTypeName = simpleType.getName(); String simpleName = simpleTypeName.toString(); // For a Type such as IExample<T>, both "IExample" and "T" will be submitted here as // simple types. As such, we use this imperfect heuristic to filter out "T" from being // added. Note that this will erroneously exclude "URI". In practice, this should // generally be OK. For example, assuming "URI" is also imported, then at least it // will end up in the set of required symbols. To this end, we perform a second check // for "all caps" types to see if there is a corresponding import and if it should be // exported rather than simply required. if (!CharMatcher.javaUpperCase().matchesAllOf(simpleName) || (dependencyType == DependencyType.EXPORTED && simpleImportedTypes.containsKey(simpleName))) { addSimpleTypeName(simpleTypeName, dependencyType); } } else if (type.isArrayType()) { ArrayType arrayType = (ArrayType) type; tryAddType(arrayType.getElementType(), dependencyType); } else if (type.isParameterizedType()) { ParameterizedType parameterizedType = (ParameterizedType) type; tryAddType(parameterizedType.getType(), dependencyType); @SuppressWarnings("unchecked") List<Type> argTypes = parameterizedType.typeArguments(); for (Type argType : argTypes) { tryAddType(argType, dependencyType); } } } private void addSimpleTypeName(Name simpleTypeName, DependencyType dependencyType) { String simpleName = simpleTypeName.toString(); if (JAVA_LANG_TYPES.contains(simpleName)) { return; } String fullyQualifiedNameForSimpleName = simpleImportedTypes.get(simpleName); if (fullyQualifiedNameForSimpleName != null) { // May need to promote from required to exported in this case. if (dependencyType == DependencyType.EXPORTED) { addSymbol(fullyQualifiedNameForSimpleName, DependencyType.EXPORTED); } return; } // For well-behaved source code, this will always be empty, so don't even bother to // create the iterator most of the time. if (!wildcardImports.isEmpty()) { for (Map.Entry<String, ImmutableSet<String>> entry : wildcardImports.entrySet()) { Set<String> types = entry.getValue(); if (types.contains(simpleName)) { String packageName = entry.getKey(); addSymbol(packageName + "." + simpleName, dependencyType); return; } } } String symbol = simpleTypeName.getFullyQualifiedName(); symbol = qualifyWithPackageNameIfNecessary(symbol); addSymbol(symbol, dependencyType); } private void addSymbol(String symbol, DependencyType dependencyType) { ((dependencyType == DependencyType.REQUIRED) ? requiredSymbols : exportedSymbols) .add(symbol); } private String qualifyWithPackageNameIfNecessary(String symbol) { if (!startsWithUppercaseChar(symbol)) { return symbol; } // If the symbol starts with a capital letter, then we assume that it is a reference to // a type in the same package. int index = symbol.indexOf('.'); if (index >= 0) { symbol = symbol.substring(0, index); } if (packageName != null) { symbol = packageName + "." + symbol; } return symbol; } }); // TODO(mbolin): Special treatment for exportedSymbols when poisoned by wildcard import. ImmutableSortedSet<String> totalExportedSymbols = exportedSymbols.build(); // If we were poisoned by an unsupported wildcard import, then we should rely exclusively on // the explicit imports to determine the required symbols. Set<String> totalRequiredSymbols = new HashSet<>(); if (isPoisonedByUnsupportedWildcardImport.get()) { totalRequiredSymbols.addAll(requiredSymbolsFromExplicitImports.build()); } else { totalRequiredSymbols.addAll(requiredSymbolsFromExplicitImports.build()); totalRequiredSymbols.addAll(requiredSymbols.build()); } // Make sure that required and exported symbols are disjoint sets. totalRequiredSymbols.removeAll(totalExportedSymbols); return new JavaFileFeatures( providedSymbols.build(), ImmutableSortedSet.copyOf(totalRequiredSymbols), totalExportedSymbols); } private static QualifiedName findMostQualifiedAncestor(QualifiedName node) { ASTNode parent = node.getParent(); if (parent instanceof QualifiedName) { return (QualifiedName) parent; } else { return node; } } /** * @return {@link Optional#empty()} if there are no uppercase components in the {@code * fullyQualifiedName}, such as {@code import org.whispersystems.curve25519.java.curve_sigs;}. */ private static Optional<String> getSimpleNameFromFullyQualifiedName(String fullyQualifiedName) { int dotIndex = fullyQualifiedName.indexOf('.'); if (dotIndex < 0) { return Optional.of(fullyQualifiedName); } int startIndex = 0; while (dotIndex <= fullyQualifiedName.length()) { String component = fullyQualifiedName.substring(startIndex, dotIndex); // In practice, if there is an uppercase character in the component, it should be the first // character, but we have found some exceptions, in practice. if (CharMatcher.javaUpperCase().matchesAnyOf(component)) { return Optional.of(component); } else { startIndex = dotIndex + 1; dotIndex = fullyQualifiedName.indexOf('.', startIndex); if (dotIndex < 0) { int length = fullyQualifiedName.length(); if (startIndex <= length) { dotIndex = length; } else { break; } } } } return Optional.empty(); } private static boolean startsWithUppercaseChar(String str) { return CharMatcher.javaUpperCase().matches(str.charAt(0)); } private static boolean looksLikeAType(String str) { Iterable<String> parts = Splitter.on('.').split(str); Iterator<String> iter = parts.iterator(); boolean hasPartThatStartsWithUppercaseLetter = false; while (iter.hasNext()) { String part = iter.next(); Preconditions.checkState( !part.isEmpty(), "Dot delimited string should not have an empty segment: '%s'", str); // Don't let it start with a digit? if (!CharMatcher.javaLetterOrDigit().matchesAllOf(part)) { return false; } else if (!hasPartThatStartsWithUppercaseLetter) { hasPartThatStartsWithUppercaseLetter = CharMatcher.javaUpperCase().matches(part.charAt(0)); } } return hasPartThatStartsWithUppercaseLetter; } public static class JavaFileFeatures { public final ImmutableSortedSet<String> providedSymbols; public final ImmutableSortedSet<String> requiredSymbols; /** * Exported symbols are those that need to be on the classpath when compiling against the * providedSymbols. These include: * * <ul> * <li>Parameter types for non-private methods of non-private types. * <li>Return types for non-private methods of non-private types. * <li>Parent classes of non-private provided types. * <li>Parent interfaces of non-private provided types. * <li>Types of non-private fields of non-private types. * </ul> */ public final ImmutableSortedSet<String> exportedSymbols; private JavaFileFeatures( ImmutableSortedSet<String> providedSymbols, ImmutableSortedSet<String> requiredSymbols, ImmutableSortedSet<String> exportedSymbols) { this.providedSymbols = providedSymbols; this.requiredSymbols = requiredSymbols; this.exportedSymbols = exportedSymbols; } @Override public String toString() { return String.format( "providedSymbols=%s; requiredSymbols=%s; exportedSymbols=%s", providedSymbols, requiredSymbols, exportedSymbols); } } private CompilationUnit makeCompilationUnitFromSource(String code) { ASTParser parser = ASTParser.newParser(jlsLevel); parser.setSource(code.toCharArray()); parser.setKind(ASTParser.K_COMPILATION_UNIT); Map<String, String> options = JavaCore.getOptions(); JavaCore.setComplianceOptions(javaVersion, options); parser.setCompilerOptions(options); return (CompilationUnit) parser.createAST(/* monitor */ null); } @Nullable private String getFullyQualifiedTypeName(AbstractTypeDeclaration node) { LinkedList<String> nameParts = new LinkedList<>(); nameParts.add(node.getName().toString()); ASTNode parent = node.getParent(); while (!(parent instanceof CompilationUnit)) { if (parent instanceof AbstractTypeDeclaration) { nameParts.addFirst(((AbstractTypeDeclaration) parent).getName().toString()); parent = parent.getParent(); } else if (parent instanceof AnonymousClassDeclaration) { // If this is defined in an anonymous class, then there is no meaningful fully qualified // name. return null; } else { throw new RuntimeException("Unexpected parent " + parent + " for " + node); } } // A Java file might not have a package. Hopefully all of ours do though... PackageDeclaration packageDecl = ((CompilationUnit) parent).getPackage(); if (packageDecl != null) { nameParts.addFirst(packageDecl.getName().toString()); } return Joiner.on(".").join(nameParts); } }
package server; import client.HttpClient; import java.io.File; import java.io.IOException; import java.io.InterruptedIOException; import java.net.InetSocketAddress; import java.net.URL; import java.security.KeyStore; import java.util.Locale; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import messages.DiffieHellmanMessage; import messages.EncryptedMessage; import messages.ErrorMessage; import messages.LoginMessage; import messages.Message; import messages.MessageTypes; import messages.RegisterMessage; import messages.SuccessMessage; import messages.SuccessorPredecessorInfo; import messages.TextMessage; import messages.chord.ChordMessage; import network_component.NetworkComponent; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.MethodNotSupportedException; import org.apache.http.config.ConnectionConfig; import org.apache.http.impl.nio.DefaultHttpServerIODispatch; import org.apache.http.impl.nio.DefaultNHttpServerConnection; import org.apache.http.impl.nio.DefaultNHttpServerConnectionFactory; import org.apache.http.impl.nio.SSLNHttpServerConnectionFactory; import org.apache.http.impl.nio.reactor.DefaultListeningIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.apache.http.nio.NHttpConnectionFactory; import org.apache.http.nio.NHttpServerConnection; import org.apache.http.nio.protocol.BasicAsyncRequestConsumer; import org.apache.http.nio.protocol.BasicAsyncResponseProducer; import org.apache.http.nio.protocol.HttpAsyncExchange; import org.apache.http.nio.protocol.HttpAsyncRequestConsumer; import org.apache.http.nio.protocol.HttpAsyncRequestHandler; import org.apache.http.nio.protocol.HttpAsyncService; import org.apache.http.nio.protocol.UriHttpAsyncRequestHandlerMapper; import org.apache.http.nio.reactor.IOEventDispatch; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.nio.reactor.ListeningIOReactor; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.protocol.HttpProcessor; import org.apache.http.protocol.HttpProcessorBuilder; import org.apache.http.protocol.ResponseConnControl; import org.apache.http.protocol.ResponseContent; import org.apache.http.protocol.ResponseDate; import org.apache.http.protocol.ResponseServer; public class HttpServer implements Runnable { private final int port; private final HttpClient httpClient; private final boolean isCentralServer; private final NetworkComponent component; public HttpServer(NetworkComponent component, int port ,HttpClient httpClient, boolean isCentralServer) { this.port = port; this.httpClient = httpClient; this.isCentralServer = isCentralServer; this.component = component; } public int getPort() { return this.port; } public boolean IsCentralServer() { return this.isCentralServer; } @Override public void run() { // HTTP Processing chain HttpProcessor httpProc = HttpProcessorBuilder.create() .add(new ResponseDate()) .add(new ResponseServer("Server/1.1")) .add(new ResponseContent()) .add(new ResponseConnControl()).build(); // Request Handler Registry UriHttpAsyncRequestHandlerMapper registry = new UriHttpAsyncRequestHandlerMapper(); // Register the default handler for all URIs registry.register("*", new HttpMessageHandler(component, port, httpClient, isCentralServer)); // Server-Side HTTP Handler HttpAsyncService protocolHandler = new HttpAsyncService(httpProc, registry) { @Override public void connected(final NHttpServerConnection conn) { System.out.println(conn + ": connection established"); super.connected(conn); } @Override public void closed(final NHttpServerConnection conn) { System.out.println(conn + ": connection closed"); super.closed(conn); } }; NHttpConnectionFactory<DefaultNHttpServerConnection> connFactory = null; // SSL if (port == 8443) { // Initialize SSL context //ClassLoader cl = server.HttpServer.class.getClassLoader(); try { URL url = new File("my.keystore").toURI().toURL();//cl.getResource("my.keystore"); if (url == null) { System.err.println("Keystore not found"); System.exit(1); } KeyStore keyStore = KeyStore.getInstance("jks"); keyStore.load(url.openStream(), "ucrucr".toCharArray()); KeyManagerFactory kmFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmFactory.init(keyStore, "ucrucr".toCharArray()); KeyManager[] keyManagers = kmFactory.getKeyManagers(); SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(keyManagers, null, null); connFactory = new SSLNHttpServerConnectionFactory(sslContext, null, ConnectionConfig.DEFAULT); } catch (Exception ex) { System.err.println(ex.getMessage()); System.exit(1); } } else { connFactory = new DefaultNHttpServerConnectionFactory(ConnectionConfig.DEFAULT); } // Server-side I/O event dispatch IOEventDispatch ioEventDispatch = new DefaultHttpServerIODispatch(protocolHandler, connFactory); // I/O reactor defaults IOReactorConfig config = IOReactorConfig.custom() .setIoThreadCount(5) .setSoTimeout(3000) .setConnectTimeout(3000) .build(); try { // Create Server-side I/O reactor ListeningIOReactor ioReactor = new DefaultListeningIOReactor(config); ioReactor.listen(new InetSocketAddress(port)); ioReactor.execute(ioEventDispatch); } catch (IOReactorException ex) { System.err.println(ex.getMessage()); System.exit(1); } catch (InterruptedIOException ex) { System.err.println("Interrupted"); } catch (IOException ex) { System.err.println("I/O error: " + ex.getMessage()); } System.out.println("Shutdown"); } static class HttpMessageHandler implements HttpAsyncRequestHandler<HttpRequest> { private final HttpClient httpClient; private final int port; private final String protocol; private final boolean isCentralServer; private final NetworkComponent component; public HttpMessageHandler(NetworkComponent component, int port, HttpClient httpClient, boolean isCentralServer) { this.httpClient = httpClient; this.port = port; this.isCentralServer = isCentralServer; this.component = component; if (port == 8443) { protocol = "https"; } else { protocol = "http"; } } @Override public HttpAsyncRequestConsumer<HttpRequest> processRequest(HttpRequest request, HttpContext context) throws HttpException, IOException { // Buffer request content in memory return new BasicAsyncRequestConsumer(); } @Override public void handle(HttpRequest request, HttpAsyncExchange httpExchange, HttpContext context) throws HttpException, IOException { HttpResponse response = httpExchange.getResponse(); handleInternal(request, response, context); httpExchange.submitResponse(new BasicAsyncResponseProducer(response)); } private void handleInternal(final HttpRequest request, final HttpResponse response, final HttpContext context) throws MethodNotSupportedException { HttpCoreContext coreContext = HttpCoreContext.adapt(context); String method = request.getRequestLine().getMethod().toUpperCase(Locale.ENGLISH); if (!method.equals("GET") && !method.equals("HEAD") && !method.equals("POST")) { throw new MethodNotSupportedException(method + " method not supported"); } /*NHttpConnection conn = coreContext.getConnection(NHttpConnection.class); HttpInetConnection connection = (HttpInetConnection) conn; InetAddress remoteAddr = connection.getRemoteAddress(); String rcvdIP = remoteAddr.toString().substring(1);*/ String rcvd = request.getRequestLine().getUri(); System.out.println("Received (Base64 Representation): " + rcvd); byte[] bRcvd = Base64.decodeBase64(rcvd); byte messageType = bRcvd[0]; Message msg; switch (messageType) { case MessageTypes.LOGIN_MESSAGE_TYPE: System.out.println("Received: LOGIN MESSAGE"); if (!isCentralServer) { System.err.println("I am not the central server. Throw it"); break; } msg = LoginMessage.parseArray(bRcvd); component.handleLogin((LoginMessage)msg); break; case MessageTypes.REGISTER_MESSAGE_TYPE: System.out.println("Received: REGISTER MESSAGE"); if (!isCentralServer) { System.err.println("I am not the central server. Throw it"); break; } msg = RegisterMessage.parseArray(bRcvd); component.handleRegister((RegisterMessage)msg); break; case MessageTypes.SUCCESSOR_PREDECESSOR_INFO: System.out.println("Received: SUCCESSOR_PREDECESSOR INFO MESSAGE"); if (isCentralServer) { System.err.println("I am the central server. Throw it"); break; } msg = SuccessorPredecessorInfo.parseArray(bRcvd); component.handleSuccessorPredecessorInfo((SuccessorPredecessorInfo) msg); break; case MessageTypes.DIFFIE_HELLMAN_MESSAGE: System.out.println("Received: DIFFIE HELLMAN MESSAGE"); if (isCentralServer) { System.err.println("I am the central server. Throw it"); break; } msg = DiffieHellmanMessage.parseArray(bRcvd); component.handleDiffieHellman((DiffieHellmanMessage) msg); break; case MessageTypes.ERROR_MESSAGE: System.out.println("Received: ERROR MESSAGE"); msg = ErrorMessage.parseArray(bRcvd); component.handleError((ErrorMessage) msg); break; case MessageTypes.SUCCESS_MESSAGE: System.out.println("Received: SUCCESS MESSAGE"); msg = SuccessMessage.parseArray(bRcvd); component.handleSuccess((SuccessMessage) msg); break; case MessageTypes.TEXT_MESSAGE: System.out.println("Received: TEXT MESSAGE"); if (isCentralServer) { System.err.println("I am the central server. Throw it"); break; } msg = TextMessage.parseArray(bRcvd); component.handleTextMessage((TextMessage) msg); break; case MessageTypes.CHORD_MESSAGE: System.out.println("Received: CHORD MESSAGE"); if (isCentralServer) { System.err.println("I am the central server. Throw it"); break; } msg = ChordMessage.parseArray(bRcvd); System.out.println("HandleChordMessage"); component.handleChordMessage((ChordMessage) msg); break; case MessageTypes.ENCRYPTED_MESSAGE: System.out.println("Received: ENCRYPTED MESSAGE"); if (isCentralServer) { System.err.println("I am the central server. Throw it"); break; } msg = EncryptedMessage.parseArray(bRcvd); component.handleEncryptedMessage((EncryptedMessage) msg); break; default: System.err.println("Unknown Message: " + messageType); } } } }
/* * The MIT License * * Copyright 2014 SBPrime. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.PrimeSoft.MCPainter.Commands; import java.awt.image.BufferedImage; import org.PrimeSoft.MCPainter.blocksplacer.BlockLoger; import org.PrimeSoft.MCPainter.blocksplacer.BlockPlacer; import org.PrimeSoft.MCPainter.Configuration.ConfigProvider; import org.PrimeSoft.MCPainter.Drawing.Filters.CropFilter; import org.PrimeSoft.MCPainter.Drawing.Filters.FilterManager; import org.PrimeSoft.MCPainter.Drawing.ImageHelper; import org.PrimeSoft.MCPainter.FoundManager; import org.PrimeSoft.MCPainter.Help; import org.PrimeSoft.MCPainter.blocksplacer.ILoggerCommand; import org.PrimeSoft.MCPainter.MapDrawer.MapHelper; import org.PrimeSoft.MCPainter.PermissionManager; import org.PrimeSoft.MCPainter.MCPainterMain; import org.PrimeSoft.MCPainter.worldEdit.ICuboidSelection; import org.PrimeSoft.MCPainter.worldEdit.IEditSession; import org.PrimeSoft.MCPainter.worldEdit.ILocalPlayer; import org.PrimeSoft.MCPainter.worldEdit.ILocalSession; import org.PrimeSoft.MCPainter.worldEdit.IWorldEdit; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Chunk; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.Rotation; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.ItemFrame; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.map.MapView; /** * @author SBPrime */ public class HdImageCommand { private final MapHelper m_mapHelper; public HdImageCommand(MapHelper mapHelper) { m_mapHelper = mapHelper; } public void Execute(MCPainterMain sender, Player player, IWorldEdit worldEdit, String[] args) { if (args.length != 2) { Help.ShowHelp(player, Commands.COMMAND_IMAGEHD); return; } String url = args[1]; final ICuboidSelection selection = worldEdit.getSelection(player); if (selection == null) { MCPainterMain.say(player, ChatColor.RED + "No selection."); return; } sender.getServer().getScheduler().runTaskAsynchronously(sender, new CommandThread(this, sender, player, url, worldEdit, selection)); } private static boolean isSolid(Material m) { if (m == Material.AIR) { return false; } if (m == Material.LAVA) { return false; } if (m == Material.STATIONARY_LAVA) { return false; } if (m == Material.WATER) { return false; } if (m == Material.STATIONARY_WATER) { return false; } if (m == Material.ENDER_PORTAL) { return false; } if (m == Material.PORTAL) { return false; } return m.isSolid() && m.isBlock(); } private class DrawMapCommand implements ILoggerCommand { private final Location m_location; private final BufferedImage m_img; private final MapHelper m_mapHelper; private final BlockFace m_rotation; private Material m_oldMaterial; private ItemFrame m_frame; private MapView m_mapView; private DrawMapCommand(Location location, BlockFace face, int offsetX, int offsetY, BufferedImage img, MapHelper mapHelper) { m_location = location; int x = Math.min(offsetX + 127, img.getWidth() - 1); int y = Math.min(offsetY + 127, img.getHeight() - 1); m_img = CropFilter.crop(img, offsetX, offsetY, x, y, false); m_mapHelper = mapHelper; m_rotation = face; } @Override public void execute(BlockPlacer blockPlacer, BlockLoger loger) { Chunk chunk = m_location.getChunk(); if (!chunk.isLoaded()) { if (!chunk.load()) { return; } } World w = m_location.getWorld(); Block block = w.getBlockAt(m_location); Material material = block.getType(); if (!isSolid(material)) { m_oldMaterial = material; block.setType(Material.BARRIER); } else { m_oldMaterial = null; } m_frame = (ItemFrame) w.spawn(block.getRelative(m_rotation).getLocation(), ItemFrame.class); m_frame.setFacingDirection(m_rotation, true); m_frame.setRotation(Rotation.NONE); m_mapView = Bukkit.createMap(w); m_mapHelper.storeMap(m_mapView, m_img); m_mapHelper.drawImage(m_mapView, m_img); m_frame.setItem(new ItemStack(Material.MAP, 1, m_mapView.getId())); } @Override public Location getLocation() { return m_location; } } private class CommandThread implements Runnable { private final ICuboidSelection m_selection; private final String m_url; private final Player m_player; private final MCPainterMain m_sender; private final HdImageCommand m_this; private final IEditSession m_session; private final ILocalSession m_lSession; private final BlockFace m_rotation; private CommandThread(HdImageCommand command, MCPainterMain sender, Player player, String url, IWorldEdit worldEdit, ICuboidSelection selection) { m_this = command; m_sender = sender; m_player = player; m_url = url; m_selection = selection; ILocalPlayer localPlayer = worldEdit.wrapPlayer(player); m_rotation = calcHeading(localPlayer.getYaw()); m_lSession = worldEdit.getSession(player); m_session = m_lSession.createEditSession(localPlayer); } @Override public void run() { FilterManager fm = FilterManager.getFilterManager(m_player); double price = ConfigProvider.getCommandPrice("imagehd") + fm.getPrice(); synchronized (FoundManager.getMutex()) { if (price > 0 && FoundManager.getMoney(m_player) < price) { MCPainterMain.say(m_player, ChatColor.RED + "You don't have sufficient funds to apply all the filters and draw the map."); return; } MCPainterMain.say(m_player, "Loading image..."); BufferedImage img = ImageHelper.downloadImage(m_url); if (img == null) { MCPainterMain.say(m_player, ChatColor.RED + "Error downloading image " + ChatColor.WHITE + m_url); return; } img = fm.applyFilters(img, null); final BufferedImage fImg = img; int imgH = img.getHeight(); int imgW = img.getWidth(); if (!PermissionManager.checkImage(m_player, imgW, imgH)) { return; } Location minPoint = m_selection.getMinimumPoint(); Location maxPoint = m_selection.getMaximumPoint(); int l = m_selection.getLength(); int w = m_selection.getWidth(); int h = m_selection.getHeight(); int kx, kz; if (w > 1 && l > 1) { MCPainterMain.say(m_player, ChatColor.RED + "Invalid selection area."); return; } else if (w > l) { kx = 1; kz = 0; } else { kx = 0; kz = 1; } int bHeight = imgH / 128 + (imgH % 128 != 0 ? 1 : 0); int bWidth = imgW / 128 + (imgW % 128 != 0 ? 1 : 0); if (h < bHeight || (w < bWidth && l < bWidth)) { MCPainterMain.say(m_player, ChatColor.RED + "The selection is to smal, required: " + bWidth + "x" + bHeight); return; } Location pos = new Location(minPoint.getWorld(), minPoint.getBlockX(), maxPoint.getBlockY(), minPoint.getBlockZ()); if (m_rotation == BlockFace.NORTH || m_rotation == BlockFace.EAST) { pos = pos.add(kx * (bWidth - 1), 0, kz * (bWidth - 1)); kx *= -1; kz *= -1; } MCPainterMain.say(m_player, "Drawing image..."); BlockLoger loger = new BlockLoger(m_player, m_lSession, m_session, m_sender); for (int py = 0; py < bHeight; py++) { Location tmp = pos.clone(); for (int px = 0; px < bWidth; px++) { loger.logCommand(new DrawMapCommand(tmp.clone(), m_rotation, px * 128, py * 128, fImg, m_mapHelper)); tmp = tmp.add(kx, 0, kz); } pos = pos.add(0, -1, 0); } loger.logMessage("Drawing image done."); loger.logEndSession(); loger.flush(); FoundManager.subtractMoney(m_player, price); } } private BlockFace calcHeading(double yaw) { yaw = (yaw + 360) % 360; if (yaw < 45) { return BlockFace.NORTH; } else if (yaw < 135) { return BlockFace.EAST; } else if (yaw < 225) { return BlockFace.SOUTH; } else if (yaw < 315) { return BlockFace.WEST; } else { return BlockFace.NORTH; } } } }
/* * Copyright (c) 2013-2017 Cinchapi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cinchapi.concourse.http; import java.util.Map; import java.util.Map.Entry; import org.junit.Assert; import org.junit.Test; import com.cinchapi.concourse.Timestamp; import com.cinchapi.concourse.time.Time; import com.cinchapi.concourse.util.TestData; import com.google.gson.reflect.TypeToken; import com.squareup.okhttp.Response; /** * Unit tests for the audit functionality in the REST API. * * @author Jeff Nelson */ public class RestAuditTest extends RestTest { @Test public void testAuditRecord() { long record = TestData.getLong(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } Map<Long, String> resp = bodyAsJava(get("/{0}/audit", record), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(record); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditKeyReturns400Error() { Response resp = get("/foo/audit"); Assert.assertEquals(400, resp.code()); } @Test public void testAuditRecordStart() { long record = TestData.getLong(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } long start = Time.now(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } Map<Long, String> resp = bodyAsJava( get("/{0}/audit?start={1}", record, start), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(record, Timestamp.fromMicros(start)); Assert.assertEquals(expected.size(), resp.size()); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertTrue(timestamp >= start); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditRecordStartEnd() { long record = TestData.getLong(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } long start = Time.now(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } long end = Time.now(); for (int i = 0; i < count; i++) { client.add(TestData.getSimpleString(), i, record); } Map<Long, String> resp = bodyAsJava( get("/{0}/audit?start={1}&end={2}", record, start, end), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(record, Timestamp.fromMicros(start), Timestamp.fromMicros(end)); Assert.assertEquals(expected.size(), resp.size()); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertTrue(timestamp >= start); Assert.assertTrue(timestamp <= end); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditKeyRecord() { long record = TestData.getLong(); String key = TestData.getSimpleString(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(key, i, record); } Map<Long, String> resp = bodyAsJava(get("/{0}/{1}/audit", key, record), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(key, record); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditRecordKey() { long record = TestData.getLong(); String key = TestData.getSimpleString(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(key, i, record); } Map<Long, String> resp = bodyAsJava(get("/{0}/{1}/audit", record, key), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(key, record); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditKeyRecordStart() { long record = TestData.getLong(); String key = TestData.getSimpleString(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(key, i, record); } long start = Time.now(); for (int i = 0; i < count; i++) { client.add(key, i, record); } Map<Long, String> resp = bodyAsJava( get("/{0}/{1}/audit?start={2}", key, record, start), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(key, record, Timestamp.fromMicros(start)); Assert.assertEquals(expected.size(), resp.size()); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertTrue(timestamp >= start); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } @Test public void testAuditKeyRecordStartEnd() { long record = TestData.getLong(); String key = TestData.getSimpleString(); int count = TestData.getScaleCount(); for (int i = 0; i < count; i++) { client.add(key, i, record); } long start = Time.now(); for (int i = 0; i < count; i++) { client.add(key, i, record); } long end = Time.now(); for (int i = 0; i < count; i++) { client.add(key, i, record); } Map<Long, String> resp = bodyAsJava( get("/{0}/{1}/audit?start={2}&end={3}", key, record, start, end), new TypeToken<Map<Long, String>>() {}); Map<Timestamp, String> expected = client.audit(key, record, Timestamp.fromMicros(start), Timestamp.fromMicros(end)); Assert.assertEquals(expected.size(), resp.size()); for (Entry<Timestamp, String> entry : expected.entrySet()) { long timestamp = entry.getKey().getMicros(); Assert.assertTrue(timestamp >= start); Assert.assertTrue(timestamp <= end); Assert.assertEquals(entry.getValue(), resp.get(timestamp)); } } }
/************************************************************************************************ * _________ _ ____ _ __ __ _ _ _ _ _ ___ * |__ / ___|__ _ ___| |__ / ___|_ _(_)_ __ __ \ \ / /_ _| | | ___| |_| | | |_ _| * / / | / _` / __| '_ \\___ \ \ /\ / / | '_ \ / _` \ \ /\ / / _` | | |/ _ \ __| | | || | * / /| |__| (_| \__ \ | | |___) \ V V /| | | | | (_| |\ V V / (_| | | | __/ |_| |_| || | * /____\____\__,_|___/_| |_|____/ \_/\_/ |_|_| |_|\__, | \_/\_/ \__,_|_|_|\___|\__|\___/|___| * |___/ * * Copyright (c) 2016 Ivan Vaklinov <ivan@vaklinov.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. **********************************************************************************/ package com.vaklinov.zcashui; import java.awt.BorderLayout; import java.awt.Desktop; import java.awt.FlowLayout; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.UnsupportedEncodingException; import java.net.URL; import java.util.Arrays; import java.util.Comparator; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.KeyStroke; import javax.swing.border.EtchedBorder; /** * Table to be used for transactions - specifically. * * @author Ivan Vaklinov <ivan@vaklinov.com> */ public class TransactionTable extends DataTable { private static final Logger LOG = Logger.getLogger(TransactionTable.class.getName()); public TransactionTable(final Object[][] rowData, final Object[] columnNames, final JFrame parent, final ZCashClientCaller caller) { super(rowData, columnNames); int accelaratorKeyMask = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(); JMenuItem showDetails = new JMenuItem("Show details..."); showDetails.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, accelaratorKeyMask)); popupMenu.add(showDetails); showDetails.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if ((lastRow >= 0) && (lastColumn >= 0)) { try { String txID = TransactionTable.this.getModel().getValueAt(lastRow, 6).toString(); txID = txID.replaceAll("\"", ""); // In case it has quotes LOG.fine("Transaction ID for detail dialog is: " + txID); Map<String, String> details = caller.getRawTransactionDetails(txID); String rawTrans = caller.getRawTransaction(txID); DetailsDialog dd = new DetailsDialog(parent, details); dd.setVisible(true); } catch (Exception ex) { LOG.log(Level.WARNING, "", ex); // TODO: report exception to user } } else { // Log perhaps } } }); JMenuItem showInExplorer = new JMenuItem("Show in block explorer"); showInExplorer.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_X, accelaratorKeyMask)); popupMenu.add(showInExplorer); showInExplorer.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if ((lastRow >= 0) && (lastColumn >= 0)) { try { String txID = TransactionTable.this.getModel().getValueAt(lastRow, 6).toString(); txID = txID.replaceAll("\"", ""); // In case it has quotes LOG.fine("Transaction ID for block explorer is: " + txID); // https://explorer.zcha.in/transactions/<ID> Desktop.getDesktop().browse( new URL("https://explorer.zcha.in/transactions/" + txID).toURI()); } catch (Exception ex) { LOG.log(Level.WARNING, "", ex); // TODO: report exception to user } } else { // Log perhaps } } }); } // End constructor private static class DetailsDialog extends JDialog { public DetailsDialog(JFrame parent, Map<String, String> details) throws UnsupportedEncodingException { this.setTitle("Transaction details..."); this.setSize(600, 310); this.setLocation(100, 100); this.setLocationRelativeTo(parent); this.setModal(true); this.setDefaultCloseOperation(DISPOSE_ON_CLOSE); this.getContentPane().setLayout(new BorderLayout(0, 0)); JPanel tempPanel = new JPanel(new BorderLayout(0, 0)); tempPanel.setBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4)); JLabel infoLabel = new JLabel( "<html><span style=\"font-size:9px;\">" + "The table shows the information about the transaction with technical details as " + "they appear at ZCash network level." + "</span>"); infoLabel.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED)); tempPanel.add(infoLabel, BorderLayout.CENTER); this.getContentPane().add(tempPanel, BorderLayout.NORTH); String[] columns = new String[] { "Name", "Value" }; String[][] data = new String[details.size()][2]; int i = 0; int maxPreferredWidht = 400; for (Entry<String, String> ent : details.entrySet()) { if (maxPreferredWidht < (ent.getValue().length() * 6)) { maxPreferredWidht = ent.getValue().length() * 6; } data[i][0] = ent.getKey(); data[i][1] = ent.getValue(); i++; } Arrays.sort(data, new Comparator<String[]>() { public int compare(String[] o1, String[] o2) { return o1[0].compareTo(o2[0]); } public boolean equals(Object obj) { return false; } }); DataTable table = new DataTable(data, columns); table.getColumnModel().getColumn(0).setPreferredWidth(200); table.getColumnModel().getColumn(1).setPreferredWidth(maxPreferredWidht); table.setAutoResizeMode(JTable.AUTO_RESIZE_OFF); JScrollPane tablePane = new JScrollPane( table, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); this.getContentPane().add(tablePane, BorderLayout.CENTER); // Lower close button JPanel closePanel = new JPanel(); closePanel.setLayout(new FlowLayout(FlowLayout.CENTER, 3, 3)); JButton closeButon = new JButton("Close"); closePanel.add(closeButon); this.getContentPane().add(closePanel, BorderLayout.SOUTH); closeButon.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { DetailsDialog.this.setVisible(false); DetailsDialog.this.dispose(); } }); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jpa.persistenceunit; import java.net.URL; import java.util.LinkedList; import java.util.List; import java.util.Properties; import javax.persistence.SharedCacheMode; import javax.persistence.ValidationMode; import javax.persistence.spi.ClassTransformer; import javax.persistence.spi.PersistenceUnitTransactionType; import javax.sql.DataSource; import org.springframework.util.ClassUtils; /** * Spring's base implementation of the JPA * {@link javax.persistence.spi.PersistenceUnitInfo} interface, * used to bootstrap an {@code EntityManagerFactory} in a container. * * <p>This implementation is largely a JavaBean, offering mutators * for all standard {@code PersistenceUnitInfo} properties. * * @author Rod Johnson * @author Juergen Hoeller * @author Costin Leau * @since 2.0 */ public class MutablePersistenceUnitInfo implements SmartPersistenceUnitInfo { private String persistenceUnitName; private String persistenceProviderClassName; private PersistenceUnitTransactionType transactionType; private DataSource nonJtaDataSource; private DataSource jtaDataSource; private final List<String> mappingFileNames = new LinkedList<>(); private List<URL> jarFileUrls = new LinkedList<>(); private URL persistenceUnitRootUrl; private final List<String> managedClassNames = new LinkedList<>(); private final List<String> managedPackages = new LinkedList<>(); private boolean excludeUnlistedClasses = false; private SharedCacheMode sharedCacheMode = SharedCacheMode.UNSPECIFIED; private ValidationMode validationMode = ValidationMode.AUTO; private Properties properties = new Properties(); private String persistenceXMLSchemaVersion = "2.0"; private String persistenceProviderPackageName; public void setPersistenceUnitName(String persistenceUnitName) { this.persistenceUnitName = persistenceUnitName; } @Override public String getPersistenceUnitName() { return this.persistenceUnitName; } public void setPersistenceProviderClassName(String persistenceProviderClassName) { this.persistenceProviderClassName = persistenceProviderClassName; } @Override public String getPersistenceProviderClassName() { return this.persistenceProviderClassName; } public void setTransactionType(PersistenceUnitTransactionType transactionType) { this.transactionType = transactionType; } @Override public PersistenceUnitTransactionType getTransactionType() { if (this.transactionType != null) { return this.transactionType; } else { return (this.jtaDataSource != null ? PersistenceUnitTransactionType.JTA : PersistenceUnitTransactionType.RESOURCE_LOCAL); } } public void setJtaDataSource(DataSource jtaDataSource) { this.jtaDataSource = jtaDataSource; } @Override public DataSource getJtaDataSource() { return this.jtaDataSource; } public void setNonJtaDataSource(DataSource nonJtaDataSource) { this.nonJtaDataSource = nonJtaDataSource; } @Override public DataSource getNonJtaDataSource() { return this.nonJtaDataSource; } public void addMappingFileName(String mappingFileName) { this.mappingFileNames.add(mappingFileName); } @Override public List<String> getMappingFileNames() { return this.mappingFileNames; } public void addJarFileUrl(URL jarFileUrl) { this.jarFileUrls.add(jarFileUrl); } @Override public List<URL> getJarFileUrls() { return this.jarFileUrls; } public void setPersistenceUnitRootUrl(URL persistenceUnitRootUrl) { this.persistenceUnitRootUrl = persistenceUnitRootUrl; } @Override public URL getPersistenceUnitRootUrl() { return this.persistenceUnitRootUrl; } /** * Add a managed class name to the persistence provider's metadata. * @see javax.persistence.spi.PersistenceUnitInfo#getManagedClassNames() * @see #addManagedPackage */ public void addManagedClassName(String managedClassName) { this.managedClassNames.add(managedClassName); } @Override public List<String> getManagedClassNames() { return this.managedClassNames; } /** * Add a managed package to the persistence provider's metadata. * <p>Note: This refers to annotated {@code package-info.java} files. It does * <i>not</i> trigger entity scanning in the specified package; this is * rather the job of {@link DefaultPersistenceUnitManager#setPackagesToScan}. * @since 4.1 * @see SmartPersistenceUnitInfo#getManagedPackages() * @see #addManagedClassName */ public void addManagedPackage(String packageName) { this.managedPackages.add(packageName); } @Override public List<String> getManagedPackages() { return this.managedPackages; } public void setExcludeUnlistedClasses(boolean excludeUnlistedClasses) { this.excludeUnlistedClasses = excludeUnlistedClasses; } @Override public boolean excludeUnlistedClasses() { return this.excludeUnlistedClasses; } public void setSharedCacheMode(SharedCacheMode sharedCacheMode) { this.sharedCacheMode = sharedCacheMode; } @Override public SharedCacheMode getSharedCacheMode() { return this.sharedCacheMode; } public void setValidationMode(ValidationMode validationMode) { this.validationMode = validationMode; } @Override public ValidationMode getValidationMode() { return this.validationMode; } public void addProperty(String name, String value) { if (this.properties == null) { this.properties = new Properties(); } this.properties.setProperty(name, value); } public void setProperties(Properties properties) { this.properties = properties; } @Override public Properties getProperties() { return this.properties; } public void setPersistenceXMLSchemaVersion(String persistenceXMLSchemaVersion) { this.persistenceXMLSchemaVersion = persistenceXMLSchemaVersion; } @Override public String getPersistenceXMLSchemaVersion() { return this.persistenceXMLSchemaVersion; } @Override public void setPersistenceProviderPackageName(String persistenceProviderPackageName) { this.persistenceProviderPackageName = persistenceProviderPackageName; } public String getPersistenceProviderPackageName() { return this.persistenceProviderPackageName; } /** * This implementation returns the default ClassLoader. * @see org.springframework.util.ClassUtils#getDefaultClassLoader() */ @Override public ClassLoader getClassLoader() { return ClassUtils.getDefaultClassLoader(); } /** * This implementation throws an UnsupportedOperationException. */ @Override public void addTransformer(ClassTransformer classTransformer) { throw new UnsupportedOperationException("addTransformer not supported"); } /** * This implementation throws an UnsupportedOperationException. */ @Override public ClassLoader getNewTempClassLoader() { throw new UnsupportedOperationException("getNewTempClassLoader not supported"); } @Override public String toString() { return "PersistenceUnitInfo: name '" + this.persistenceUnitName + "', root URL [" + this.persistenceUnitRootUrl + "]"; } }
/* * Copyright 2016 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.athenz.common.server.rest; import javax.servlet.http.HttpServletRequest; import com.yahoo.athenz.auth.impl.PrincipalAuthority; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; import static org.mockito.Mockito.times; import static org.testng.Assert.*; import org.testng.annotations.Test; import com.yahoo.athenz.auth.Authority; import com.yahoo.athenz.auth.Authorizer; import com.yahoo.athenz.auth.Principal; import com.yahoo.athenz.auth.Authority.CredSource; import java.security.cert.X509Certificate; public class HttpTest { @Test public void testAuthoritiesNotNull() { Http.AuthorityList authorities = new Http.AuthorityList(); assertNotNull(authorities.getAuthorities()); } @Test public void testAuthenticateInternalServerError() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); try { Http.authenticate(httpServletRequest, null); } catch (ResourceException expected) { assertEquals(expected.getCode(), 500); } } @Test public void testAuthenticateCertificateFailure() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(CredSource.CERTIFICATE); authorities.add(authority); try { Http.authenticate(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } } @Test public void testAuthenticateCertificate() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(CredSource.CERTIFICATE); X509Certificate[] certs = new X509Certificate[1]; certs[0] = Mockito.mock(X509Certificate.class); Mockito.when(httpServletRequest.getAttribute(Http.JAVAX_CERT_ATTR)).thenReturn(certs); Principal principal = Mockito.mock(Principal.class); Mockito.when(authority.authenticate(ArgumentMatchers.any(X509Certificate[].class), ArgumentMatchers.any())).thenReturn(principal); authorities.add(authority); assertNotNull(Http.authenticate(httpServletRequest, authorities)); } @Test public void testAuthenticateRequest() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(CredSource.REQUEST); Principal principal = Mockito.mock(Principal.class); Mockito.when(authority.authenticate(ArgumentMatchers.any(HttpServletRequest.class), ArgumentMatchers.any())).thenReturn(principal); authorities.add(authority); assertNotNull(Http.authenticate(httpServletRequest, authorities)); } @Test public void testAuthenticateHeaderFailure() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(CredSource.HEADER); Mockito.when(authority.getHeader()).thenReturn("Cookie.hogehoge"); Mockito.when(authority.getAuthenticateChallenge()).thenReturn("Basic realm=\"athenz\""); authorities.add(authority); try { Http.authenticate(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } Mockito.verify(httpServletRequest, times(1)) .setAttribute("com.yahoo.athenz.auth.credential.challenges", "Basic realm=\"athenz\""); } @Test public void testAuthenticateHeaderErrorMessage() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Mockito.when(httpServletRequest.getHeader("Athenz-Principal-Auth")).thenReturn("Creds"); Http.AuthorityList authorities = new Http.AuthorityList(); PrincipalAuthority authority1 = new PrincipalAuthority(); authorities.add(authority1); PrincipalAuthority authority2 = new PrincipalAuthority(); authorities.add(authority2); try { Http.authenticate(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } Mockito.verify(httpServletRequest, times(2)) .setAttribute(ArgumentMatchers.anyString(), ArgumentMatchers.anyString()); } @Test public void testAuthenticateHeaderFailureMultipleAuth() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority1 = Mockito.mock(Authority.class); Mockito.when(authority1.getCredSource()).thenReturn(CredSource.HEADER); Mockito.when(authority1.getHeader()).thenReturn("Cookie.hogehoge"); Mockito.when(authority1.getAuthenticateChallenge()).thenReturn("Basic realm=\"athenz\""); authorities.add(authority1); Authority authority2 = Mockito.mock(Authority.class); Mockito.when(authority2.getCredSource()).thenReturn(CredSource.REQUEST); Mockito.when(authority2.getAuthenticateChallenge()).thenReturn("AthenzRequest realm=\"athenz\""); authorities.add(authority2); try { Http.authenticate(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } Mockito.verify(httpServletRequest, times(1)) .setAttribute("com.yahoo.athenz.auth.credential.challenges", "Basic realm=\"athenz\", AthenzRequest realm=\"athenz\""); } @Test public void testAuthenticateHeaderNull() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(CredSource.HEADER); Mockito.when(authority.getHeader()).thenReturn(null); // we should not get npe - instead standard 401 try { Http.authenticate(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } } @Test public void testAuthenticatedUserInvalidCredentials() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Http.AuthorityList authorities = new Http.AuthorityList(); try { Http.authenticatedUser(httpServletRequest, authorities); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } } @Test public void testAuthenticatedUser() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Principal principal = Mockito.mock(Principal.class); Mockito.when(principal.getFullName()).thenReturn("athenz.api"); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(Authority.CredSource.HEADER); Mockito.when(authority.getHeader()).thenReturn("Athenz-Principal-Auth"); Mockito.when(httpServletRequest.getHeader("Athenz-Principal-Auth")).thenReturn("Creds"); Mockito.when(authority.authenticate(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())).thenReturn(principal); Http.AuthorityList authorities = new Http.AuthorityList(); authorities.add(authority); assertEquals(Http.authenticatedUser(httpServletRequest, authorities), "athenz.api"); } @Test public void testAuthorizedUserUserInvalidCredentials() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Authorizer authorizer = Mockito.mock(Authorizer.class); Http.AuthorityList authorities = new Http.AuthorityList(); try { Http.authorizedUser(httpServletRequest, authorities, authorizer, "action", null, null); } catch (ResourceException expected) { assertEquals(expected.getCode(), 401); } } @Test public void testAuthorizedUser() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Principal principal = Mockito.mock(Principal.class); Mockito.when(principal.getFullName()).thenReturn("athenz.api"); Authorizer authorizer = Mockito.mock(Authorizer.class); Mockito.when(authorizer.access(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(Principal.class), ArgumentMatchers.any())).thenReturn(true); Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getCredSource()).thenReturn(Authority.CredSource.HEADER); Mockito.when(authority.getHeader()).thenReturn("Athenz-Principal-Auth"); Mockito.when(httpServletRequest.getHeader("Athenz-Principal-Auth")).thenReturn("Creds"); Mockito.when(authority.authenticate(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any())).thenReturn(principal); Http.AuthorityList authorities = new Http.AuthorityList(); authorities.add(authority); assertEquals("athenz.api", Http.authorizedUser(httpServletRequest, authorities, authorizer, "action", "resource", null)); } @Test public void testAuthorizedBadRequest() { Authorizer authorizer = Mockito.mock(Authorizer.class); Principal principal = Mockito.mock(Principal.class); try { Http.authorize(authorizer, principal, "action", null, null); } catch (ResourceException expected) { assertEquals(expected.getCode(), 400); } } @Test public void testAuthorizedInternalServerError() { Principal principal = Mockito.mock(Principal.class); try { Http.authorize(null, principal, "action", "resource", null); } catch (ResourceException expected) { assertEquals(expected.getCode(), 500); } } @Test public void testAuthorizedForbidden() { Authorizer authorizer = Mockito.mock(Authorizer.class); Principal principal = Mockito.mock(Principal.class); try { Http.authorize(authorizer, principal, "action", "resource", null); } catch (ResourceException expected) { assertEquals(expected.getCode(), 403); } } @Test public void testGetCookieValue() { HttpServletRequest httpServletRequest = Mockito.mock(HttpServletRequest.class); Mockito.when(httpServletRequest.getCookies()).thenReturn(null); assertNull(Http.getCookieValue(httpServletRequest, "cookie1")); assertNull(Http.getCookieValue(httpServletRequest, "cookie2")); javax.servlet.http.Cookie[] cookies = new javax.servlet.http.Cookie[2]; cookies[0] = new javax.servlet.http.Cookie("cookie1", "value1"); cookies[1] = new javax.servlet.http.Cookie("cookie2", "value2"); Mockito.when(httpServletRequest.getCookies()).thenReturn(cookies); assertEquals(Http.getCookieValue(httpServletRequest, "cookie1"), "value1"); assertEquals(Http.getCookieValue(httpServletRequest, "cookie2"), "value2"); assertNull(Http.getCookieValue(httpServletRequest, "cookie3")); } @Test public void testAuthenticatingCredentialsHeaderNull() { Authority authority = Mockito.mock(Authority.class); Mockito.when(authority.getHeader()).thenReturn(null); assertNull(Http.authenticatingCredentials(null, authority)); } }
/** * * This file is part of the https://github.com/BITPlan/can4eve open source project * * Copyright 2017 BITPlan GmbH https://github.com/BITPlan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * http:www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.bitplan.obdii.elm327; import java.io.IOException; import java.util.ArrayList; import java.util.List; import com.bitplan.can4eve.VehicleGroup; import com.bitplan.elm327.Packet; import com.bitplan.elm327.ResponseHandler; /** * simulates an ELM327 * * @author wf * */ public class ELM327SimulatorConnection extends ELM327 implements ResponseHandler { public int delay = 0; // 0,1 millisecs delay public int delaynano = 100; private String filter; List<Monitor> monitors = new ArrayList<Monitor>(); private String canprotcode; private String canprot; private String ecu; private Monitor monitor; /** * constructor */ public ELM327SimulatorConnection(VehicleGroup vehicleGroup) { super(vehicleGroup); this.getCon().setHandleResponses(true); this.getCon().setResponseHandler(this); } public void setHandleResponses(boolean handleResponses) { // ignore trying to set e.g. to false } /** * handle the responses */ @Override public void handleResponse(Packet response) { // echo // super.handleResponse(response); // ignore null response if (response == null) { log(" received null response"); return; } String command = response.getData().toUpperCase().trim().replace(" ", ""); log(" received command " + command); try { if (command.startsWith("AT")) { command = command.substring(2).trim(); stopMonitors(); if (command.equals("I")) { outputWithPrompt("ELM327 v1.3a"); } else if (command.equals("@1")) { outputWithPrompt("SCANTOOL.NET LLC"); } else if (command.equals("D")) { log("Setting Defaults"); outputWithPrompt("OK"); } else if (command.equals("Z")) { log("Resetting OBD"); filter = null; ecu = null; outputWithPrompt("OK"); } else if (command.startsWith("L")) { String option = command.substring(1).trim(); log("Set Linefeed handling to " + option); if (option.startsWith("1")) { setSendLineFeed(true); } else if (option.startsWith("0")) { setSendLineFeed(false); } outputWithPrompt("OK"); } else if (command.startsWith("H")) { String option = command.substring(1).trim(); log("Set header handling to " + option); if (option.startsWith("1")) { setHeader(true); } else if (option.startsWith("0")) { setHeader(false); } outputWithPrompt("OK"); } else if (command.equals("RV")) { outputWithPrompt("14.5V"); } else if (command.equals("D1") || command.equals("D0")) { String option = command.substring(1).trim(); log("Set length handling to " + option); if (option.startsWith("1")) { setLength(true); } else if (option.startsWith("0")) { setLength(false); } outputWithPrompt("OK"); } else if (command.equals("E1") || command.equals("E0")) { String option = command.substring(1).trim(); log("Set echo handling to " + option); if (option.startsWith("1")) { setEcho(true); } else if (option.startsWith("0")) { setEcho(false); } outputWithPrompt("OK"); } else if (command.startsWith("CAF")) { String option = command.substring(3).trim(); log("Setting automatic formatting to " + option); // TODO implement non formatted mode e.g. with simulated DATA errors outputWithPrompt("OK"); } else if (command.startsWith("FCSD")) { outputWithPrompt("OK"); } else if (command.startsWith("FCSM1")) { outputWithPrompt("OK"); } else if (command.startsWith("FCSH")) { // ECU selection ecu = command.substring(4); log("fcsh selected ecu=" + ecu); outputWithPrompt("OK"); } else if (command.startsWith("SH")) { // ECU selection ecu = command.substring(4); log("sh selected ecu=" + ecu); outputWithPrompt("OK"); } else if (command.startsWith("CRA")) { filter = command.substring(3).trim(); outputWithPrompt("OK"); } else if (command.startsWith("MA")) { monitor = new Monitor(); monitor.init(this, filter, isHeader(), isLength()); monitor.startUp(); monitors.add(monitor); } else if (command.equals("DP")) { log("Reporting can protocol " + canprotcode + "=" + canprot); outputWithPrompt(canprot); } else if (command.startsWith("SP")) { /** * 0 Automatic protocol detection 1 SAE J1850 PWM (41.6 kbaud) 2 SAE * J1850 VPW (10.4 kbaud) 3 ISO 9141-2 (5 baud init, 10.4 kbaud) 4 ISO * 14230-4 KWP (5 baud init, 10.4 kbaud) 5 ISO 14230-4 KWP (fast init, * 10.4 kbaud) 6 ISO 15765-4 CAN (11 bit ID, 500 kbaud) 7 ISO 15765-4 * CAN (29 bit ID, 500 kbaud) 8 ISO 15765-4 CAN (11 bit ID, 250 kbaud) * - used mainly on utility vehicles and Volvo 9 ISO 15765-4 CAN (29 * bit ID, 250 kbaud) - used mainly on utility vehicles and Volvo */ canprotcode = command.substring(2).trim(); canprot = canprotcode; if ("1".equals(canprotcode)) { canprot = "SAE J1850 PWM"; } else if ("2".equals(canprotcode)) { canprot = "SAE J1850 VPW"; } else if ("3".equals(canprotcode)) { canprot = "ISO 9141-2"; } else if ("4".equals(canprotcode)) { canprot = "ISO 14230-4 (KWP 5BAUD)"; } else if ("5".equals(canprotcode)) { canprot = "ISO 14230-4 (KWP FAST)"; } else if ("6".equals(canprotcode)) { canprot = "ISO 15765-4 (CAN 11/500)"; // ISO 15765-4 CAN (11 bit ID, // 500 kbaud)"; } else if ("7".equals(canprotcode)) { canprot = "ISO 15765-4 (CAN 29/500)"; } else if ("8".equals(canprotcode)) { canprot = "ISO 15765-4 (CAN 11/250)"; } else if ("9".equals(canprotcode)) { canprot = "ISO 15765-4 (CAN 29/250)"; } else if ("A".equals(canprotcode)) { canprot = "SAE J1939 (CAN 29/250)"; } log("selected CAN protocol " + canprotcode + "=" + canprot); outputWithPrompt("OK"); } } else { if (command.equals("")) { // FIXME - this e.g. restarts MA command // this.stopMonitors(); outputWithPrompt("OK"); } else if (command.equals("STFAC")) { outputWithPrompt("OK"); } else if (command.equals("STI")) { outputWithPrompt("STN1130 v4.0.1"); } else if (command.equals("STDI")) { outputWithPrompt("OBDLink SX r4.2"); } else if (command.startsWith("STFAP")) { outputWithPrompt("OK"); } else if (command.equals("STM")) { monitor = new Monitor(); monitor.init(this, isHeader(), isLength()); monitor.startUp(); monitors.add(monitor); } else if (command.equals("2101")) { // outputWithPrompt("OK"); this.getCon() .output("762 10 2E 61 01 D2 D2 01 90\n" + "762 21 00 01 8F 4A 0C D0 4E\n" + "75A 03 E8 03 E8 64 64 46 45\n" + "762 22 02 4B 0C 01 5E 01 5D\n" + "762 23 01 2C 00 FA 00 FA 10\n" + "762 24 0F 0F 01 BF 01 BF 28\n" + "762 25 FE 00 00 01 8F 78 7C\n" + "762 26 64 00 01 00 00 00 00\n" + "75A 03 E8 03 E8 64 64 46 45\n"); } else if (command.equals("0100")) { outputWithPrompt("SEARCHING ..."); this.getCon().pause(2000, 0); outputWithPrompt("UNABLE TO CONNECT"); } else { log("unknown command '" + command + "'"); } } } catch (Exception e) { log("handleResponse failed " + e.getMessage()); if (isDebug()) e.printStackTrace(); } // TODO - check if we need a pause here // pause(delay, delaynano); } /** * output the given response followed by a prompt * * @param response * @throws IOException */ protected void outputWithPrompt(String response) throws IOException { this.getCon().output(response + "\r\n>"); } /** * stop the monitors */ protected void stopMonitors() { for (Monitor monitor : monitors) { monitor.halt(); ; } monitors.clear(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.sql.dialect; import org.apache.calcite.avatica.util.TimeUnitRange; import org.apache.calcite.config.NullCollation; import org.apache.calcite.rel.type.RelDataTypeSystem; import org.apache.calcite.sql.SqlAbstractDateTimeLiteral; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlIntervalLiteral; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.SqlWriter; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.ReturnTypes; /** * A <code>SqlDialect</code> implementation for the Microsoft SQL Server * database. */ public class MssqlSqlDialect extends SqlDialect { public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT .withDatabaseProduct(SqlDialect.DatabaseProduct.MSSQL) .withIdentifierQuoteString("[") .withCaseSensitive(false) .withNullCollation(NullCollation.LOW); public static final SqlDialect DEFAULT = new MssqlSqlDialect(DEFAULT_CONTEXT); private static final SqlFunction MSSQL_SUBSTRING = new SqlFunction("SUBSTRING", SqlKind.OTHER_FUNCTION, ReturnTypes.ARG0_NULLABLE_VARYING, null, null, SqlFunctionCategory.STRING); /** Whether to generate "SELECT TOP(fetch)" rather than * "SELECT ... FETCH NEXT fetch ROWS ONLY". */ private final boolean top; /** Creates a MssqlSqlDialect. */ public MssqlSqlDialect(Context context) { super(context); // MSSQL 2008 (version 10) and earlier only supports TOP // MSSQL 2012 (version 11) and higher supports OFFSET and FETCH top = context.databaseMajorVersion() < 11; } /** {@inheritDoc} * * <p>MSSQL does not support NULLS FIRST, so we emulate using CASE * expressions. For example, * * <blockquote>{@code ORDER BY x NULLS FIRST}</blockquote> * * <p>becomes * * <blockquote> * {@code ORDER BY CASE WHEN x IS NULL THEN 0 ELSE 1 END, x} * </blockquote> */ @Override public SqlNode emulateNullDirection(SqlNode node, boolean nullsFirst, boolean desc) { // Default ordering preserved if (nullCollation.isDefaultOrder(nullsFirst, desc)) { return null; } // Grouping node should preserve grouping, no emulation needed if (node.getKind() == SqlKind.GROUPING) { return node; } // Emulate nulls first/last with case ordering final SqlParserPos pos = SqlParserPos.ZERO; final SqlNodeList whenList = SqlNodeList.of(SqlStdOperatorTable.IS_NULL.createCall(pos, node)); final SqlNode oneLiteral = SqlLiteral.createExactNumeric("1", pos); final SqlNode zeroLiteral = SqlLiteral.createExactNumeric("0", pos); if (nullsFirst) { // IS NULL THEN 0 ELSE 1 END return SqlStdOperatorTable.CASE.createCall(null, pos, null, whenList, SqlNodeList.of(zeroLiteral), oneLiteral); } else { // IS NULL THEN 1 ELSE 0 END return SqlStdOperatorTable.CASE.createCall(null, pos, null, whenList, SqlNodeList.of(oneLiteral), zeroLiteral); } } @Override public void unparseOffsetFetch(SqlWriter writer, SqlNode offset, SqlNode fetch) { if (!top) { super.unparseOffsetFetch(writer, offset, fetch); } } @Override public void unparseTopN(SqlWriter writer, SqlNode offset, SqlNode fetch) { if (top) { // Per Microsoft: // "For backward compatibility, the parentheses are optional in SELECT // statements. We recommend that you always use parentheses for TOP in // SELECT statements. Doing so provides consistency with its required // use in INSERT, UPDATE, MERGE, and DELETE statements." // // Note that "fetch" is ignored. writer.keyword("TOP"); writer.keyword("("); fetch.unparse(writer, -1, -1); writer.keyword(")"); } } @Override public void unparseDateTimeLiteral(SqlWriter writer, SqlAbstractDateTimeLiteral literal, int leftPrec, int rightPrec) { writer.literal("'" + literal.toFormattedString() + "'"); } @Override public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) { if (call.getOperator() == SqlStdOperatorTable.SUBSTRING) { if (call.operandCount() != 3) { throw new IllegalArgumentException("MSSQL SUBSTRING requires FROM and FOR arguments"); } SqlUtil.unparseFunctionSyntax(MSSQL_SUBSTRING, writer, call, false); } else { switch (call.getKind()) { case FLOOR: if (call.operandCount() != 2) { super.unparseCall(writer, call, leftPrec, rightPrec); return; } unparseFloor(writer, call); break; default: super.unparseCall(writer, call, leftPrec, rightPrec); } } } @Override public boolean supportsCharSet() { return false; } @Override public boolean supportsGroupByWithRollup() { return true; } @Override public boolean supportsGroupByWithCube() { return true; } /** * Unparses datetime floor for Microsoft SQL Server. * There is no TRUNC function, so simulate this using calls to CONVERT. * * @param writer Writer * @param call Call */ private void unparseFloor(SqlWriter writer, SqlCall call) { SqlLiteral node = call.operand(1); TimeUnitRange unit = (TimeUnitRange) node.getValue(); switch (unit) { case YEAR: unparseFloorWithUnit(writer, call, 4, "-01-01"); break; case MONTH: unparseFloorWithUnit(writer, call, 7, "-01"); break; case WEEK: writer.print("CONVERT(DATETIME, CONVERT(VARCHAR(10), " + "DATEADD(day, - (6 + DATEPART(weekday, "); call.operand(0).unparse(writer, 0, 0); writer.print(")) % 7, "); call.operand(0).unparse(writer, 0, 0); writer.print("), 126))"); break; case DAY: unparseFloorWithUnit(writer, call, 10, ""); break; case HOUR: unparseFloorWithUnit(writer, call, 13, ":00:00"); break; case MINUTE: unparseFloorWithUnit(writer, call, 16, ":00"); break; case SECOND: unparseFloorWithUnit(writer, call, 19, ":00"); break; default: throw new IllegalArgumentException("MSSQL does not support FLOOR for time unit: " + unit); } } @Override public void unparseSqlDatetimeArithmetic(SqlWriter writer, SqlCall call, SqlKind sqlKind, int leftPrec, int rightPrec) { final SqlWriter.Frame frame = writer.startFunCall("DATEADD"); SqlNode operand = call.operand(1); if (operand instanceof SqlIntervalLiteral) { //There is no DATESUB method available, so change the sign. unparseSqlIntervalLiteralMssql( writer, (SqlIntervalLiteral) operand, sqlKind == SqlKind.MINUS ? -1 : 1); } else { operand.unparse(writer, leftPrec, rightPrec); } writer.sep(",", true); call.operand(0).unparse(writer, leftPrec, rightPrec); writer.endList(frame); } @Override public void unparseSqlIntervalQualifier(SqlWriter writer, SqlIntervalQualifier qualifier, RelDataTypeSystem typeSystem) { switch (qualifier.timeUnitRange) { case YEAR: case QUARTER: case MONTH: case WEEK: case DAY: case HOUR: case MINUTE: case SECOND: case MILLISECOND: case MICROSECOND: final String timeUnit = qualifier.timeUnitRange.startUnit.name(); writer.keyword(timeUnit); break; default: throw new AssertionError("Unsupported type: " + qualifier.timeUnitRange); } if (null != qualifier.timeUnitRange.endUnit) { throw new AssertionError("End unit is not supported now: " + qualifier.timeUnitRange.endUnit); } } @Override public void unparseSqlIntervalLiteral( SqlWriter writer, SqlIntervalLiteral literal, int leftPrec, int rightPrec) { unparseSqlIntervalLiteralMssql(writer, literal, 1); } private void unparseSqlIntervalLiteralMssql( SqlWriter writer, SqlIntervalLiteral literal, int sign) { final SqlIntervalLiteral.IntervalValue interval = literal.getValueAs(SqlIntervalLiteral.IntervalValue.class); unparseSqlIntervalQualifier(writer, interval.getIntervalQualifier(), RelDataTypeSystem.DEFAULT); writer.sep(",", true); if (interval.getSign() * sign == -1) { writer.print("-"); } writer.literal(literal.getValue().toString()); } private void unparseFloorWithUnit(SqlWriter writer, SqlCall call, int charLen, String offset) { writer.print("CONVERT"); SqlWriter.Frame frame = writer.startList("(", ")"); writer.print("DATETIME, CONVERT(VARCHAR(" + charLen + "), "); call.operand(0).unparse(writer, 0, 0); writer.print(", 126)"); if (offset.length() > 0) { writer.print("+'" + offset + "'"); } writer.endList(frame); } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package com.examples.model.test.movies.media; import java.util.*; import com.examples.model.test.movies.likeable.VendorCategory; import com.examples.model.test.time.TimeZoneHolder; import com.examples.model.test.time.TimeZoneType; import org.boon.*; import static org.boon.Boon.puts; public class Movie { protected final String url; protected final String id; protected VendorCategory category; protected static transient final Class<Movie> videoDomainType = Movie.class; protected final long pubDate; protected final TimeZoneHolder timeZone; protected final String title; protected final String caption; protected final Set<String> players = new HashSet<> ( ); protected final Set<String> people = new HashSet<> ( ); protected Set<VendorCategory> tags = new HashSet<> ( ); protected int score; protected int lengthInSeconds; protected Movie() { tags = new HashSet<> ( ); id = ""; title = ""; caption = ""; pubDate =0; url = ""; this.timeZone = new TimeZoneHolder(TimeZoneType.EST); } public static Movie video () { Movie movie = new Movie(); return movie; } protected Movie(String id, String url, long originalPublishDate, VendorCategory category, String title, String caption, List<VendorCategory> tags, List<String> players, List<String> people, int lengthInSeconds, TimeZoneHolder timeZone) { this.id = id; this.url = url; this.pubDate = originalPublishDate; this.category = category; this.title = title; this.caption = caption; this.tags.addAll(tags); this.players.addAll(players); this.people.addAll(people); this.timeZone = timeZone; this.lengthInSeconds = lengthInSeconds; } /** * Movie id * @return */ public String id() { return id; } public long originalPublishDate() { return pubDate; } public String headline() { return title; } public String caption() { return caption; } public VendorCategory category() { return category; } public boolean hasPlayers() { return this.players!=null && this.players.size ()>0; } public int score() { return score; } public void increaseScore(int increaseScore) { this.score += increaseScore; } public boolean hasPeople() { return people!=null && people.size()>0; } public Set<String> people() { return Collections.unmodifiableSet(people); } public boolean hasCategories() { return tags!=null && tags.size()>0; } public Set<VendorCategory> categories() { return Collections.unmodifiableSet(tags); } public Set<VendorCategory> tags() { return Collections.unmodifiableSet(tags); } public Set<String> players() { return players; } public void tag(VendorCategory category) { this.tags.add(category); } public String url() { return url; } public TimeZone timeZone() { return timeZone.timeZone(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Movie movie = (Movie) o; if (pubDate != movie.pubDate) return false; if (score != movie.score) return false; if (caption != null ? !caption.equals(movie.caption) : movie.caption != null) return false; if (category != movie.category) return false; if (title != null ? !title.equals(movie.title) : movie.title != null) return false; if (id != null ? !id.equals(movie.id) : movie.id != null) return false; if (people != null ? !people.equals(movie.people) : movie.people != null) return false; if (players != null ? !players.equals(movie.players) : movie.players != null) return false; if (tags != null ? !tags.equals(movie.tags) : movie.tags != null) return false; if (timeZone != null ? !timeZone.equals(movie.timeZone) : movie.timeZone != null) return false; if (url != null ? !url.equals(movie.url) : movie.url != null) return false; return true; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (category != null ? category.hashCode() : 0); result = 31 * result + (int) (pubDate ^ (pubDate >>> 32)); result = 31 * result + (timeZone != null ? timeZone.hashCode() : 0); result = 31 * result + (title != null ? title.hashCode() : 0); result = 31 * result + (caption != null ? caption.hashCode() : 0); result = 31 * result + (url != null ? url.hashCode() : 0); result = 31 * result + (players != null ? players.hashCode() : 0); result = 31 * result + (people != null ? people.hashCode() : 0); result = 31 * result + (tags != null ? tags.hashCode() : 0); result = 31 * result + score; return result; } @Override public String toString() { return "Movie{" + "id='" + id + '\'' + ", category=" + category + ", pubDate=" + pubDate + ", timeZone=" + timeZone + ", title='" + title + '\'' + ", caption='" + caption + '\'' + ", url='" + url + '\'' + ", players=" + players + ", people=" + people + ", tags=" + tags + ", scoreCategory=" + score + '}'; } /* String id, String url, long originalPublishDate, long modifiedDate, VendorCategory category, String title, String caption, String mediumImageUrl, List<VendorCategory> tags, List<String> players, List<String> people, TimeZoneHolder timeZone) { */ public List<Object> toList() { return Lists.list((Object)id, url, pubDate, category, title, caption, tags, players, people, lengthInSeconds, timeZone.toList() ); } public static void main (String... args) { puts(System.currentTimeMillis()); } public int lengthInSeconds() { return lengthInSeconds; } }
package com.fsck.k9.activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.text.format.DateUtils; import com.fsck.k9.Account; import com.fsck.k9.AccountStats; import com.fsck.k9.R; import com.fsck.k9.controller.MessagingControllerCommands; import com.fsck.k9.controller.SimpleMessagingListener; import com.fsck.k9.service.MailService; import net.jcip.annotations.GuardedBy; public class ActivityListener extends SimpleMessagingListener { private final Object lock = new Object(); @GuardedBy("lock") private Account account = null; @GuardedBy("lock") private String loadingFolderName = null; @GuardedBy("lock") private String loadingHeaderFolderName = null; @GuardedBy("lock") private String loadingAccountDescription = null; @GuardedBy("lock") private String sendingAccountDescription = null; @GuardedBy("lock") private int folderCompleted = 0; @GuardedBy("lock") private int folderTotal = 0; @GuardedBy("lock") private String processingAccountDescription = null; @GuardedBy("lock") private String processingCommandName = null; private BroadcastReceiver tickReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { informUserOfStatus(); } }; public String getOperation(Context context) { synchronized (lock) { if (loadingAccountDescription != null || sendingAccountDescription != null || loadingHeaderFolderName != null || processingAccountDescription != null) { return getActionInProgressOperation(context); } } long nextPollTime = MailService.getNextPollTime(); if (nextPollTime != -1) { if (nextPollTime - System.currentTimeMillis() < 0) { return context.getString(R.string.status_next_poll_overdue); } else { CharSequence relativeTimeSpanString = DateUtils.getRelativeTimeSpanString( nextPollTime, System.currentTimeMillis(), DateUtils.MINUTE_IN_MILLIS, 0); return context.getString(R.string.status_next_poll, relativeTimeSpanString); } } else if (MailService.isSyncDisabled()) { if (MailService.hasNoConnectivity()) { return context.getString(R.string.status_no_network); } else if (MailService.isSyncNoBackground()) { return context.getString(R.string.status_no_background); } else if (MailService.isSyncBlocked()) { return context.getString(R.string.status_syncing_blocked); } else if (MailService.isPollAndPushDisabled()) { return context.getString(R.string.status_poll_and_push_disabled); } else { return context.getString(R.string.status_syncing_off); } } else { return ""; } } @GuardedBy("lock") private String getActionInProgressOperation(Context context) { String progress = folderTotal > 0 ? context.getString(R.string.folder_progress, folderCompleted, folderTotal) : ""; if (loadingFolderName != null || loadingHeaderFolderName != null) { String displayName; if (loadingHeaderFolderName != null) { displayName = loadingHeaderFolderName; } else { displayName = loadingFolderName; } if (account != null) { if (displayName.equalsIgnoreCase(account.getInboxFolderName())) { displayName = context.getString(R.string.special_mailbox_name_inbox); } else if (displayName.equalsIgnoreCase(account.getOutboxFolderName())) { displayName = context.getString(R.string.special_mailbox_name_outbox); } } if (loadingHeaderFolderName != null) { return context.getString(R.string.status_loading_account_folder_headers, loadingAccountDescription, displayName, progress); } else { return context.getString(R.string.status_loading_account_folder, loadingAccountDescription, displayName, progress); } } else if (sendingAccountDescription != null) { return context.getString(R.string.status_sending_account, sendingAccountDescription, progress); } else if (processingAccountDescription != null) { return context.getString(R.string.status_processing_account, processingAccountDescription, processingCommandName != null ? commandTitleForName(context, processingCommandName) : "", progress); } else { return ""; } } private String commandTitleForName(Context context, String processingCommandName) { switch (processingCommandName) { case MessagingControllerCommands.COMMAND_APPEND: return context.getString(R.string.status_command_append); case MessagingControllerCommands.COMMAND_MARK_ALL_AS_READ: return context.getString(R.string.status_command_mark_all_as_read); case MessagingControllerCommands.COMMAND_SET_FLAG: return context.getString(R.string.status_command_set_flag); case MessagingControllerCommands.COMMAND_EXPUNGE: return context.getString(R.string.status_command_expunge); case MessagingControllerCommands.COMMAND_MOVE_OR_COPY: return context.getString(R.string.status_command_move_or_copy); case MessagingControllerCommands.COMMAND_EMPTY_TRASH: return context.getString(R.string.status_command_empty_trash); } return ""; } public void onResume(Context context) { context.registerReceiver(tickReceiver, new IntentFilter(Intent.ACTION_TIME_TICK)); } public void onPause(Context context) { context.unregisterReceiver(tickReceiver); } public void informUserOfStatus() { } @Override public void synchronizeMailboxFinished(Account account, String folder, int totalMessagesInMailbox, int numNewMessages) { synchronized (lock) { loadingAccountDescription = null; loadingFolderName = null; this.account = null; } informUserOfStatus(); } @Override public void synchronizeMailboxStarted(Account account, String folder) { synchronized (lock) { loadingAccountDescription = account.getDescription(); loadingFolderName = folder; this.account = account; folderCompleted = 0; folderTotal = 0; } informUserOfStatus(); } @Override public void synchronizeMailboxHeadersStarted(Account account, String folder) { synchronized (lock) { loadingAccountDescription = account.getDescription(); loadingHeaderFolderName = folder; } informUserOfStatus(); } @Override public void synchronizeMailboxHeadersProgress(Account account, String folder, int completed, int total) { synchronized (lock) { folderCompleted = completed; folderTotal = total; } informUserOfStatus(); } @Override public void synchronizeMailboxHeadersFinished(Account account, String folder, int total, int completed) { synchronized (lock) { loadingHeaderFolderName = null; folderCompleted = 0; folderTotal = 0; } informUserOfStatus(); } @Override public void synchronizeMailboxProgress(Account account, String folder, int completed, int total) { synchronized (lock) { folderCompleted = completed; folderTotal = total; } informUserOfStatus(); } @Override public void synchronizeMailboxFailed(Account account, String folder, String message) { synchronized (lock) { loadingAccountDescription = null; loadingHeaderFolderName = null; loadingFolderName = null; this.account = null; } informUserOfStatus(); } @Override public void sendPendingMessagesStarted(Account account) { synchronized (lock) { sendingAccountDescription = account.getDescription(); } informUserOfStatus(); } @Override public void sendPendingMessagesCompleted(Account account) { synchronized (lock) { sendingAccountDescription = null; } informUserOfStatus(); } @Override public void sendPendingMessagesFailed(Account account) { synchronized (lock) { sendingAccountDescription = null; } informUserOfStatus(); } @Override public void pendingCommandsProcessing(Account account) { synchronized (lock) { processingAccountDescription = account.getDescription(); folderCompleted = 0; folderTotal = 0; } informUserOfStatus(); } @Override public void pendingCommandsFinished(Account account) { synchronized (lock) { processingAccountDescription = null; } informUserOfStatus(); } @Override public void pendingCommandStarted(Account account, String commandName) { synchronized (lock) { processingCommandName = commandName; } informUserOfStatus(); } @Override public void pendingCommandCompleted(Account account, String commandName) { synchronized (lock) { processingCommandName = null; } informUserOfStatus(); } @Override public void searchStats(AccountStats stats) { informUserOfStatus(); } @Override public void systemStatusChanged() { informUserOfStatus(); } @Override public void folderStatusChanged(Account account, String folder, int unreadMessageCount) { informUserOfStatus(); } public int getFolderCompleted() { synchronized (lock) { return folderCompleted; } } public int getFolderTotal() { synchronized (lock) { return folderTotal; } } }
package com.user; import java.io.Serializable; import java.util.Calendar; import java.util.List; import java.util.ResourceBundle; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.faces.application.FacesMessage; import javax.faces.bean.ManagedBean; import javax.faces.bean.RequestScoped; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import javax.servlet.http.HttpServletRequest; import org.primefaces.model.LazyDataModel; import com.AuthBean; import com.LogBean; import com.facade.MessageFacade; import com.facade.ProjectFacade; import com.facade.ProjectStatusFacade; import com.facade.UserFacade; import com.model.Message; import com.model.Project; import com.model.ProjectStatus; import com.model.User; @RequestScoped @ManagedBean(name = "profileView", eager = true) public class ProfileViewBean implements Serializable { /** * @author ttt */ private static final long serialVersionUID = 1L; private User user; private User profile; private Integer userId; private LazyDataModel<Project> lazyModelProject; private Project selectedProject; private List<ProjectStatus> projectStatuses; private LazyDataModel<Message> lazyModelMessage; private Message selectedMessage; private LazyDataModel<User> lazyModelUser; private User selectedUser; private static Calendar cal = Calendar.getInstance(); private FacesContext context = FacesContext.getCurrentInstance(); private ExternalContext externalContext = context.getExternalContext(); private ResourceBundle bundle = context.getApplication().getResourceBundle( context, "msgs"); private HttpServletRequest req = (HttpServletRequest) externalContext.getRequest(); @EJB private UserFacade userFacade; @EJB private ProjectFacade projectFacade; @EJB private ProjectStatusFacade projectStatusFacade; @EJB private MessageFacade messageFacade; @PostConstruct public void init() { try { user = (User) externalContext.getSessionMap().get("user"); } catch (Exception e) { @SuppressWarnings("unused") LogBean log = new LogBean( "INFO ProfileBean.init: can't get user from session: " + e.getMessage()); } try { userId = Integer.parseInt(req.getParameter("userId")); if (userId != null) { profile = userFacade.getById(userId); } } catch (Exception e) { @SuppressWarnings("unused") LogBean log = new LogBean( "ERROR ProfileViewBean.init: can't get profile from request: " + e.getMessage()); } if (profile == null) { AuthBean authBean = new AuthBean(); authBean.logout(); return; } setLazyModelProject(new LazyDataModelProject( projectFacade.getByUser(userId))); setProjectStatuses(projectStatusFacade.getAll()); setLazyModelMessage(new LazyDataModelMessage( messageFacade.getByCreator(userId))); setLazyModelUser(new LazyDataModelUser(userFacade.getAll())); } public void updateUser() { profile.setEditedOn(cal.getTime()); userFacade.update(profile); context.addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, bundle.getString("changesSaved"), null)); if (user.getId() == profile.getId()) { AuthBean authBean = new AuthBean(); authBean.logout(); return; } } public String getProjectStatusName(Integer projectStatusId) { String result = ""; if (projectStatusId != null) { try { result += projectStatusFacade.getById(projectStatusId) .getName(); } catch (Exception e) { @SuppressWarnings("unused") LogBean log = new LogBean( "ERROR ProjectsViewBean.getProjectStatusName(" + projectStatusId + "): can't get projectStatusName: " + e.getMessage()); } } return result; } public User getProfile() { return profile; } public void setProfile(User user) { this.profile = user; } public LazyDataModel<Project> getLazyModelProject() { return lazyModelProject; } public void setLazyModelProject(LazyDataModel<Project> lazyModelProject) { this.lazyModelProject = lazyModelProject; } public Project getSelectedProject() { return selectedProject; } public void setSelectedProject(Project selectedProject) { this.selectedProject = selectedProject; } public ProjectFacade getProjectFacade() { return projectFacade; } public void setProjectFacade(ProjectFacade projectFacade) { this.projectFacade = projectFacade; } public ProjectStatusFacade getProjectStatusFacade() { return projectStatusFacade; } public void setProjectStatusFacade(ProjectStatusFacade projectStatusFacade) { this.projectStatusFacade = projectStatusFacade; } public List<ProjectStatus> getProjectStatuses() { return projectStatuses; } public void setProjectStatuses(List<ProjectStatus> projectStatuses) { this.projectStatuses = projectStatuses; } public LazyDataModel<User> getLazyModelUser() { return lazyModelUser; } public void setLazyModelUser(LazyDataModel<User> lazyModelUser) { this.lazyModelUser = lazyModelUser; } public User getSelectedUser() { return selectedUser; } public void setSelectedUser(User selectedUser) { this.selectedUser = selectedUser; } public LazyDataModel<Message> getLazyModelMessage() { return lazyModelMessage; } public void setLazyModelMessage(LazyDataModel<Message> lazyModelMessage) { this.lazyModelMessage = lazyModelMessage; } public Message getSelectedMessage() { return selectedMessage; } public void setSelectedMessage(Message selectedMessage) { this.selectedMessage = selectedMessage; } public User getUser() { return user; } public void setUser(User user) { this.user = user; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.apache.pig.ResourceSchema; import org.apache.pig.SortColInfo; import org.apache.pig.SortInfo; import org.apache.pig.ResourceSchema.ResourceFieldSchema; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.data.DataType; import org.apache.pig.impl.logicalLayer.FrontendException; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.logicalLayer.schema.SchemaMergeException; import org.apache.pig.impl.util.Utils; import org.apache.pig.test.utils.TypeCheckingTestUtil; import org.junit.Test; public class TestResourceSchema { /** * Test that ResourceSchema is correctly created given a * pig.Schema and vice versa */ @Test public void testResourceFlatSchemaCreation() throws ExecException, SchemaMergeException, FrontendException { String [] aliases ={"f1", "f2"}; byte[] types = {DataType.CHARARRAY, DataType.INTEGER}; Schema origSchema = TypeCheckingTestUtil.genFlatSchema( aliases,types); ResourceSchema rsSchema = new ResourceSchema(origSchema); assertEquals("num fields", aliases.length, rsSchema.getFields().length); ResourceSchema.ResourceFieldSchema[] fields = rsSchema.getFields(); for (int i=0; i<fields.length; i++) { assertEquals(fields[i].getName(), aliases[i]); assertEquals(fields[i].getType(), types[i]); } Schema genSchema = Schema.getPigSchema(rsSchema); assertTrue("generated schema equals original", Schema.equals(genSchema, origSchema, true, false)); } /** * Test that ResourceSchema is correctly created given a * pig.Schema and vice versa */ @Test public void testResourceFlatSchemaCreation2() throws ExecException, SchemaMergeException, FrontendException { String [] aliases ={"f1", "f2"}; byte[] types = {DataType.CHARARRAY, DataType.INTEGER}; Schema origSchema = new Schema( new Schema.FieldSchema("t1", new Schema( new Schema.FieldSchema("t0", TypeCheckingTestUtil.genFlatSchema( aliases,types), DataType.TUPLE)), DataType.BAG)); ResourceSchema rsSchema = new ResourceSchema(origSchema); Schema genSchema = Schema.getPigSchema(rsSchema); assertTrue("generated schema equals original", Schema.equals(genSchema, origSchema, true, false)); } /** * Test that ResourceSchema is correctly with SortInfo */ @Test public void testResourceFlatSchemaCreationWithSortInfo() throws ExecException, SchemaMergeException, FrontendException { String [] aliases ={"f1", "f2"}; byte[] types = {DataType.CHARARRAY, DataType.INTEGER}; Schema origSchema = new Schema( new Schema.FieldSchema("t1", new Schema( new Schema.FieldSchema("t0", TypeCheckingTestUtil.genFlatSchema( aliases,types), DataType.TUPLE)), DataType.BAG)); List<SortColInfo> colList = new ArrayList<SortColInfo>(); SortColInfo col1 = new SortColInfo("f1", 0, SortColInfo.Order.ASCENDING); SortColInfo col2 = new SortColInfo("f1", 1, SortColInfo.Order.DESCENDING); colList.add(col1); colList.add(col2); SortInfo sortInfo = new SortInfo(colList); ResourceSchema rsSchema = new ResourceSchema(origSchema, sortInfo); Schema genSchema = Schema.getPigSchema(rsSchema); assertTrue("generated schema equals original", Schema.equals(genSchema, origSchema, true, false)); assertTrue(rsSchema.getSortKeys()[0]==0); assertTrue(rsSchema.getSortKeys()[1]==1); assertTrue(rsSchema.getSortKeyOrders()[0]==ResourceSchema.Order.ASCENDING); assertTrue(rsSchema.getSortKeyOrders()[1]==ResourceSchema.Order.DESCENDING); } /** * Test that Pig Schema is correctly created given a * ResourceSchema and vice versa. Test also that * TwoLevelAccess flag is set for Pig Schema when needed. * @throws IOException */ @Test public void testToPigSchemaWithTwoLevelAccess() throws IOException { ResourceFieldSchema[] level0 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("fld0").setType(DataType.CHARARRAY), new ResourceFieldSchema() .setName("fld1").setType(DataType.DOUBLE), new ResourceFieldSchema() .setName("fld2").setType(DataType.INTEGER) }; ResourceSchema rSchema0 = new ResourceSchema() .setFields(level0); ResourceFieldSchema[] level1 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("t1").setType(DataType.TUPLE) .setSchema(rSchema0) }; ResourceSchema rSchema1 = new ResourceSchema() .setFields(level1); ResourceFieldSchema[] level2 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("t2").setType(DataType.BAG) .setSchema(rSchema1) }; ResourceSchema origSchema = new ResourceSchema() .setFields(level2); Schema pSchema = Schema.getPigSchema(origSchema); assertTrue(!CheckTwoLevelAccess(pSchema)); assertTrue(ResourceSchema.equals(origSchema, new ResourceSchema(pSchema))); } private boolean CheckTwoLevelAccess(Schema s) { if (s == null) return false; for (Schema.FieldSchema fs : s.getFields()) { if (fs.type == DataType.BAG && fs.schema != null && fs.schema.isTwoLevelAccessRequired()) { return true; } if (CheckTwoLevelAccess(fs.schema)) return true; } return false; } /** * Test invalid Resource Schema: multiple fields for a bag * @throws IOException */ @Test(expected=FrontendException.class) public void testToPigSchemaWithInvalidSchema() throws IOException { ResourceFieldSchema[] level0 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("fld0").setType(DataType.CHARARRAY), new ResourceFieldSchema() .setName("fld1").setType(DataType.DOUBLE), new ResourceFieldSchema() .setName("fld2").setType(DataType.INTEGER) }; ResourceSchema rSchema0 = new ResourceSchema() .setFields(level0); ResourceFieldSchema[] level2 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("t2").setType(DataType.BAG).setSchema(rSchema0) }; } /** * Test invalid Resource Schema: bag without tuple field * @throws IOException */ @Test(expected=FrontendException.class) public void testToPigSchemaWithInvalidSchema2() throws IOException { ResourceFieldSchema[] level0 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("fld0").setType(DataType.CHARARRAY) }; ResourceSchema rSchema0 = new ResourceSchema() .setFields(level0); ResourceFieldSchema[] level2 = new ResourceFieldSchema[] { new ResourceFieldSchema() .setName("t2").setType(DataType.BAG).setSchema(rSchema0) }; } /** * Test one-level Pig Schema: multiple fields for a bag */ @Test public void testResourceSchemaWithInvalidPigSchema() throws FrontendException { String [] aliases ={"f1", "f2"}; byte[] types = {DataType.CHARARRAY, DataType.INTEGER}; Schema level0 = TypeCheckingTestUtil.genFlatSchema( aliases,types); Schema.FieldSchema fld0 = new Schema.FieldSchema("f0", level0, DataType.BAG); Schema level1 = new Schema(fld0); try { Schema.getPigSchema(new ResourceSchema(level1)); Assert.fail(); } catch(FrontendException e) { assertTrue(e.getErrorCode()==2218); } } /** * Test one-level Pig Schema: bag without tuple field */ @Test public void testResourceSchemaWithInvalidPigSchema2() throws FrontendException { String [] aliases ={"f1"}; byte[] types = {DataType.INTEGER}; Schema level0 = TypeCheckingTestUtil.genFlatSchema( aliases,types); Schema.FieldSchema fld0 = new Schema.FieldSchema("f0", level0, DataType.BAG); Schema level1 = new Schema(fld0); try { Schema.getPigSchema(new ResourceSchema(level1)); Assert.fail(); } catch (FrontendException e) { assertTrue(e.getErrorCode()==2218); } } /** * Test that we can turn a schema into a string and then parse it * back into a schema. */ @Test public void testToStringAndParse() throws Exception { ResourceSchema rs = new ResourceSchema(); ResourceFieldSchema[] fields = new ResourceFieldSchema[13]; byte[] types = {DataType.INTEGER, DataType.LONG, DataType.FLOAT, DataType.DOUBLE, DataType.BYTEARRAY, DataType.CHARARRAY, DataType.MAP, DataType.TUPLE, DataType.TUPLE, DataType.BAG, DataType.BAG, DataType.BOOLEAN, DataType.DATETIME}; String[] names = {"i", "l", "f", "d", "b", "s", "m", "tschema", "tnull", "bschema", "bnull", "bb", "dt"}; for (int i = 0; i < fields.length; i++) { fields[i] = new ResourceFieldSchema(); fields[i].setName(names[i]); fields[i].setType(types[i]); } // Add in the schemas for the tuple and the bag ResourceSchema tschema = new ResourceSchema(); ResourceFieldSchema[] tfields = new ResourceFieldSchema[3]; for (int i = 0; i < 3; i++) { tfields[i] = new ResourceFieldSchema(); tfields[i].setName(names[i]); tfields[i].setType(types[i]); } tschema.setFields(tfields); fields[7].setSchema(tschema); ResourceSchema bschema = new ResourceSchema(); ResourceFieldSchema[] bfields = new ResourceFieldSchema[1]; bfields[0] = new ResourceFieldSchema(); bfields[0].setName("t"); bfields[0].setType(DataType.TUPLE); ResourceSchema tbschema = new ResourceSchema(); ResourceFieldSchema[] tbfields = new ResourceFieldSchema[3]; for (int i = 3; i < 6; i++) { tbfields[i-3] = new ResourceFieldSchema(); tbfields[i-3].setName(names[i]); tbfields[i-3].setType(types[i]); } tbschema.setFields(tbfields); bfields[0].setSchema(tbschema); bschema.setFields(bfields); fields[9].setSchema(bschema); rs.setFields(fields); String strSchema = rs.toString(); assertEquals("i:int,l:long,f:float,d:double,b:bytearray,s:" + "chararray,m:[],tschema:(i:int,l:long,f:float)," + "tnull:(),bschema:{t:(d:double,b:bytearray,s:chararray)},bnull:{},bb:boolean,dt:datetime", strSchema); ResourceSchema after = new ResourceSchema(Utils.getSchemaFromString(strSchema)); ResourceFieldSchema[] afterFields = after.getFields(); assertEquals(13, afterFields.length); assertEquals("i", afterFields[0].getName()); assertEquals(DataType.INTEGER, afterFields[0].getType()); assertEquals("l", afterFields[1].getName()); assertEquals(DataType.LONG, afterFields[1].getType()); assertEquals("f", afterFields[2].getName()); assertEquals(DataType.FLOAT, afterFields[2].getType()); assertEquals("d", afterFields[3].getName()); assertEquals(DataType.DOUBLE, afterFields[3].getType()); assertEquals("b", afterFields[4].getName()); assertEquals(DataType.BYTEARRAY, afterFields[4].getType()); assertEquals("s", afterFields[5].getName()); assertEquals(DataType.CHARARRAY, afterFields[5].getType()); assertEquals("m", afterFields[6].getName()); assertEquals(DataType.MAP, afterFields[6].getType()); assertEquals("tschema", afterFields[7].getName()); assertEquals(DataType.TUPLE, afterFields[7].getType()); assertEquals("tnull", afterFields[8].getName()); assertEquals(DataType.TUPLE, afterFields[8].getType()); assertEquals("bschema", afterFields[9].getName()); assertEquals(DataType.BAG, afterFields[9].getType()); assertEquals("bnull", afterFields[10].getName()); assertEquals(DataType.BAG, afterFields[10].getType()); assertEquals("bb", afterFields[11].getName()); assertEquals(DataType.BOOLEAN, afterFields[11].getType()); assertEquals("dt", afterFields[12].getName()); assertEquals(DataType.DATETIME, afterFields[12].getType()); assertNotNull(afterFields[7].getSchema()); ResourceFieldSchema[] tAfterFields = afterFields[7].getSchema().getFields(); assertEquals(3, tAfterFields.length); assertEquals("i", tAfterFields[0].getName()); assertEquals(DataType.INTEGER, tAfterFields[0].getType()); assertEquals("l", tAfterFields[1].getName()); assertEquals(DataType.LONG, tAfterFields[1].getType()); assertEquals("f", tAfterFields[2].getName()); assertEquals(DataType.FLOAT, tAfterFields[2].getType()); assertNotNull(afterFields[9].getSchema()); ResourceFieldSchema[] bAfterFields = afterFields[9].getSchema().getFields(); assertEquals(1, bAfterFields.length); assertNotNull(bAfterFields[0].getSchema()); ResourceFieldSchema[] tbAfterFields = bAfterFields[0].getSchema().getFields(); assertEquals(3, tbAfterFields.length); assertEquals("d", tbAfterFields[0].getName()); assertEquals(DataType.DOUBLE, tbAfterFields[0].getType()); assertEquals("b", tbAfterFields[1].getName()); assertEquals(DataType.BYTEARRAY, tbAfterFields[1].getType()); assertEquals("s", tbAfterFields[2].getName()); assertEquals(DataType.CHARARRAY, tbAfterFields[2].getType()); } }
package io.schinzel.basicutils; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author schinzel */ @RunWith(JUnitParamsRunner.class) public class RandomUtilTest { @Test public void generateSeed_Generate1000Seeds_SeedShouldNeverBeRepeated() { int noOfSeeds = 1000; Set<Long> set = new HashSet<>(noOfSeeds); for (int i = 0; i < noOfSeeds; i++) { long seed = RandomUtil.generateSeed(); assertThat(set.contains(seed)).isFalse(); set.add(seed); } } @Test public void testGetInstance_sameSeed() { //Two instance with the same seed should generate //the same random data RandomUtil instance1 = RandomUtil.create(123); RandomUtil instance2 = RandomUtil.create(123); Assert.assertEquals(instance1.getInt(10, 100), instance2.getInt(10, 100)); Assert.assertEquals(instance1.getString(10), instance2.getString(10)); } @Test public void testGetInstance_differentSeeds() { //Two instance with the same seed should generate //the same random data RandomUtil instance1 = RandomUtil.create(123); RandomUtil instance2 = RandomUtil.create(456); Assert.assertNotEquals(instance1.getInt(10, 100), instance2.getInt(10, 100)); Assert.assertNotEquals(instance1.getString(10), instance2.getString(10)); } @Test public void getInt_TwoRandObjectWithSameSeedGenerate1000numbers_NumbersEqual() { int seed = RandomUtil.getRandomNumber(10000, 20000); RandomUtil rand1 = RandomUtil.create(seed); RandomUtil rand2 = RandomUtil.create(seed); for (int i = 0; i < 1000; i++) { assertThat(rand1.getInt(0, 100)).isEqualTo(rand2.getInt(0, 100)); } } @Test public void test_getIntArray() { RandomUtil mRand = RandomUtil.create(); int arraySum, arraySize; int[] result; //Basic test arraySum = 100; arraySize = 5; result = mRand.getIntArray(arraySize, arraySum); Assert.assertEquals(arraySum, sumArray(result)); Assert.assertEquals(arraySize, result.length); //Test one elem arraySum = 100; arraySize = 1; result = mRand.getIntArray(arraySize, arraySum); Assert.assertEquals(arraySum, sumArray(result)); Assert.assertEquals(arraySize, result.length); //Test when sum is only slightly larger than size arraySum = 6; arraySize = 5; result = mRand.getIntArray(arraySize, arraySum); Assert.assertEquals(arraySum, sumArray(result)); Assert.assertEquals(arraySize, result.length); } private static int sumArray(int[] arr) { int sum = 0; for (int element : arr) { sum += element; } return sum; } @Test @Parameters({"10, 10", "10, 0", "2, 10"}) public void getIntArray_SizeIsSameSizeAsSum_Exception(int arraySum, int arraySize) { assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> RandomUtil.create().getIntArray(arraySize, arraySum) ); } @Test public void testGetRandomNumber() { int min, max, result; min = 1; max = 10; for (int i = 0; i < 100; i++) { result = RandomUtil.getRandomNumber(min, max); Assert.assertTrue((result >= min) && (result <= max)); } min = 10000; max = 10010; for (int i = 0; i < 100; i++) { result = RandomUtil.getRandomNumber(min, max); Assert.assertTrue((result >= min) && (result <= max)); } min = 1; max = 2; for (int i = 0; i < 100; i++) { result = RandomUtil.getRandomNumber(min, max); Assert.assertTrue((result >= min) && (result <= max)); } min = Integer.MAX_VALUE - 1; max = Integer.MAX_VALUE; for (int i = 0; i < 100; i++) { result = RandomUtil.getRandomNumber(min, max); Assert.assertTrue((result >= min) && (result <= max)); } } /** * Checks that all numbers in the range are a possible output. */ @Test public void testRandomNumberRange() { int min = 3; int max = 5; for (int i = 0; i < 1000; i++) { int number = RandomUtil.getRandomNumber(min, max); assertThat(number).isBetween(min, max); } } @Test(expected = RuntimeException.class) public void testGetRandomNumberException() { RandomUtil.getRandomNumber(100, 1); // Exception should be thrown before this line Assert.fail(); } @Test public void testGetRandomString() { String randomString; //Test that length is correct randomString = RandomUtil.getRandomString(5); assertThat(randomString).hasSize(5); //Test short randomString = RandomUtil.getRandomString(1); assertThat(randomString).hasSize(1); //Test long randomString = RandomUtil.getRandomString(400); assertThat(randomString).hasSize(400); //Test that only alphnum chars lower case randomString = RandomUtil.getRandomString(100); Pattern pattern = Pattern.compile("^[a-z0-9]*"); assertThat(randomString).matches(pattern); } /** * Two instances with the same seed should generated the same doubles */ @Test public void testGetDouble_SameSeed() { RandomUtil instance1 = RandomUtil.create(1234); RandomUtil instance2 = RandomUtil.create(1234); Assert.assertEquals( instance1.getDouble(0, 10d), instance2.getDouble(0, 10d), 0); } /** * Make sure that the random doubles are less than max and more than min. */ @Test public void testGetDouble_MaxMin() { RandomUtil rand = RandomUtil.create(1234); for (int i = 0; i < 1000; i++) { double min = 0d; double max = 1000d; double number = rand.getDouble(min, max); assertThat(number).isGreaterThanOrEqualTo(min); assertThat(number).isLessThanOrEqualTo(max); } } @Test @Parameters({ "1, 160", "2, 160", "3, 160", "4, 0160" }) public void testGetPaddedInt(int padding, String expected) { String actual = RandomUtil.create(1234).getPaddedInt(100, 200, padding); assertThat(actual).isEqualTo(expected); } @Test @Parameters({"0", "1000"}) public void testGetPaddedInt_incorrectPaddingArgToSmall(int padding) { assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> RandomUtil.create(1234).getPaddedInt(100, 200, padding) ); } @Test public void testGetPaddedInt_incorrectPaddingArgToLarge() { assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> RandomUtil.create(1234).getPaddedInt(100, 200, 1000) ); } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import org.eclipse.draw2d.Graphics; import org.eclipse.draw2d.GridData; import org.eclipse.draw2d.GridLayout; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.PositionConstants; import org.eclipse.draw2d.RoundedRectangle; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.StackLayout; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ResizableCompartmentEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ShapeNodeEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator; import org.eclipse.gmf.runtime.diagram.ui.figures.ResizableCompartmentFigure; import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.gmf.tooling.runtime.edit.policies.reparent.CreationEditPolicyWithCustomReparent; import org.eclipse.swt.graphics.Color; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractBaseFigureEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorFlowEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.MediatorFlow6ItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated NOT */ public class MediatorFlow6EditPart extends AbstractMediatorFlowEditPart { //ProxyService FaultSequence. /** * @generated */ public static final int VISUAL_ID = 3530; /** * @generated */ protected IFigure contentPane; /** * @generated */ protected IFigure primaryShape; /** * @generated */ public MediatorFlow6EditPart(View view) { super(view); } /** * @generated NOT */ protected void createDefaultEditPolicies() { super.createDefaultEditPolicies(); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new MediatorFlow6ItemSemanticEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } /** * @generated */ protected IFigure createNodeShape() { return primaryShape = new MediatorFlowFigure(); } /** * @generated */ public MediatorFlowFigure getPrimaryShape() { return (MediatorFlowFigure) primaryShape; } /** * @generated */ protected NodeFigure createNodePlate() { DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40); return result; } protected void refreshInputConnector(EditPart childEditPart) { if (childEditPart instanceof AbstractBaseFigureEditPart) { AbstractBaseFigureEditPart baseFigureEditPart = (AbstractBaseFigureEditPart) childEditPart; BorderItemLocator locator = new FixedBorderItemLocator(this.getFigure(), baseFigureEditPart.faultInputnputConnectorFigure, PositionConstants.WEST, 0.5); baseFigureEditPart.getBorderedFigure().getBorderItemContainer() .add(baseFigureEditPart.faultInputnputConnectorFigure, locator); } else { //Should handle properly. throw new ClassCastException(); } } protected void addChildVisual(EditPart childEditPart, int index) { if (childEditPart.getParent().getParent().getParent().getParent() instanceof AbstractBaseFigureEditPart) refreshInputConnector(((AbstractBaseFigureEditPart) childEditPart.getParent().getParent().getParent() .getParent())); super.addChildVisual(childEditPart, -1); } /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated */ protected NodeFigure createNodeFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; } /** * Default implementation treats passed figure as content pane. * Respects layout one may have set for generated figure. * @param nodeShape instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { if (nodeShape.getLayoutManager() == null) { ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout(); layout.setSpacing(5); nodeShape.setLayoutManager(layout); } return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } protected IFigure getContentPaneFor(IGraphicalEditPart editPart) { if (editPart instanceof ResizableCompartmentEditPart) { // Compartment should be added to the primary shape. return getPrimaryShape(); } else { return super.getContentPaneFor(editPart); } } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated NOT */ public class MediatorFlowFigure extends EsbMediatorFlowFigure { /** * @generated NOT */ public MediatorFlowFigure() { this.setBackgroundColor(THIS_BACK); this.setPreferredSize(new Dimension(getMapMode().DPtoLP(30000), getMapMode().DPtoLP(30000))); } } public boolean isSelectable() { // TODO This or using ResizableEditpolicy? return false; } /** * @generated */ static final Color THIS_BACK = new Color(null, 255, 255, 255); @Override public AbstractOutputConnectorEditPart getAssociatedOutputConnector() { // TODO Auto-generated method stub return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.component.dsl; import javax.annotation.Generated; import org.apache.camel.Component; import org.apache.camel.builder.component.AbstractComponentBuilder; import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamComponent; /** * Poll for changes in a Google Calendar. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.ComponentDslMojo") public interface GoogleCalendarStreamComponentBuilderFactory { /** * Google Calendar Stream (camel-google-calendar) * Poll for changes in a Google Calendar. * * Category: api,cloud * Since: 2.23 * Maven coordinates: org.apache.camel:camel-google-calendar */ static GoogleCalendarStreamComponentBuilder googleCalendarStream() { return new GoogleCalendarStreamComponentBuilderImpl(); } /** * Builder for the Google Calendar Stream component. */ interface GoogleCalendarStreamComponentBuilder extends ComponentBuilder<GoogleCalendarStreamComponent> { /** * Google Calendar application name. Example would be * camel-google-calendar/1.0. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default GoogleCalendarStreamComponentBuilder applicationName( java.lang.String applicationName) { doSetProperty("applicationName", applicationName); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: <code>boolean</code> type. * * Default: false * Group: consumer */ default GoogleCalendarStreamComponentBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * The calendarId to be used. * * The option is a: <code>java.lang.String</code> type. * * Default: primary * Group: consumer */ default GoogleCalendarStreamComponentBuilder calendarId( java.lang.String calendarId) { doSetProperty("calendarId", calendarId); return this; } /** * Client ID of the calendar application. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default GoogleCalendarStreamComponentBuilder clientId( java.lang.String clientId) { doSetProperty("clientId", clientId); return this; } /** * The configuration. * * The option is a: * <code>org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamConfiguration</code> type. * * Group: consumer */ default GoogleCalendarStreamComponentBuilder configuration( org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamConfiguration configuration) { doSetProperty("configuration", configuration); return this; } /** * Take into account the lastUpdate of the last event polled as start * date for the next poll. * * The option is a: <code>boolean</code> type. * * Default: false * Group: consumer */ default GoogleCalendarStreamComponentBuilder considerLastUpdate( boolean considerLastUpdate) { doSetProperty("considerLastUpdate", considerLastUpdate); return this; } /** * Consume events in the selected calendar from now on. * * The option is a: <code>boolean</code> type. * * Default: true * Group: consumer */ default GoogleCalendarStreamComponentBuilder consumeFromNow( boolean consumeFromNow) { doSetProperty("consumeFromNow", consumeFromNow); return this; } /** * Max results to be returned. * * The option is a: <code>int</code> type. * * Default: 10 * Group: consumer */ default GoogleCalendarStreamComponentBuilder maxResults(int maxResults) { doSetProperty("maxResults", maxResults); return this; } /** * The query to execute on calendar. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default GoogleCalendarStreamComponentBuilder query( java.lang.String query) { doSetProperty("query", query); return this; } /** * Specifies the level of permissions you want a calendar application to * have to a user account. See * https://developers.google.com/calendar/auth for more info. * * The option is a: <code>java.util.List<java.lang.String></code> type. * * Group: consumer */ default GoogleCalendarStreamComponentBuilder scopes( java.util.List<java.lang.String> scopes) { doSetProperty("scopes", scopes); return this; } /** * Whether the component should use basic property binding (Camel 2.x) * or the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ @Deprecated default GoogleCalendarStreamComponentBuilder basicPropertyBinding( boolean basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * The client Factory. * * The option is a: * <code>org.apache.camel.component.google.calendar.GoogleCalendarClientFactory</code> type. * * Group: advanced */ default GoogleCalendarStreamComponentBuilder clientFactory( org.apache.camel.component.google.calendar.GoogleCalendarClientFactory clientFactory) { doSetProperty("clientFactory", clientFactory); return this; } /** * OAuth 2 access token. This typically expires after an hour so * refreshToken is recommended for long term usage. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default GoogleCalendarStreamComponentBuilder accessToken( java.lang.String accessToken) { doSetProperty("accessToken", accessToken); return this; } /** * Client secret of the calendar application. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default GoogleCalendarStreamComponentBuilder clientSecret( java.lang.String clientSecret) { doSetProperty("clientSecret", clientSecret); return this; } /** * OAuth 2 refresh token. Using this, the Google Calendar component can * obtain a new accessToken whenever the current one expires - a * necessity if the application is long-lived. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default GoogleCalendarStreamComponentBuilder refreshToken( java.lang.String refreshToken) { doSetProperty("refreshToken", refreshToken); return this; } } class GoogleCalendarStreamComponentBuilderImpl extends AbstractComponentBuilder<GoogleCalendarStreamComponent> implements GoogleCalendarStreamComponentBuilder { @Override protected GoogleCalendarStreamComponent buildConcreteComponent() { return new GoogleCalendarStreamComponent(); } private org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamConfiguration getOrCreateConfiguration( org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamComponent component) { if (component.getConfiguration() == null) { component.setConfiguration(new org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamConfiguration()); } return component.getConfiguration(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "applicationName": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setApplicationName((java.lang.String) value); return true; case "bridgeErrorHandler": ((GoogleCalendarStreamComponent) component).setBridgeErrorHandler((boolean) value); return true; case "calendarId": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setCalendarId((java.lang.String) value); return true; case "clientId": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setClientId((java.lang.String) value); return true; case "configuration": ((GoogleCalendarStreamComponent) component).setConfiguration((org.apache.camel.component.google.calendar.stream.GoogleCalendarStreamConfiguration) value); return true; case "considerLastUpdate": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setConsiderLastUpdate((boolean) value); return true; case "consumeFromNow": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setConsumeFromNow((boolean) value); return true; case "maxResults": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setMaxResults((int) value); return true; case "query": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setQuery((java.lang.String) value); return true; case "scopes": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setScopes((java.util.List) value); return true; case "basicPropertyBinding": ((GoogleCalendarStreamComponent) component).setBasicPropertyBinding((boolean) value); return true; case "clientFactory": ((GoogleCalendarStreamComponent) component).setClientFactory((org.apache.camel.component.google.calendar.GoogleCalendarClientFactory) value); return true; case "accessToken": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setAccessToken((java.lang.String) value); return true; case "clientSecret": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setClientSecret((java.lang.String) value); return true; case "refreshToken": getOrCreateConfiguration((GoogleCalendarStreamComponent) component).setRefreshToken((java.lang.String) value); return true; default: return false; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.05.23 at 02:47:00 PM EDT // package oasis.names.tc.xacml._3_0.core.schema.wd_17; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p> * Java class for PolicyType complex type. * <p> * The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="PolicyType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}Description" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}PolicyIssuer" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}PolicyDefaults" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}Target"/> * &lt;choice maxOccurs="unbounded"> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}CombinerParameters" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}RuleCombinerParameters" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}VariableDefinition"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}Rule"/> * &lt;/choice> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}ObligationExpressions" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}AdviceExpressions" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="PolicyId" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;attribute name="Version" use="required" type="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}VersionType" /> * &lt;attribute name="RuleCombiningAlgId" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;attribute name="MaxDelegationDepth" type="{http://www.w3.org/2001/XMLSchema}integer" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "PolicyType", propOrder = { "description", "policyIssuer", "policyDefaults", "target", "combinerParametersOrRuleCombinerParametersOrVariableDefinition", "obligationExpressions", "adviceExpressions" }) public class PolicyType { @XmlElement(name = "Description") protected String description; @XmlElement(name = "PolicyIssuer") protected PolicyIssuerType policyIssuer; @XmlElement(name = "PolicyDefaults") protected DefaultsType policyDefaults; @XmlElement(name = "Target", required = true) protected TargetType target; @XmlElements({ @XmlElement(name = "RuleCombinerParameters", type = RuleCombinerParametersType.class), @XmlElement(name = "CombinerParameters", type = CombinerParametersType.class), @XmlElement(name = "VariableDefinition", type = VariableDefinitionType.class), @XmlElement(name = "Rule", type = RuleType.class) }) protected List<Object> combinerParametersOrRuleCombinerParametersOrVariableDefinition; @XmlElement(name = "ObligationExpressions") protected ObligationExpressionsType obligationExpressions; @XmlElement(name = "AdviceExpressions") protected AdviceExpressionsType adviceExpressions; @XmlAttribute(name = "PolicyId", required = true) @XmlSchemaType(name = "anyURI") protected String policyId; @XmlAttribute(name = "Version", required = true) protected String version; @XmlAttribute(name = "RuleCombiningAlgId", required = true) @XmlSchemaType(name = "anyURI") protected String ruleCombiningAlgId; @XmlAttribute(name = "MaxDelegationDepth") protected BigInteger maxDelegationDepth; /** * Gets the value of the description property. * * @return possible object is {@link String } */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value allowed object is {@link String } */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the policyIssuer property. * * @return possible object is {@link PolicyIssuerType } */ public PolicyIssuerType getPolicyIssuer() { return policyIssuer; } /** * Sets the value of the policyIssuer property. * * @param value allowed object is {@link PolicyIssuerType } */ public void setPolicyIssuer(PolicyIssuerType value) { this.policyIssuer = value; } /** * Gets the value of the policyDefaults property. * * @return possible object is {@link DefaultsType } */ public DefaultsType getPolicyDefaults() { return policyDefaults; } /** * Sets the value of the policyDefaults property. * * @param value allowed object is {@link DefaultsType } */ public void setPolicyDefaults(DefaultsType value) { this.policyDefaults = value; } /** * Gets the value of the target property. * * @return possible object is {@link TargetType } */ public TargetType getTarget() { return target; } /** * Sets the value of the target property. * * @param value allowed object is {@link TargetType } */ public void setTarget(TargetType value) { this.target = value; } /** * Gets the value of the combinerParametersOrRuleCombinerParametersOrVariableDefinition property. * <p> * This accessor method returns a reference to the live list, not a snapshot. Therefore any modification * you make to the returned list will be present inside the JAXB object. This is why there is not a * <CODE>set</CODE> method for the combinerParametersOrRuleCombinerParametersOrVariableDefinition * property. * <p> * For example, to add a new item, do as follows: * * <pre> * getCombinerParametersOrRuleCombinerParametersOrVariableDefinition().add(newItem); * </pre> * <p> * Objects of the following type(s) are allowed in the list {@link RuleCombinerParametersType } * {@link CombinerParametersType } {@link VariableDefinitionType } * {@link oasis.names.tc.xacml._3_0.core.schema.wd_17.RuleType } */ public List<Object> getCombinerParametersOrRuleCombinerParametersOrVariableDefinition() { if (combinerParametersOrRuleCombinerParametersOrVariableDefinition == null) { combinerParametersOrRuleCombinerParametersOrVariableDefinition = new ArrayList<Object>(); } return this.combinerParametersOrRuleCombinerParametersOrVariableDefinition; } /** * Gets the value of the obligationExpressions property. * * @return possible object is {@link ObligationExpressionsType } */ public ObligationExpressionsType getObligationExpressions() { return obligationExpressions; } /** * Sets the value of the obligationExpressions property. * * @param value allowed object is {@link ObligationExpressionsType } */ public void setObligationExpressions(ObligationExpressionsType value) { this.obligationExpressions = value; } /** * Gets the value of the adviceExpressions property. * * @return possible object is {@link AdviceExpressionsType } */ public AdviceExpressionsType getAdviceExpressions() { return adviceExpressions; } /** * Sets the value of the adviceExpressions property. * * @param value allowed object is {@link AdviceExpressionsType } */ public void setAdviceExpressions(AdviceExpressionsType value) { this.adviceExpressions = value; } /** * Gets the value of the policyId property. * * @return possible object is {@link String } */ public String getPolicyId() { return policyId; } /** * Sets the value of the policyId property. * * @param value allowed object is {@link String } */ public void setPolicyId(String value) { this.policyId = value; } /** * Gets the value of the version property. * * @return possible object is {@link String } */ public String getVersion() { return version; } /** * Sets the value of the version property. * * @param value allowed object is {@link String } */ public void setVersion(String value) { this.version = value; } /** * Gets the value of the ruleCombiningAlgId property. * * @return possible object is {@link String } */ public String getRuleCombiningAlgId() { return ruleCombiningAlgId; } /** * Sets the value of the ruleCombiningAlgId property. * * @param value allowed object is {@link String } */ public void setRuleCombiningAlgId(String value) { this.ruleCombiningAlgId = value; } /** * Gets the value of the maxDelegationDepth property. * * @return possible object is {@link java.math.BigInteger } */ public BigInteger getMaxDelegationDepth() { return maxDelegationDepth; } /** * Sets the value of the maxDelegationDepth property. * * @param value allowed object is {@link java.math.BigInteger } */ public void setMaxDelegationDepth(BigInteger value) { this.maxDelegationDepth = value; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.wire; import alluxio.util.webui.UIFileInfo; import alluxio.util.webui.UIFileBlockInfo; import com.google.common.base.MoreObjects; import java.io.Serializable; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; /** * Alluxio WebUI browse information. */ @NotThreadSafe public final class MasterWebUIBrowse implements Serializable { private static final long serialVersionUID = 5446587832759273932L; private boolean mDebug; private boolean mShowPermissions; private int mNTotalFile; private List<UIFileBlockInfo> mFileBlocks; private List<UIFileInfo> mFileInfos; private long mViewingOffset; private String mAccessControlException; private String mBlockSizeBytes; private String mCurrentPath; private String mFatalError; private String mFileData; private String mFileDoesNotExistException; private String mHighestTierAlias; private String mInvalidPathError; private String mInvalidPathException; private String mMasterNodeAddress; private UIFileInfo mCurrentDirectory; private UIFileInfo[] mPathInfos; /** * Creates a new instance of {@link MasterWebUIBrowse}. */ public MasterWebUIBrowse() { } /** * Gets access control exception. * * @return the access control exception */ public String getAccessControlException() { return mAccessControlException; } /** * Gets block size bytes. * * @return the block size bytes */ public String getBlockSizeBytes() { return mBlockSizeBytes; } /** * Gets current directory. * * @return the current directory */ public UIFileInfo getCurrentDirectory() { return mCurrentDirectory; } /** * Gets current path. * * @return the current path */ public String getCurrentPath() { return mCurrentPath; } /** * Gets debug. * * @return the debug */ public boolean getDebug() { return mDebug; } /** * Gets fatal error. * * @return the fatal error */ public String getFatalError() { return mFatalError; } /** * Gets file blocks. * * @return the file blocks */ public List<UIFileBlockInfo> getFileBlocks() { return mFileBlocks; } /** * Gets file data. * * @return the file data */ public String getFileData() { return mFileData; } /** * Gets file does not exist exception. * * @return the file does not exist exception */ public String getFileDoesNotExistException() { return mFileDoesNotExistException; } /** * Gets file infos. * * @return the file infos */ public List<UIFileInfo> getFileInfos() { return mFileInfos; } /** * Gets highest tier alias. * * @return the highest tier alias */ public String getHighestTierAlias() { return mHighestTierAlias; } /** * Gets invalid path error. * * @return the invalid path error */ public String getInvalidPathError() { return mInvalidPathError; } /** * Gets invalid path exception. * * @return the invalid path exception */ public String getInvalidPathException() { return mInvalidPathException; } /** * Gets master node address. * * @return the master node address */ public String getMasterNodeAddress() { return mMasterNodeAddress; } /** * Gets n total file. * * @return the n total file */ public int getNTotalFile() { return mNTotalFile; } /** * Get path infos ui file info [ ]. * * @return the ui file info [ ] */ public UIFileInfo[] getPathInfos() { return mPathInfos; } /** * Gets show permissions. * * @return the show permissions */ public boolean getShowPermissions() { return mShowPermissions; } /** * Gets viewing offset. * * @return the viewing offset */ public long getViewingOffset() { return mViewingOffset; } /** * Sets access control exception. * * @param accessControlException the access control exception * @return the access control exception */ public MasterWebUIBrowse setAccessControlException(String accessControlException) { mAccessControlException = accessControlException; return this; } /** * Sets block size bytes. * * @param blockSizeBytes the block size bytes * @return the block size bytes */ public MasterWebUIBrowse setBlockSizeBytes(String blockSizeBytes) { mBlockSizeBytes = blockSizeBytes; return this; } /** * Sets current directory. * * @param currentDirectory the current directory * @return the current directory */ public MasterWebUIBrowse setCurrentDirectory(UIFileInfo currentDirectory) { mCurrentDirectory = currentDirectory; return this; } /** * Sets current path. * * @param currentPath the current path * @return the current path */ public MasterWebUIBrowse setCurrentPath(String currentPath) { mCurrentPath = currentPath; return this; } /** * Sets debug. * * @param debug the debug * @return the debug */ public MasterWebUIBrowse setDebug(boolean debug) { mDebug = debug; return this; } /** * Sets fatal error. * * @param fatalError the fatal error * @return the fatal error */ public MasterWebUIBrowse setFatalError(String fatalError) { mFatalError = fatalError; return this; } /** * Sets file blocks. * * @param fileBlocks the file blocks * @return the file blocks */ public MasterWebUIBrowse setFileBlocks(List<UIFileBlockInfo> fileBlocks) { mFileBlocks = fileBlocks; return this; } /** * Sets file data. * * @param fileData the file data * @return the file data */ public MasterWebUIBrowse setFileData(String fileData) { mFileData = fileData; return this; } /** * Sets file does not exist exception. * * @param fileDoesNotExistException the file does not exist exception * @return the file does not exist exception */ public MasterWebUIBrowse setFileDoesNotExistException(String fileDoesNotExistException) { mFileDoesNotExistException = fileDoesNotExistException; return this; } /** * Sets file infos. * * @param fileInfos the file infos * @return the file infos */ public MasterWebUIBrowse setFileInfos(List<UIFileInfo> fileInfos) { mFileInfos = fileInfos; return this; } /** * Sets highest tier alias. * * @param highestTierAlias the highest tier alias * @return the highest tier alias */ public MasterWebUIBrowse setHighestTierAlias(String highestTierAlias) { mHighestTierAlias = highestTierAlias; return this; } /** * Sets invalid path error. * * @param invalidPathError the invalid path error * @return the invalid path error */ public MasterWebUIBrowse setInvalidPathError(String invalidPathError) { mInvalidPathError = invalidPathError; return this; } /** * Sets invalid path exception. * * @param invalidPathException the invalid path exception * @return the invalid path exception */ public MasterWebUIBrowse setInvalidPathException(String invalidPathException) { mInvalidPathException = invalidPathException; return this; } /** * Sets master node address. * * @param masterNodeAddress the master node address * @return the master node address */ public MasterWebUIBrowse setMasterNodeAddress(String masterNodeAddress) { mMasterNodeAddress = masterNodeAddress; return this; } /** * Sets n total file. * * @param nTotalFile the n total file * @return the n total file */ public MasterWebUIBrowse setNTotalFile(int nTotalFile) { mNTotalFile = nTotalFile; return this; } /** * Sets path infos. * * @param pathInfos the path infos * @return the path infos */ public MasterWebUIBrowse setPathInfos(UIFileInfo[] pathInfos) { mPathInfos = pathInfos.clone(); return this; } /** * Sets show permissions. * * @param showPermissions the show permissions * @return the show permissions */ public MasterWebUIBrowse setShowPermissions(boolean showPermissions) { mShowPermissions = showPermissions; return this; } /** * Sets viewing offset. * * @param viewingOffset the viewing offset * @return the viewing offset */ public MasterWebUIBrowse setViewingOffset(long viewingOffset) { mViewingOffset = viewingOffset; return this; } @Override public String toString() { return MoreObjects.toStringHelper(this).add("accessControlException", mAccessControlException) .add("blockSizeBytes", mBlockSizeBytes).add("currentDirectory", mCurrentDirectory) .add("currentPath", mCurrentPath).add("debug", mDebug).add("fatalError", mFatalError) .add("fileBlocks", mFileBlocks).add("fileData", mFileData) .add("fileDoesNotExistException", mFileDoesNotExistException).add("fileInfos", mFileInfos) .add("highestTierAlias", mHighestTierAlias).add("invalidPathError", mInvalidPathError) .add("invalidPathException", mInvalidPathException) .add("masterNodeAddress", mMasterNodeAddress).add("nTotalFile", mNTotalFile) .add("pathInfos", mPathInfos).add("showPermissions", mShowPermissions) .add("viewingOffset", mViewingOffset).toString(); } }
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.m3bp.mirror.jna; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; import org.junit.Test; import com.asakusafw.dag.api.processor.GroupReader; import com.asakusafw.dag.api.processor.ObjectReader; import com.asakusafw.dag.api.processor.ObjectWriter; import com.asakusafw.dag.api.processor.TaskProcessor; import com.asakusafw.dag.api.processor.TaskSchedule; import com.asakusafw.dag.api.processor.VertexProcessor; import com.asakusafw.dag.api.processor.VertexProcessorContext; import com.asakusafw.dag.api.processor.basic.BasicProcessorContext; import com.asakusafw.dag.api.processor.basic.BasicTaskInfo; import com.asakusafw.dag.api.processor.basic.BasicTaskSchedule; import com.asakusafw.lang.utils.common.Lang; import com.asakusafw.lang.utils.common.Optionals; import com.asakusafw.m3bp.descriptor.Descriptors; import com.asakusafw.m3bp.mirror.PortMirror; import com.asakusafw.m3bp.mirror.VertexMirror; import com.asakusafw.m3bp.mirror.basic.BasicConfigurationMirror; import com.asakusafw.m3bp.mirror.basic.BasicFlowGraphMirror; /** * Test for {@link FlowGraphExecutor}. */ public class FlowGraphExecutorTest { final BasicProcessorContext root = new BasicProcessorContext(getClass().getClassLoader()); final BasicConfigurationMirror conf = new BasicConfigurationMirror(); /** * simple case. * @throws Exception if failed */ @Test public void simple() throws Exception { Orphan.INPUT.clear(); Orphan.OUTPUT.clear(); BasicFlowGraphMirror graph = new BasicFlowGraphMirror(); graph.addVertex("orphan", Descriptors.newVertex(Orphan.class)); FlowGraphExecutor executor = new FlowGraphExecutor(root, graph, conf, null); Collections.addAll(Orphan.INPUT, "A", "B", "C"); executor.run(); assertThat(Orphan.OUTPUT, containsInAnyOrder("A", "B", "C")); Orphan.INPUT.clear(); Orphan.OUTPUT.clear(); } /** * one to one. * @throws Exception if failed */ @Test public void one_to_one() throws Exception { Generator.INPUT.clear(); Consumer.OUTPUT.clear(); BasicFlowGraphMirror graph = new BasicFlowGraphMirror(); VertexMirror generator = graph.addVertex("generator", Descriptors.newVertex(Generator.class)); VertexMirror consumer = graph.addVertex("consumer", Descriptors.newVertex(Consumer.class)); PortMirror out = generator.addOutput("out", Descriptors.newOneToOneEdge(StringSerDe.class)); PortMirror in = consumer.addInput("in", Descriptors.newOneToOneEdge(StringSerDe.class)); graph.addEdge(out, in); FlowGraphExecutor executor = new FlowGraphExecutor(root, graph, conf, null); Collections.addAll(Generator.INPUT, "A", "B", "C"); executor.run(); assertThat(Consumer.OUTPUT, containsInAnyOrder("A", "B", "C")); Generator.INPUT.clear(); Consumer.OUTPUT.clear(); } /** * broadcast. * @throws Exception if failed */ @Test public void broadcast() throws Exception { Generator.INPUT.clear(); BroadcastConsumer.OUTPUT.clear(); BasicFlowGraphMirror graph = new BasicFlowGraphMirror(); VertexMirror generator = graph.addVertex("generator", Descriptors.newVertex(Generator.class)); VertexMirror consumer = graph.addVertex("consumer", Descriptors.newVertex(BroadcastConsumer.class)); PortMirror out = generator.addOutput("out", Descriptors.newBroadcastEdge(StringSerDe.class)); PortMirror in = consumer.addInput("in", Descriptors.newBroadcastEdge(StringSerDe.class)); graph.addEdge(out, in); FlowGraphExecutor executor = new FlowGraphExecutor(root, graph, conf, null); Collections.addAll(Generator.INPUT, "A", "B", "C"); executor.run(); assertThat(BroadcastConsumer.OUTPUT, containsInAnyOrder("A", "B", "C")); Generator.INPUT.clear(); BroadcastConsumer.OUTPUT.clear(); } /** * word count. * @throws Exception if failed */ @Test public void wordcount() throws Exception { Map<String, Integer> results = wordcount(new String[] { "Hello" }); assertThat(results.keySet(), containsInAnyOrder("hello")); assertThat(results, hasEntry("hello", 1)); } private Map<String, Integer> wordcount(String[] lines) throws IOException, InterruptedException { WcMap.TEXT.clear(); WcReduce.RESULTS.clear(); Collections.addAll(WcMap.TEXT, lines); BasicFlowGraphMirror graph = new BasicFlowGraphMirror(); VertexMirror map = graph.addVertex("map", Descriptors.newVertex(WcMap.class)); VertexMirror reduce = graph.addVertex("reduce", Descriptors.newVertex(WcReduce.class)); PortMirror mapOut = map.addOutput("out", Descriptors.newScatterGatherEdge( StringSerDe.class, StringSerDe.class, StringSerDe.class.getName())); PortMirror reduceIn = reduce.addInput("in", Descriptors.newScatterGatherEdge( StringSerDe.class, StringSerDe.class, StringSerDe.class.getName())); graph.addEdge(mapOut, reduceIn); FlowGraphExecutor executor = new FlowGraphExecutor(root, graph, conf, new MockBufferComparatorProvider()); executor.run(); return Lang.let(new HashMap<>(), results -> { WcReduce.RESULTS.forEach((k, v) -> results.put(k, v.get())); WcMap.TEXT.clear(); WcReduce.RESULTS.clear(); }); } /** * word count. * @throws Exception if failed */ @Test public void wordcount2() throws Exception { Map<String, Integer> results = wordcount( new String[] { "Hello1" }, new String[] { "Hello2" }); assertThat(results.keySet(), containsInAnyOrder("hello1", "hello2")); assertThat(results, hasEntry("hello1", 1)); assertThat(results, hasEntry("hello2", 1)); } private Map<String, Integer> wordcount(String[] l1, String[] l2) throws IOException, InterruptedException { WcMap.TEXT.clear(); WcMap2.TEXT2.clear(); Collections.addAll(WcMap.TEXT, l1); Collections.addAll(WcMap2.TEXT2, l2); WcReduce.RESULTS.clear(); BasicFlowGraphMirror graph = new BasicFlowGraphMirror(); VertexMirror map1 = graph.addVertex("map1", Descriptors.newVertex(WcMap.class)); VertexMirror map2 = graph.addVertex("map2", Descriptors.newVertex(WcMap2.class)); VertexMirror reduce = graph.addVertex("reduce", Descriptors.newVertex(WcReduce.class)); PortMirror map1Out = map1.addOutput("out", Descriptors.newScatterGatherEdge( StringSerDe.class, StringSerDe.class, StringSerDe.class.getName())); PortMirror map2Out = map2.addOutput("out", Descriptors.newScatterGatherEdge( StringSerDe.class, StringSerDe.class, StringSerDe.class.getName())); PortMirror reduceIn = reduce.addInput("in", Descriptors.newScatterGatherEdge( StringSerDe.class, StringSerDe.class, StringSerDe.class.getName())); graph.addEdge(map1Out, reduceIn); graph.addEdge(map2Out, reduceIn); FlowGraphExecutor executor = new FlowGraphExecutor(root, graph, conf, new MockBufferComparatorProvider()); executor.run(); return Lang.let(new HashMap<>(), results -> { WcReduce.RESULTS.forEach((k, v) -> results.put(k, v.get())); WcMap.TEXT.clear(); WcMap2.TEXT2.clear(); WcReduce.RESULTS.clear(); }); } /** * Standalone vertex. */ public static class Orphan implements VertexProcessor { static final List<String> INPUT = new ArrayList<>(); static final List<String> OUTPUT = Collections.synchronizedList(new ArrayList<>()); @Override public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) { return Optionals.of(new BasicTaskSchedule(Lang.project(INPUT, BasicTaskInfo::new))); } @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { return context -> { String string = context.getTaskInfo() .map(BasicTaskInfo.class::cast) .map(info -> (String) info.getValue()) .orElseThrow(AssertionError::new); OUTPUT.add(string); }; } } /** * Generator vertex. */ public static class Generator implements VertexProcessor { static final List<Object> INPUT = new ArrayList<>(); @Override public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) { return Optionals.of(new BasicTaskSchedule(Lang.project(INPUT, BasicTaskInfo::new))); } @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { return context -> { try (ObjectWriter writer = (ObjectWriter) context.getOutput("out")) { writer.putObject(context.getTaskInfo() .map(BasicTaskInfo.class::cast) .map(BasicTaskInfo::getValue) .orElseThrow(AssertionError::new)); } }; } } /** * Consumer vertex. */ public static class Consumer implements VertexProcessor { static final List<Object> OUTPUT = Collections.synchronizedList(new ArrayList<>()); @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { return context -> { try (ObjectReader reader = (ObjectReader) context.getInput("in")) { while (reader.nextObject()) { OUTPUT.add(reader.getObject()); } } }; } } /** * Broadcast consumer vertex. */ public static class BroadcastConsumer implements VertexProcessor { static final List<Object> OUTPUT = Collections.synchronizedList(new ArrayList<>()); @Override public Optional<? extends TaskSchedule> initialize( VertexProcessorContext context) throws IOException, InterruptedException { try (ObjectReader reader = (ObjectReader) context.getInput("in")) { while (reader.nextObject()) { OUTPUT.add(reader.getObject()); } } return Optional.of(new BasicTaskSchedule()); } @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { throw new UnsupportedOperationException(); } } /** * Word count mapper. */ public static class WcMap implements VertexProcessor { static final List<String> TEXT = new ArrayList<>(); @Override public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) { return Optionals.of(new BasicTaskSchedule(Lang.project(TEXT, BasicTaskInfo::new))); } @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { return context -> { String line = context.getTaskInfo() .map(BasicTaskInfo.class::cast) .map(info -> (String) info.getValue()) .orElseThrow(AssertionError::new); try (ObjectWriter writer = (ObjectWriter) context.getOutput("out")) { Stream.of(line.split("\\s+")) .map(String::trim) .map(String::toLowerCase) .filter(s -> !s.isEmpty()) .forEach(s -> Lang.safe(() -> writer.putObject(s))); } }; } } /** * Word count mapper. */ public static class WcMap2 extends WcMap { static final List<String> TEXT2 = new ArrayList<>(); @Override public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) { return Optionals.of(new BasicTaskSchedule(Lang.project(TEXT2, BasicTaskInfo::new))); } } /** * Word count reducer. */ public static class WcReduce implements VertexProcessor { static final Map<String, AtomicInteger> RESULTS = new HashMap<>(); @Override public TaskProcessor createTaskProcessor() throws IOException, InterruptedException { return context -> { try (GroupReader reader = (GroupReader) context.getInput("in")) { while (reader.nextGroup()) { while (reader.nextObject()) { String s = (String) reader.getObject(); RESULTS.computeIfAbsent(s, it -> new AtomicInteger()).incrementAndGet(); } } } }; } } }
/** * Copyright (c) 2009 Jozef Izso. All Rights Reserved. */ package net.izsak.sandcastle.api; import net.izsak.sandcastle.ApiWriterContext; import net.izsak.sandcastle.IApiNamer; import net.izsak.sandcastle.LibraryInfo; import nu.xom.Attribute; import nu.xom.Element; import com.sun.javadoc.ClassDoc; import com.sun.javadoc.Doc; import com.sun.javadoc.MemberDoc; import com.sun.javadoc.PackageDoc; import com.sun.javadoc.ParameterizedType; import com.sun.javadoc.Type; import com.sun.javadoc.TypeVariable; /** * @author Jozef Izso * */ public class CodeApiWriterBase { private IApiNamer apiNamer; private LibraryInfo library; private Doc doc; private Element api; public CodeApiWriterBase(ApiWriterContext context, Doc doc) { this.apiNamer = context.getApiNamer(); this.library = context.getLibrary(); this.doc = doc; this.api = new Element("api"); } public Element getApiElement() { return this.api; } public IApiNamer getApiNamer() { return this.apiNamer; } public LibraryInfo getLibrary() { return this.library; } protected Doc getDoc() { return this.doc; } public void write() { this.write(this.getDoc()); this.writeApiData(); this.writeOtherData(); this.writeMembers(); this.writeContainers(); } protected void write(Doc doc) { if (doc == null) throw new IllegalStateException("Argument doc cannot be null."); String qname = this.getApiNamer().getApiName(doc); this.api.addAttribute(new Attribute("id", qname)); } protected void writeMembers() { } protected void writeApiData() { this.writeApiData(this.getSimpleName(), this.getGroup(), this.getSubgroup()); } protected void writeApiData(String simpleName, String group) { this.writeApiData(simpleName, group, null); } /** * * Note: reflection.xsd support 4th parameter: subSubGroup. This is * used for operators overloading and it is no applicable in Java. * * @param simpleName Simple name of the current code element. * @param group Can be one of the following types: namespace, type or member. * @param subGroup Subgroup defining concrete type of the code element. * Can be on of the following values: class, structure, interface, * enumeration, delegate, constructor, method, property, field, * event. */ protected void writeApiData(String simpleName, String group, String subGroup) { if (!Validator.isValidateGroupName(group)) throw new IllegalArgumentException("Argument group cannot have value "+ group +"."); if ("enum".equals(subGroup)) subGroup = "enumeration"; if (subGroup != null && !Validator.isValidSubGroupName(subGroup)) throw new IllegalArgumentException("Argument subGroup cannot have value "+ subGroup +"."); Element elmApiData = new Element("apidata"); // simple name elmApiData.addAttribute(new Attribute("name", simpleName)); elmApiData.addAttribute(new Attribute("group", group)); if (subGroup != null) elmApiData.addAttribute(new Attribute("subgroup", subGroup)); this.addElement(elmApiData); } protected void writeOtherData() { } protected void writeMembers(Doc[] members) { Element elmElements = new Element("elements"); for (Doc member : members) { Element elm = new Element("element"); String qname = this.getApiNamer().getApiName(member); if (qname == null) qname = ""; elm.addAttribute(new Attribute("api", qname)); elmElements.appendChild(elm); } this.addElement(elmElements); } protected void writeContainers() { Element elmContainers = new Element("containers"); Element elmLib = new Element("library"); String name = this.library.getName(); elmLib.addAttribute(new Attribute("assembly", name)); elmLib.addAttribute(new Attribute("module", name)); elmLib.addAttribute(new Attribute("kind", "DynamicallyLinkedLibrary")); elmContainers.appendChild(elmLib); this.writeContainersCore(elmContainers); this.addElement(elmContainers); } protected void writeContainersCore(Element elmContainers) { } protected void addElement(Element element) { this.getApiElement().appendChild(element); } protected String getSimpleName() { return this.doc.name(); } protected String getGroup() { if (this.doc.isClass() || this.doc.isInterface()) return "type"; if (this.doc instanceof MemberDoc) return "member"; if (this.doc instanceof PackageDoc) return "namespace"; throw new IllegalStateException("Group type for "+ this.doc.name() +" is not supported."); } protected String getSubgroup() { if (this.doc.isField()) return "field"; if (this.doc.isConstructor()) return "constructor"; if (this.doc.isMethod()) return "method"; if (this.doc.isOrdinaryClass() || this.doc.isException()) return "class"; if (this.doc.isEnum()) return "enum"; if (this.doc.isInterface()) return "interface"; throw new IllegalStateException("SubGroup type for "+ this.doc.name() +" is not supported."); } protected void writeNamespaceToContainers(Element elmContainers, PackageDoc packageDoc) { String qname = this.getApiNamer().getPackageName(packageDoc); Element elmNamespace = new Element("namespace"); elmNamespace.addAttribute(new Attribute("api", qname)); elmContainers.appendChild(elmNamespace); } protected void writeTypeInfo(Type type, Element elmContainer) { if (type == null) { return; } String dim = type.dimension(); if (dim.length() == 0) { this.writeSimpleTypeInfo(type, elmContainer); } else { this.writeArrayTypeInfo(type, elmContainer); } } protected void writeSimpleTypeInfo(Type type, Element elmContainer) { String qname = this.getApiNamer().getApiName(type); boolean isRef = !type.isPrimitive(); boolean isParametrized = type.asParameterizedType() != null; Element elmType = new Element("type"); elmType.addAttribute(new Attribute("api", qname)); elmType.addAttribute(new Attribute("ref", Boolean.toString(isRef))); if (isParametrized) this.writeGenericTypeSpecializationInfo(type, elmType); elmContainer.appendChild(elmType); } protected void writeArrayTypeInfo(Type type, Element elmContainer) { ClassDoc arrayType = type.asClassDoc(); String dim = type.dimension(); // HACK: very weird.... String[] split = dim.split("]"); int num = split.length; Element elmArrayOf = new Element("arrayOf"); elmArrayOf.addAttribute(new Attribute("rank", Integer.toString(num))); if (type.isPrimitive()) this.writeSimpleTypeInfo(type, elmArrayOf); else this.writeTypeInfo(arrayType, elmArrayOf); elmContainer.appendChild(elmArrayOf); } protected void writeGenericTypeSpecializationInfo(Type type, Element elmContainer) { ParameterizedType param = type.asParameterizedType(); Type[] args = param.typeArguments(); Element elmSpec = new Element("specialization"); for (Type paramType : args) { if (paramType.typeName().equals("?")) { Element elmTemplate = new Element("template"); elmTemplate.addAttribute(new Attribute("name", "?")); elmSpec.appendChild(elmTemplate); continue; } TypeVariable typeVar = paramType.asTypeVariable(); if (typeVar != null) this.writeTypeVariable(typeVar, elmSpec); else this.writeTypeInfo(paramType, elmSpec); } if (elmSpec.getChildCount() > 0) elmContainer.appendChild(elmSpec); } /** * Writes template information about generic typed variable. * <code><template name="E" api="T:net.izsak.GenericsClass{T,E}"/></code> * * @param typeVariable * @param elmContainer Element to which the <template> will be added. * @return Returns the <template> element. */ protected Element writeTypeVariable(TypeVariable typeVariable, Element elmContainer) { String name = typeVariable.typeName(); Element elmTemplate = new Element("template"); elmTemplate.addAttribute(new Attribute("name", name)); this.writeTypeVariableType(typeVariable, elmTemplate); elmContainer.appendChild(elmTemplate); return elmTemplate; } protected void writeTypeVariableType(TypeVariable typeVariable, Element elmTemplate) { Doc owner = typeVariable.owner(); String qname = this.getApiNamer().getApiName(owner); elmTemplate.addAttribute(new Attribute("api", qname)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.manager; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.ParseException; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableExportJob; import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableImportJob; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.cli.RelatedOptions; import com.cloudera.sqoop.util.ExportException; import com.cloudera.sqoop.util.ImportException; /** * Manages direct mode transfers from Netezza databases using the external table * options. */ public class DirectNetezzaManager extends NetezzaManager { public static final Log LOG = LogFactory.getLog(DirectNetezzaManager.class .getName()); public static final String NETEZZA_LOG_DIR_OPT = "netezza.log.dir"; public static final String NETEZZA_LOG_DIR_LONG_ARG = "log-dir"; public static final String NETEZZA_ERROR_THRESHOLD_OPT = "netezza.error.threshold"; public static final String NETEZZA_ERROR_THRESHOLD_LONG_ARG = "max-errors"; private static final String QUERY_CHECK_DICTIONARY_FOR_TABLE = "SELECT 1 FROM _V_OBJECTS WHERE OWNER= ? " + " AND OBJNAME = ? and OBJTYPE = 'TABLE'"; public static final String NETEZZA_NULL_VALUE = "netezza.exttable.null.value"; public DirectNetezzaManager(SqoopOptions opts) { super(opts); try { handleNetezzaExtraArgs(options); } catch (ParseException pe) { throw new RuntimeException(pe.getMessage(), pe); } } private void checkNullValueStrings(String nullStrValue, String nullNonStrValue) throws IOException { if (!StringUtils.equals(nullStrValue, nullNonStrValue)) { throw new IOException( "Detected different values of --input-string and --input-non-string " + "parameters. Netezza direct manager does not support that. Please " + "either use the same values or omit the --direct parameter."); } // Null String values cannot be more 4 chars in length in the case // Netezza external tables. if (nullStrValue != null) { nullStrValue = StringEscapeUtils.unescapeJava(nullStrValue); if (nullStrValue.length() > 4) { throw new IOException( "Null string (and null non string) values for Netezza direct mode" + " manager must be less than 4 characters in length"); } options.getConf().set(NETEZZA_NULL_VALUE, nullStrValue); } } /** * Check Table if it is valid for export. Parse the table like what we do in * Oracle manager * * @throws IOException * @throws ExportException */ private void checkTable() throws IOException, ExportException { String tableOwner = this.options.getUsername(); String tableName = this.options.getTableName(); String shortTableName = tableName; int qualifierIndex = tableName.indexOf('.'); if (qualifierIndex != -1) { tableOwner = tableName.substring(0, qualifierIndex); shortTableName = tableName.substring(qualifierIndex + 1); } Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { try { conn = getConnection(); ps = conn.prepareStatement(QUERY_CHECK_DICTIONARY_FOR_TABLE, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ps.setString(1, tableOwner); ps.setString(2, shortTableName); rs = ps.executeQuery(); if (!rs.next()) { String message = tableName + " is not a valid Netezza table. " + "Please make sure that you have connected to the Netezza DB " + "and the table name is right. The current values are\n\t" + " connection string : " + options.getConnectString() + "\n\t table owner : " + tableOwner + "\n\t table name : " + shortTableName; LOG.error(message); throw new IOException(message); } } finally { if (rs != null) { rs.close(); } if (ps != null) { ps.close(); } close(); } } catch (SQLException sqle) { throw new IOException("SQL exception checking table " + sqle.getMessage(), sqle); } } /** * Export data stored in HDFS into a table in a database. */ public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context) throws IOException, ExportException { options = context.getOptions(); context.setConnManager(this); checkTable(); // Throws excpetion as necessary NetezzaExternalTableExportJob exporter = null; char qc = (char) options.getInputEnclosedBy(); char ec = (char) options.getInputEscapedBy(); checkNullValueStrings(options.getInNullStringValue(), options.getInNullNonStringValue()); if (qc > 0 && !(qc == '"' || qc == '\'')) { throw new ExportException("Input enclosed-by character must be '\"' " + "or ''' for netezza direct mode exports"); } if (ec > 0 && ec != '\\') { throw new ExportException("Input escaped-by character must be '\\' " + "for netezza direct mode exports"); } exporter = new NetezzaExternalTableExportJob(context); exporter.runExport(); } /** * Import the table into HDFS by using Netezza external tables to pull out the * data from the database and upload the files directly to HDFS. */ @Override public void importTable(com.cloudera.sqoop.manager.ImportJobContext context) throws IOException, ImportException { context.setConnManager(this); String tableName = context.getTableName(); String jarFile = context.getJarFile(); SqoopOptions options = context.getOptions(); if (null == tableName) { LOG. error("Netezza external table import does not support query imports."); LOG. error("Do not use --direct and --query together for Netezza."); throw new IOException("Null tableName for Netezza external table import."); } checkNullValueStrings(options.getNullStringValue(), options.getNullNonStringValue()); char qc = options.getOutputEnclosedBy(); char ec = options.getOutputEscapedBy(); if (qc > 0 && !(qc == '"' || qc == '\'')) { throw new ImportException("Output enclosed-by character must be '\"' " + "or ''' for netezza direct mode imports"); } if (ec > 0 && ec != '\\') { throw new ImportException("Output escaped-by character must be '\\' " + "for netezza direct mode exports"); } NetezzaExternalTableImportJob importer = null; importer = new NetezzaExternalTableImportJob(options, context); // Direct Netezza Manager will use the datasliceid so no split columns // will be used. LOG.info("Beginning netezza fast path import"); if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile) { LOG.warn("File import layout " + options.getFileLayout() + " is not supported by"); LOG.warn("Netezza direct import; import will proceed as text files."); } importer.runImport(tableName, jarFile, null, options.getConf()); } protected RelatedOptions getNetezzaExtraOpts() { // Just add the options from NetezzaManager and ignore the setting // for direct mode access RelatedOptions netezzaOpts = new RelatedOptions("Netezza Connector Direct mode options"); netezzaOpts.addOption(OptionBuilder .withArgName(NETEZZA_ERROR_THRESHOLD_OPT).hasArg() .withDescription("Error threshold for the job") .withLongOpt(NETEZZA_ERROR_THRESHOLD_LONG_ARG).create()); netezzaOpts.addOption(OptionBuilder.withArgName(NETEZZA_LOG_DIR_OPT) .hasArg().withDescription("Netezza log directory") .withLongOpt(NETEZZA_LOG_DIR_LONG_ARG).create()); return netezzaOpts; } private void handleNetezzaExtraArgs(SqoopOptions opts) throws ParseException { Configuration conf = opts.getConf(); String[] extraArgs = opts.getExtraArgs(); RelatedOptions netezzaOpts = getNetezzaExtraOpts(); CommandLine cmdLine = new GnuParser().parse(netezzaOpts, extraArgs, true); if (cmdLine.hasOption(NETEZZA_ERROR_THRESHOLD_LONG_ARG)) { int threshold = Integer.parseInt(cmdLine .getOptionValue(NETEZZA_ERROR_THRESHOLD_LONG_ARG)); conf.setInt(NETEZZA_ERROR_THRESHOLD_OPT, threshold); } if (cmdLine.hasOption(NETEZZA_LOG_DIR_LONG_ARG)) { String dir = cmdLine.getOptionValue(NETEZZA_LOG_DIR_LONG_ARG); conf.set(NETEZZA_LOG_DIR_OPT, dir); } // Always true for Netezza direct mode access conf.setBoolean(NETEZZA_DATASLICE_ALIGNED_ACCESS_OPT, true); } @Override public boolean supportsStagingForExport() { return false; } @Override public boolean isORMFacilitySelfManaged() { if (options.getHCatTableName() != null) { return false; } return true; } @Override public boolean isDirectModeHCatSupported() { return true; } }
package org.apache.rat.mp; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.commons.io.IOUtils; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.rat.Defaults; import org.apache.rat.ReportConfiguration; import org.apache.rat.config.AddLicenseHeaders; import org.apache.rat.config.ReportFormat; import org.apache.rat.report.claim.ClaimStatistic; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; /** * Run Rat to perform a violation check. */ @Mojo(name = "check", defaultPhase = LifecyclePhase.VALIDATE) public class RatCheckMojo extends AbstractRatMojo { /** * Where to store the report. */ @Parameter(property = "rat.outputFile", defaultValue = "${project.build.directory}/rat.txt") private File reportFile; /** * Output style of the report. Use "plain" (the default) for a plain text * report or "xml" for the raw XML report. Alternatively you can give the * path of an XSL transformation that will be applied on the raw XML to * produce the report written to the output file. */ @Parameter(property = "rat.outputStyle", defaultValue = "plain") private String reportStyle; /** * Maximum number of files with unapproved licenses. */ @Parameter(property = "rat.numUnapprovedLicenses", defaultValue = "0") private int numUnapprovedLicenses; /** * Whether to add license headers; possible values are * {@code forced}, {@code true}, and {@code false} (default). */ @Parameter(property = "rat.addLicenseHeaders", defaultValue = "false") private String addLicenseHeaders; /** * Copyright message to add to license headers. This option is * ignored, unless {@code addLicenseHeaders} is set to {@code true}, * or {@code forced}. */ @Parameter(property = "rat.copyrightMessage") private String copyrightMessage; /** * Will ignore rat errors and display a log message if any. * Its use is NOT RECOMMENDED, but quite convenient on occasion. * * @since 0.9 */ @Parameter(property = "rat.ignoreErrors", defaultValue = "false") private boolean ignoreErrors; /** * Whether to output the names of files that have unapproved licenses to the * console. * * @since 0.12 */ @Parameter(property = "rat.consoleOutput", defaultValue = "false") private boolean consoleOutput; private ClaimStatistic getRawReport() throws MojoExecutionException, MojoFailureException { FileWriter fw = null; try { fw = new FileWriter(reportFile); final ClaimStatistic statistic = createReport(fw, getStyleSheet()); fw.close(); fw = null; return statistic; } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } finally { IOUtils.closeQuietly(fw); } } /** * Returns the XSL stylesheet to be used for formatting the report. * * @return report stylesheet, or <code>null</code> for raw XML * @throws MojoExecutionException if the stylesheet can not be found * @see #reportStyle */ private InputStream getStyleSheet() throws MojoExecutionException { if (reportStyle == null || ReportFormat.PLAIN.is(reportStyle)) { return Defaults.getPlainStyleSheet(); } else if (ReportFormat.XML.is(reportStyle)) { return null; } else { try { return new FileInputStream(reportStyle); } catch (FileNotFoundException e) { throw new MojoExecutionException( "Unable to find report stylesheet: " + reportStyle, e); } } } /** * Invoked by Maven to execute the Mojo. * * @throws MojoFailureException An error in the plugin configuration was detected. * @throws MojoExecutionException Another error occurred while executing the plugin. */ public void execute() throws MojoExecutionException, MojoFailureException { if (skip) { getLog().info("RAT will not execute since it is configured to be skipped via system property 'rat.skip'."); return; } final File parent = reportFile.getParentFile(); if (!parent.mkdirs() && !parent.isDirectory()) { throw new MojoExecutionException("Could not create report parent directory " + parent); } final ClaimStatistic report = getRawReport(); check(report); } protected void check(ClaimStatistic statistics) throws MojoFailureException { if (numUnapprovedLicenses > 0) { getLog().info("You requested to accept " + numUnapprovedLicenses + " files with unapproved licenses."); } int numApproved = statistics.getNumApproved(); getLog().info("Rat check: Summary over all files. Unapproved: " + statistics.getNumUnApproved() + // ", unknown: " + statistics.getNumUnknown() + // ", generated: " + statistics.getNumGenerated() + // ", approved: " + numApproved + // (numApproved > 0 ? " licenses." : " license.")); if (numUnapprovedLicenses < statistics.getNumUnApproved()) { if (consoleOutput) { try { getLog().warn(createReport(Defaults.getUnapprovedLicensesStyleSheet()).trim()); } catch (MojoExecutionException e) { getLog().warn("Unable to print the files with unapproved licenses to the console."); } } final String seeReport = " See RAT report in: " + reportFile; if (!ignoreErrors) { throw new RatCheckException("Too many files with unapproved license: " + statistics.getNumUnApproved() + seeReport); } else { getLog().warn("Rat check: " + statistics.getNumUnApproved() + " files with unapproved licenses." + seeReport); } } } @Override protected ReportConfiguration getConfiguration() throws MojoFailureException, MojoExecutionException { final ReportConfiguration configuration = super.getConfiguration(); if (AddLicenseHeaders.FORCED.name().equalsIgnoreCase(addLicenseHeaders)) { configuration.setAddingLicenses(true); configuration.setAddingLicensesForced(true); configuration.setCopyrightMessage(copyrightMessage); } else if (AddLicenseHeaders.TRUE.name().equalsIgnoreCase(addLicenseHeaders)) { configuration.setAddingLicenses(true); configuration.setCopyrightMessage(copyrightMessage); } else if (AddLicenseHeaders.FALSE.name().equalsIgnoreCase(addLicenseHeaders)) { // Nothing to do } else { throw new MojoFailureException("Invalid value for addLicenseHeaders: Expected " + AddLicenseHeaders.getValuesForHelp() + ", got " + addLicenseHeaders); } return configuration; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.rounding; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import java.io.IOException; /** * A strategy for rounding long values. */ public abstract class Rounding implements Streamable { public abstract byte id(); /** * Given a value, compute a key that uniquely identifies the rounded value although it is not necessarily equal to the rounding value itself. */ public abstract long roundKey(long value); /** * Compute the rounded value given the key that identifies it. */ public abstract long valueForKey(long key); /** * Rounds the given value, equivalent to calling <code>roundValue(roundKey(value))</code>. * * @param value The value to round. * @return The rounded value. */ public final long round(long value) { return valueForKey(roundKey(value)); } /** * Given the rounded value (which was potentially generated by {@link #round(long)}, returns the next rounding value. For example, with * interval based rounding, if the interval is 3, {@code nextRoundValue(6) = 9 }. * * @param value The current rounding value * @return The next rounding value; */ public abstract long nextRoundingValue(long value); /** * Rounding strategy which is based on an interval * * {@code rounded = value - (value % interval) } */ public static class Interval extends Rounding { final static byte ID = 0; private long interval; public Interval() { // for serialization } /** * Creates a new interval rounding. * * @param interval The interval */ public Interval(long interval) { this.interval = interval; } @Override public byte id() { return ID; } public static long roundKey(long value, long interval) { if (value < 0) { return (value - interval + 1) / interval; } else { return value / interval; } } public static long roundValue(long key, long interval) { return key * interval; } @Override public long roundKey(long value) { return roundKey(value, interval); } @Override public long valueForKey(long key) { return key * interval; } @Override public long nextRoundingValue(long value) { assert value == round(value); return value + interval; } @Override public void readFrom(StreamInput in) throws IOException { interval = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(interval); } } public static class FactorRounding extends Rounding { final static byte ID = 7; private Rounding rounding; private float factor; FactorRounding() { // for serialization } FactorRounding(Rounding rounding, float factor) { this.rounding = rounding; this.factor = factor; } @Override public byte id() { return ID; } @Override public long roundKey(long utcMillis) { return rounding.roundKey((long) (factor * utcMillis)); } @Override public long valueForKey(long key) { return rounding.valueForKey(key); } @Override public long nextRoundingValue(long value) { return rounding.nextRoundingValue(value); } @Override public void readFrom(StreamInput in) throws IOException { rounding = (TimeZoneRounding) Rounding.Streams.read(in); factor = in.readFloat(); } @Override public void writeTo(StreamOutput out) throws IOException { Rounding.Streams.write(rounding, out); out.writeFloat(factor); } } public static class PrePostRounding extends Rounding { final static byte ID = 8; private Rounding rounding; private long preOffset; private long postOffset; PrePostRounding() { // for serialization } public PrePostRounding(Rounding intervalRounding, long preOffset, long postOffset) { this.rounding = intervalRounding; this.preOffset = preOffset; this.postOffset = postOffset; } @Override public byte id() { return ID; } @Override public long roundKey(long value) { return rounding.roundKey(value + preOffset); } @Override public long valueForKey(long key) { return postOffset + rounding.valueForKey(key); } @Override public long nextRoundingValue(long value) { return postOffset + rounding.nextRoundingValue(value - postOffset); } @Override public void readFrom(StreamInput in) throws IOException { rounding = Rounding.Streams.read(in); preOffset = in.readVLong(); postOffset = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { Rounding.Streams.write(rounding, out); out.writeVLong(preOffset); out.writeVLong(postOffset); } } public static class Streams { public static void write(Rounding rounding, StreamOutput out) throws IOException { out.writeByte(rounding.id()); rounding.writeTo(out); } public static Rounding read(StreamInput in) throws IOException { Rounding rounding = null; byte id = in.readByte(); switch (id) { case Interval.ID: rounding = new Interval(); break; case TimeZoneRounding.TimeTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.TimeTimeZoneRoundingFloor(); break; case TimeZoneRounding.UTCTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.UTCTimeZoneRoundingFloor(); break; case TimeZoneRounding.DayTimeZoneRoundingFloor.ID: rounding = new TimeZoneRounding.DayTimeZoneRoundingFloor(); break; case TimeZoneRounding.UTCIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.UTCIntervalTimeZoneRounding(); break; case TimeZoneRounding.TimeIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.TimeIntervalTimeZoneRounding(); break; case TimeZoneRounding.DayIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.DayIntervalTimeZoneRounding(); break; case TimeZoneRounding.FactorRounding.ID: rounding = new FactorRounding(); break; case PrePostRounding.ID: rounding = new PrePostRounding(); break; default: throw new ElasticsearchException("unknown rounding id [" + id + "]"); } rounding.readFrom(in); return rounding; } } }
/******************************************************************************* * Copyright 2015, 2016 Junichi Tatemura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.nec.strudel.tkvs.store.hbase; import java.io.IOException; import java.util.Map; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.primitives.Longs; import com.nec.strudel.tkvs.Key; import com.nec.strudel.tkvs.Record; import com.nec.strudel.tkvs.SimpleRecord; import com.nec.strudel.tkvs.impl.CollectionBuffer; import com.nec.strudel.tkvs.impl.KeyValueReader; import com.nec.strudel.tkvs.impl.TransactionBaseImpl; import com.nec.strudel.tkvs.impl.TransactionProfiler; /** * * @author tatemura, Zheng Li (initial version) * */ public class HBaseTransaction extends TransactionBaseImpl { private final HTableInterface htable; private final long vnum; private final String gName; private final byte[] rowid; private final TransactionProfiler prof; public HBaseTransaction(String gName, Key gKey, byte[] rowid, HTableInterface table, TransactionProfiler prof) throws IOException { super(gName, gKey, new HBaseReader(table, rowid), prof); this.rowid = rowid; //Get version number Get get = new Get(rowid); get.addColumn(HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER); Result res = table.get(get); this.vnum = res.isEmpty() ? 0L : Longs.fromByteArray(res.value()); this.htable = table; this.gName = gName; this.prof = prof; } @Override public boolean commit() { prof.commitStart(gName); //Will only change only Row rowid because //one entity group is packed in one row Put put = new Put(rowid); Delete del = new Delete(rowid); for (CollectionBuffer b : buffers()) { Map<Key, Record> writes = b.getWrites(); //collection name is different from group name String name = b.getName(); for (Map.Entry<Key, Record> e : writes.entrySet()) { Record r = e.getValue(); //for put if (r != null) { put.add(HBaseStore.ENTITYCF, e.getKey().toByteKey(name), r.toBytes()); } else { // for delete del.deleteColumn(HBaseStore.ENTITYCF, e.getKey().toByteKey(name)); } } } //do batch commit byte[] oldVnumBytes = vnum == 0 ? null : Bytes.toBytes(this.vnum); if (put.isEmpty() && del.isEmpty()) { //read only transaction, fail commit if version is changed Get get = new Get(rowid); get.addColumn(HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER); Result res = null; try { res = htable.get(get); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } closeTable(); if (!res.isEmpty()) { long newvnum = Longs.fromByteArray(res.value()); if (newvnum == this.vnum) { prof.commitSuccess(gName); return true; } else { prof.commitFail(gName); return false; } } else { if (this.vnum == 0L) { prof.commitSuccess(gName); return true; } else { prof.commitFail(gName); return false; } } } else { if (!put.isEmpty() && !del.isEmpty()) { try { put.add(HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, Bytes.toBytes(vnum + 1)); if (!htable.checkAndPut(this.rowid, HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, oldVnumBytes, put)) { prof.commitFail(gName); closeTable(); return false; } } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { if (!htable.checkAndDelete(this.rowid, HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, Bytes.toBytes(vnum + 1), del)) { prof.commitFail(gName); closeTable(); return false; } } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else if (del.isEmpty()) { try { put.add(HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, Bytes.toBytes(vnum + 1)); if (!htable.checkAndPut(this.rowid, HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, oldVnumBytes, put)) { prof.commitFail(gName); closeTable(); return false; } } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { //put is empty and del is not empty try { if (!htable.checkAndDelete(this.rowid, HBaseStore.VERSIONCF, HBaseStore.VERQUALIFIER, oldVnumBytes, del)) { prof.commitFail(gName); closeTable(); return false; } } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } prof.commitSuccess(gName); closeTable(); return true; } private void closeTable() { try { this.htable.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } static class HBaseReader implements KeyValueReader { private final HTableInterface htable; private final byte[] rowid; HBaseReader(HTableInterface htable, byte[] rowid) { this.htable = htable; this.rowid = rowid; } @Override public Record get(String name, Key key) { Get get = new Get(rowid); //Do we need to check version here? //Or we'll just wait until commit get.addColumn(HBaseStore.ENTITYCF, key.toByteKey(name)); byte[] value = null; try { value = this.htable.get(get).value(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (value != null) { return SimpleRecord.create(value); } else { return null; } } } }
/** * Copyright 2016 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package redis.clients.redisson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import redis.clients.redisson.api.RFuture; import redis.clients.redisson.api.RSet; import redis.clients.redisson.api.RSetMultimap; import redis.clients.redisson.client.codec.Codec; import redis.clients.redisson.client.protocol.RedisCommand; import redis.clients.redisson.client.protocol.RedisCommands; import redis.clients.redisson.client.protocol.RedisStrictCommand; import redis.clients.redisson.client.protocol.convertor.BooleanAmountReplayConvertor; import redis.clients.redisson.client.protocol.convertor.BooleanReplayConvertor; import redis.clients.redisson.command.CommandAsyncExecutor; import io.netty.buffer.ByteBuf; /** * @author Nikita Koksharov * * @param <K> key * @param <V> value */ public class RedissonSetMultimap<K, V> extends RedissonMultimap<K, V> implements RSetMultimap<K, V> { private static final RedisStrictCommand<Boolean> SCARD_VALUE = new RedisStrictCommand<Boolean>("SCARD", new BooleanAmountReplayConvertor()); private static final RedisCommand<Boolean> SISMEMBER_VALUE = new RedisCommand<Boolean>("SISMEMBER", new BooleanReplayConvertor()); RedissonSetMultimap(UUID id, CommandAsyncExecutor connectionManager, String name) { super(id, connectionManager, name); } RedissonSetMultimap(UUID id, Codec codec, CommandAsyncExecutor connectionManager, String name) { super(id, codec, connectionManager, name); } @Override public RFuture<Integer> sizeAsync() { return commandExecutor.evalReadAsync(getName(), codec, RedisCommands.EVAL_INTEGER, "local keys = redis.call('hgetall', KEYS[1]); " + "local size = 0; " + "for i, v in ipairs(keys) do " + "if i % 2 == 0 then " + "local name = '{' .. KEYS[1] .. '}:' .. v; " + "size = size + redis.call('scard', name); " + "end;" + "end; " + "return size; ", Arrays.<Object>asList(getName())); } @Override public RFuture<Boolean> containsKeyAsync(Object key) { ByteBuf keyState = encodeMapKey(key); String keyHash = hashAndRelease(keyState); String setName = getValuesName(keyHash); return commandExecutor.readAsync(getName(), codec, SCARD_VALUE, setName); } @Override public RFuture<Boolean> containsValueAsync(Object value) { ByteBuf valueState = encodeMapValue(value); return commandExecutor.evalReadAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN, "local keys = redis.call('hgetall', KEYS[1]); " + "for i, v in ipairs(keys) do " + "if i % 2 == 0 then " + "local name = '{' .. KEYS[1] .. '}:' .. v; " + "if redis.call('sismember', name, ARGV[1]) == 1 then " + "return 1; " + "end;" + "end;" + "end; " + "return 0; ", Arrays.<Object>asList(getName()), valueState); } @Override public RFuture<Boolean> containsEntryAsync(Object key, Object value) { ByteBuf keyState = encodeMapKey(key); String keyHash = hashAndRelease(keyState); ByteBuf valueState = encodeMapValue(value); String setName = getValuesName(keyHash); return commandExecutor.readAsync(getName(), codec, SISMEMBER_VALUE, setName, valueState); } @Override public RFuture<Boolean> putAsync(K key, V value) { ByteBuf keyState = encodeMapKey(key); String keyHash = hash(keyState); ByteBuf valueState = encodeMapValue(value); String setName = getValuesName(keyHash); return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN, "redis.call('hset', KEYS[1], ARGV[1], ARGV[2]); " + "return redis.call('sadd', KEYS[2], ARGV[3]); ", Arrays.<Object>asList(getName(), setName), keyState, keyHash, valueState); } @Override public RFuture<Boolean> removeAsync(Object key, Object value) { ByteBuf keyState = encodeMapKey(key); String keyHash = hash(keyState); ByteBuf valueState = encodeMapValue(value); String setName = getValuesName(keyHash); return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN, "local res = redis.call('srem', KEYS[2], ARGV[2]); " + "if res == 1 and redis.call('scard', KEYS[2]) == 0 then " + "redis.call('hdel', KEYS[1], ARGV[1]); " + "end; " + "return res; ", Arrays.<Object>asList(getName(), setName), keyState, valueState); } @Override public RFuture<Boolean> putAllAsync(K key, Iterable<? extends V> values) { List<Object> params = new ArrayList<Object>(); ByteBuf keyState = encodeMapKey(key); params.add(keyState); String keyHash = hash(keyState); params.add(keyHash); for (Object value : values) { ByteBuf valueState = encodeMapValue(value); params.add(valueState); } String setName = getValuesName(keyHash); return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN_AMOUNT, "redis.call('hset', KEYS[1], ARGV[1], ARGV[2]); " + "return redis.call('sadd', KEYS[2], unpack(ARGV, 3, #ARGV)); ", Arrays.<Object>asList(getName(), setName), params.toArray()); } @Override public RSet<V> get(final K key) { final ByteBuf keyState = encodeMapKey(key); final String keyHash = hashAndRelease(keyState); final String setName = getValuesName(keyHash); return new RedissonSet<V>(codec, commandExecutor, setName, null) { @Override public RFuture<Boolean> deleteAsync() { ByteBuf keyState = encodeMapKey(key); return RedissonSetMultimap.this.fastRemoveAsync(Arrays.<Object>asList(keyState), Arrays.<Object>asList(setName), RedisCommands.EVAL_BOOLEAN_AMOUNT); } @Override public RFuture<Boolean> clearExpireAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> expireAtAsync(long timestamp) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Long> remainTimeToLiveAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Void> renameAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> renamenxAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } }; } @Override public Set<V> getAll(K key) { return (Set<V>) super.getAll(key); } @Override public RFuture<Collection<V>> getAllAsync(K key) { ByteBuf keyState = encodeMapKey(key); String keyHash = hashAndRelease(keyState); String setName = getValuesName(keyHash); return commandExecutor.readAsync(getName(), codec, RedisCommands.SMEMBERS, setName); } @Override public Set<V> removeAll(Object key) { return (Set<V>) get(removeAllAsync(key)); } @Override public RFuture<Collection<V>> removeAllAsync(Object key) { ByteBuf keyState = encodeMapKey(key); String keyHash = hash(keyState); String setName = getValuesName(keyHash); return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_SET, "redis.call('hdel', KEYS[1], ARGV[1]); " + "local members = redis.call('smembers', KEYS[2]); " + "redis.call('del', KEYS[2]); " + "return members; ", Arrays.<Object>asList(getName(), setName), keyState); } @Override public Set<Entry<K, V>> entries() { return (Set<Entry<K, V>>) super.entries(); } @Override public Set<V> replaceValues(K key, Iterable<? extends V> values) { return (Set<V>) get(replaceValuesAsync(key, values)); } @Override Iterator<V> valuesIterator() { return new RedissonSetMultimapIterator<K, V, V>(RedissonSetMultimap.this, commandExecutor, codec) { @Override V getValue(V entry) { return (V) entry; } }; } @Override RedissonSetMultimapIterator<K, V, Entry<K, V>> entryIterator() { return new RedissonSetMultimapIterator<K, V, Map.Entry<K, V>>(RedissonSetMultimap.this, commandExecutor, codec); } @Override public RFuture<Collection<V>> replaceValuesAsync(K key, Iterable<? extends V> values) { List<Object> params = new ArrayList<Object>(); ByteBuf keyState = encodeMapKey(key); params.add(keyState); String keyHash = hash(keyState); params.add(keyHash); for (Object value : values) { ByteBuf valueState = encodeMapValue(value); params.add(valueState); } String setName = getValuesName(keyHash); return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_SET, "redis.call('hset', KEYS[1], ARGV[1], ARGV[2]); " + "local members = redis.call('smembers', KEYS[2]); " + "redis.call('del', KEYS[2]); " + "redis.call('sadd', KEYS[2], unpack(ARGV, 3, #ARGV)); " + "return members; ", Arrays.<Object>asList(getName(), setName), params.toArray()); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.braket.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A filter to use to search for tasks. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/braket-2019-09-01/SearchQuantumTasksFilter" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SearchQuantumTasksFilter implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the device used for the task. * </p> */ private String name; /** * <p> * An operator to use in the filter. * </p> */ private String operator; /** * <p> * The values to use for the filter. * </p> */ private java.util.List<String> values; /** * <p> * The name of the device used for the task. * </p> * * @param name * The name of the device used for the task. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the device used for the task. * </p> * * @return The name of the device used for the task. */ public String getName() { return this.name; } /** * <p> * The name of the device used for the task. * </p> * * @param name * The name of the device used for the task. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchQuantumTasksFilter withName(String name) { setName(name); return this; } /** * <p> * An operator to use in the filter. * </p> * * @param operator * An operator to use in the filter. * @see SearchQuantumTasksFilterOperator */ public void setOperator(String operator) { this.operator = operator; } /** * <p> * An operator to use in the filter. * </p> * * @return An operator to use in the filter. * @see SearchQuantumTasksFilterOperator */ public String getOperator() { return this.operator; } /** * <p> * An operator to use in the filter. * </p> * * @param operator * An operator to use in the filter. * @return Returns a reference to this object so that method calls can be chained together. * @see SearchQuantumTasksFilterOperator */ public SearchQuantumTasksFilter withOperator(String operator) { setOperator(operator); return this; } /** * <p> * An operator to use in the filter. * </p> * * @param operator * An operator to use in the filter. * @return Returns a reference to this object so that method calls can be chained together. * @see SearchQuantumTasksFilterOperator */ public SearchQuantumTasksFilter withOperator(SearchQuantumTasksFilterOperator operator) { this.operator = operator.toString(); return this; } /** * <p> * The values to use for the filter. * </p> * * @return The values to use for the filter. */ public java.util.List<String> getValues() { return values; } /** * <p> * The values to use for the filter. * </p> * * @param values * The values to use for the filter. */ public void setValues(java.util.Collection<String> values) { if (values == null) { this.values = null; return; } this.values = new java.util.ArrayList<String>(values); } /** * <p> * The values to use for the filter. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the * existing values. * </p> * * @param values * The values to use for the filter. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchQuantumTasksFilter withValues(String... values) { if (this.values == null) { setValues(new java.util.ArrayList<String>(values.length)); } for (String ele : values) { this.values.add(ele); } return this; } /** * <p> * The values to use for the filter. * </p> * * @param values * The values to use for the filter. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchQuantumTasksFilter withValues(java.util.Collection<String> values) { setValues(values); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getOperator() != null) sb.append("Operator: ").append(getOperator()).append(","); if (getValues() != null) sb.append("Values: ").append(getValues()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SearchQuantumTasksFilter == false) return false; SearchQuantumTasksFilter other = (SearchQuantumTasksFilter) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getOperator() == null ^ this.getOperator() == null) return false; if (other.getOperator() != null && other.getOperator().equals(this.getOperator()) == false) return false; if (other.getValues() == null ^ this.getValues() == null) return false; if (other.getValues() != null && other.getValues().equals(this.getValues()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getOperator() == null) ? 0 : getOperator().hashCode()); hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode()); return hashCode; } @Override public SearchQuantumTasksFilter clone() { try { return (SearchQuantumTasksFilter) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.braket.model.transform.SearchQuantumTasksFilterMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package com.ctrip.framework.cs.asm.tree; import java.util.ListIterator; import java.util.NoSuchElementException; import com.ctrip.framework.cs.asm.MethodVisitor; /** * A doubly linked list of {@link AbstractInsnNode} objects. <i>This * implementation is not thread safe</i>. */ public class InsnList { /** * The number of instructions in this list. */ private int size; /** * The first instruction in this list. May be <tt>null</tt>. */ private AbstractInsnNode first; /** * The last instruction in this list. May be <tt>null</tt>. */ private AbstractInsnNode last; /** * A cache of the instructions of this list. This cache is used to improve * the performance of the {@link #get} method. */ AbstractInsnNode[] cache; /** * Returns the number of instructions in this list. * * @return the number of instructions in this list. */ public int size() { return size; } /** * Returns the first instruction in this list. * * @return the first instruction in this list, or <tt>null</tt> if the list * is empty. */ public AbstractInsnNode getFirst() { return first; } /** * Returns the last instruction in this list. * * @return the last instruction in this list, or <tt>null</tt> if the list * is empty. */ public AbstractInsnNode getLast() { return last; } /** * Returns the instruction whose index is given. This method builds a cache * of the instructions in this list to avoid scanning the whole list each * time it is called. Once the cache is built, this method run in constant * time. This cache is invalidated by all the methods that modify the list. * * @param index * the index of the instruction that must be returned. * @return the instruction whose index is given. * @throws IndexOutOfBoundsException * if (index &lt; 0 || index &gt;= size()). */ public AbstractInsnNode get(final int index) { if (index < 0 || index >= size) { throw new IndexOutOfBoundsException(); } if (cache == null) { cache = toArray(); } return cache[index]; } /** * Returns <tt>true</tt> if the given instruction belongs to this list. This * method always scans the instructions of this list until it finds the * given instruction or reaches the end of the list. * * @param insn * an instruction. * @return <tt>true</tt> if the given instruction belongs to this list. */ public boolean contains(final AbstractInsnNode insn) { AbstractInsnNode i = first; while (i != null && i != insn) { i = i.next; } return i != null; } /** * Returns the index of the given instruction in this list. This method * builds a cache of the instruction indexes to avoid scanning the whole * list each time it is called. Once the cache is built, this method run in * constant time. The cache is invalidated by all the methods that modify * the list. * * @param insn * an instruction <i>of this list</i>. * @return the index of the given instruction in this list. <i>The result of * this method is undefined if the given instruction does not belong * to this list</i>. Use {@link #contains contains} to test if an * instruction belongs to an instruction list or not. */ public int indexOf(final AbstractInsnNode insn) { if (cache == null) { cache = toArray(); } return insn.index; } /** * Makes the given visitor visit all of the instructions in this list. * * @param mv * the method visitor that must visit the instructions. */ public void accept(final MethodVisitor mv) { AbstractInsnNode insn = first; while (insn != null) { insn.accept(mv); insn = insn.next; } } /** * Returns an iterator over the instructions in this list. * * @return an iterator over the instructions in this list. */ public ListIterator<AbstractInsnNode> iterator() { return iterator(0); } /** * Returns an iterator over the instructions in this list. * * @param index * index of instruction for the iterator to start at * * @return an iterator over the instructions in this list. */ @SuppressWarnings("unchecked") public ListIterator<AbstractInsnNode> iterator(int index) { return new InsnListIterator(index); } /** * Returns an array containing all of the instructions in this list. * * @return an array containing all of the instructions in this list. */ public AbstractInsnNode[] toArray() { int i = 0; AbstractInsnNode elem = first; AbstractInsnNode[] insns = new AbstractInsnNode[size]; while (elem != null) { insns[i] = elem; elem.index = i++; elem = elem.next; } return insns; } /** * Replaces an instruction of this list with another instruction. * * @param location * an instruction <i>of this list</i>. * @param insn * another instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void set(final AbstractInsnNode location, final AbstractInsnNode insn) { AbstractInsnNode next = location.next; insn.next = next; if (next != null) { next.prev = insn; } else { last = insn; } AbstractInsnNode prev = location.prev; insn.prev = prev; if (prev != null) { prev.next = insn; } else { first = insn; } if (cache != null) { int index = location.index; cache[index] = insn; insn.index = index; } else { insn.index = 0; // insn now belongs to an InsnList } location.index = -1; // i no longer belongs to an InsnList location.prev = null; location.next = null; } /** * Adds the given instruction to the end of this list. * * @param insn * an instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void add(final AbstractInsnNode insn) { ++size; if (last == null) { first = insn; last = insn; } else { last.next = insn; insn.prev = last; } last = insn; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Adds the given instructions to the end of this list. * * @param insns * an instruction list, which is cleared during the process. This * list must be different from 'this'. */ public void add(final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; if (last == null) { first = insns.first; last = insns.last; } else { AbstractInsnNode elem = insns.first; last.next = elem; elem.prev = last; last = insns.last; } cache = null; insns.removeAll(false); } /** * Inserts the given instruction at the begining of this list. * * @param insn * an instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void insert(final AbstractInsnNode insn) { ++size; if (first == null) { first = insn; last = insn; } else { first.prev = insn; insn.next = first; } first = insn; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions at the begining of this list. * * @param insns * an instruction list, which is cleared during the process. This * list must be different from 'this'. */ public void insert(final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; if (first == null) { first = insns.first; last = insns.last; } else { AbstractInsnNode elem = insns.last; first.prev = elem; elem.next = first; first = insns.first; } cache = null; insns.removeAll(false); } /** * Inserts the given instruction after the specified instruction. * * @param location * an instruction <i>of this list</i> after which insn must be * inserted. * @param insn * the instruction to be inserted, <i>which must not belong to * any {@link InsnList}</i>. */ public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) { ++size; AbstractInsnNode next = location.next; if (next == null) { last = insn; } else { next.prev = insn; } location.next = insn; insn.next = next; insn.prev = location; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions after the specified instruction. * * @param location * an instruction <i>of this list</i> after which the * instructions must be inserted. * @param insns * the instruction list to be inserted, which is cleared during * the process. This list must be different from 'this'. */ public void insert(final AbstractInsnNode location, final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; AbstractInsnNode ifirst = insns.first; AbstractInsnNode ilast = insns.last; AbstractInsnNode next = location.next; if (next == null) { last = ilast; } else { next.prev = ilast; } location.next = ifirst; ilast.next = next; ifirst.prev = location; cache = null; insns.removeAll(false); } /** * Inserts the given instruction before the specified instruction. * * @param location * an instruction <i>of this list</i> before which insn must be * inserted. * @param insn * the instruction to be inserted, <i>which must not belong to * any {@link InsnList}</i>. */ public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) { ++size; AbstractInsnNode prev = location.prev; if (prev == null) { first = insn; } else { prev.next = insn; } location.prev = insn; insn.next = location; insn.prev = prev; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions before the specified instruction. * * @param location * an instruction <i>of this list</i> before which the * instructions must be inserted. * @param insns * the instruction list to be inserted, which is cleared during * the process. This list must be different from 'this'. */ public void insertBefore(final AbstractInsnNode location, final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; AbstractInsnNode ifirst = insns.first; AbstractInsnNode ilast = insns.last; AbstractInsnNode prev = location.prev; if (prev == null) { first = ifirst; } else { prev.next = ifirst; } location.prev = ilast; ilast.next = location; ifirst.prev = prev; cache = null; insns.removeAll(false); } /** * Removes the given instruction from this list. * * @param insn * the instruction <i>of this list</i> that must be removed. */ public void remove(final AbstractInsnNode insn) { --size; AbstractInsnNode next = insn.next; AbstractInsnNode prev = insn.prev; if (next == null) { if (prev == null) { first = null; last = null; } else { prev.next = null; last = prev; } } else { if (prev == null) { first = next; next.prev = null; } else { prev.next = next; next.prev = prev; } } cache = null; insn.index = -1; // insn no longer belongs to an InsnList insn.prev = null; insn.next = null; } /** * Removes all of the instructions of this list. * * @param mark * if the instructions must be marked as no longer belonging to * any {@link InsnList}. */ void removeAll(final boolean mark) { if (mark) { AbstractInsnNode insn = first; while (insn != null) { AbstractInsnNode next = insn.next; insn.index = -1; // insn no longer belongs to an InsnList insn.prev = null; insn.next = null; insn = next; } } size = 0; first = null; last = null; cache = null; } /** * Removes all of the instructions of this list. */ public void clear() { removeAll(false); } /** * Reset all labels in the instruction list. This method should be called * before reusing same instructions list between several * <code>ClassWriter</code>s. */ public void resetLabels() { AbstractInsnNode insn = first; while (insn != null) { if (insn instanceof LabelNode) { ((LabelNode) insn).resetLabel(); } insn = insn.next; } } // this class is not generified because it will create bridges @SuppressWarnings("rawtypes") private final class InsnListIterator implements ListIterator { AbstractInsnNode next; AbstractInsnNode prev; AbstractInsnNode remove; InsnListIterator(int index) { if (index == size()) { next = null; prev = getLast(); } else { next = get(index); prev = next.prev; } } public boolean hasNext() { return next != null; } public Object next() { if (next == null) { throw new NoSuchElementException(); } AbstractInsnNode result = next; prev = result; next = result.next; remove = result; return result; } public void remove() { if (remove != null) { if (remove == next) { next = next.next; } else { prev = prev.prev; } InsnList.this.remove(remove); remove = null; } else { throw new IllegalStateException(); } } public boolean hasPrevious() { return prev != null; } public Object previous() { AbstractInsnNode result = prev; next = result; prev = result.prev; remove = result; return result; } public int nextIndex() { if (next == null) { return size(); } if (cache == null) { cache = toArray(); } return next.index; } public int previousIndex() { if (prev == null) { return -1; } if (cache == null) { cache = toArray(); } return prev.index; } public void add(Object o) { if (next != null) { InsnList.this.insertBefore(next, (AbstractInsnNode) o); } else if (prev != null) { InsnList.this.insert(prev, (AbstractInsnNode) o); } else { InsnList.this.add((AbstractInsnNode) o); } prev = (AbstractInsnNode) o; remove = null; } public void set(Object o) { if (remove != null) { InsnList.this.set(remove, (AbstractInsnNode) o); if (remove == prev) { prev = (AbstractInsnNode) o; } else { next = (AbstractInsnNode) o; } } else { throw new IllegalStateException(); } } } }
package dk.nversion; import java.lang.Integer; import java.lang.Object; import java.lang.System; import java.util.Map; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.concurrent.atomic.AtomicLongArray; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; public class LoadBalancer { // Configured private int retries = 0; private int endpointCount = 1; private int failureRateMaxFailures = 0; private TimeUnit failureRateTimeUnit; private long failureRateTime; private LoadBalancerPolicy policy = LoadBalancerPolicy.ROUND_ROBIN; private long suspensionTime = 0; private TimeUnit suspensionTimeUnit; private AtomicLongArray[] failureTimes; private AtomicLongArray suspensionTimes; private AtomicInteger indexGenerator = new AtomicInteger(0); private AtomicLongArray successCounters; private AtomicLongArray failureCounters; private Map<Object,AtomicInteger> retryCounters = new ConcurrentHashMap<>(); ScheduledExecutorService scheduledExecutorService; private long monitorCheckInterval; private TimeUnit monitorCheckTimeUnit; private int monitorUnhealthyThreshold; private int monitorHealthyThreshold; private AtomicIntegerArray monitorUnhealthyCounters; private AtomicIntegerArray monitorHealthyCounters; private Function<Integer, CompletableFuture<Boolean>> monitorFunction; private AtomicLongArray latencyTimes; public <T> CompletableFuture<T> wrap(Supplier<CompletableFuture<T>> function) { CompletableFuture<T> result = new CompletableFuture<>(); retryHelper((index, retryCount) -> function.get(), result); return result; } public <T> CompletableFuture<T> wrap(Function<Integer, CompletableFuture<T>> function) { CompletableFuture<T> result = new CompletableFuture<>(); retryHelper((index, retryCount) -> function.apply(index), result); return result; } public <T> CompletableFuture<T> wrap(BiFunction<Integer, Integer, CompletableFuture<T>> function) { CompletableFuture<T> result = new CompletableFuture<>(); retryHelper(function, result); return result; } private <T> void retryHelper (BiFunction<Integer, Integer, CompletableFuture<T>> function, CompletableFuture<T> result) { final int retries = doRetry(function); final int index = getNextIndex(function); final long start = System.currentTimeMillis(); if(index > -1) { try { function.apply(index, retries).whenComplete((obj, ex) -> { if (ex == null) { registerSuccess(function, index, start); result.complete(obj); } else { if (retries > 0) { registerFailure(function, index, false); retryHelper(function, result); } else { registerFailure(function, index, true); result.completeExceptionally(ex); } } }); } catch (Exception e) { if (retries > 0) { registerFailure(function, index, false); retryHelper(function, result); } else { registerFailure(function, index, true); result.completeExceptionally(e); } } } else { result.completeExceptionally(new LoadBalancerException("All backends suspended")); } } private int doRetry(Object caller) { if(retryCounters.containsKey(caller)) { return retryCounters.get(caller).decrementAndGet(); } else { retryCounters.put(caller, new AtomicInteger(this.retries)); return this.retries; } } private int getNextIndex(Object caller) { long now = System.currentTimeMillis(); if(this.policy == LoadBalancerPolicy.ROUND_ROBIN) { // Try all endpoints if some are suspended int count = endpointCount; int[] tried = new int[endpointCount]; while (count > 0) { // Increment index and convert negative values to positive if need be int index = indexGenerator.getAndIncrement() % endpointCount; if (index < 0) { index += endpointCount; } // Don't retry same index if (tried[index] == 0) { // Return index if endpoint is not suspended if (suspensionTimes.get(index) < now) { return index; } tried[index] = 1; count--; } } } else if(this.policy == LoadBalancerPolicy.LATENCY_LAST) { int index = -1; for(int i = 0; i < latencyTimes.length(); i++) { long current = latencyTimes.get(i); long smallest = Long.MAX_VALUE; if(current < smallest && suspensionTimes.get(i) < now) { smallest = current; index = i; } } return index; } return -1; } private void registerSuccess(Object caller, int index, long start) { successCounters.incrementAndGet(index); retryCounters.remove(caller); if(policy == LoadBalancerPolicy.LATENCY_LAST) { latencyTimes.set(index, System.currentTimeMillis() - start); } } private void registerFailure(Object caller, int index, boolean last) { failureCounters.incrementAndGet(index); if(policy == LoadBalancerPolicy.LATENCY_LAST) { // Make sure we don't pick a backend with a failed request next time latencyTimes.set(index, Long.MAX_VALUE - 1); } if(suspensionTime > 0) { // Try to save the failure time in the failureTimes array long now = System.currentTimeMillis(); long oldestFailureTime = now - failureRateTimeUnit.toMillis(failureRateTime); int i; for (i = 0; i < failureTimes[index].length(); i++) { if (failureTimes[index].get(i) < oldestFailureTime) { failureTimes[index].set(i, now); break; } } // If all failureTimes slots are used then we can suspend the endpoint if (failureTimes[index].length() == 0 || i == failureTimes[index].length() - 1) { suspensionTimes.set(index, now + suspensionTimeUnit.toMillis(suspensionTime)); } } // Remove the caller if(last) { retryCounters.remove(caller); } } private void checkMonitors() { for (int i = 0; i < endpointCount; i++) { final int index = i; try { monitorFunction.apply(index).whenComplete((result, ex) -> { if (result && ex == null) { // Unset suspension time when we hit the healthy threshold if(monitorHealthyCounters.incrementAndGet(index) >= monitorHealthyThreshold) { suspensionTimes.set(index, 0); monitorHealthyCounters.set(index, 0); } // Reset unhealthy counter if(monitorUnhealthyCounters.get(index) > 0) { monitorUnhealthyCounters.set(index, 0); } } else { // Set suspension time when we hit the unhealthy threshold if(monitorUnhealthyCounters.incrementAndGet(index) >= monitorUnhealthyThreshold) { suspensionTimes.set(index, Long.MAX_VALUE); monitorUnhealthyCounters.set(index, 0); } // Reset healthy counter if(monitorHealthyCounters.get(index) > 0) { monitorHealthyCounters.set(index, 0); } } }); } catch (Exception ex) { // Got exception trying to create the future // Set suspension time when we hit the unhealthy threshold if(monitorUnhealthyCounters.incrementAndGet(index) >= monitorUnhealthyThreshold) { suspensionTimes.set(index, Long.MAX_VALUE); monitorUnhealthyCounters.set(index, 0); } // Reset healthy counter if(monitorHealthyCounters.get(index) > 0) { monitorHealthyCounters.set(index, 0); } } } } public static LoadBalancerBuilder builder() { return new LoadBalancerBuilder(); } public static final class LoadBalancerBuilder { private LoadBalancer loadBalancer = new LoadBalancer(); public LoadBalancerBuilder setRetryCount(int retries) { loadBalancer.retries = retries; return this; } public LoadBalancerBuilder setEndpointCount(int count) { loadBalancer.endpointCount = count; return this; } public LoadBalancerBuilder setMaxFailureRate(int maxFailures, long failuresTime, TimeUnit failuresTimeUnit, long suspensionTime, TimeUnit suspensionTimeUnit) { loadBalancer.failureRateMaxFailures = maxFailures; loadBalancer.failureRateTime = failuresTime; loadBalancer.failureRateTimeUnit = failuresTimeUnit; loadBalancer.suspensionTime = suspensionTime; loadBalancer.suspensionTimeUnit = suspensionTimeUnit; return this; } public LoadBalancerBuilder setPolicy(LoadBalancerPolicy policy) { loadBalancer.policy = policy; return this; } public LoadBalancerBuilder setMonitor(long checkInterval, TimeUnit checkTimeUnit, int unhealthyThreshold, int healthyThreshold, Function<Integer, CompletableFuture<Boolean>> function) { loadBalancer.monitorCheckInterval = checkInterval; loadBalancer.monitorCheckTimeUnit = checkTimeUnit; loadBalancer.monitorUnhealthyThreshold = unhealthyThreshold; loadBalancer.monitorHealthyThreshold = healthyThreshold; loadBalancer.monitorFunction = function; return this; } public <K, V> LoadBalancerBuilder setCache(CompletableFutureCache<K, V> cache) { //ConcurrentHashMap<T,T> map = ; return this; } public <T> LoadBalancerBuilder setCache(long checkInterval, TimeUnit checkTimeUnit, Class<T> returnType, BiFunction<Integer, CompletableFutureCache, CompletableFuture<T>> function) { loadBalancer.monitorCheckInterval = checkInterval; loadBalancer.monitorCheckTimeUnit = checkTimeUnit; //loadBalancer.monitorFunction = function; return this; } public LoadBalancer build() { loadBalancer.suspensionTimes = new AtomicLongArray(loadBalancer.endpointCount); loadBalancer.successCounters = new AtomicLongArray(loadBalancer.endpointCount); loadBalancer.failureCounters = new AtomicLongArray(loadBalancer.endpointCount); loadBalancer.failureTimes = new AtomicLongArray[loadBalancer.endpointCount]; for(int i = 0; i < loadBalancer.failureTimes.length; i++) { loadBalancer.failureTimes[i] = new AtomicLongArray(loadBalancer.failureRateMaxFailures); } // Setup monitor if it has been set if(loadBalancer.monitorFunction != null) { loadBalancer.monitorUnhealthyCounters = new AtomicIntegerArray(loadBalancer.endpointCount); loadBalancer.monitorHealthyCounters = new AtomicIntegerArray(loadBalancer.endpointCount); loadBalancer.scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); loadBalancer.scheduledExecutorService.scheduleWithFixedDelay(() -> { loadBalancer.checkMonitors(); }, 0, loadBalancer.monitorCheckInterval, loadBalancer.monitorCheckTimeUnit); } return loadBalancer; } } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.p4runtime.model; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.testing.EqualsTester; import org.junit.Test; import org.onosproject.net.pi.model.PiActionId; import org.onosproject.net.pi.model.PiActionModel; import org.onosproject.net.pi.model.PiActionParamId; import org.onosproject.net.pi.model.PiActionParamModel; import org.onosproject.net.pi.model.PiActionProfileId; import org.onosproject.net.pi.model.PiActionProfileModel; import org.onosproject.net.pi.model.PiCounterId; import org.onosproject.net.pi.model.PiCounterModel; import org.onosproject.net.pi.model.PiCounterType; import org.onosproject.net.pi.model.PiMatchFieldId; import org.onosproject.net.pi.model.PiMatchFieldModel; import org.onosproject.net.pi.model.PiMatchType; import org.onosproject.net.pi.model.PiMeterId; import org.onosproject.net.pi.model.PiMeterModel; import org.onosproject.net.pi.model.PiMeterType; import org.onosproject.net.pi.model.PiTableId; import org.onosproject.net.pi.model.PiTableModel; import org.onosproject.net.pi.model.PiTableType; import static org.onlab.junit.ImmutableClassChecker.assertThatClassIsImmutable; /** * Unit tests for P4TableModel class. */ public class P4TableModelTest { /* Action Profiles */ private static final PiActionProfileId PI_ACTION_PROFILE_ID_1 = PiActionProfileId.of("Action1"); private static final PiActionProfileId PI_ACTION_PROFILE_ID_2 = PiActionProfileId.of("Action2"); private static final PiTableId ACTION_PI_TABLE_ID_1 = PiTableId.of("ActionTable1"); private static final PiTableId ACTION_PI_TABLE_ID_2 = PiTableId.of("ActionTable2"); private static final ImmutableSet<PiTableId> ACTION_TABLES_1 = new ImmutableSet.Builder<PiTableId>() .add(ACTION_PI_TABLE_ID_1) .build(); private static final ImmutableSet<PiTableId> ACTION_TABLES_2 = new ImmutableSet.Builder<PiTableId>() .add(ACTION_PI_TABLE_ID_2) .build(); private static final boolean ACTION_HAS_SELECTOR_1 = true; private static final boolean ACTION_HAS_SELECTOR_2 = false; private static final long ACTION_MAX_SIZE_1 = 100; private static final long ACTION_MAX_SIZE_2 = 200; private static final int ACTION_MAX_GROUP_SIZE_1 = 10; private static final int ACTION_MAX_GROUP_SIZE_2 = 20; private static final PiActionProfileModel P4_ACTION_PROFILE_MODEL_1 = new P4ActionProfileModel(PI_ACTION_PROFILE_ID_1, ACTION_TABLES_1, ACTION_HAS_SELECTOR_1, ACTION_MAX_SIZE_1, ACTION_MAX_GROUP_SIZE_1); private static final PiActionProfileModel P4_ACTION_PROFILE_MODEL_2 = new P4ActionProfileModel(PI_ACTION_PROFILE_ID_2, ACTION_TABLES_2, ACTION_HAS_SELECTOR_2, ACTION_MAX_SIZE_2, ACTION_MAX_GROUP_SIZE_2); /* Counters */ private static final PiCounterId PI_COUNTER_ID_1 = PiCounterId.of("Counter1"); private static final PiCounterId PI_COUNTER_ID_2 = PiCounterId.of("Counter2"); private static final PiCounterType PI_COUNTER_TYPE_1 = PiCounterType.DIRECT; private static final PiCounterType PI_COUNTER_TYPE_2 = PiCounterType.INDIRECT; private static final PiCounterModel.Unit COUNTER_UNIT_BYTES = P4CounterModel.Unit.BYTES; private static final PiCounterModel.Unit COUNTER_UNIT_PACKETS = P4CounterModel.Unit.PACKETS; private static final PiTableId COUNTER_PI_TABLE_ID_1 = PiTableId.of("CounterTable1"); private static final PiTableId COUNTER_PI_TABLE_ID_2 = PiTableId.of("CounterTable2"); private static final long COUNTER_SIZE_1 = 1000; private static final long COUNTER_SIZE_2 = 2000; private static final PiCounterModel P4_COUNTER_MODEL_1 = new P4CounterModel(PI_COUNTER_ID_1, PI_COUNTER_TYPE_1, COUNTER_UNIT_BYTES, COUNTER_PI_TABLE_ID_1, COUNTER_SIZE_1); private static final PiCounterModel P4_COUNTER_MODEL_2 = new P4CounterModel(PI_COUNTER_ID_2, PI_COUNTER_TYPE_2, COUNTER_UNIT_PACKETS, COUNTER_PI_TABLE_ID_2, COUNTER_SIZE_2); private static final ImmutableMap<PiCounterId, PiCounterModel> COUNTERS_1 = new ImmutableMap.Builder<PiCounterId, PiCounterModel>() .put(PI_COUNTER_ID_1, P4_COUNTER_MODEL_1) .build(); private static final ImmutableMap<PiCounterId, PiCounterModel> COUNTERS_2 = new ImmutableMap.Builder<PiCounterId, PiCounterModel>() .put(PI_COUNTER_ID_2, P4_COUNTER_MODEL_2) .build(); /* Meters */ private static final PiMeterId PI_METER_ID_1 = PiMeterId.of("Meter1"); private static final PiMeterId PI_METER_ID_2 = PiMeterId.of("Meter2"); private static final PiMeterType PI_METER_TYPE_1 = PiMeterType.DIRECT; private static final PiMeterType PI_METER_TYPE_2 = PiMeterType.INDIRECT; private static final PiMeterModel.Unit METER_UNIT_BYTES = P4MeterModel.Unit.BYTES; private static final PiMeterModel.Unit METER_UNIT_PACKETS = P4MeterModel.Unit.PACKETS; private static final PiTableId METER_PI_TABLE_ID_1 = PiTableId.of("MeterTable1"); private static final PiTableId METER_PI_TABLE_ID_2 = PiTableId.of("MeterTable2"); private static final long METER_SIZE_1 = 1000; private static final long METER_SIZE_2 = 2000; private static final PiMeterModel P4_METER_MODEL_1 = new P4MeterModel(PI_METER_ID_1, PI_METER_TYPE_1, METER_UNIT_BYTES, METER_PI_TABLE_ID_1, METER_SIZE_1); private static final PiMeterModel P4_METER_MODEL_2 = new P4MeterModel(PI_METER_ID_2, PI_METER_TYPE_2, METER_UNIT_PACKETS, METER_PI_TABLE_ID_2, METER_SIZE_2); private static final ImmutableMap<PiMeterId, PiMeterModel> METERS_1 = new ImmutableMap.Builder<PiMeterId, PiMeterModel>() .put(PI_METER_ID_1, P4_METER_MODEL_1) .build(); private static final ImmutableMap<PiMeterId, PiMeterModel> METERS_2 = new ImmutableMap.Builder<PiMeterId, PiMeterModel>() .put(PI_METER_ID_2, P4_METER_MODEL_2) .build(); /* Match Fields */ private static final PiMatchFieldId PI_MATCH_FIELD_ID_1 = PiMatchFieldId.of("MatchField1"); private static final PiMatchFieldId PI_MATCH_FIELD_ID_2 = PiMatchFieldId.of("MatchField2"); private static final int MATCH_FIELD_BIT_WIDTH_1 = 8; private static final int MATCH_FIELD_BIT_WIDTH_2 = 16; private static final PiMatchType PI_MATCH_TYPE_1 = PiMatchType.EXACT; private static final PiMatchType PI_MATCH_TYPE_2 = PiMatchType.TERNARY; private static final PiMatchFieldModel P4_MATCH_FIELD_MODEL_1 = new P4MatchFieldModel(PI_MATCH_FIELD_ID_1, MATCH_FIELD_BIT_WIDTH_1, PI_MATCH_TYPE_1); private static final PiMatchFieldModel P4_MATCH_FIELD_MODEL_2 = new P4MatchFieldModel(PI_MATCH_FIELD_ID_2, MATCH_FIELD_BIT_WIDTH_2, PI_MATCH_TYPE_2); private static final ImmutableMap<PiMatchFieldId, PiMatchFieldModel> MATCH_FIELDS_1 = new ImmutableMap.Builder<PiMatchFieldId, PiMatchFieldModel>() .put(PI_MATCH_FIELD_ID_1, P4_MATCH_FIELD_MODEL_1) .build(); private static final ImmutableMap<PiMatchFieldId, PiMatchFieldModel> MATCH_FIELDS_2 = new ImmutableMap.Builder<PiMatchFieldId, PiMatchFieldModel>() .put(PI_MATCH_FIELD_ID_2, P4_MATCH_FIELD_MODEL_2) .build(); /* Actions */ private static final PiActionId PI_ACTION_ID_1 = PiActionId.of("Action1"); private static final PiActionId PI_ACTION_ID_2 = PiActionId.of("Action2"); private static final PiActionParamId PI_ACTION_PARAM_ID_1 = PiActionParamId.of("ActionParameter1"); private static final PiActionParamId PI_ACTION_PARAM_ID_2 = PiActionParamId.of("ActionParameter2"); private static final int ACTION_PARAM_BIT_WIDTH_1 = 8; private static final int ACTION_PARAM_BIT_WIDTH_2 = 16; private static final PiActionParamModel P4_ACTION_PARAM_MODEL_1 = new P4ActionParamModel(PI_ACTION_PARAM_ID_1, ACTION_PARAM_BIT_WIDTH_1); private static final PiActionParamModel P4_ACTION_PARAM_MODEL_2 = new P4ActionParamModel(PI_ACTION_PARAM_ID_2, ACTION_PARAM_BIT_WIDTH_2); private static final ImmutableMap<PiActionParamId, PiActionParamModel> PI_ACTION_PARAMS_1 = new ImmutableMap.Builder<PiActionParamId, PiActionParamModel>() .put(PI_ACTION_PARAM_ID_1, P4_ACTION_PARAM_MODEL_1) .build(); private static final ImmutableMap<PiActionParamId, PiActionParamModel> PI_ACTION_PARAMS_2 = new ImmutableMap.Builder<PiActionParamId, PiActionParamModel>() .put(PI_ACTION_PARAM_ID_2, P4_ACTION_PARAM_MODEL_2) .build(); private static final PiActionModel P4_ACTION_MODEL_1 = new P4ActionModel(PI_ACTION_ID_1, PI_ACTION_PARAMS_1); private static final PiActionModel P4_ACTION_MODEL_2 = new P4ActionModel(PI_ACTION_ID_2, PI_ACTION_PARAMS_2); private static final ImmutableMap<PiActionId, PiActionModel> ACTIONS_1 = new ImmutableMap.Builder<PiActionId, PiActionModel>() .put(PI_ACTION_ID_1, P4_ACTION_MODEL_1) .build(); private static final ImmutableMap<PiActionId, PiActionModel> ACTIONS_2 = new ImmutableMap.Builder<PiActionId, PiActionModel>() .put(PI_ACTION_ID_2, P4_ACTION_MODEL_2) .build(); /* Default Action */ private static final PiActionId PI_ACTION_ID_DEFAULT_1 = PiActionId.of("DefaultAction1"); private static final PiActionId PI_ACTION_ID_DEFAULT_2 = PiActionId.of("DefaultAction2"); private static final PiActionParamId PI_ACTION_PARAM_ID_DEFAULT_1 = PiActionParamId.of("DefaultActionParameter1"); private static final PiActionParamId PI_ACTION_PARAM_ID_DEFAULT_2 = PiActionParamId.of("DefaultActionParameter2"); private static final int ACTION_PARAM_BIT_WIDTH_DEFAULT_1 = 8; private static final int ACTION_PARAM_BIT_WIDTH_DEFAULT_2 = 16; private static final PiActionParamModel P4_ACTION_PARAM_MODEL_DEFAULT_1 = new P4ActionParamModel(PI_ACTION_PARAM_ID_DEFAULT_1, ACTION_PARAM_BIT_WIDTH_DEFAULT_1); private static final PiActionParamModel P4_ACTION_PARAM_MODEL_DEFAULT_2 = new P4ActionParamModel(PI_ACTION_PARAM_ID_DEFAULT_2, ACTION_PARAM_BIT_WIDTH_DEFAULT_2); private static final ImmutableMap<PiActionParamId, PiActionParamModel> PI_ACTION_PARAMS_DEFAULT_1 = new ImmutableMap.Builder<PiActionParamId, PiActionParamModel>() .put(PI_ACTION_PARAM_ID_DEFAULT_1, P4_ACTION_PARAM_MODEL_DEFAULT_1) .build(); private static final ImmutableMap<PiActionParamId, PiActionParamModel> PI_ACTION_PARAMS_DEFAULT_2 = new ImmutableMap.Builder<PiActionParamId, PiActionParamModel>() .put(PI_ACTION_PARAM_ID_DEFAULT_2, P4_ACTION_PARAM_MODEL_DEFAULT_2) .build(); private static final PiActionModel P4_ACTION_MODEL_DEFAULT_1 = new P4ActionModel(PI_ACTION_ID_DEFAULT_1, PI_ACTION_PARAMS_DEFAULT_1); private static final PiActionModel P4_ACTION_MODEL_DEFAULT_2 = new P4ActionModel(PI_ACTION_ID_DEFAULT_2, PI_ACTION_PARAMS_DEFAULT_2); /* Table Models */ private static final PiTableId PI_TABLE_ID_1 = PiTableId.of("Table1"); private static final PiTableId PI_TABLE_ID_2 = PiTableId.of("Table2"); private static final PiTableType PI_TABLE_TYPE_1 = PiTableType.DIRECT; private static final PiTableType PI_TABLE_TYPE_2 = PiTableType.INDIRECT; private static final long MAX_SIZE_1 = 10000; private static final long MAX_SIZE_2 = 20000; private static final boolean SUPPORT_AGING_1 = true; private static final boolean SUPPORT_AGING_2 = false; private static final boolean IS_CONST_TABLE_1 = true; private static final boolean IS_CONST_TABLE_2 = false; private static final PiTableModel P4_TABLE_MODEL_1 = new P4TableModel(PI_TABLE_ID_1, PI_TABLE_TYPE_1, P4_ACTION_PROFILE_MODEL_1, MAX_SIZE_1, COUNTERS_1, METERS_1, SUPPORT_AGING_1, MATCH_FIELDS_1, ACTIONS_1, P4_ACTION_MODEL_DEFAULT_1, IS_CONST_TABLE_1, false); private static final PiTableModel SAME_AS_P4_TABLE_MODEL_1 = new P4TableModel(PI_TABLE_ID_1, PI_TABLE_TYPE_1, P4_ACTION_PROFILE_MODEL_1, MAX_SIZE_1, COUNTERS_1, METERS_1, SUPPORT_AGING_1, MATCH_FIELDS_1, ACTIONS_1, P4_ACTION_MODEL_DEFAULT_1, IS_CONST_TABLE_1, false); private static final PiTableModel P4_TABLE_MODEL_2 = new P4TableModel(PI_TABLE_ID_2, PI_TABLE_TYPE_2, P4_ACTION_PROFILE_MODEL_2, MAX_SIZE_2, COUNTERS_2, METERS_2, SUPPORT_AGING_2, MATCH_FIELDS_2, ACTIONS_2, P4_ACTION_MODEL_DEFAULT_2, IS_CONST_TABLE_2, false); private static final PiTableModel P4_TABLE_MODEL_3 = new P4TableModel(PI_TABLE_ID_2, PI_TABLE_TYPE_2, P4_ACTION_PROFILE_MODEL_2, MAX_SIZE_2, COUNTERS_2, METERS_2, SUPPORT_AGING_2, MATCH_FIELDS_2, ACTIONS_2, P4_ACTION_MODEL_DEFAULT_2, IS_CONST_TABLE_2, true); /** * Checks that the P4TableModel class is immutable. */ @Test public void testImmutability() { assertThatClassIsImmutable(P4TableModel.class); } /** * Checks the operation of equals(), hashCode() and toString() methods. */ @Test public void testEquals() { new EqualsTester() .addEqualityGroup(P4_TABLE_MODEL_1, SAME_AS_P4_TABLE_MODEL_1) .addEqualityGroup(P4_TABLE_MODEL_2) .addEqualityGroup(P4_TABLE_MODEL_3) .testEquals(); } }
/** * Copyright 2009 DigitalPebble Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.digitalpebble.classification; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import java.util.regex.Pattern; import com.digitalpebble.classification.Parameters.WeightingMethod; import com.digitalpebble.classification.util.scorers.AttributeScorer; /** * A lexicon contains all the information about the tokens used during learning * and ensures that the same mapping is used during classification */ public class Lexicon { private TreeMap<String, int[]> tokenForm2index; private TreeMap<Integer, int[]> index2docfreq; private int nextAttributeID = 1; // private int method_used = Parameters.method_frequency; private Parameters.WeightingMethod method_used = Parameters.WeightingMethod.FREQUENCY; private int docNum = 0; private boolean normalizeVector = true; private double[] linearWeight; // private double[] loglikelihoodratio; private List<String> labels; /** a learner can specify which classifier to use* */ private String classifierType; /** list of fields used by a corpus * */ private Map<String, Integer> fields = new HashMap<String, Integer>(); /** Custom weighting schemes for fields **/ private Map<String, WeightingMethod> customWeights = new HashMap<String, WeightingMethod>(); private int lastFieldId = -1; private AttributeScorer filter; // creates a new lexicon public Lexicon() { tokenForm2index = new TreeMap<String, int[]>(); index2docfreq = new TreeMap(); labels = new ArrayList<String>(); } // loads a new lexicon public Lexicon(String file) throws IOException { this(); this.loadFromFile(file); } /** * Adjust the indices of the attributes so that maxAttributeID == * getAttributesNum. Returns a Map containing the mapping between the old * indices and the new ones. **/ public Map<Integer, Integer> compact() { Map<Integer, Integer> equiv = new HashMap<Integer, Integer>(); TreeMap<Integer, int[]> newIndex2docfreq = new TreeMap<Integer, int[]>(); // iterate on the map token -> Id and change the latter Iterator<Entry<String, int[]>> iter = tokenForm2index.entrySet().iterator(); nextAttributeID = 1; while (iter.hasNext()) { Entry<String, int[]> entry = iter.next(); int oldIndex = entry.getValue()[0]; int newIndex = nextAttributeID; entry.setValue(new int[] { newIndex }); // store the equivalence in the map equiv.put(oldIndex, newIndex); // populate the doc freq int[] docFreq = index2docfreq.get(oldIndex); newIndex2docfreq.put(newIndex, docFreq); nextAttributeID++; } // swap the doc freq index2docfreq = newIndex2docfreq; return equiv; } /** * Returns the weighting scheme used for a specific field or the default one * if nothing has been specified for it **/ public WeightingMethod getMethod(String fieldName) { WeightingMethod method = this.customWeights.get(fieldName); if (method != null) return method; return this.method_used; } /** Returns the default weighting scheme **/ public WeightingMethod getMethod() { return this.method_used; } /** Sets the default weighting scheme **/ public void setMethod(WeightingMethod method) { this.method_used = method; } /** Sets the weighting scheme for a specific field **/ public void setMethod(WeightingMethod method, String fieldName) { WeightingMethod existingmethod = this.customWeights.get(fieldName); if (existingmethod == null) { this.customWeights.put(fieldName, method); return; } // already one specified : check that it is the same as the one we have if (!method.equals(existingmethod)) throw new RuntimeException("Already set weight of field " + fieldName + " to " + existingmethod.toString()); } public int getDocNum() { return this.docNum; } public int getLabelNum() { return this.labels.size(); } public Integer getFieldID(String fieldName, boolean create) { Integer id = fields.get(fieldName); if (id == null) { // field does not exist if (!create) return new Integer(-1); fields.put(fieldName, ++lastFieldId); return Integer.valueOf(lastFieldId); } return id; } public String[] getFields() { String[] ff = new String[fields.size()]; Iterator iter = fields.keySet().iterator(); while (iter.hasNext()) { String fname = (String) iter.next(); Integer integ = fields.get(fname); ff[integ.intValue()] = fname; } return ff; } public String[] getLabels() { String[] labs = new String[labels.size()]; for (int l = 0; l < labels.size(); l++) labs[l] = (String) labels.get(l); return labs; } public void incrementDocCount() { this.docNum++; } /** * returns the position of a given tokenform or -1 if the tokenform is * unknown or has been filtered out * * @param tokenForm * @return */ public int getIndex(String tokenForm) { // tokenForm = tokenForm.replaceAll("\\W+", "_"); int[] index = (int[]) tokenForm2index.get(tokenForm); if (index == null) return -1; return index[0]; } /*************************************************************************** * Returns the document frequency of a term in the collection or 0 if the * term is unknown or has been filtered **************************************************************************/ public int getDocFreq(int term) { int[] docfreq = (int[]) this.index2docfreq.get(Integer.valueOf(term)); if (docfreq == null) return 0; return docfreq[0]; } public void pruneTermsDocFreq(int mindn, int maxdocs) { // iterate on the terms // and remove them if they are below or above // the expected number of documents Iterator termIter = this.tokenForm2index.keySet().iterator(); List terms2remove = new ArrayList(); while (termIter.hasNext()) { String term = (String) termIter.next(); int[] index = this.tokenForm2index.get(term); // get the docFreq int[] docfreq = (int[]) this.index2docfreq .get(new Integer(index[0])); if ((docfreq[0] < mindn) || (docfreq[0] > maxdocs)) { // remove it! terms2remove.add(term); } } for (int i = 0; i < terms2remove.size(); i++) { String term = (String) terms2remove.get(i); int[] index = this.tokenForm2index.remove(term); this.index2docfreq.remove(Integer.valueOf(index[0])); } } /** Keep the top n attributes according to an AttributeFilter* */ public void applyAttributeFilter(AttributeScorer filter, int rank) { if (filter == null) return; if (rank >= this.getAttributesNum()) return; // get the threshold double threshold = filter.getValueForRank(rank); // iterate on the attributes // and remove them if their LLR score is below the threshold Iterator termIter = this.tokenForm2index.keySet().iterator(); List terms2remove = new ArrayList(); while (termIter.hasNext()) { String term = (String) termIter.next(); int[] index = this.tokenForm2index.get(term); // get the score // TODO what if we are getting -1 if (filter.getScore(index[0]) < threshold) terms2remove.add(term); } for (int i = 0; i < terms2remove.size(); i++) { String term = (String) terms2remove.get(i); int[] index = this.tokenForm2index.remove(term); this.index2docfreq.remove(Integer.valueOf(index[0])); } } // creates an entry for the token // called from Document public int createIndex(String tokenForm) { int[] index = (int[]) tokenForm2index.get(tokenForm); if (index == null) { index = new int[] { nextAttributeID }; tokenForm2index.put(tokenForm, index); nextAttributeID++; } // add information about number of documents // for the term Integer integ = Integer.valueOf(index[0]); int[] docfreq = (int[]) this.index2docfreq.get(integ); if (docfreq == null) { docfreq = new int[] { 0 }; index2docfreq.put(integ, docfreq); } docfreq[0]++; return index[0]; } private void loadFromFile(String filename) throws IOException { File file = new File(filename); BufferedReader reader = new BufferedReader(new FileReader(file)); String line = null; this.docNum = Integer.parseInt(reader.readLine()); this.method_used = Parameters.WeightingMethod.methodFromString(reader .readLine()); this.normalizeVector = Boolean.parseBoolean(reader.readLine()); this.classifierType = reader.readLine(); this.labels = Arrays.asList(reader.readLine().split(" ")); String[] tmp = reader.readLine().split(" "); for (String f : tmp) { // see if there is a custom weight for it String[] fieldTokens = f.split(":"); String field_name = fieldTokens[0]; if (fieldTokens.length > 1) { WeightingMethod method = Parameters.WeightingMethod .methodFromString(fieldTokens[1]); customWeights.put(field_name, method); } getFieldID(field_name, true); } int loaded = 0; int highestID = 0; Pattern tab = Pattern.compile("\t"); while ((line = reader.readLine()) != null) { String[] content_pos = tab.split(line); int index = Integer.parseInt(content_pos[1]); if (index > highestID) highestID = index; int docs = Integer.parseInt(content_pos[2]); int[] aindex = new int[] { index }; int[] adocs = new int[] { docs }; this.tokenForm2index.put(content_pos[0], aindex); this.index2docfreq.put(Integer.valueOf(index), adocs); loaded++; } this.nextAttributeID = highestID + 1; reader.close(); } public void saveToFile(String filename) throws IOException { File file = new File(filename); BufferedWriter writer = new BufferedWriter(new FileWriter(file)); Iterator forms = this.tokenForm2index.keySet().iterator(); // saves the number of documents in the corpus writer.write(this.docNum + "\n"); // saves the method used writer.write(this.method_used.toString() + "\n"); // saves the normalization writer.write(this.normalizeVector + "\n"); // saves the classifier into writer.write(this.classifierType + "\n"); // saves the list of labels Iterator labelIters = this.labels.iterator(); while (labelIters.hasNext()) { writer.write((String) labelIters.next() + " "); } writer.write("\n"); // save the field names (possibly with non default scheme) for (String fname : this.getFields()) { writer.write(fname); WeightingMethod method = customWeights.get(fname); if (method != null) writer.write(":" + method.name()); writer.write(" "); } writer.write("\n"); // dump all token_forms one by one while (forms.hasNext()) { String key = (String) forms.next(); int indexTerm = ((int[]) this.tokenForm2index.get(key))[0]; int docfreq = this.getDocFreq(indexTerm); // dumps the weight of the term // or skip the term if it has a weight of 0 String weight = ""; if (linearWeight != null) { if (indexTerm >= linearWeight.length || linearWeight[indexTerm] == 0) continue; weight = "\t" + linearWeight[indexTerm]; } String score = ""; // if (loglikelihoodratio != null) { // score = "\t" + loglikelihoodratio[indexTerm]; // } if (filter != null) { score = "\t" + filter.getScore(indexTerm); } writer.write(key + "\t" + indexTerm + "\t" + docfreq + weight + score + "\n"); } writer.close(); } public boolean isNormalizeVector() { return normalizeVector; } /** * contribution of the attributes to the model used by linear models in * libSVM or svmlight */ public void setLinearWeight(double[] linearWeight) { this.linearWeight = linearWeight; } public void setNormalizeVector(boolean normalizeVector) { this.normalizeVector = normalizeVector; } public int getLabelIndex(String label) { label = label.toLowerCase(); label = label.replace(' ', '_'); int position = this.labels.indexOf(label); if (position != -1) return position; this.labels.add(label.toLowerCase()); return this.labels.size() - 1; } /** Return a map with Integers as keys and attribute labels as value* */ public Map<Integer, String> getInvertedIndex() { TreeMap<Integer, String> inverted = new TreeMap<Integer, String>(); Iterator<String> keyiter = this.tokenForm2index.keySet().iterator(); while (keyiter.hasNext()) { String key = keyiter.next(); int[] index = tokenForm2index.get(key); Integer i = Integer.valueOf(index[0]); inverted.put(i, key); } return inverted; } public String getLabel(int index) { return (String) this.labels.get(index); } protected String getClassifierType() { return classifierType; } protected void setClassifierType(String classifierType) { this.classifierType = classifierType; } /** Returns the number of attributes present in the lexicon **/ public int getAttributesNum() { return tokenForm2index.size(); } /** Returns the largest ID used for an attribute **/ public int maxAttributeID() { return nextAttributeID - 1; } public void setAttributeScorer(AttributeScorer f) { this.filter = f; } }
package org.ripple.bouncycastle.pqc.crypto.rainbow.util; /** * This class is needed for the conversions while encoding and decoding, as well as for * comparison between arrays of some dimensions */ public class RainbowUtil { /** * This function converts an one-dimensional array of bytes into a * one-dimensional array of int * * @param in the array to be converted * @return out * the one-dimensional int-array that corresponds the input */ public static int[] convertArraytoInt(byte[] in) { int[] out = new int[in.length]; for (int i = 0; i < in.length; i++) { out[i] = in[i] & GF2Field.MASK; } return out; } /** * This function converts an one-dimensional array of bytes into a * one-dimensional array of type short * * @param in the array to be converted * @return out * one-dimensional short-array that corresponds the input */ public static short[] convertArray(byte[] in) { short[] out = new short[in.length]; for (int i = 0; i < in.length; i++) { out[i] = (short)(in[i] & GF2Field.MASK); } return out; } /** * This function converts a matrix of bytes into a matrix of type short * * @param in the matrix to be converted * @return out * short-matrix that corresponds the input */ public static short[][] convertArray(byte[][] in) { short[][] out = new short[in.length][in[0].length]; for (int i = 0; i < in.length; i++) { for (int j = 0; j < in[0].length; j++) { out[i][j] = (short)(in[i][j] & GF2Field.MASK); } } return out; } /** * This function converts a 3-dimensional array of bytes into a 3-dimensional array of type short * * @param in the array to be converted * @return out * short-array that corresponds the input */ public static short[][][] convertArray(byte[][][] in) { short[][][] out = new short[in.length][in[0].length][in[0][0].length]; for (int i = 0; i < in.length; i++) { for (int j = 0; j < in[0].length; j++) { for (int k = 0; k < in[0][0].length; k++) { out[i][j][k] = (short)(in[i][j][k] & GF2Field.MASK); } } } return out; } /** * This function converts an array of type int into an array of type byte * * @param in the array to be converted * @return out * the byte-array that corresponds the input */ public static byte[] convertIntArray(int[] in) { byte[] out = new byte[in.length]; for (int i = 0; i < in.length; i++) { out[i] = (byte)in[i]; } return out; } /** * This function converts an array of type short into an array of type byte * * @param in the array to be converted * @return out * the byte-array that corresponds the input */ public static byte[] convertArray(short[] in) { byte[] out = new byte[in.length]; for (int i = 0; i < in.length; i++) { out[i] = (byte)in[i]; } return out; } /** * This function converts a matrix of type short into a matrix of type byte * * @param in the matrix to be converted * @return out * the byte-matrix that corresponds the input */ public static byte[][] convertArray(short[][] in) { byte[][] out = new byte[in.length][in[0].length]; for (int i = 0; i < in.length; i++) { for (int j = 0; j < in[0].length; j++) { out[i][j] = (byte)in[i][j]; } } return out; } /** * This function converts a 3-dimensional array of type short into a 3-dimensional array of type byte * * @param in the array to be converted * @return out * the byte-array that corresponds the input */ public static byte[][][] convertArray(short[][][] in) { byte[][][] out = new byte[in.length][in[0].length][in[0][0].length]; for (int i = 0; i < in.length; i++) { for (int j = 0; j < in[0].length; j++) { for (int k = 0; k < in[0][0].length; k++) { out[i][j][k] = (byte)in[i][j][k]; } } } return out; } /** * Compare two short arrays. No null checks are performed. * * @param left the first short array * @param right the second short array * @return the result of the comparison */ public static boolean equals(short[] left, short[] right) { if (left.length != right.length) { return false; } boolean result = true; for (int i = left.length - 1; i >= 0; i--) { result &= left[i] == right[i]; } return result; } /** * Compare two two-dimensional short arrays. No null checks are performed. * * @param left the first short array * @param right the second short array * @return the result of the comparison */ public static boolean equals(short[][] left, short[][] right) { if (left.length != right.length) { return false; } boolean result = true; for (int i = left.length - 1; i >= 0; i--) { result &= equals(left[i], right[i]); } return result; } /** * Compare two three-dimensional short arrays. No null checks are performed. * * @param left the first short array * @param right the second short array * @return the result of the comparison */ public static boolean equals(short[][][] left, short[][][] right) { if (left.length != right.length) { return false; } boolean result = true; for (int i = left.length - 1; i >= 0; i--) { result &= equals(left[i], right[i]); } return result; } }
package org.apache.hadoop.hive.cassandra.input; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.BytesType; import org.apache.cassandra.db.marshal.TypeParser; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.exceptions.SyntaxException; import org.apache.cassandra.hadoop2.ColumnFamilyInputFormat; import org.apache.cassandra.hadoop2.ColumnFamilyRecordReader; import org.apache.cassandra.hadoop2.ColumnFamilySplit; import org.apache.cassandra.hadoop2.ConfigHelper; import org.apache.cassandra.thrift.ColumnDef; import org.apache.cassandra.thrift.IndexExpression; import org.apache.cassandra.thrift.SlicePredicate; import org.apache.cassandra.thrift.SliceRange; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cassandra.CassandraPushdownPredicate; import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe; import org.apache.hadoop.hive.cassandra.serde.CassandraColumnSerDe; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; import org.apache.hadoop.hive.ql.index.IndexSearchCondition; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.task.*; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Set; @SuppressWarnings("deprecation") public class HiveCassandraStandardColumnInputFormat extends InputFormat<BytesWritable, MapWritable> implements org.apache.hadoop.mapred.InputFormat<BytesWritable, MapWritable> { static final Logger LOG = LoggerFactory.getLogger(HiveCassandraStandardColumnInputFormat.class); private boolean isTransposed; private final ColumnFamilyInputFormat cfif = new ColumnFamilyInputFormat(); @Override public RecordReader<BytesWritable, MapWritable> getRecordReader(InputSplit split, JobConf jobConf, final Reporter reporter) throws IOException { HiveCassandraStandardSplit cassandraSplit = (HiveCassandraStandardSplit) split; List<String> columns = CassandraColumnSerDe.parseColumnMapping(cassandraSplit.getColumnMapping()); isTransposed = CassandraColumnSerDe.isTransposed(columns); List<Integer> readColIDs = ColumnProjectionUtils.getReadColumnIDs(jobConf); if (columns.size() < readColIDs.size()) { throw new IOException("Cannot read more columns than the given table contains."); } org.apache.cassandra.hadoop2.ColumnFamilySplit cfSplit = cassandraSplit.getSplit(); Job job = new Job(jobConf); TaskAttemptContext tac = new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID()) { @Override public void progress() { reporter.progress(); } }; SlicePredicate predicate = new SlicePredicate(); if (isTransposed || readColIDs.size() == columns.size() || readColIDs.size() == 0) { SliceRange range = new SliceRange(); AbstractType comparator = BytesType.instance; String comparatorType = jobConf.get(AbstractCassandraSerDe.CASSANDRA_SLICE_PREDICATE_RANGE_COMPARATOR); if (comparatorType != null && !comparatorType.equals("")) { try { comparator = TypeParser.parse(comparatorType); } catch (ConfigurationException ex) { throw new IOException("Comparator class not found."); } catch (SyntaxException e) { throw new IOException(e); } } String sliceStart = jobConf.get(AbstractCassandraSerDe.CASSANDRA_SLICE_PREDICATE_RANGE_START); String sliceEnd = jobConf.get(AbstractCassandraSerDe.CASSANDRA_SLICE_PREDICATE_RANGE_FINISH); String reversed = jobConf.get(AbstractCassandraSerDe.CASSANDRA_SLICE_PREDICATE_RANGE_REVERSED); range.setStart(comparator.fromString(sliceStart == null ? "" : sliceStart)); range.setFinish(comparator.fromString(sliceEnd == null ? "" : sliceEnd)); range.setReversed(reversed == null ? false : reversed.equals("true")); range.setCount(cassandraSplit.getSlicePredicateSize()); predicate.setSlice_range(range); } else { int iKey = columns.indexOf(CassandraColumnSerDe.CASSANDRA_KEY_COLUMN); predicate.setColumn_names(getColumnNames(iKey, columns, readColIDs)); } try { boolean wideRows = false; if (isTransposed && tac.getConfiguration().getBoolean(CassandraColumnSerDe.CASSANDRA_ENABLE_WIDEROW_ITERATOR, true)) { wideRows = true; } ConfigHelper.setInputColumnFamily(tac.getConfiguration(), cassandraSplit.getKeyspace(), cassandraSplit.getColumnFamily(), wideRows); ConfigHelper.setInputSlicePredicate(tac.getConfiguration(), predicate); ConfigHelper.setRangeBatchSize(tac.getConfiguration(), cassandraSplit.getRangeBatchSize()); ConfigHelper.setInputRpcPort(tac.getConfiguration(), cassandraSplit.getPort() + ""); ConfigHelper.setInputInitialAddress(tac.getConfiguration(), cassandraSplit.getHost()); ConfigHelper.setInputPartitioner(tac.getConfiguration(), cassandraSplit.getPartitioner()); // Set Split Size ConfigHelper.setInputSplitSize(tac.getConfiguration(), cassandraSplit.getSplitSize()); LOG.info("Validators : " + tac.getConfiguration().get(CassandraColumnSerDe.CASSANDRA_VALIDATOR_TYPE)); List<IndexExpression> indexExpr = parseFilterPredicate(jobConf); if (indexExpr != null) { //We have pushed down a filter from the Hive query, we can use this against secondary indexes ConfigHelper.setInputRange(tac.getConfiguration(), indexExpr); } CassandraHiveRecordReader rr = new CassandraHiveRecordReader(new ColumnFamilyRecordReader(), isTransposed); rr.initialize(cfSplit, tac); return rr; } catch (Exception ie) { throw new IOException(ie); } } @Override public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException { String ks = jobConf.get(AbstractCassandraSerDe.CASSANDRA_KEYSPACE_NAME); String cf = jobConf.get(AbstractCassandraSerDe.CASSANDRA_CF_NAME); int slicePredicateSize = jobConf.getInt(AbstractCassandraSerDe.CASSANDRA_SLICE_PREDICATE_SIZE, AbstractCassandraSerDe.DEFAULT_SLICE_PREDICATE_SIZE); int sliceRangeSize = jobConf.getInt( AbstractCassandraSerDe.CASSANDRA_RANGE_BATCH_SIZE, AbstractCassandraSerDe.DEFAULT_RANGE_BATCH_SIZE); int splitSize = jobConf.getInt( AbstractCassandraSerDe.CASSANDRA_SPLIT_SIZE, AbstractCassandraSerDe.DEFAULT_SPLIT_SIZE); String cassandraColumnMapping = jobConf.get(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING); int rpcPort = jobConf.getInt(AbstractCassandraSerDe.CASSANDRA_PORT, 9160); String host = jobConf.get(AbstractCassandraSerDe.CASSANDRA_HOST); String partitioner = jobConf.get(AbstractCassandraSerDe.CASSANDRA_PARTITIONER); if (cassandraColumnMapping == null) { throw new IOException("cassandra.columns.mapping required for Cassandra Table."); } SliceRange range = new SliceRange(); range.setStart(new byte[0]); range.setFinish(new byte[0]); range.setReversed(false); range.setCount(slicePredicateSize); SlicePredicate predicate = new SlicePredicate(); predicate.setSlice_range(range); ConfigHelper.setInputRpcPort(jobConf, "" + rpcPort); ConfigHelper.setInputInitialAddress(jobConf, host); ConfigHelper.setInputPartitioner(jobConf, partitioner); ConfigHelper.setInputSlicePredicate(jobConf, predicate); ConfigHelper.setInputColumnFamily(jobConf, ks, cf); ConfigHelper.setRangeBatchSize(jobConf, sliceRangeSize); ConfigHelper.setInputSplitSize(jobConf, splitSize); Job job = new Job(jobConf); JobContext jobContext = new JobContextImpl(job.getConfiguration(), job.getJobID()); Path[] tablePaths = FileInputFormat.getInputPaths(jobContext); List<org.apache.hadoop.mapreduce.InputSplit> splits = getSplits(jobContext); InputSplit[] results = new InputSplit[splits.size()]; for (int i = 0; i < splits.size(); ++i) { HiveCassandraStandardSplit csplit = new HiveCassandraStandardSplit( (ColumnFamilySplit) splits.get(i), cassandraColumnMapping, tablePaths[0]); csplit.setKeyspace(ks); csplit.setColumnFamily(cf); csplit.setRangeBatchSize(sliceRangeSize); csplit.setSplitSize(splitSize); csplit.setHost(host); csplit.setPort(rpcPort); csplit.setSlicePredicateSize(slicePredicateSize); csplit.setPartitioner(partitioner); csplit.setColumnMapping(cassandraColumnMapping); results[i] = csplit; } return results; } /** * Return a list of columns names to read from cassandra. The column defined * as the key in the column mapping should be skipped. * * @param iKey the index of the key defined in the column mappping * @param columns column mapping * @param readColIDs column names to read from cassandra */ private List<ByteBuffer> getColumnNames(int iKey, List<String> columns, List<Integer> readColIDs) { List<ByteBuffer> results = new ArrayList(); int maxSize = columns.size(); for (Integer i : readColIDs) { assert (i < maxSize); if (i != iKey) { results.add(ByteBufferUtil.bytes(columns.get(i.intValue()))); } } return results; } @Override public List<org.apache.hadoop.mapreduce.InputSplit> getSplits(JobContext context) throws IOException { return cfif.getSplits(context); } @Override public org.apache.hadoop.mapreduce.RecordReader<BytesWritable, MapWritable> createRecordReader( org.apache.hadoop.mapreduce.InputSplit arg0, TaskAttemptContext tac) throws IOException, InterruptedException { return new CassandraHiveRecordReader(new ColumnFamilyRecordReader(), isTransposed); } /** * Look for a filter predicate pushed down by the StorageHandler. If a * filter was pushed down, the filter expression and the list of indexed * columns should be set in the JobConf properties. If either is not set, we * can't deal with the filter here so return null. If both are present in * the JobConf, translate the filter expression into a list of C* * IndexExpressions which we'll later use in queries. The filter expression * should translate exactly to IndexExpressions, as our * HiveStoragePredicateHandler implementation has already done this once. As * an additional check, if this is no longer the case & there is some * residual predicate after translation, throw an Exception. * * @param jobConf Job Configuration * @return C* IndexExpressions representing the pushed down filter or null * pushdown is not possible * @throws IOException if there are problems deserializing from the JobConf */ private List<IndexExpression> parseFilterPredicate(JobConf jobConf) throws IOException { String filterExprSerialized = jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR); if (filterExprSerialized == null) { return null; } ExprNodeDesc filterExpr = Utilities.deserializeExpression(filterExprSerialized, jobConf); String encodedIndexedColumns = jobConf.get(AbstractCassandraSerDe.CASSANDRA_INDEXED_COLUMNS); Set<ColumnDef> indexedColumns = CassandraPushdownPredicate.deserializeIndexedColumns(encodedIndexedColumns); if (indexedColumns.isEmpty()) { return null; } IndexPredicateAnalyzer analyzer = CassandraPushdownPredicate.newIndexPredicateAnalyzer(indexedColumns); List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>(); ExprNodeDesc residualPredicate = analyzer.analyzePredicate(filterExpr, searchConditions); // There should be no residual predicate since we already negotiated // that earlier in CassandraStorageHandler.decomposePredicate. if (residualPredicate != null) { throw new RuntimeException("Unexpected residual predicate : " + residualPredicate.getExprString()); } if (!searchConditions.isEmpty()) { return CassandraPushdownPredicate.translateSearchConditions(searchConditions, indexedColumns); } else { throw new RuntimeException("At least one search condition expected in filter predicate"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.planner.physical; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptRuleOperand; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rel.type.RelDataTypeFieldImpl; import org.apache.calcite.rel.type.RelRecordType; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.physical.base.GroupScan; import org.apache.drill.exec.physical.base.ScanStats; import org.apache.drill.exec.planner.logical.DrillAggregateRel; import org.apache.drill.exec.planner.logical.DrillProjectRel; import org.apache.drill.exec.planner.logical.DrillScanRel; import org.apache.drill.exec.planner.logical.RelOptHelper; import org.apache.drill.exec.store.ColumnExplorer; import org.apache.drill.exec.store.direct.MetadataDirectGroupScan; import org.apache.drill.exec.store.pojo.DynamicPojoRecordReader; /** * <p> * This rule will convert <b>" select count(*) as mycount from table "</b> * or <b>" select count(not-nullable-expr) as mycount from table "</b> into * <pre> * Project(mycount) * \ * DirectGroupScan ( PojoRecordReader ( rowCount )) *</pre> * or <b>" select count(column) as mycount from table "</b> into * <pre> * Project(mycount) * \ * DirectGroupScan (PojoRecordReader (columnValueCount)) *</pre> * Rule can be applied if query contains multiple count expressions. * <b>" select count(column1), count(column2), count(*) from table "</b> * </p> * * <p> * Currently, only parquet group scan has the exact row count and column value count, * obtained from parquet row group info. This will save the cost to * scan the whole parquet files. * </p> */ public class ConvertCountToDirectScan extends Prule { public static final RelOptRule AGG_ON_PROJ_ON_SCAN = new ConvertCountToDirectScan( RelOptHelper.some(DrillAggregateRel.class, RelOptHelper.some(DrillProjectRel.class, RelOptHelper.any(DrillScanRel.class))), "Agg_on_proj_on_scan"); public static final RelOptRule AGG_ON_SCAN = new ConvertCountToDirectScan( RelOptHelper.some(DrillAggregateRel.class, RelOptHelper.any(DrillScanRel.class)), "Agg_on_scan"); private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConvertCountToDirectScan.class); /** Creates a SplunkPushDownRule. */ protected ConvertCountToDirectScan(RelOptRuleOperand rule, String id) { super(rule, "ConvertCountToDirectScan:" + id); } @Override public void onMatch(RelOptRuleCall call) { final DrillAggregateRel agg = (DrillAggregateRel) call.rel(0); final DrillScanRel scan = (DrillScanRel) call.rel(call.rels.length - 1); final DrillProjectRel project = call.rels.length == 3 ? (DrillProjectRel) call.rel(1) : null; final GroupScan oldGrpScan = scan.getGroupScan(); final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner()); // Only apply the rule when: // 1) scan knows the exact row count in getSize() call, // 2) No GroupBY key, // 3) No distinct agg call. if (!(oldGrpScan.getScanStats(settings).getGroupScanProperty().hasExactRowCount() && agg.getGroupCount() == 0 && !agg.containsDistinctCall())) { return; } Map<String, Long> result = collectCounts(settings, agg, scan, project); logger.trace("Calculated the following aggregate counts: ", result); // if could not determine the counts, rule won't be applied if (result.isEmpty()) { return; } final RelDataType scanRowType = constructDataType(agg, result.keySet()); final DynamicPojoRecordReader<Long> reader = new DynamicPojoRecordReader<>( buildSchema(scanRowType.getFieldNames()), Collections.singletonList((List<Long>) new ArrayList<>(result.values()))); final ScanStats scanStats = new ScanStats(ScanStats.GroupScanProperty.EXACT_ROW_COUNT, 1, 1, scanRowType.getFieldCount()); final GroupScan directScan = new MetadataDirectGroupScan(reader, oldGrpScan.getFiles(), scanStats); final DirectScanPrel newScan = new DirectScanPrel(scan.getCluster(), scan.getTraitSet().plus(Prel.DRILL_PHYSICAL).plus(DrillDistributionTrait.SINGLETON), directScan, scanRowType); final ProjectPrel newProject = new ProjectPrel(agg.getCluster(), agg.getTraitSet().plus(Prel.DRILL_PHYSICAL) .plus(DrillDistributionTrait.SINGLETON), newScan, prepareFieldExpressions(scanRowType), agg.getRowType()); call.transformTo(newProject); } /** * Collects counts for each aggregation call. * Will return empty result map if was not able to determine count for at least one aggregation call, * * For each aggregate call will determine if count can be calculated. Collects counts only for COUNT function. * For star, not null expressions and implicit columns sets count to total record number. * For other cases obtains counts from group scan operator. Also count can not be calculated for parition columns. * * @param agg aggregate relational expression * @param scan scan relational expression * @param project project relational expression * @return result map where key is count column name, value is count value */ private Map<String, Long> collectCounts(PlannerSettings settings, DrillAggregateRel agg, DrillScanRel scan, DrillProjectRel project) { final Set<String> implicitColumnsNames = ColumnExplorer.initImplicitFileColumns(settings.getOptions()).keySet(); final GroupScan oldGrpScan = scan.getGroupScan(); final long totalRecordCount = (long)oldGrpScan.getScanStats(settings).getRecordCount(); final LinkedHashMap<String, Long> result = new LinkedHashMap<>(); for (int i = 0; i < agg.getAggCallList().size(); i++) { AggregateCall aggCall = agg.getAggCallList().get(i); long cnt; // rule can be applied only for count function, return empty counts if (!"count".equalsIgnoreCase(aggCall.getAggregation().getName()) ) { return ImmutableMap.of(); } if (containsStarOrNotNullInput(aggCall, agg)) { cnt = totalRecordCount; } else if (aggCall.getArgList().size() == 1) { // count(columnName) ==> Agg ( Scan )) ==> columnValueCount int index = aggCall.getArgList().get(0); if (project != null) { // project in the middle of Agg and Scan : Only when input of AggCall is a RexInputRef in Project, we find the index of Scan's field. // For instance, // Agg - count($0) // \ // Proj - Exp={$1} // \ // Scan (col1, col2). // return count of "col2" in Scan's metadata, if found. if (!(project.getProjects().get(index) instanceof RexInputRef)) { return ImmutableMap.of(); // do not apply for all other cases. } index = ((RexInputRef) project.getProjects().get(index)).getIndex(); } String columnName = scan.getRowType().getFieldNames().get(index).toLowerCase(); // for implicit column count will the same as total record count if (implicitColumnsNames.contains(columnName)) { cnt = totalRecordCount; } else { SchemaPath simplePath = SchemaPath.getSimplePath(columnName); if (ColumnExplorer.isPartitionColumn(settings.getOptions(), simplePath)) { return ImmutableMap.of(); } cnt = oldGrpScan.getColumnValueCount(simplePath); if (cnt == GroupScan.NO_COLUMN_STATS) { // if column stats is not available don't apply this rule, return empty counts return ImmutableMap.of(); } } } else { return ImmutableMap.of(); } String name = "count" + i + "$" + (aggCall.getName() == null ? aggCall.toString() : aggCall.getName()); result.put(name, cnt); } return ImmutableMap.copyOf(result); } /** * Checks if aggregate call contains star or non-null expression: * <pre> * count(*) == > empty arg ==> rowCount * count(Not-null-input) ==> rowCount * </pre> * * @param aggregateCall aggregate call * @param aggregate aggregate relation expression * @return true of aggregate call contains star or non-null expression */ private boolean containsStarOrNotNullInput(AggregateCall aggregateCall, DrillAggregateRel aggregate) { return aggregateCall.getArgList().isEmpty() || (aggregateCall.getArgList().size() == 1 && !aggregate.getInput().getRowType().getFieldList().get(aggregateCall.getArgList().get(0)).getType().isNullable()); } /** * For each aggregate call creates field based on its name with bigint type. * Constructs record type for created fields. * * @param aggregateRel aggregate relation expression * @param fieldNames field names * @return record type */ private RelDataType constructDataType(DrillAggregateRel aggregateRel, Collection<String> fieldNames) { List<RelDataTypeField> fields = new ArrayList<>(); Iterator<String> filedNamesIterator = fieldNames.iterator(); int fieldIndex = 0; while (filedNamesIterator.hasNext()) { RelDataTypeField field = new RelDataTypeFieldImpl( filedNamesIterator.next(), fieldIndex++, aggregateRel.getCluster().getTypeFactory().createSqlType(SqlTypeName.BIGINT)); fields.add(field); } return new RelRecordType(fields); } /** * Builds schema based on given field names. * Type for each schema is set to long.class. * * @param fieldNames field names * @return schema */ private LinkedHashMap<String, Class<?>> buildSchema(List<String> fieldNames) { LinkedHashMap<String, Class<?>> schema = new LinkedHashMap<>(); for (String fieldName: fieldNames) { schema.put(fieldName, long.class); } return schema; } /** * For each field creates row expression. * * @param rowType row type * @return list of row expressions */ private List<RexNode> prepareFieldExpressions(RelDataType rowType) { List<RexNode> expressions = new ArrayList<>(); for (int i = 0; i < rowType.getFieldCount(); i++) { expressions.add(RexInputRef.of(i, rowType)); } return expressions; } }
/* * Copyright 2017 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl.ocsp; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.DefaultEventLoopGroup; import io.netty.channel.EventLoopGroup; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalServerChannel; import io.netty.handler.ssl.OpenSsl; import io.netty.handler.ssl.ReferenceCountedOpenSslEngine; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.SslHandler; import io.netty.handler.ssl.SslProvider; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.handler.ssl.util.SelfSignedCertificate; import io.netty.util.CharsetUtil; import io.netty.util.ReferenceCountUtil; import java.net.SocketAddress; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.SSLHandshakeException; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; public class OcspTest { @BeforeClass public static void checkOcspSupported() { assumeTrue(OpenSsl.isOcspSupported()); } @Test(expected = IllegalArgumentException.class) public void testJdkClientEnableOcsp() throws Exception { SslContextBuilder.forClient() .sslProvider(SslProvider.JDK) .enableOcsp(true) .build(); } @Test(expected = IllegalArgumentException.class) public void testJdkServerEnableOcsp() throws Exception { SelfSignedCertificate ssc = new SelfSignedCertificate(); try { SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()) .sslProvider(SslProvider.JDK) .enableOcsp(true) .build(); } finally { ssc.delete(); } } @Test(expected = IllegalStateException.class) public void testClientOcspNotEnabledOpenSsl() throws Exception { testClientOcspNotEnabled(SslProvider.OPENSSL); } @Test(expected = IllegalStateException.class) public void testClientOcspNotEnabledOpenSslRefCnt() throws Exception { testClientOcspNotEnabled(SslProvider.OPENSSL_REFCNT); } private static void testClientOcspNotEnabled(SslProvider sslProvider) throws Exception { SslContext context = SslContextBuilder.forClient() .sslProvider(sslProvider) .build(); try { SslHandler sslHandler = context.newHandler(ByteBufAllocator.DEFAULT); ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) sslHandler.engine(); try { engine.getOcspResponse(); } finally { engine.release(); } } finally { ReferenceCountUtil.release(context); } } @Test(expected = IllegalStateException.class) public void testServerOcspNotEnabledOpenSsl() throws Exception { testServerOcspNotEnabled(SslProvider.OPENSSL); } @Test(expected = IllegalStateException.class) public void testServerOcspNotEnabledOpenSslRefCnt() throws Exception { testServerOcspNotEnabled(SslProvider.OPENSSL_REFCNT); } private static void testServerOcspNotEnabled(SslProvider sslProvider) throws Exception { SelfSignedCertificate ssc = new SelfSignedCertificate(); try { SslContext context = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()) .sslProvider(sslProvider) .build(); try { SslHandler sslHandler = context.newHandler(ByteBufAllocator.DEFAULT); ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) sslHandler.engine(); try { engine.setOcspResponse(new byte[] { 1, 2, 3 }); } finally { engine.release(); } } finally { ReferenceCountUtil.release(context); } } finally { ssc.delete(); } } @Test(timeout = 10000L) public void testClientAcceptingOcspStapleOpenSsl() throws Exception { testClientAcceptingOcspStaple(SslProvider.OPENSSL); } @Test(timeout = 10000L) public void testClientAcceptingOcspStapleOpenSslRefCnt() throws Exception { testClientAcceptingOcspStaple(SslProvider.OPENSSL_REFCNT); } /** * The Server provides an OCSP staple and the Client accepts it. */ private static void testClientAcceptingOcspStaple(SslProvider sslProvider) throws Exception { final CountDownLatch latch = new CountDownLatch(1); ChannelInboundHandlerAdapter serverHandler = new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.writeAndFlush(Unpooled.wrappedBuffer("Hello, World!".getBytes())); ctx.fireChannelActive(); } }; ChannelInboundHandlerAdapter clientHandler = new ChannelInboundHandlerAdapter() { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { try { ReferenceCountUtil.release(msg); } finally { latch.countDown(); } } }; byte[] response = newOcspResponse(); TestClientOcspContext callback = new TestClientOcspContext(true); handshake(sslProvider, latch, serverHandler, response, clientHandler, callback); byte[] actual = callback.response(); assertNotNull(actual); assertNotSame(response, actual); assertArrayEquals(response, actual); } @Test(timeout = 10000L) public void testClientRejectingOcspStapleOpenSsl() throws Exception { testClientRejectingOcspStaple(SslProvider.OPENSSL); } @Test(timeout = 10000L) public void testClientRejectingOcspStapleOpenSslRefCnt() throws Exception { testClientRejectingOcspStaple(SslProvider.OPENSSL_REFCNT); } /** * The Server provides an OCSP staple and the Client rejects it. */ private static void testClientRejectingOcspStaple(SslProvider sslProvider) throws Exception { final AtomicReference<Throwable> causeRef = new AtomicReference<Throwable>(); final CountDownLatch latch = new CountDownLatch(1); ChannelInboundHandlerAdapter clientHandler = new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { try { causeRef.set(cause); } finally { latch.countDown(); } } }; byte[] response = newOcspResponse(); TestClientOcspContext callback = new TestClientOcspContext(false); handshake(sslProvider, latch, null, response, clientHandler, callback); byte[] actual = callback.response(); assertNotNull(actual); assertNotSame(response, actual); assertArrayEquals(response, actual); Throwable cause = causeRef.get(); assertTrue("" + cause, cause instanceof SSLHandshakeException); } @Test(timeout = 10000L) public void testServerHasNoStapleOpenSsl() throws Exception { testServerHasNoStaple(SslProvider.OPENSSL); } @Test(timeout = 10000L) public void testServerHasNoStapleOpenSslRefCnt() throws Exception { testServerHasNoStaple(SslProvider.OPENSSL_REFCNT); } /** * The server has OCSP stapling enabled but doesn't provide a staple. */ private static void testServerHasNoStaple(SslProvider sslProvider) throws Exception { final CountDownLatch latch = new CountDownLatch(1); ChannelInboundHandlerAdapter serverHandler = new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.writeAndFlush(Unpooled.wrappedBuffer("Hello, World!".getBytes())); ctx.fireChannelActive(); } }; ChannelInboundHandlerAdapter clientHandler = new ChannelInboundHandlerAdapter() { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { try { ReferenceCountUtil.release(msg); } finally { latch.countDown(); } } }; byte[] response = null; TestClientOcspContext callback = new TestClientOcspContext(true); handshake(sslProvider, latch, serverHandler, response, clientHandler, callback); byte[] actual = callback.response(); assertNull(response); assertNull(actual); } @Test(timeout = 10000L) public void testClientExceptionOpenSsl() throws Exception { testClientException(SslProvider.OPENSSL); } @Test(timeout = 10000L) public void testClientExceptionOpenSslRefCnt() throws Exception { testClientException(SslProvider.OPENSSL_REFCNT); } /** * Testing what happens if the {@link OcspClientCallback} throws an {@link Exception}. * * The exception should bubble up on the client side and the connection should get closed. */ private static void testClientException(SslProvider sslProvider) throws Exception { final AtomicReference<Throwable> causeRef = new AtomicReference<Throwable>(); final CountDownLatch latch = new CountDownLatch(1); ChannelInboundHandlerAdapter clientHandler = new ChannelInboundHandlerAdapter() { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { try { causeRef.set(cause); } finally { latch.countDown(); } } }; final OcspTestException clientException = new OcspTestException("testClientException"); byte[] response = newOcspResponse(); OcspClientCallback callback = new OcspClientCallback() { @Override public boolean verify(byte[] response) throws Exception { throw clientException; } }; handshake(sslProvider, latch, null, response, clientHandler, callback); assertSame(clientException, causeRef.get()); } private static void handshake(SslProvider sslProvider, CountDownLatch latch, ChannelHandler serverHandler, byte[] response, ChannelHandler clientHandler, OcspClientCallback callback) throws Exception { SelfSignedCertificate ssc = new SelfSignedCertificate(); try { SslContext serverSslContext = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()) .sslProvider(sslProvider) .enableOcsp(true) .build(); try { SslContext clientSslContext = SslContextBuilder.forClient() .sslProvider(sslProvider) .enableOcsp(true) .trustManager(InsecureTrustManagerFactory.INSTANCE) .build(); try { EventLoopGroup group = new DefaultEventLoopGroup(); try { LocalAddress address = new LocalAddress("handshake-" + Math.random()); Channel server = newServer(group, address, serverSslContext, response, serverHandler); Channel client = newClient(group, address, clientSslContext, callback, clientHandler); try { assertTrue("Something went wrong.", latch.await(10L, TimeUnit.SECONDS)); } finally { client.close().syncUninterruptibly(); server.close().syncUninterruptibly(); } } finally { group.shutdownGracefully(1L, 1L, TimeUnit.SECONDS); } } finally { ReferenceCountUtil.release(clientSslContext); } } finally { ReferenceCountUtil.release(serverSslContext); } } finally { ssc.delete(); } } private static Channel newServer(EventLoopGroup group, SocketAddress address, SslContext context, byte[] response, ChannelHandler handler) { ServerBootstrap bootstrap = new ServerBootstrap() .channel(LocalServerChannel.class) .group(group) .childHandler(newServerHandler(context, response, handler)); return bootstrap.bind(address) .syncUninterruptibly() .channel(); } private static Channel newClient(EventLoopGroup group, SocketAddress address, SslContext context, OcspClientCallback callback, ChannelHandler handler) { Bootstrap bootstrap = new Bootstrap() .channel(LocalChannel.class) .group(group) .handler(newClientHandler(context, callback, handler)); return bootstrap.connect(address) .syncUninterruptibly() .channel(); } private static ChannelHandler newServerHandler(final SslContext context, final byte[] response, final ChannelHandler handler) { return new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); SslHandler sslHandler = context.newHandler(ch.alloc()); if (response != null) { ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) sslHandler.engine(); engine.setOcspResponse(response); } pipeline.addLast(sslHandler); if (handler != null) { pipeline.addLast(handler); } } }; } private static ChannelHandler newClientHandler(final SslContext context, final OcspClientCallback callback, final ChannelHandler handler) { return new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); SslHandler sslHandler = context.newHandler(ch.alloc()); ReferenceCountedOpenSslEngine engine = (ReferenceCountedOpenSslEngine) sslHandler.engine(); pipeline.addLast(sslHandler); pipeline.addLast(new OcspClientCallbackHandler(engine, callback)); if (handler != null) { pipeline.addLast(handler); } } }; } private static byte[] newOcspResponse() { // Assume we got the OCSP staple from somewhere. Using a bogus byte[] // in the test because getting a true staple from the CA is quite involved. // It requires HttpCodec and Bouncycastle and the test may be very unreliable // because the OCSP responder servers are basically being DDoS'd by the // Internet. return "I am a bogus OCSP staple. OpenSSL does not care about the format of the byte[]!" .getBytes(CharsetUtil.US_ASCII); } private interface OcspClientCallback { boolean verify(byte[] staple) throws Exception; } private static final class TestClientOcspContext implements OcspClientCallback { private final CountDownLatch latch = new CountDownLatch(1); private final boolean valid; private volatile byte[] response; public TestClientOcspContext(boolean valid) { this.valid = valid; } public byte[] response() throws InterruptedException, TimeoutException { assertTrue(latch.await(10L, TimeUnit.SECONDS)); return response; } @Override public boolean verify(byte[] response) throws Exception { this.response = response; latch.countDown(); return valid; } } private static final class OcspClientCallbackHandler extends OcspClientHandler { private final OcspClientCallback callback; public OcspClientCallbackHandler(ReferenceCountedOpenSslEngine engine, OcspClientCallback callback) { super(engine); this.callback = callback; } @Override protected boolean verify(ChannelHandlerContext ctx, ReferenceCountedOpenSslEngine engine) throws Exception { byte[] response = engine.getOcspResponse(); return callback.verify(response); } } private static final class OcspTestException extends IllegalStateException { private static final long serialVersionUID = 4516426833250228159L; public OcspTestException(String message) { super(message); } } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.catalog; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.net.ConnectException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.util.Progressable; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Matchers; import org.mockito.Mockito; /** * Test {@link CatalogTracker} */ public class TestCatalogTracker { private static final Log LOG = LogFactory.getLog(TestCatalogTracker.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final ServerName SN = new ServerName("example.org", 1234, System.currentTimeMillis()); private ZooKeeperWatcher watcher; private Abortable abortable; @BeforeClass public static void beforeClass() throws Exception { UTIL.startMiniZKCluster(); } @AfterClass public static void afterClass() throws IOException { UTIL.getZkCluster().shutdown(); } @Before public void before() throws IOException { this.abortable = new Abortable() { @Override public void abort(String why, Throwable e) { LOG.info(why, e); } @Override public boolean isAborted() { return false; } }; this.watcher = new ZooKeeperWatcher(UTIL.getConfiguration(), this.getClass().getSimpleName(), this.abortable, true); } @After public void after() { this.watcher.close(); } private CatalogTracker constructAndStartCatalogTracker() throws IOException, InterruptedException { return constructAndStartCatalogTracker(null); } private CatalogTracker constructAndStartCatalogTracker(final HConnection c) throws IOException, InterruptedException { CatalogTracker ct = new CatalogTracker(this.watcher, null, c, this.abortable, 0); ct.start(); return ct; } /** * Test that we get notification if .META. moves. * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testThatIfMETAMovesWeAreNotified() throws IOException, InterruptedException, KeeperException { HConnection connection = Mockito.mock(HConnection.class); constructAndStartCatalogTracker(connection); try { RootLocationEditor.setRootLocation(this.watcher, new ServerName("example.com", 1234, System.currentTimeMillis())); } finally { // Clean out root location or later tests will be confused... they presume // start fresh in zk. RootLocationEditor.deleteRootLocation(this.watcher); } } /** * Test interruptable while blocking wait on root and meta. * @throws IOException * @throws InterruptedException */ @Test public void testInterruptWaitOnMetaAndRoot() throws IOException, InterruptedException { final CatalogTracker ct = constructAndStartCatalogTracker(); ServerName hsa = ct.getRootLocation(); Assert.assertNull(hsa); ServerName meta = ct.getMetaLocation(); Assert.assertNull(meta); Thread t = new Thread() { @Override public void run() { try { ct.waitForMeta(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted", e); } } }; t.start(); while (!t.isAlive()) Threads.sleep(1); Threads.sleep(1); assertTrue(t.isAlive()); ct.stop(); // Join the thread... should exit shortly. t.join(); } @Test public void testGetMetaServerConnectionFails() throws IOException, InterruptedException, KeeperException { HConnection connection = Mockito.mock(HConnection.class); ConnectException connectException = new ConnectException("Connection refused"); final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); Mockito.when(implementation.get((byte [])Mockito.any(), (Get)Mockito.any())). thenThrow(connectException); Mockito.when(connection.getHRegionConnection((HServerAddress)Matchers.anyObject(), Matchers.anyBoolean())). thenReturn(implementation); Assert.assertNotNull(connection.getHRegionConnection(new HServerAddress(), false)); final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { RootLocationEditor.setRootLocation(this.watcher, new ServerName("example.com", 1234, System.currentTimeMillis())); Assert.assertFalse(ct.verifyMetaRegionLocation(100)); } finally { // Clean out root location or later tests will be confused... they presume // start fresh in zk. RootLocationEditor.deleteRootLocation(this.watcher); } } /** * Test get of root region fails properly if nothing to connect to. * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testVerifyRootRegionLocationFails() throws IOException, InterruptedException, KeeperException { HConnection connection = Mockito.mock(HConnection.class); ConnectException connectException = new ConnectException("Connection refused"); final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); Mockito.when(implementation.getRegionInfo((byte [])Mockito.any())). thenThrow(connectException); Mockito.when(connection.getHRegionConnection((HServerAddress)Matchers.anyObject(), Matchers.anyBoolean())). thenReturn(implementation); Assert.assertNotNull(connection.getHRegionConnection(new HServerAddress(), false)); final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { RootLocationEditor.setRootLocation(this.watcher, new ServerName("example.com", 1234, System.currentTimeMillis())); Assert.assertFalse(ct.verifyRootRegionLocation(100)); } finally { // Clean out root location or later tests will be confused... they presume // start fresh in zk. RootLocationEditor.deleteRootLocation(this.watcher); } } @Test (expected = NotAllMetaRegionsOnlineException.class) public void testTimeoutWaitForRoot() throws IOException, InterruptedException { final CatalogTracker ct = constructAndStartCatalogTracker(); ct.waitForRoot(100); } @Test (expected = NotAllMetaRegionsOnlineException.class) public void testTimeoutWaitForMeta() throws IOException, InterruptedException { final CatalogTracker ct = constructAndStartCatalogTracker(); ct.waitForMeta(100); } /** * Test waiting on root w/ no timeout specified. * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testNoTimeoutWaitForRoot() throws IOException, InterruptedException, KeeperException { final CatalogTracker ct = constructAndStartCatalogTracker(); ServerName hsa = ct.getRootLocation(); Assert.assertNull(hsa); // Now test waiting on root location getting set. Thread t = new WaitOnMetaThread(ct); startWaitAliveThenWaitItLives(t, 1000); // Set a root location. hsa = setRootLocation(); // Join the thread... should exit shortly. t.join(); // Now root is available. Assert.assertTrue(ct.getRootLocation().equals(hsa)); } private ServerName setRootLocation() throws KeeperException { RootLocationEditor.setRootLocation(this.watcher, SN); return SN; } /** * Test waiting on meta w/ no timeout specified. * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testNoTimeoutWaitForMeta() throws IOException, InterruptedException, KeeperException { // Mock an HConnection and a HRegionInterface implementation. Have the // HConnection return the HRI. Have the HRI return a few mocked up responses // to make our test work. HConnection connection = Mockito.mock(HConnection.class); HRegionInterface mockHRI = Mockito.mock(HRegionInterface.class); // Make the HRI return an answer no matter how Get is called. Same for // getHRegionInfo. Thats enough for this test. Mockito.when(connection.getHRegionConnection((String)Mockito.any(), Matchers.anyInt())).thenReturn(mockHRI); final CatalogTracker ct = constructAndStartCatalogTracker(connection); ServerName hsa = ct.getMetaLocation(); Assert.assertNull(hsa); // Now test waiting on meta location getting set. Thread t = new WaitOnMetaThread(ct) { @Override void doWaiting() throws InterruptedException { this.ct.waitForMeta(); } }; startWaitAliveThenWaitItLives(t, 1000); // Now the ct is up... set into the mocks some answers that make it look // like things have been getting assigned. Make it so we'll return a // location (no matter what the Get is). Same for getHRegionInfo -- always // just return the meta region. List<KeyValue> kvs = new ArrayList<KeyValue>(); kvs.add(new KeyValue(HConstants.EMPTY_BYTE_ARRAY, HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes.toBytes(SN.getHostAndPort()))); kvs.add(new KeyValue(HConstants.EMPTY_BYTE_ARRAY, HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(SN.getStartcode()))); final Result result = new Result(kvs); Mockito.when(mockHRI.get((byte [])Mockito.any(), (Get)Mockito.any())). thenReturn(result); Mockito.when(mockHRI.getRegionInfo((byte [])Mockito.any())). thenReturn(HRegionInfo.FIRST_META_REGIONINFO); // This should trigger wake up of meta wait (Its the removal of the meta // region unassigned node that triggers catalogtrackers that a meta has // been assigned. String node = ct.getMetaNodeTracker().getNode(); ZKUtil.createAndFailSilent(this.watcher, node); MetaEditor.updateMetaLocation(ct, HRegionInfo.FIRST_META_REGIONINFO, SN); ZKUtil.deleteNode(this.watcher, node); // Join the thread... should exit shortly. t.join(); // Now meta is available. Assert.assertTrue(ct.getMetaLocation().equals(SN)); } private void startWaitAliveThenWaitItLives(final Thread t, final int ms) { t.start(); while(!t.isAlive()) { // Wait } // Wait one second. Threads.sleep(ms); Assert.assertTrue("Assert " + t.getName() + " still waiting", t.isAlive()); } class CountingProgressable implements Progressable { final AtomicInteger counter = new AtomicInteger(0); @Override public void progress() { this.counter.incrementAndGet(); } } /** * Wait on META. * Default is wait on -ROOT-. */ class WaitOnMetaThread extends Thread { final CatalogTracker ct; WaitOnMetaThread(final CatalogTracker ct) { super("WaitOnMeta"); this.ct = ct; } @Override public void run() { try { doWaiting(); } catch (InterruptedException e) { throw new RuntimeException("Failed wait", e); } LOG.info("Exiting " + getName()); } void doWaiting() throws InterruptedException { this.ct.waitForRoot(); } } }
/** * generated by Xtext 2.9.2 */ package org.xtuml.bp.xtext.masl.masl.behavior.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.xtuml.bp.xtext.masl.masl.behavior.BehaviorPackage; import org.xtuml.bp.xtext.masl.masl.behavior.SimpleFeatureCall; import org.xtuml.bp.xtext.masl.masl.behavior.SortOrderComponent; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Sort Order Component</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtuml.bp.xtext.masl.masl.behavior.impl.SortOrderComponentImpl#isReverse <em>Reverse</em>}</li> * <li>{@link org.xtuml.bp.xtext.masl.masl.behavior.impl.SortOrderComponentImpl#getFeature <em>Feature</em>}</li> * </ul> * * @generated */ public class SortOrderComponentImpl extends MinimalEObjectImpl.Container implements SortOrderComponent { /** * The default value of the '{@link #isReverse() <em>Reverse</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isReverse() * @generated * @ordered */ protected static final boolean REVERSE_EDEFAULT = false; /** * The cached value of the '{@link #isReverse() <em>Reverse</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isReverse() * @generated * @ordered */ protected boolean reverse = REVERSE_EDEFAULT; /** * The cached value of the '{@link #getFeature() <em>Feature</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFeature() * @generated * @ordered */ protected SimpleFeatureCall feature; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected SortOrderComponentImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return BehaviorPackage.Literals.SORT_ORDER_COMPONENT; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isReverse() { return reverse; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setReverse(boolean newReverse) { boolean oldReverse = reverse; reverse = newReverse; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BehaviorPackage.SORT_ORDER_COMPONENT__REVERSE, oldReverse, reverse)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SimpleFeatureCall getFeature() { return feature; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetFeature(SimpleFeatureCall newFeature, NotificationChain msgs) { SimpleFeatureCall oldFeature = feature; feature = newFeature; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE, oldFeature, newFeature); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setFeature(SimpleFeatureCall newFeature) { if (newFeature != feature) { NotificationChain msgs = null; if (feature != null) msgs = ((InternalEObject)feature).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE, null, msgs); if (newFeature != null) msgs = ((InternalEObject)newFeature).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE, null, msgs); msgs = basicSetFeature(newFeature, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE, newFeature, newFeature)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE: return basicSetFeature(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BehaviorPackage.SORT_ORDER_COMPONENT__REVERSE: return isReverse(); case BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE: return getFeature(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BehaviorPackage.SORT_ORDER_COMPONENT__REVERSE: setReverse((Boolean)newValue); return; case BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE: setFeature((SimpleFeatureCall)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BehaviorPackage.SORT_ORDER_COMPONENT__REVERSE: setReverse(REVERSE_EDEFAULT); return; case BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE: setFeature((SimpleFeatureCall)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BehaviorPackage.SORT_ORDER_COMPONENT__REVERSE: return reverse != REVERSE_EDEFAULT; case BehaviorPackage.SORT_ORDER_COMPONENT__FEATURE: return feature != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (reverse: "); result.append(reverse); result.append(')'); return result.toString(); } } //SortOrderComponentImpl
/** * Copyright (C) 2009-2015 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.utils.requester.fluent; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.HttpClientBuilder; import org.dasein.cloud.utils.requester.*; import org.dasein.cloud.utils.requester.streamprocessors.*; import org.json.JSONObject; import org.w3c.dom.Document; import javax.annotation.Nonnull; /** * DaseinRequest class is a wrapper for Apache HTTP client. * * <pre> * <code> * String result = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).execute(); * Document resultAsDocument = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).withDocumentProcessor().execute(); * </code> * </pre> * * @author Vlad Munthiu * */ public class DaseinRequest implements CompositeRequester { private HttpClientBuilder httpClientBuilder; private HttpUriRequest httpUriRequestBuilder; private HttpProxyConfig httpProxyConfig; public DaseinRequest(@Nonnull HttpClientBuilder httpClientBuilder, @Nonnull HttpUriRequest httpUriRequestBuilder, @Nonnull String httpProxyHost, @Nonnull Integer httpProxyPort){ if(httpClientBuilder == null) throw new IllegalArgumentException("Parameter httpClientBuilder cannot be null."); if(httpUriRequestBuilder == null) throw new IllegalArgumentException("Parameter httpUriRequestBuilder cannot be null"); this.httpClientBuilder = httpClientBuilder; this.httpUriRequestBuilder = httpUriRequestBuilder; this.httpProxyConfig = new HttpProxyConfig(httpProxyHost, httpProxyPort); } /** * Constructs a new DaseinRequest instance, ready to execute http calls to a specified Uri. * * @param httpClientBuilder HTTP client builder * @param httpUriRequestBuilder HTTP URI request builder **/ public DaseinRequest(@Nonnull HttpClientBuilder httpClientBuilder, @Nonnull HttpUriRequest httpUriRequestBuilder){ if(httpClientBuilder == null) throw new IllegalArgumentException("Parameter httpClientBuilder cannot be null."); if(httpUriRequestBuilder == null) throw new IllegalArgumentException("Parameter httpUriRequestBuilder cannot be null"); this.httpClientBuilder = httpClientBuilder; this.httpUriRequestBuilder = httpUriRequestBuilder; } /** * Constructs a instance of a DaseinRequestExecutor with a XML stream processor that, once the HTTP request has been * finished, will perform a deserialization of the XML response into the specified type T. * * <pre> * DaseinDriverType result = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).withXmlProcessor(DaseinDriverType.class).execute(); * </pre> * * @param classType the type of the expected model * @return an instance of the classType type representing the response XML **/ @Override public <T> Requester<T> withXmlProcessor(@Nonnull Class<T> classType) { if(classType == null) throw new IllegalArgumentException("Parameter classType cannot be null"); if(httpProxyConfig == null) { return new DaseinRequestExecutor<T>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<T>(new XmlStreamToObjectProcessor(), classType)); } else { return new DaseinRequestExecutor<T>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<T>(new XmlStreamToObjectProcessor(), classType), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Constructs a instance of a DaseinRequestExecutor with a XML stream processor that, once the HTTP request has been * finished, will perform a deserialization of the XML response into the specified type T. A valid instance of a * ObjectMapper should be passed in, so that a mapping from a driver model type ( T ) to a Dasein Core * model( V ) to be performed after the response is received. * * <pre> * DaseinCoreType result = new DaseinRequest(cloudProvider, httpClientBuilder, httpUriRequestBuilder) * .withXmlProcessor(new ObjectMapper&lt;DaseinDriverType, DaseinCoreType&gt;() { * &#64;Override * public DaseinCoreType mapFrom(DaseinDriverType entity) { * //map entity to a new instance of DaseinCoreType * } * DaseinDriverType.class).execute(); * </pre> * * @param mapper an implementation of {@link org.dasein.cloud.utils.requester.ObjectMapper} interface * @param classType the type of the expected model * @return an instance of the V type which should be a Dasien Core type. **/ @Override public <T, V> Requester<V> withXmlProcessor(@Nonnull ObjectMapper<T, V> mapper, @Nonnull Class<T> classType) { if(mapper == null) throw new IllegalArgumentException("Parameter mapper cannot be null"); if(classType == null) throw new IllegalArgumentException("Parameter classType cannot be null"); if(httpProxyConfig == null) { return new DaseinRequestExecutor<V>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandlerWithMapper<T, V>(new XmlStreamToObjectProcessor(), mapper, classType)); } else { return new DaseinRequestExecutor<V>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandlerWithMapper<T, V>(new XmlStreamToObjectProcessor(), mapper, classType), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Constructs a instance of a DaseinRequestExecutor with a JSON stream processor that, once the HTTP request has been * finished will perform a deserialization of the JSON response into the specified type T. * * <pre> * DaseinDriverType result = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).withJsonProcessor(DaseinDriverType.class).execute(); * </pre> * * @param classType the type of the expected model * @return an instance of the classType type representing the response JSON **/ @Override public <T> Requester<T> withJsonProcessor(@Nonnull Class<T> classType) { if(classType == null) throw new IllegalArgumentException("Parameter classType cannot be null"); if(httpProxyConfig == null) { return new DaseinRequestExecutor<T>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<T>(new JsonStreamToObjectProcessor(), classType)); } else { return new DaseinRequestExecutor<T>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<T>(new JsonStreamToObjectProcessor(), classType), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Constructs a instance of a DaseinRequestExecutor with a JSON stream processor that, once the HTTP request has been * finished, will perform a deserialization of the JSON response into the specified type T. A valid instance of a * ObjectMapper should be passed in, so that a mapping from a driver model type ( T ) to a Dasein Core * model( V ) to be performed after the response is received. * * <pre> * DaseinCoreType result = new DaseinRequest(cloudProvider, httpClientBuilder, httpUriRequestBuilder) * .withJsonProcessor(new ObjectMapper&lt;DaseinDriverType, DaseinCoreType&gt;() { * &#64;Override * public DaseinCoreType mapFrom(DaseinDriverType entity) { * //map entity to a new instance of DaseinCoreType * } * DaseinDriverType.class).execute(); * </pre> * * @param mapper an implementation of {@link org.dasein.cloud.utils.requester.ObjectMapper} interface * @param classType the type of the expected model * @return an instance of the V type which should be a Dasien Core type. **/ @Override public <T, V> Requester<V> withJsonProcessor(@Nonnull ObjectMapper<T, V> mapper, @Nonnull Class<T> classType) { if(mapper == null) throw new IllegalArgumentException("Parameter mapper cannot be null"); if(classType == null) throw new IllegalArgumentException("Parameter classType cannot be null"); if(httpProxyConfig == null) { return new DaseinRequestExecutor<V>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandlerWithMapper<T, V>(new JsonStreamToObjectProcessor(), mapper, classType)); } else { return new DaseinRequestExecutor<V>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandlerWithMapper<T, V>(new JsonStreamToObjectProcessor(), mapper, classType), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Constructs a instance of a DaseinRequestExecutor with a stream processor that, once the HTTP request has been * finished, will try to parse the response stream into a valid XML Document object. * * <pre> * Document documentResult = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).withDocumentProcessor().execute(); * </pre> **/ @Override public <T> DaseinRequestExecutor<Document> withDocumentProcessor() { if(httpProxyConfig == null) { return new DaseinRequestExecutor<Document>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<Document>(new StreamToDocumentProcessor(), Document.class)); } else { return new DaseinRequestExecutor<Document>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<Document>(new StreamToDocumentProcessor(), Document.class), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Constructs a instance of a DaseinRequestExecutor with a stream processor that, once the HTTP request has been * finished, will try to parse the response stream into a valid JSONObject object. * * <pre> * JSONObject jsonResult = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).withJSONObjectProcessor().execute(); * </pre> **/ @Override public <T> DaseinRequestExecutor<JSONObject> withJSONObjectProcessor() { if(httpProxyConfig == null) { return new DaseinRequestExecutor<JSONObject>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<JSONObject>(new StreamToJSONObjectProcessor(), JSONObject.class)); } else { return new DaseinRequestExecutor<JSONObject>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<JSONObject>(new StreamToJSONObjectProcessor(), JSONObject.class), httpProxyConfig.getHost(), httpProxyConfig.getPort()); } } /** * Executes a HTTP request using a string processor for the response. * * <pre> * String result = new DaseinRequest(httpClientBuilder, httpUriRequestBuilder).execute(); * </pre> * * @return a string representing the response of the current HTTP call. **/ @Override public String execute() throws DaseinRequestException { if(httpProxyConfig == null) { return new DaseinRequestExecutor<String>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<String>(new StreamToStringProcessor(), String.class)).execute(); } else { return new DaseinRequestExecutor<String>(this.httpClientBuilder, this.httpUriRequestBuilder, new DaseinResponseHandler<String>(new StreamToStringProcessor(), String.class), httpProxyConfig.getHost(), httpProxyConfig.getPort()).execute(); } } }
package com.jomofisher.cmakeify; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.io.Files; import com.jomofisher.cmakeify.model.CMakeifyYml; import org.junit.Test; import org.yaml.snakeyaml.Yaml; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; public class TestCmakeify { private static String main(String... args) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); new CMakeify(ps).go(args); return new String(baos.toByteArray(), StandardCharsets.UTF_8); } @Test public void testVersion() throws IOException { assertThat(main("--version")).contains(BuildInfo.PROJECT_VERSION); } @Test public void missingConfigurationFile() throws IOException { new File("test-files/empty-folder").mkdirs(); assertThat(main("-wf", "test-files/empty-folder")).contains("configuration file"); } @Test public void workingFolderFlag() throws IOException { assertThat(main("--working-folder", "non-existing-blah")).contains("non-existing-blah"); } @Test public void wfFlag() throws IOException { assertThat(main("-wf", "non-existing-blah")).contains("non-existing-blah"); } @Test public void unusedFlags() throws IOException { try { main("--what"); fail("expected excdeption"); } catch (RuntimeException e) { assertThat(e).hasMessage("Argument 0 '--what' was not recognized"); } } @Test public void probeSmokeTest() throws IOException { File yaml = new File("smoke-test/cmakeify.yml"); yaml.getParentFile().mkdirs(); main("-wf", yaml.getParent(), "--host", "Linux", "--group-id", "my-group-id", "--artifact-id", "my-artifact-id", "--target-version", "my-target-version"); File scriptFile = new File(".cmakeify/build.sh"); String script = Joiner.on("\n").join(Files.readLines(scriptFile, Charsets.UTF_8)); } @Test public void probeSmokeTestZeroVersion() throws IOException { File yaml = new File("smoke-test/cmakeify.yml"); yaml.getParentFile().mkdirs(); main("-wf", yaml.getParent(), "--host", "Linux", "--group-id", "my-group-id", "--artifact-id", "my-artifact-id", "--target-version", "0.0.0"); File scriptFile = new File(".cmakeify/build.sh"); String script = Joiner.on("\n").join(Files.readLines(scriptFile, Charsets.UTF_8)); } @Test public void complicatedSelfHost() throws IOException { File yaml = new File("test-files/complicatedSelfHost/cmakeify.yml"); yaml.getParentFile().mkdirs(); Files.write("includes: [extra-includes]\n" + "android:\n" + " flavors:\n" + " myflags:\n" + " - -DANDROID\n" + " lib: libbob.a\n" + " ndk:\n" + " platforms: [21, 22]\n", yaml, StandardCharsets.UTF_8); String result1 = main("-wf", yaml.getParent(), "--dump"); yaml.delete(); Files.write(result1, yaml, StandardCharsets.UTF_8); System.out.print(result1); String result2 = main("-wf", yaml.getParent(), "--dump"); assertThat(result2).isEqualTo(result1); assertThat(result2).contains("-DANDROID"); assertThat(result2).doesNotContain("default-flavor"); } @Test public void testScript() throws IOException { File yaml = new File("test-files/testScript/cmakeify.yml"); yaml.getParentFile().mkdirs(); Files.write("targets: [android]\n" + "includes: [extra-includes]\n" + "buildTarget: bob\n" + "android:\n" + " flavors:\n" + " myflags: -DANDROID -DBOOST_ROOT=bob\n" + " ndk:\n" + " platforms: [21, 22]\n", yaml, StandardCharsets.UTF_8); main("-wf", yaml.getParent(), "--host", "Linux", "--group-id", "com.github.jomof", "--artifact-id", "my-artifact-id", "--target-version", "my-target-version"); File scriptFile = new File(".cmakeify/build.sh"); String script = Joiner.on("\n").join(Files.readLines(scriptFile, Charsets.UTF_8)); assertThat(script).contains("cmake-3.7.2-Linux-x86_64.tar.gz"); assertThat(script).contains("groupId: com.github.jomof"); assertThat(script).contains("artifactId: my-artifact-id"); assertThat(script).contains("version: my-target-version"); assertThat(script).contains("BOOST_ROOT="); assertThat(script).contains("-DANDROID"); assertThat(script).doesNotContain("didn't"); String dump = main("-wf", yaml.getParent(), "--dump"); assertThat(dump).contains("runtimes:"); System.out.printf(dump); } @Test public void testScriptMacOS() throws IOException { File yaml = new File("test-files/testScriptMacOS/cmakeify.yml"); yaml.getParentFile().mkdirs(); Files.write("targets: [android, iOS]\n" + "includes: [extra-includes]\n" + "buildTarget: bob\n" + "iOS:\n" + " flavors:\n" + " myflags: -DOSX -DBOOST_ROOT=bob\n", yaml, StandardCharsets.UTF_8); main("-wf", yaml.getParent(), "--host", "MacOS", "--group-id", "my-group-id", "--artifact-id", "my-artifact-id", "--target-version", "my-target-version", "--target-os", "iOS"); File scriptFile = new File(".cmakeify/build.sh"); String script = Joiner.on("\n").join(Files.readLines(scriptFile, Charsets.UTF_8)); assertThat(script).contains("cmake-3.7.2-Darwin-x86_64.tar.gz"); assertThat(script).contains("groupId: my-group-id"); assertThat(script).contains("artifactId: my-artifact-id"); assertThat(script).contains("version: my-target-version"); assertThat(script).contains("BOOST_ROOT="); assertThat(script).contains("cdep-manifest-iOS.yml"); assertThat(script).contains("-DOSX"); assertThat(script).doesNotContain("--parent"); // mkdir --parents flag doesn't work on OSX assertThat(script).doesNotContain("didn't"); String dump = main("-wf", yaml.getParent(), "--dump"); assertThat(dump).contains("buildTarget:"); assertThat(dump).contains("runtimes:"); System.out.printf(dump); } @Test public void testSQLite() throws IOException { File yaml = new File("test-files/testScript/cmakeify.yml"); yaml.getParentFile().mkdirs(); Files.write("targets: [android]\n" + "buildTargets: [sqlite]\n" + "android:\n" + " ndk:\n" + " runtimes: [c++, gnustl, stlport]\n" + " platforms: [12, 21]\n" + "example: |\n" + " #include <sqlite3.h>\n" + " void test() {\n" + " sqlite3_initialize();\n" + " }", yaml, StandardCharsets.UTF_8); main("-wf", yaml.getParent(), "--host", "Linux", "--group-id", "my-group-id", "--artifact-id", "my-artifact-id", "--target-version", "my-target-version"); File scriptFile = new File(".cmakeify/build.sh"); String script = Joiner.on("\n").join(Files.readLines(scriptFile, Charsets.UTF_8)); assertThat(script).contains("cmake-3.7.2-Linux-x86_64.tar.gz"); } @Test public void weirdHost() throws IOException { File yaml = new File("test-files/simpleConfiguration/cmakeify.yml"); Files.write("", yaml, StandardCharsets.UTF_8); assertThat(main("-wf", yaml.getParent(), "--host", "Joebob")) .contains("Joebob"); } @Test public void dumpIsSelfHost() throws IOException { CMakeifyYml config = new CMakeifyYml(); System.out.printf(new Yaml().dump(config)); File yaml = new File("test-files/simpleConfiguration/cmakeify.yml"); yaml.getParentFile().mkdirs(); Files.write("", yaml, StandardCharsets.UTF_8); String result1 = main("-wf", yaml.getParent(), "--dump"); yaml.delete(); Files.write(result1, yaml, StandardCharsets.UTF_8); System.out.print(result1); String result2 = main("-wf", yaml.getParent(), "--dump"); assertThat(result2).isEqualTo(result1); } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jayway.restassured.config; import org.apache.http.client.HttpClient; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.params.CookiePolicy; import org.apache.http.cookie.params.CookieSpecPNames; import org.apache.http.entity.mime.HttpMultipartMode; import org.apache.http.impl.client.DefaultHttpClient; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static com.jayway.restassured.internal.assertion.AssertParameter.notNull; import static java.util.Arrays.asList; /** * Configure the Apache HTTP Client parameters. * <p>Note that you can't configure the redirect settings from this config. Please use {@link RedirectConfig} for this purpose.</p> * <p/> * The following parameters are applied per default: * <table border=1> * <tr> * <th>Parameter name</th><th>Parameter value</th><th>Description</th> * </tr> * <tr> * <td>{@link ClientPNames#COOKIE_POLICY}</td><td>{@link CookiePolicy#IGNORE_COOKIES}</td><td>Don't automatically set response cookies in subsequent requests.</td> * </tr> * <tr> * <td>{@link CookieSpecPNames#DATE_PATTERNS}</td><td>[EEE, dd-MMM-yyyy HH:mm:ss z, EEE, dd MMM yyyy HH:mm:ss z]</td><td>Defines valid date patterns to be used for parsing non-standard * <code>expires</code> attribute.</td> * <p/> * </tr> * </table> * <p> * You can also specify a http client factory that is used to create the http client instances that REST Assured uses ({@link #httpClientFactory(com.jayway.restassured.config.HttpClientConfig.HttpClientFactory)}). * By default the {@link DefaultHttpClient} is used. It's also possible to specify whether or not this instance should be reused in multiple requests. By default the http client instance is not reused. * </p> * * @see org.apache.http.client.params.ClientPNames * @see org.apache.http.client.params.CookiePolicy * @see org.apache.http.params.CoreProtocolPNames */ public class HttpClientConfig { private static final boolean SHOULD_REUSE_HTTP_CLIENT_INSTANCE_BY_DEFAULT = false; private static final HttpClient NO_HTTP_CLIENT = null; private final boolean shouldReuseHttpClientInstance; private final Map<String, ?> httpClientParams; private final HttpMultipartMode httpMultipartMode; private final HttpClientFactory httpClientFactory; private volatile HttpClient httpClient; /** * Creates a new HttpClientConfig instance with the <code>{@value org.apache.http.client.params.ClientPNames#COOKIE_POLICY}</code> parameter set to <code>{@value org.apache.http.client.params.CookiePolicy#IGNORE_COOKIES}</code>. */ public HttpClientConfig() { this.httpClientFactory = defaultHttpClientFactory(); this.httpClientParams = new HashMap<String, Object>() { { put(ClientPNames.COOKIE_POLICY, CookiePolicy.IGNORE_COOKIES); put(CookieSpecPNames.DATE_PATTERNS, asList("EEE, dd-MMM-yyyy HH:mm:ss z", "EEE, dd MMM yyyy HH:mm:ss z")); } }; this.httpMultipartMode = HttpMultipartMode.STRICT; this.shouldReuseHttpClientInstance = SHOULD_REUSE_HTTP_CLIENT_INSTANCE_BY_DEFAULT; this.httpClient = null; } private HttpClientConfig(HttpClientFactory httpClientFactory, Map<String, ?> httpClientParams, HttpMultipartMode httpMultipartMode, boolean shouldReuseHttpClientInstance, HttpClient abstractHttpClient) { notNull(httpClientParams, "httpClientParams"); notNull(httpMultipartMode, "httpMultipartMode"); notNull(httpClientFactory, "Http Client factory"); this.shouldReuseHttpClientInstance = shouldReuseHttpClientInstance; this.httpClientFactory = httpClientFactory; this.httpClientParams = new HashMap<String, Object>(httpClientParams); this.httpMultipartMode = httpMultipartMode; this.httpClient = abstractHttpClient; } /** * Creates a new HttpClientConfig instance with the parameters defined by the <code>httpClientParams</code>. */ public HttpClientConfig(Map<String, ?> httpClientParams) { this(defaultHttpClientFactory(), httpClientParams, HttpMultipartMode.STRICT, SHOULD_REUSE_HTTP_CLIENT_INSTANCE_BY_DEFAULT, NO_HTTP_CLIENT); } /** * @return The configured parameters */ public Map<String, ?> params() { return Collections.unmodifiableMap(httpClientParams); } /** * @return The same HttpClientConfig instance. Only here for syntactic sugar. */ public HttpClientConfig and() { return this; } /** * Instruct REST Assured to reuse the configured http client instance for multiple requests. By default REST Assured * will create a new {@link org.apache.http.client.HttpClient} instance for each request. Note that for this to work * the configuration must be defined statically, for example: * <p/> * <pre> * RestAssured.config = newConfig().httpClient(httpClientConfig().reuseHttpClientInstance()); * </pre> * * @return An updated HttpClientConfig * @see #httpClientFactory(com.jayway.restassured.config.HttpClientConfig.HttpClientFactory) */ public HttpClientConfig reuseHttpClientInstance() { return new HttpClientConfig(httpClientFactory, httpClientParams, httpMultipartMode, true, httpClient); } /** * Instruct REST Assured <i>not</i> to reuse the configured http client instance for multiple requests. This is the default behavior. * * @return An updated HttpClientConfig * @see #reuseHttpClientInstance() */ public HttpClientConfig dontReuseHttpClientInstance() { return new HttpClientConfig(httpClientFactory, httpClientParams, httpMultipartMode, false, NO_HTTP_CLIENT); } /** * If this method returns <code>true</code> then REST Assured will reuse the same {@link org.apache.http.client.HttpClient} instance created * by the {@link #httpClientInstance()} method for all requests. If <code>false</code> is returned then REST Assured creates a new instance for each request. * <p> * By default <code>false</code> is returned. * </p> * Note that for this to work the configuration must be defined statically, for example: * <pre> * RestAssured.config = newConfig().httpClient(httpClientConfig().reuseHttpClientInstance()); * </pre> * * @return <code>true</code> if the same HTTP Client instance should be reused between several requests, <code>false</code> otherwise. */ public boolean isConfiguredToReuseTheSameHttpClientInstance() { return shouldReuseHttpClientInstance; } /** * Set a http client parameter. * * @param parameterName The name of the parameter * @param parameterValue The value of the parameter (may be null) * @param <T> The parameter type * @return An updated HttpClientConfig */ public <T> HttpClientConfig setParam(String parameterName, T parameterValue) { notNull(parameterName, "Parameter name"); final Map<String, Object> newParams = new HashMap<String, Object>(httpClientParams); newParams.put(parameterName, parameterValue); return new HttpClientConfig(newParams); } /** * Replaces the currently configured parameters with the ones supplied by <code>httpClientParams</code>. This method is the same as {@link #setParams(java.util.Map)}. * * @param httpClientParams The parameters to set. * @return An updated HttpClientConfig */ public HttpClientConfig withParams(Map<String, ?> httpClientParams) { return new HttpClientConfig(httpClientParams); } /** * Replaces the currently configured parameters with the ones supplied by <code>httpClientParams</code>. This method is the same as {@link #withParams(java.util.Map)}. * * @param httpClientParams The parameters to set. * @return An updated HttpClientConfig */ public HttpClientConfig setParams(Map<String, ?> httpClientParams) { return withParams(httpClientParams); } /** * Add the given parameters to an already configured number of parameters. * * @param httpClientParams The parameters. * @return An updated HttpClientConfig */ public HttpClientConfig addParams(Map<String, ?> httpClientParams) { notNull(httpClientParams, "httpClientParams"); final Map<String, Object> newParams = new HashMap<String, Object>(httpClientParams); newParams.putAll(httpClientParams); return new HttpClientConfig(newParams); } /** * Set the http client factory that Rest Assured should use when making request. For each request REST Assured will invoke the factory to get the a the HttpClient instance. * * @param httpClientFactory The http client factory to use. * @return An updated HttpClientConfig */ public HttpClientConfig httpClientFactory(HttpClientFactory httpClientFactory) { return new HttpClientConfig(httpClientFactory, httpClientParams, httpMultipartMode, shouldReuseHttpClientInstance, NO_HTTP_CLIENT); } /** * @return The configured http client that will create an {@link org.apache.http.client.HttpClient} instances that's used by REST Assured when making a request. */ public HttpClient httpClientInstance() { if (isConfiguredToReuseTheSameHttpClientInstance()) { if (httpClient == NO_HTTP_CLIENT) { httpClient = httpClientFactory.createHttpClient(); } return httpClient; } return httpClientFactory.createHttpClient(); } /** * Specify the HTTP Multipart mode when sending multi-part data. * * @param httpMultipartMode The multi-part mode to set. * @return An updated HttpClientConfig */ public HttpClientConfig httpMultipartMode(HttpMultipartMode httpMultipartMode) { return new HttpClientConfig(httpClientFactory, httpClientParams, httpMultipartMode, shouldReuseHttpClientInstance, httpClient); } /** * @return A static way to create a new HttpClientConfig instance without calling "new" explicitly. Mainly for syntactic sugar. */ public static HttpClientConfig httpClientConfig() { return new HttpClientConfig(); } /** * @return The http multi-part mode. */ public HttpMultipartMode httpMultipartMode() { return httpMultipartMode; } private static HttpClientFactory defaultHttpClientFactory() { return new HttpClientFactory() { @Override public HttpClient createHttpClient() { return new DefaultHttpClient(); } }; } /** * A factory for creating and configuring a custom http client instance that will be used by REST Assured. */ public static abstract class HttpClientFactory { /** * Create an instance of {@link HttpClient} that'll be used by REST Assured when making requests. By default * REST Assured creates a {@link DefaultHttpClient}. * <p> * <b>Important: Version 1.9.0 of REST Assured ONLY supports instances of {@link org.apache.http.impl.client.AbstractHttpClient}</b>. The API is * how ever prepared for future upgrades. * </p> * * @return An instance of {@link HttpClient}. */ public abstract HttpClient createHttpClient(); } }
/* * Copyright (C) 2012-2021 DuyHai DOAN * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.archinnov.achilles.configuration; import static info.archinnov.achilles.configuration.ConfigurationParameters.*; import static javax.validation.Validation.buildDefaultValidatorFactory; import java.util.*; import java.util.concurrent.*; import java.util.function.Supplier; import javax.validation.ValidationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.Session; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import info.archinnov.achilles.async.DefaultExecutorThreadFactory; import info.archinnov.achilles.exception.AchillesException; import info.archinnov.achilles.internals.cache.StatementsCache; import info.archinnov.achilles.internals.context.ConfigurationContext; import info.archinnov.achilles.internals.factory.DefaultBeanFactory; import info.archinnov.achilles.internals.options.CassandraOptions; import info.archinnov.achilles.internals.types.ConfigMap; import info.archinnov.achilles.json.DefaultJacksonMapperFactory; import info.archinnov.achilles.json.JacksonMapperFactory; import info.archinnov.achilles.type.SchemaNameProvider; import info.archinnov.achilles.type.codec.Codec; import info.archinnov.achilles.type.codec.CodecSignature; import info.archinnov.achilles.type.factory.BeanFactory; import info.archinnov.achilles.type.interceptor.Interceptor; import info.archinnov.achilles.type.strategy.InsertStrategy; import info.archinnov.achilles.type.strategy.NamingStrategy; /** * Extract bootstrap argument and create a configuration context */ public class ArgumentExtractor { static final int DEFAULT_LRU_CACHE_SIZE = 10000; static final boolean DEFAULT_ENABLE_PRE_MUTATE_BEAN_VALIDATION = false; static final boolean DEFAULT_ENABLE_POST_LOAD_BEAN_VALIDATION = false; static final int DEFAULT_THREAD_POOL_MIN_THREAD_COUNT = 10; static final int DEFAULT_THREAD_POOL_MAX_THREAD_COUNT = 10; static final long DEFAULT_THREAD_POOL_THREAD_TTL = 60L; static final int DEFAULT_THREAD_POOL_QUEUE_SIZE = 1000; static final ThreadFactory DEFAULT_THREAD_POOL_THREAD_FACTORY = new DefaultExecutorThreadFactory(); static final InsertStrategy DEFAULT_INSERT_STRATEGY = InsertStrategy.ALL_FIELDS; static final NamingStrategy DEFAULT_GLOBAL_NAMING_STRATEGY = NamingStrategy.LOWER_CASE; static final BeanFactory DEFAULT_BEAN_FACTORY = new DefaultBeanFactory(); static final Integer DEFAULT_DML_RESULTS_DISPLAY_SIZE = 10; private static final Logger LOGGER = LoggerFactory.getLogger(ArgumentExtractor.class); public static ConfigurationContext initConfigContext(Cluster cluster, ConfigMap configurationMap) { LOGGER.trace("Build ConfigurationContext from configuration map"); ConfigurationContext configContext = new ConfigurationContext(); configContext.setCurrentKeyspace(initKeyspaceName(configurationMap)); configContext.setForceSchemaGeneration(initForceSchemaCreation(configurationMap)); configContext.setManageEntities(initManagedEntities(configurationMap)); configContext.setJacksonMapperFactory(initObjectMapperFactory(configurationMap)); configContext.setDefaultReadConsistencyLevel(initDefaultReadConsistencyLevel(configurationMap)); configContext.setDefaultWriteConsistencyLevel(initDefaultWriteConsistencyLevel(configurationMap)); configContext.setDefaultSerialConsistencyLevel(initDefaultSerialConsistencyLevel(configurationMap)); configContext.setReadConsistencyLevelMap(initReadConsistencyMap(configurationMap)); configContext.setWriteConsistencyLevelMap(initWriteConsistencyMap(configurationMap)); configContext.setSerialConsistencyLevelMap(initSerialConsistencyMap(configurationMap)); configContext.setBeanValidator(initValidator(configurationMap)); configContext.setPostLoadBeanValidationEnabled(initPostLoadBeanValidation(configurationMap)); configContext.setInterceptors(initInterceptors(configurationMap)); configContext.setPreparedStatementLRUCacheSize(initPreparedStatementsCacheSize(configurationMap)); configContext.setGlobalInsertStrategy(initInsertStrategy(configurationMap)); configContext.setGlobalNamingStrategy(initGlobalNamingStrategy(configurationMap)); configContext.setSchemaNameProvider(initSchemaNameProvider(configurationMap)); configContext.setExecutorService(initExecutorService(configurationMap)); configContext.setProvidedExecutorService(initProvidedExecutorService(configurationMap)); configContext.setDefaultBeanFactory(initDefaultBeanFactory(configurationMap)); configContext.setSession(initSession(cluster, configurationMap)); configContext.setProvidedSession(initProvidedSession(configurationMap)); configContext.setStatementsCache(initStatementCache(configurationMap)); configContext.setRuntimeCodecs(initRuntimeCodecs(configurationMap)); configContext.setValidateSchema(initValidateSchema(configurationMap)); configContext.setDMLResultsDisplaySize(initDMLResultsDisplayLimit(configurationMap)); return configContext; } static boolean initValidateSchema(ConfigMap configurationMap) { LOGGER.trace("Extract 'schema validation enabled' from configuration map"); return configurationMap.getTypedOr(VALIDATE_SCHEMA, true); } static boolean initForceSchemaCreation(ConfigMap configurationMap) { LOGGER.trace("Extract 'force table creation' from configuration map"); return configurationMap.getTypedOr(FORCE_SCHEMA_GENERATION, false); } static public List<Class<?>> initManagedEntities(ConfigMap configMap) { LOGGER.trace("Extract managed entity classes from configuration map"); return configMap.getTypedOr(MANAGED_ENTITIES, new ArrayList<>()); } static JacksonMapperFactory initObjectMapperFactory(ConfigMap configurationMap) { LOGGER.trace("Extract object mapper factory from configuration map"); JacksonMapperFactory jacksonMapperFactory = configurationMap.getTyped(JACKSON_MAPPER_FACTORY); if (jacksonMapperFactory == null) { ObjectMapper mapper = configurationMap.getTyped(JACKSON_MAPPER); if (mapper != null) { jacksonMapperFactory = factoryFromMapper(mapper); } else { jacksonMapperFactory = new DefaultJacksonMapperFactory(); } } return jacksonMapperFactory; } protected static JacksonMapperFactory factoryFromMapper(final ObjectMapper mapper) { return new JacksonMapperFactory() { @Override public <T> ObjectMapper getMapper(Class<T> type) { return mapper; } }; } static Optional<ConsistencyLevel> initDefaultReadConsistencyLevel(ConfigMap configMap) { LOGGER.trace("Extract default read Consistency level from configuration map"); return Optional.ofNullable(configMap.getTyped(CONSISTENCY_LEVEL_READ_DEFAULT)); } static Optional<ConsistencyLevel> initDefaultWriteConsistencyLevel(ConfigMap configMap) { LOGGER.trace("Extract default read Consistency level from configuration map"); return Optional.ofNullable(configMap.getTyped(CONSISTENCY_LEVEL_WRITE_DEFAULT)); } static Optional<ConsistencyLevel> initDefaultSerialConsistencyLevel(ConfigMap configMap) { LOGGER.trace("Extract default write Consistency level from configuration map"); return Optional.ofNullable(configMap.getTyped(CONSISTENCY_LEVEL_SERIAL_DEFAULT)); } public static Map<String, ConsistencyLevel> initReadConsistencyMap(ConfigMap configMap) { LOGGER.trace("Extract read Consistency level map from configuration map"); return configMap.getTypedOr(CONSISTENCY_LEVEL_READ_MAP, ImmutableMap.<String, ConsistencyLevel>of()); } public static Map<String, ConsistencyLevel> initWriteConsistencyMap(ConfigMap configMap) { LOGGER.trace("Extract write Consistency level map from configuration map"); return configMap.getTypedOr(CONSISTENCY_LEVEL_WRITE_MAP, ImmutableMap.<String, ConsistencyLevel>of()); } public static Map<String, ConsistencyLevel> initSerialConsistencyMap(ConfigMap configMap) { LOGGER.trace("Extract serial Consistency level map from configuration map"); return configMap.getTypedOr(CONSISTENCY_LEVEL_SERIAL_MAP, ImmutableMap.<String, ConsistencyLevel>of()); } public static Optional<String> initKeyspaceName(ConfigMap configurationMap) { return Optional.ofNullable(configurationMap.<String>getTyped(KEYSPACE_NAME)); } public static Session initSession(Cluster cluster, ConfigMap configurationMap) { LOGGER.trace("Extract or init Session from configuration map"); return Optional.<Session>ofNullable(configurationMap.getTyped(NATIVE_SESSION)) .orElseGet(() -> initKeyspaceName(configurationMap) .map(cluster::connect) .orElseGet(cluster::connect) ); } public static boolean initProvidedSession(ConfigMap configurationMap) { LOGGER.trace("Is Session object provided or built internally ?"); return Optional.<Session>ofNullable(configurationMap.getTyped(NATIVE_SESSION)).isPresent(); } @SuppressWarnings("unchecked") public static List<Interceptor<?>> initInterceptors(ConfigMap configurationMap) { LOGGER.trace("Extract or init Interceptors"); List<Interceptor<?>> interceptors = (List<Interceptor<?>>) configurationMap.get(EVENT_INTERCEPTORS); if (interceptors == null) { interceptors = new ArrayList<>(); } return new ArrayList<>(new LinkedHashSet<>(interceptors)); } static javax.validation.Validator initValidator(ConfigMap configurationMap) { LOGGER.trace("Extract or init Bean validation"); Boolean enablePreMutateBeanValidation = configurationMap.getTypedOr(BEAN_VALIDATION_ENABLE, DEFAULT_ENABLE_PRE_MUTATE_BEAN_VALIDATION); if (enablePreMutateBeanValidation) { try { javax.validation.Validator defaultValidator = buildDefaultValidatorFactory().getValidator(); return configurationMap.getTypedOr(BEAN_VALIDATION_VALIDATOR, defaultValidator); } catch (ValidationException vex) { throw new AchillesException("Cannot bootstrap ValidatorFactory for Bean Validation (JSR 303)", vex); } } return null; } static boolean initPostLoadBeanValidation(ConfigMap configMap) { LOGGER.trace("Extract or init Post Load Bean validation"); return configMap.getTypedOr(POST_LOAD_BEAN_VALIDATION_ENABLE, DEFAULT_ENABLE_POST_LOAD_BEAN_VALIDATION); } public static Integer initPreparedStatementsCacheSize(ConfigMap configMap) { LOGGER.trace("Extract or init prepared statements cache size"); return configMap.getTypedOr(PREPARED_STATEMENTS_CACHE_SIZE, DEFAULT_LRU_CACHE_SIZE); } public static InsertStrategy initInsertStrategy(ConfigMap configMap) { LOGGER.trace("Extract or init global Insert strategy"); return configMap.getTypedOr(GLOBAL_INSERT_STRATEGY, DEFAULT_INSERT_STRATEGY); } public static NamingStrategy initGlobalNamingStrategy(ConfigMap configMap) { LOGGER.trace("Extract or init global Naming strategy"); return configMap.getTypedOr(GLOBAL_NAMING_STRATEGY, DEFAULT_GLOBAL_NAMING_STRATEGY); } public static Optional<SchemaNameProvider> initSchemaNameProvider(ConfigMap configMap) { LOGGER.trace("Extract or init schema name provider"); return Optional.ofNullable(configMap.getTyped(SCHEMA_NAME_PROVIDER)); } public static ExecutorService initExecutorService(ConfigMap configMap) { LOGGER.trace("Extract or init executor service (thread pool)"); return configMap.getTypedOr(EXECUTOR_SERVICE, initializeDefaultExecutor(configMap)); } public static boolean initProvidedExecutorService(ConfigMap configMap) { LOGGER.trace("Is executor service provided or built internally ? "); return Optional.ofNullable(configMap.<ExecutorService>getTyped(EXECUTOR_SERVICE)).isPresent(); } private static Supplier<ExecutorService> initializeDefaultExecutor(final ConfigMap configMap) { return () -> { int minThreads = configMap.getTypedOr(DEFAULT_EXECUTOR_SERVICE_MIN_THREAD, DEFAULT_THREAD_POOL_MIN_THREAD_COUNT); int maxThreads = configMap.getTypedOr(DEFAULT_EXECUTOR_SERVICE_MAX_THREAD, DEFAULT_THREAD_POOL_MAX_THREAD_COUNT); long threadKeepAlive = configMap.getTypedOr(DEFAULT_EXECUTOR_SERVICE_THREAD_KEEPALIVE, DEFAULT_THREAD_POOL_THREAD_TTL); int queueSize = configMap.getTypedOr(DEFAULT_EXECUTOR_SERVICE_QUEUE_SIZE, DEFAULT_THREAD_POOL_QUEUE_SIZE); ThreadFactory threadFactory = configMap.getTypedOr(DEFAULT_EXECUTOR_SERVICE_THREAD_FACTORY, DEFAULT_THREAD_POOL_THREAD_FACTORY); return new ThreadPoolExecutor(minThreads, maxThreads, threadKeepAlive, TimeUnit.SECONDS, new LinkedBlockingQueue<>(queueSize), threadFactory); }; } private static BeanFactory initDefaultBeanFactory(final ConfigMap configMap) { LOGGER.trace("Extract or init default bean factory"); if (configMap.containsKey(ConfigurationParameters.DEFAULT_BEAN_FACTORY)) { return configMap.<BeanFactory>getTyped(ConfigurationParameters.DEFAULT_BEAN_FACTORY); } else { return DEFAULT_BEAN_FACTORY; } } private static StatementsCache initStatementCache(final ConfigMap configMap) { LOGGER.trace("Extract or init default statement cache"); if (configMap.containsKey(STATEMENTS_CACHE)) { return configMap.getTyped(STATEMENTS_CACHE); } else { final Integer cacheSize = initPreparedStatementsCacheSize(configMap); return new StatementsCache(cacheSize); } } private static Map<CodecSignature<?, ?>, Codec<?, ?>> initRuntimeCodecs(final ConfigMap configMap) { LOGGER.trace("Extract or init default runtime codecs"); if (configMap.containsKey(RUNTIME_CODECS)) { return configMap.getTyped(RUNTIME_CODECS); } else { return new HashMap<>(); } } private static Integer initDMLResultsDisplayLimit(final ConfigMap configMap) { if(configMap.containsKey(DML_RESULTS_DISPLAY_SIZE)) { final Integer resultsDisplaySize = configMap.getTyped(DML_RESULTS_DISPLAY_SIZE); return Integer.max(0,Integer.min(resultsDisplaySize, CassandraOptions.MAX_RESULTS_DISPLAY_SIZE)); } else { return DEFAULT_DML_RESULTS_DISPLAY_SIZE; } } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.function; import com.hazelcast.internal.util.ExceptionUtil; import com.hazelcast.function.ComparatorsEx.NullComparator; import java.io.Serializable; import java.util.Comparator; import java.util.function.Function; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import static com.hazelcast.internal.serialization.impl.SerializationUtil.checkSerializable; import static com.hazelcast.internal.util.Preconditions.checkNotNull; /** * {@code Serializable} variant of {@link Comparator java.util.Comparator} * which declares checked exception. * * @param <T> the type of objects that may be compared by this comparator * * @since 4.0 */ @FunctionalInterface @SuppressWarnings("checkstyle:methodcount") public interface ComparatorEx<T> extends Comparator<T>, Serializable { /** * Exception-declaring version of {@link Comparator#compare}. * @throws Exception in case of any exceptional case */ int compareEx(T o1, T o2) throws Exception; @Override default int compare(T o1, T o2) { try { return compareEx(o1, o2); } catch (Exception e) { throw ExceptionUtil.sneakyThrow(e); } } /** * {@code Serializable} variant of {@link * Comparator#naturalOrder() * java.util.Comparator#naturalOrder()}. * @param <T> the {@link Comparable} type of element to be compared */ @SuppressWarnings("unchecked") static <T extends Comparable<? super T>> ComparatorEx<T> naturalOrder() { return (ComparatorEx<T>) ComparatorsEx.NATURAL_ORDER; } /** * {@code Serializable} variant of {@link * Comparator#reverseOrder() * java.util.Comparator#reverseOrder()}. * @param <T> the {@link Comparable} type of element to be compared */ @SuppressWarnings("unchecked") static <T extends Comparable<? super T>> ComparatorEx<T> reverseOrder() { return (ComparatorEx<T>) ComparatorsEx.REVERSE_ORDER; } /** * {@code Serializable} variant of {@link * Comparator#nullsFirst(Comparator) * java.util.Comparator#nullsFirst(Comparator)}. * @param <T> the type of the elements to be compared */ static <T> ComparatorEx<T> nullsFirst(Comparator<? super T> comparator) { checkSerializable(comparator, "comparator"); NullComparator<T> c = new NullComparator<>(true); return comparator != null ? c.thenComparing(comparator) : c; } /** * {@code Serializable} variant of {@link * Comparator#nullsFirst(Comparator) * java.util.Comparator#nullsFirst(Comparator)}. * @param <T> the type of the elements to be compared */ static <T> ComparatorEx<T> nullsFirst(ComparatorEx<? super T> comparator) { return nullsFirst((Comparator<? super T>) comparator); } /** * {@code Serializable} variant of {@link * Comparator#nullsLast(Comparator) * java.util.Comparator#nullsLast(Comparator)}. * @param <T> the type of the elements to be compared */ static <T> ComparatorEx<T> nullsLast(Comparator<? super T> comparator) { checkSerializable(comparator, "comparator"); NullComparator<T> c = new NullComparator<>(false); return comparator != null ? c.thenComparing(comparator) : c; } /** * {@code Serializable} variant of {@link * Comparator#nullsLast(Comparator) * java.util.Comparator#nullsLast(Comparator)}. * @param <T> the type of the elements to be compared */ static <T> ComparatorEx<T> nullsLast(ComparatorEx<? super T> comparator) { return nullsLast((Comparator<? super T>) comparator); } /** * {@code Serializable} variant of {@link * Comparator#comparing(Function, Comparator) * java.util.Comparator#comparing(Function, Comparator)}. * @param <T> the type of element to be compared * @param <U> the type of the sort key */ static <T, U> ComparatorEx<T> comparing( Function<? super T, ? extends U> toKeyFn, Comparator<? super U> keyComparator ) { checkNotNull(toKeyFn, "toKeyFn"); checkNotNull(keyComparator, "keyComparator"); checkSerializable(toKeyFn, "toKeyFn"); checkSerializable(keyComparator, "keyComparator"); return (c1, c2) -> keyComparator.compare(toKeyFn.apply(c1), toKeyFn.apply(c2)); } /** * {@code Serializable} variant of {@link * Comparator#comparing(Function, Comparator) * java.util.Comparator#comparing(Function, Comparator)}. * @param <T> the type of element to be compared * @param <U> the type of the sort key */ static <T, U> ComparatorEx<T> comparing( FunctionEx<? super T, ? extends U> toKeyFn, ComparatorEx<? super U> keyComparator) { return comparing((Function<? super T, ? extends U>) toKeyFn, keyComparator); } /** * {@code Serializable} variant of {@link * Comparator#comparing(Function) * java.util.Comparator#comparing(Function)}. * @param <T> the type of element to be compared * @param <U> the type of the {@code Comparable} sort key */ static <T, U extends Comparable<? super U>> ComparatorEx<T> comparing( Function<? super T, ? extends U> toKeyFn ) { checkNotNull(toKeyFn, "toKeyFn"); checkSerializable(toKeyFn, "toKeyFn"); return (left, right) -> toKeyFn.apply(left).compareTo(toKeyFn.apply(right)); } /** * {@code Serializable} variant of {@link * Comparator#comparing(Function) * java.util.Comparator#comparing(Function)}. * @param <T> the type of element to be compared * @param <U> the type of the {@code Comparable} sort key */ static <T, U extends Comparable<? super U>> ComparatorEx<T> comparing( FunctionEx<? super T, ? extends U> toKeyFn ) { return comparing((Function<? super T, ? extends U>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#comparingInt(ToIntFunction) * java.util.Comparator#comparingInt(ToIntFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingInt(ToIntFunction<? super T> toKeyFn) { checkNotNull(toKeyFn, "toKeyFn"); checkSerializable(toKeyFn, "toKeyFn"); return (c1, c2) -> Integer.compare(toKeyFn.applyAsInt(c1), toKeyFn.applyAsInt(c2)); } /** * {@code Serializable} variant of {@link * Comparator#comparingInt(ToIntFunction) * java.util.Comparator#comparingInt(ToIntFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingInt(ToIntFunctionEx<? super T> toKeyFn) { return comparingInt((ToIntFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#comparingLong(ToLongFunction) * java.util.Comparator#comparingLong(ToLongFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingLong(ToLongFunction<? super T> toKeyFn) { checkNotNull(toKeyFn, "toKeyFn"); checkSerializable(toKeyFn, "toKeyFn"); return (c1, c2) -> Long.compare(toKeyFn.applyAsLong(c1), toKeyFn.applyAsLong(c2)); } /** * {@code Serializable} variant of {@link * Comparator#comparingLong(ToLongFunction) * java.util.Comparator#comparingLong(ToLongFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingLong(ToLongFunctionEx<? super T> toKeyFn) { return comparingLong((ToLongFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#comparingDouble(ToDoubleFunction) * java.util.Comparator#comparingDouble(ToDoubleFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingDouble(ToDoubleFunction<? super T> toKeyFn) { checkNotNull(toKeyFn, "toKeyFn"); checkSerializable(toKeyFn, "toKeyFn"); return (c1, c2) -> Double.compare(toKeyFn.applyAsDouble(c1), toKeyFn.applyAsDouble(c2)); } /** * {@code Serializable} variant of {@link * Comparator#comparingDouble(ToDoubleFunction) * java.util.Comparator#comparingDouble(ToDoubleFunction)}. * @param <T> the type of element to be compared */ static <T> ComparatorEx<T> comparingDouble(ToDoubleFunctionEx<? super T> toKeyFn) { return comparingDouble((ToDoubleFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Comparator) * java.util.Comparator#thenComparing(Comparator)}. */ @Override default ComparatorEx<T> thenComparing(Comparator<? super T> other) { checkNotNull(other, "other"); checkSerializable(other, "other"); return (c1, c2) -> { int res = compare(c1, c2); return (res != 0) ? res : other.compare(c1, c2); }; } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Comparator) * java.util.Comparator#thenComparing(Comparator)}. */ default ComparatorEx<T> thenComparing(ComparatorEx<? super T> other) { return thenComparing((Comparator<? super T>) other); } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Function, Comparator) * java.util.Comparator#thenComparing(Function, Comparator)}. * @param <U> the type of the sort key */ @Override default <U> ComparatorEx<T> thenComparing( Function<? super T, ? extends U> toKeyFn, Comparator<? super U> keyComparator ) { checkSerializable(toKeyFn, "toKeyFn"); checkSerializable(keyComparator, "keyComparator"); return thenComparing(comparing(toKeyFn, keyComparator)); } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Function, Comparator) * java.util.Comparator#thenComparing(Function, Comparator)}. * @param <U> the type of the sort key */ default <U> ComparatorEx<T> thenComparing( FunctionEx<? super T, ? extends U> toKeyFn, ComparatorEx<? super U> keyComparator) { return thenComparing((Function<? super T, ? extends U>) toKeyFn, keyComparator); } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Function) * java.util.Comparator#thenComparing(Function)}. * @param <U> the type of the {@link Comparable} sort key */ @Override default <U extends Comparable<? super U>> ComparatorEx<T> thenComparing( Function<? super T, ? extends U> toKeyFn ) { checkSerializable(toKeyFn, "toKeyFn"); return thenComparing(comparing(toKeyFn)); } /** * {@code Serializable} variant of {@link * Comparator#thenComparing(Function) * java.util.Comparator#thenComparing(Function)}. * @param <U> the type of the {@link Comparable} sort key */ default <U extends Comparable<? super U>> ComparatorEx<T> thenComparing( FunctionEx<? super T, ? extends U> toKeyFn) { return thenComparing((Function<? super T, ? extends U>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingInt(ToIntFunction) * java.util.Comparator#thenComparingInt(ToIntFunction)}. */ @Override default ComparatorEx<T> thenComparingInt(ToIntFunction<? super T> toKeyFn) { checkSerializable(toKeyFn, "toKeyFn"); return thenComparing(comparingInt(toKeyFn)); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingInt(ToIntFunction) * java.util.Comparator#thenComparingInt(ToIntFunction)}. */ default ComparatorEx<T> thenComparingInt(ToIntFunctionEx<? super T> toKeyFn) { return thenComparingInt((ToIntFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingLong(ToLongFunction) * java.util.Comparator#thenComparingLong(ToLongFunction)}. */ @Override default ComparatorEx<T> thenComparingLong(ToLongFunction<? super T> toKeyFn) { checkSerializable(toKeyFn, "toKeyFn"); return thenComparing(comparingLong(toKeyFn)); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingLong(ToLongFunction) * java.util.Comparator#thenComparingLong(ToLongFunction)}. */ default ComparatorEx<T> thenComparingLong(ToLongFunctionEx<? super T> toKeyFn) { return thenComparingLong((ToLongFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingDouble(ToDoubleFunction) * java.util.Comparator#thenComparingDouble(ToDoubleFunction)}. */ @Override default ComparatorEx<T> thenComparingDouble(ToDoubleFunction<? super T> toKeyFn) { checkSerializable(toKeyFn, "toKeyFn"); return thenComparing(comparingDouble(toKeyFn)); } /** * {@code Serializable} variant of {@link * Comparator#thenComparingDouble(ToDoubleFunction) * java.util.Comparator#thenComparingDouble(ToDoubleFunction)}. */ default ComparatorEx<T> thenComparingDouble(ToDoubleFunctionEx<? super T> toKeyFn) { return thenComparingDouble((ToDoubleFunction<? super T>) toKeyFn); } /** * {@code Serializable} variant of {@link Comparator#reversed() * java.util.Comparator#reversed()} */ @Override default ComparatorEx<T> reversed() { return (o1, o2) -> compare(o2, o1); } }
package org.jgroups.tests; import org.jgroups.Global; import org.jgroups.JChannel; import org.jgroups.blocks.ReplicatedHashMap; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Test methods for ReplicatedHashMap * @author Bela Ban */ @Test(groups={Global.STACK_DEPENDENT,Global.EAP_EXCLUDED},singleThreaded=true) public class ReplicatedHashMapTest extends ChannelTestBase { private ReplicatedHashMap<String,String> map1; private ReplicatedHashMap<String,String> map2; private ConcurrentHashMap<String,String> wrap=new ConcurrentHashMap<>(); @BeforeClass protected void setUp() throws Exception { JChannel c1=createChannel(true, 2); this.map1=new ReplicatedHashMap<>(c1); map1.setBlockingUpdates(true); c1.connect("ReplicatedHashMapTest"); this.map1.start(5000); JChannel c2=createChannel(c1); this.map2=new ReplicatedHashMap<>(wrap, c2); map2.setBlockingUpdates(true); c2.connect("ReplicatedHashMapTest"); this.map2.start(5000); } @AfterMethod protected void clean() { map1.clear(); map2.clear(); } @AfterClass protected void tearDown() throws Exception { this.map1.stop(); this.map2.stop(); } public void testEqualsEtc() { map1.put("key1", "value1"); assertEquals(this.map1, this.map2); Assert.assertEquals(this.map1.hashCode(), this.map2.hashCode()); Assert.assertEquals(this.map1.toString(), this.map2.toString()); assertEquals(this.wrap, this.map1); } public void testSize() { Assert.assertEquals(0, this.map1.size()); Assert.assertEquals(this.map2.size(), this.map1.size()); this.map1.put("key1", "value1"); Assert.assertEquals(1, this.map1.size()); Assert.assertEquals(this.map2.size(), this.map1.size()); this.map2.put("key2", "value2"); Assert.assertEquals(2, this.map1.size()); Assert.assertEquals(this.map2.size(), this.map1.size()); } public void testIsEmpty() { assertTrue(this.map1.isEmpty()); assertTrue(this.map2.isEmpty()); this.map1.put("key", "value"); assertFalse(this.map1.isEmpty()); assertFalse(this.map2.isEmpty()); } public void testContainsKey() { assertFalse(this.map1.containsKey("key1")); assertFalse(this.map2.containsKey("key1")); this.map1.put("key1", "value"); assertTrue(this.map1.containsKey("key1")); assertTrue(this.map2.containsKey("key1")); this.map2.put("key2", "value"); assertTrue(this.map1.containsKey("key2")); assertTrue(this.map2.containsKey("key2")); } public void testContainsValue() { assertFalse(this.map1.containsValue("value1")); assertFalse(this.map2.containsValue("value1")); this.map1.put("key1", "value1"); assertTrue(this.map1.containsValue("value1")); assertTrue(this.map2.containsValue("value1")); this.map2.put("key2", "value2"); assertTrue(this.map1.containsValue("value2")); assertTrue(this.map2.containsValue("value2")); } public void testPutAndGet() { assert this.map1.get("key1") == null; assert this.map2.get("key1") == null; this.map1.put("key1", "value1"); assertNotNull(this.map1.get("key1")); assertNotNull(this.map2.get("key1")); this.map2.put("key2", "value2"); assertNotNull(this.map1.get("key2")); assertNotNull(this.map2.get("key2")); } public void testPutIfAbsent() { String retval=map1.putIfAbsent("name", "Bela"); assert retval == null; retval=map1.putIfAbsent("name", "Michelle"); assertNotNull(retval); Assert.assertEquals("Bela", retval); Assert.assertEquals("Bela", map1.get("name")); Assert.assertEquals("Bela", map2.get("name")); } public void testRemove() { assert this.map1.get("key1") == null; assert this.map2.get("key1") == null; this.map1.put("key1", "value1"); this.map2.put("key2", "value2"); assertNotNull(this.map1.get("key1")); assertNotNull(this.map2.get("key1")); assertNotNull(this.map1.get("key2")); assertNotNull(this.map2.get("key2")); this.map1.remove("key1"); assert this.map1.get("key1") == null; assert this.map2.get("key1") == null; assertNotNull(this.map1.get("key2")); assertNotNull(this.map2.get("key2")); this.map2.remove("key2"); assert this.map1.get("key2") == null; assert this.map2.get("key2") == null; } public void testRemove2() { map1.put("name", "Bela"); map1.put("id", "322649"); System.out.println("map1: " + map1); boolean removed=map1.remove("id", "322000"); assertFalse(removed); assertTrue(map1.containsKey("id")); removed=map1.remove("id", "322649"); System.out.println("map1: " + map1); assertTrue(removed); assertFalse(map1.containsKey("id")); Assert.assertEquals(1, map2.size()); } public void testReplace() { map1.put("name", "Bela"); map1.put("id", "322649"); System.out.println("map1: " + map1); String val=map1.replace("id2", "322000"); Assert.assertEquals(2, map1.size()); assert map1.get("id2") == null; System.out.println("map1: " + map1); assert val == null; val=map1.replace("id", "322000"); System.out.println("map1: " + map1); assertNotNull(val); Assert.assertEquals("322649", val); Assert.assertEquals("322000", map1.get("id")); Assert.assertEquals("322000", map2.get("id")); } public void testReplace2() { map1.put("name", "Bela"); map1.put("id", "322649"); System.out.println("map1: " + map1); boolean replaced=map1.replace("id", "322000", "1"); assertFalse(replaced); Assert.assertEquals("322649", map1.get("id")); replaced=map1.replace("id", "322649", "1"); assertTrue(replaced); Assert.assertEquals("1", map1.get("id")); } public void testPutAll() { Map<String,String> all1=new HashMap<>(); all1.put("key1", "value1"); all1.put("key2", "value2"); Map<String,String> all2=new HashMap<>(); all2.put("key3", "value3"); all2.put("key4", "value4"); this.map1.putAll(all1); Assert.assertEquals(2, this.map1.size()); Assert.assertEquals(2, this.map2.size()); this.map2.putAll(all2); Assert.assertEquals(4, this.map1.size()); Assert.assertEquals(4, this.map2.size()); assertTrue(this.map1.containsKey("key1")); assertTrue(this.map1.containsKey("key2")); assertTrue(this.map1.containsKey("key3")); assertTrue(this.map1.containsKey("key4")); assertTrue(this.map2.containsKey("key1")); assertTrue(this.map2.containsKey("key2")); assertTrue(this.map2.containsKey("key3")); assertTrue(this.map2.containsKey("key4")); } public void testClear() { assertTrue(this.map1.isEmpty()); assertTrue(this.map2.isEmpty()); this.map1.put("key", "value"); assertFalse(this.map1.isEmpty()); assertFalse(this.map2.isEmpty()); this.map1.clear(); assertTrue(this.map1.isEmpty()); assertTrue(this.map2.isEmpty()); this.map2.put("key", "value"); assertFalse(this.map1.isEmpty()); assertFalse(this.map2.isEmpty()); this.map2.clear(); assertTrue(this.map1.isEmpty()); assertTrue(this.map2.isEmpty()); } public void testKeySet() { Map<String,String> all1=new HashMap<>(); all1.put("key1", "value1"); all1.put("key2", "value2"); Map<String,String> all2=new HashMap<>(); all2.put("key3", "value3"); all2.put("key4", "value4"); this.map1.putAll(all1); assertEquals(all1.keySet(), this.map1.keySet()); assertEquals(all1.keySet(), this.map2.keySet()); this.map2.putAll(all2); all1.putAll(all2); assertEquals(all1.keySet(), this.map1.keySet()); assertEquals(all1.keySet(), this.map2.keySet()); } public void testValues() { Map<String,String> all1=new HashMap<>(); all1.put("key1", "value1"); all1.put("key2", "value2"); Map<String,String> all2=new HashMap<>(); all2.put("key3", "value3"); all2.put("key4", "value4"); this.map1.putAll(all1); assertTrue(this.map1.values().containsAll(all1.values())); assertTrue(this.map2.values().containsAll(all1.values())); this.map2.putAll(all2); all1.putAll(all2); assertTrue(this.map1.values().containsAll(all1.values())); assertTrue(this.map2.values().containsAll(all1.values())); } }
/* * The MIT License (MIT) * * Copyright (c) 2013 Jeremy Othieno. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package clockwork.gui.controls.scene; import clockwork.graphics.Appearance; import clockwork.gui.controls.InputControls; import clockwork.gui.views.GUIColorRGBView; import clockwork.gui.views.GUIFrustumView; import clockwork.gui.views.GUIObjectView; import clockwork.gui.views.GUIOrientationView; import clockwork.gui.views.GUIPositionView; import clockwork.gui.views.GUIScaleView; import clockwork.gui.views.info.GUIAppearanceInfoView; import clockwork.gui.views.info.GUIBodyInfoView; import clockwork.gui.views.info.GUILightInfoView; import clockwork.gui.views.info.GUISceneViewerInfoView; import clockwork.physics.body.RigidBody; import clockwork.physics.lighting.Light; import clockwork.physics.lighting.LightEmitter; import clockwork.scene.SceneEntityProperty; import clockwork.scene.SceneGraph; import clockwork.scene.SceneObject; import clockwork.scene.SceneViewer; public final class SceneInputControls extends InputControls<SceneGraph.Node> { /** * The serial version UID. */ private static final long serialVersionUID = -6608888101435800761L; /** * Input components used to modify a specific property of a given entity. */ private final GUIObjectView<?> views[][]; /** * Index of view subgroups. */ private final int OBJECT_VIEWS = 0; private final int BODY_VIEWS = 1; private final int VIEWER_VIEWS = 2; private final int LIGHT_VIEWS = 3; private final int APPEARANCE_VIEWS = 4; /** * Instantiate a SceneInputControls attached to a given control panel. */ public SceneInputControls(final SceneControlPanel parent) { views = new GUIObjectView[][] { // Views shared by most scene objects. new GUIObjectView[] { new GUIPositionView(parent), new GUIOrientationView(parent), new GUIScaleView(parent), new GUIColorRGBView(parent), }, // Views that modify rigid bodies. new GUIObjectView[] { new GUIBodyInfoView(parent), }, // Views that modify scene viewers. new GUIObjectView[] { new GUISceneViewerInfoView(parent), new GUIFrustumView(parent), }, // Views that modify lighting. new GUIObjectView[] { new GUILightInfoView(parent), new GUIColorRGBView(parent), }, // Views that modify an object's appearance new GUIObjectView[] { new GUIAppearanceInfoView(parent) } }; } /** * Build the views based on a specific type of object. */ private void buildViews(final Class<?> cls) { removeInputComponents(); final GUIObjectView<?> genericViews[] = views[0]; GUIObjectView<?> specificViews[] = null; // The number of generic views to add. int M = 0; if (SceneObject.class.isAssignableFrom(cls)) { // If it is a scene object then by default, add the generic views. M = genericViews.length; if (RigidBody.class.isAssignableFrom(cls)) specificViews = views[BODY_VIEWS]; else if (SceneViewer.class.isAssignableFrom(cls)) { specificViews = views[VIEWER_VIEWS]; --M; // We can't scale viewers so we ignore the scaling controls. } else if (Light.class.isAssignableFrom(cls)) --M; // We can't scale lights so we ignore the scaling controls. } // If it is a scene property, then only add specific views. else if (SceneEntityProperty.class.isAssignableFrom(cls)) { if (LightEmitter.class.isAssignableFrom(cls)) specificViews = views[LIGHT_VIEWS]; else if (Appearance.class.isAssignableFrom(cls)) specificViews = views[APPEARANCE_VIEWS]; } // The number of specific views to add. final int N = specificViews != null ? specificViews.length : 0; // First, add the specific view that gives information on the object, then add // the generic views and the remaining specific views. if (N > 0) addInputComponent(specificViews[0]); for (int i = 0; i < M; ++i) addInputComponent(genericViews[i]); for (int i = 1; i < N; ++i) addInputComponent(specificViews[i]); } /** * Display the values of a given node. * @param input the node to display. */ @Override public void setValue(final SceneGraph.Node input) { if (input != null) { buildViews(input.getClass()); if (input instanceof SceneObject) { final SceneObject object = (SceneObject)input; ((GUIPositionView)views[OBJECT_VIEWS][0]).write(object.getPosition()); ((GUIOrientationView)views[OBJECT_VIEWS][1]).write(object.getOrientation()); ((GUIScaleView)views[OBJECT_VIEWS][2]).write(object.getScale()); if (input instanceof RigidBody) { // TODO Complete me. final RigidBody body = (RigidBody)input; ((GUIBodyInfoView)views[BODY_VIEWS][0]).write(body); } else if (input instanceof SceneViewer) { final SceneViewer viewer = (SceneViewer)input; ((GUISceneViewerInfoView)views[VIEWER_VIEWS][0]).write(viewer); ((GUIFrustumView)views[VIEWER_VIEWS][1]).write(viewer.getFrustum()); } } else if (input instanceof SceneEntityProperty<?>) { if (input instanceof LightEmitter) { final LightEmitter light = (LightEmitter)input; ((GUILightInfoView)views[LIGHT_VIEWS][0]).write(light); ((GUIColorRGBView)views[LIGHT_VIEWS][1]).write(light.getColor()); } else if (input instanceof Appearance) { final Appearance appearance = (Appearance)input; ((GUIAppearanceInfoView)views[APPEARANCE_VIEWS][0]).write(appearance); } } } else setValue(); } /** * @see InputControls#setValue. */ @Override public void setValue() { // TODO Auto-generated method stub } /** * Return the value of the selected node. * @param output the location where the value will be stored. */ @Override public void getValue(final SceneGraph.Node output) { if (output != null) { if (output instanceof SceneObject) { final SceneObject object = (SceneObject)output; object.setPosition(((GUIPositionView)views[OBJECT_VIEWS][0]).read()); object.setOrientation(((GUIOrientationView)views[OBJECT_VIEWS][1]).read()); object.setScale(((GUIScaleView)views[OBJECT_VIEWS][2]).read()); if (output instanceof RigidBody) { // TODO Complete me. final RigidBody body = (RigidBody)output; ((GUIBodyInfoView)views[BODY_VIEWS][0]).read(body); } else if (output instanceof SceneViewer) { final SceneViewer viewer = (SceneViewer)output; ((GUISceneViewerInfoView)views[VIEWER_VIEWS][0]).read(viewer); viewer.setFrustum(((GUIFrustumView)views[VIEWER_VIEWS][1]).read()); } } else if (output instanceof SceneEntityProperty<?>) { if (output instanceof LightEmitter) { final LightEmitter light = (LightEmitter)output; ((GUILightInfoView)views[LIGHT_VIEWS][0]).read(light); light.setColor(((GUIColorRGBView)views[LIGHT_VIEWS][1]).read()); } } } } }
/* * HorizontalListView.java v1.5 * * * The MIT License * Copyright (c) 2011 Paul Soucy (paul@dev-smart.com) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ package com.brandstore1.utils; import java.util.LinkedList; import java.util.Queue; import android.content.Context; import android.database.DataSetObserver; import android.graphics.Rect; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.GestureDetector.OnGestureListener; import android.view.MotionEvent; import android.view.View; import android.widget.AdapterView; import android.widget.ListAdapter; import android.widget.Scroller; public class HorizontalListView extends AdapterView<ListAdapter> { public boolean mAlwaysOverrideTouch = true; protected ListAdapter mAdapter; private int mLeftViewIndex = -1; private int mRightViewIndex = 0; protected int mCurrentX; protected int mNextX; private int mMaxX = Integer.MAX_VALUE; private int mDisplayOffset = 0; protected Scroller mScroller; private GestureDetector mGesture; private Queue<View> mRemovedViewQueue = new LinkedList<View>(); private OnItemSelectedListener mOnItemSelected; private OnItemClickListener mOnItemClicked; private OnItemLongClickListener mOnItemLongClicked; private boolean mDataChanged = false; public HorizontalListView(Context context, AttributeSet attrs) { super(context, attrs); initView(); } private synchronized void initView() { mLeftViewIndex = -1; mRightViewIndex = 0; mDisplayOffset = 0; mCurrentX = 0; mNextX = 0; mMaxX = Integer.MAX_VALUE; mScroller = new Scroller(getContext()); mGesture = new GestureDetector(getContext(), mOnGesture); } @Override public void setOnItemSelectedListener(OnItemSelectedListener listener) { mOnItemSelected = listener; } @Override public void setOnItemClickListener(OnItemClickListener listener){ mOnItemClicked = listener; } @Override public void setOnItemLongClickListener(OnItemLongClickListener listener) { mOnItemLongClicked = listener; } private DataSetObserver mDataObserver = new DataSetObserver() { @Override public void onChanged() { synchronized(HorizontalListView.this){ mDataChanged = true; } invalidate(); requestLayout(); } @Override public void onInvalidated() { reset(); invalidate(); requestLayout(); } }; @Override public ListAdapter getAdapter() { return mAdapter; } @Override public View getSelectedView() { //TODO: implement return null; } @Override public void setAdapter(ListAdapter adapter) { if(mAdapter != null) { mAdapter.unregisterDataSetObserver(mDataObserver); } mAdapter = adapter; mAdapter.registerDataSetObserver(mDataObserver); reset(); } private synchronized void reset(){ initView(); removeAllViewsInLayout(); requestLayout(); } @Override public void setSelection(int position) { //TODO: implement } private void addAndMeasureChild(final View child, int viewPos) { LayoutParams params = child.getLayoutParams(); if(params == null) { params = new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT); } addViewInLayout(child, viewPos, params, true); child.measure(MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(getHeight(), MeasureSpec.AT_MOST)); } @Override protected synchronized void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); if(mAdapter == null){ return; } if(mDataChanged){ int oldCurrentX = mCurrentX; initView(); removeAllViewsInLayout(); mNextX = oldCurrentX; mDataChanged = false; } if(mScroller.computeScrollOffset()){ int scrollx = mScroller.getCurrX(); mNextX = scrollx; } if(mNextX <= 0){ mNextX = 0; mScroller.forceFinished(true); } if(mNextX >= mMaxX) { mNextX = mMaxX; mScroller.forceFinished(true); } int dx = mCurrentX - mNextX; removeNonVisibleItems(dx); fillList(dx); positionItems(dx); mCurrentX = mNextX; if(!mScroller.isFinished()){ post(new Runnable(){ @Override public void run() { requestLayout(); } }); } } private void fillList(final int dx) { int edge = 0; View child = getChildAt(getChildCount()-1); if(child != null) { edge = child.getRight(); } fillListRight(edge, dx); edge = 0; child = getChildAt(0); if(child != null) { edge = child.getLeft(); } fillListLeft(edge, dx); } private void fillListRight(int rightEdge, final int dx) { while(rightEdge + dx < getWidth() && mRightViewIndex < mAdapter.getCount()) { View child = mAdapter.getView(mRightViewIndex, mRemovedViewQueue.poll(), this); addAndMeasureChild(child, -1); rightEdge += child.getMeasuredWidth(); if(mRightViewIndex == mAdapter.getCount()-1) { mMaxX = mCurrentX + rightEdge - getWidth(); } if (mMaxX < 0) { mMaxX = 0; } mRightViewIndex++; } } private void fillListLeft(int leftEdge, final int dx) { while(leftEdge + dx > 0 && mLeftViewIndex >= 0) { View child = mAdapter.getView(mLeftViewIndex, mRemovedViewQueue.poll(), this); addAndMeasureChild(child, 0); leftEdge -= child.getMeasuredWidth(); mLeftViewIndex--; mDisplayOffset -= child.getMeasuredWidth(); } } private void removeNonVisibleItems(final int dx) { View child = getChildAt(0); while(child != null && child.getRight() + dx <= 0) { mDisplayOffset += child.getMeasuredWidth(); mRemovedViewQueue.offer(child); removeViewInLayout(child); mLeftViewIndex++; child = getChildAt(0); } child = getChildAt(getChildCount()-1); while(child != null && child.getLeft() + dx >= getWidth()) { mRemovedViewQueue.offer(child); removeViewInLayout(child); mRightViewIndex--; child = getChildAt(getChildCount()-1); } } private void positionItems(final int dx) { if(getChildCount() > 0){ mDisplayOffset += dx; int left = mDisplayOffset; for(int i=0;i<getChildCount();i++){ View child = getChildAt(i); int childWidth = child.getMeasuredWidth(); child.layout(left, 0, left + childWidth, child.getMeasuredHeight()); left += childWidth + child.getPaddingRight(); } } } public synchronized void scrollTo(int x) { mScroller.startScroll(mNextX, 0, x - mNextX, 0); requestLayout(); } @Override public boolean dispatchTouchEvent(MotionEvent ev) { boolean handled = super.dispatchTouchEvent(ev); handled |= mGesture.onTouchEvent(ev); return handled; } protected boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { synchronized(HorizontalListView.this){ mScroller.fling(mNextX, 0, (int)-velocityX, 0, 0, mMaxX, 0, 0); } requestLayout(); return true; } protected boolean onDown(MotionEvent e) { mScroller.forceFinished(true); return true; } private OnGestureListener mOnGesture = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDown(MotionEvent e) { return HorizontalListView.this.onDown(e); } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { return HorizontalListView.this.onFling(e1, e2, velocityX, velocityY); } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { synchronized(HorizontalListView.this){ mNextX += (int)distanceX; } requestLayout(); return true; } @Override public boolean onSingleTapConfirmed(MotionEvent e) { for(int i=0;i<getChildCount();i++){ View child = getChildAt(i); if (isEventWithinView(e, child)) { if(mOnItemClicked != null){ mOnItemClicked.onItemClick(HorizontalListView.this, child, mLeftViewIndex + 1 + i, mAdapter.getItemId( mLeftViewIndex + 1 + i )); } if(mOnItemSelected != null){ mOnItemSelected.onItemSelected(HorizontalListView.this, child, mLeftViewIndex + 1 + i, mAdapter.getItemId( mLeftViewIndex + 1 + i )); } break; } } return true; } @Override public void onLongPress(MotionEvent e) { int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { View child = getChildAt(i); if (isEventWithinView(e, child)) { if (mOnItemLongClicked != null) { mOnItemLongClicked.onItemLongClick(HorizontalListView.this, child, mLeftViewIndex + 1 + i, mAdapter.getItemId(mLeftViewIndex + 1 + i)); } break; } } } private boolean isEventWithinView(MotionEvent e, View child) { Rect viewRect = new Rect(); int[] childPosition = new int[2]; child.getLocationOnScreen(childPosition); int left = childPosition[0]; int right = left + child.getWidth(); int top = childPosition[1]; int bottom = top + child.getHeight(); viewRect.set(left, top, right, bottom); return viewRect.contains((int) e.getRawX(), (int) e.getRawY()); } }; }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.facebook.buck.android.relinker.Symbols; import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.testutil.integration.BuckBuildLog; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TemporaryPaths; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.testutil.integration.ZipInspector; import com.facebook.buck.util.DefaultPropertyFinder; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.zip.ZipConstants; import com.google.common.base.Optional; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMap; import com.google.common.hash.Hashing; import org.apache.commons.compress.archivers.zip.ZipUtil; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Date; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; public class AndroidBinaryIntegrationTest { @ClassRule public static TemporaryPaths projectFolderWithPrebuiltTargets = new TemporaryPaths(); @Rule public TemporaryPaths tmpFolder = new TemporaryPaths(); private ProjectWorkspace workspace; private static final String SIMPLE_TARGET = "//apps/multidex:app"; private static final String RAW_DEX_TARGET = "//apps/multidex:app-art"; @BeforeClass public static void setUpOnce() throws IOException { AssumeAndroidPlatform.assumeSdkIsAvailable(); AssumeAndroidPlatform.assumeNdkIsAvailable(); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( new AndroidBinaryIntegrationTest(), "android_project", projectFolderWithPrebuiltTargets); workspace.setUp(); workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); } @Before public void setUp() throws IOException { workspace = new ProjectWorkspace( projectFolderWithPrebuiltTargets.getRoot(), tmpFolder.getRoot()); workspace.setUp(); } @Test public void testNonExopackageHasSecondary() throws IOException { ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/multidex/app.apk")); zipInspector.assertFileExists("assets/secondary-program-dex-jars/metadata.txt"); zipInspector.assertFileExists("assets/secondary-program-dex-jars/secondary-1.dex.jar"); zipInspector.assertFileDoesNotExist("classes2.dex"); zipInspector.assertFileExists("classes.dex"); zipInspector.assertFileExists("lib/armeabi/libfakenative.so"); } @Test public void testRawSplitDexHasSecondary() throws IOException { ProjectWorkspace.ProcessResult result = workspace.runBuckCommand("build", RAW_DEX_TARGET); result.assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/multidex/app-art.apk")); zipInspector.assertFileDoesNotExist("assets/secondary-program-dex-jars/metadata.txt"); zipInspector.assertFileDoesNotExist("assets/secondary-program-dex-jars/secondary-1.dex.jar"); zipInspector.assertFileExists("classes2.dex"); zipInspector.assertFileExists("classes.dex"); zipInspector.assertFileExists("lib/armeabi/libfakenative.so"); } @Test public void testDisguisedExecutableIsRenamed() throws IOException { ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/multidex/app.apk")); zipInspector.assertFileExists("lib/armeabi/libmybinary.so"); } @Test public void testEditingPrimaryDexClassForcesRebuildForSimplePackage() throws IOException { workspace.replaceFileContents( "java/com/sample/app/MyApplication.java", "package com", "package\ncom"); workspace.resetBuildLogFile(); ProjectWorkspace.ProcessResult result = workspace.runBuckCommand("build", SIMPLE_TARGET); result.assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(SIMPLE_TARGET); } @Test public void testEditingSecondaryDexClassForcesRebuildForSimplePackage() throws IOException { workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "package com", "package\ncom"); workspace.resetBuildLogFile(); ProjectWorkspace.ProcessResult result = workspace.runBuckCommand("build", SIMPLE_TARGET); result.assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(SIMPLE_TARGET); } @Test public void testPreprocessorForcesReDex() throws IOException { Path outputFile = workspace.buildAndReturnOutput("//java/com/preprocess:disassemble"); String output = new String(Files.readAllBytes(outputFile), UTF_8); assertThat(output, containsString("content=2")); workspace.replaceFileContents( "java/com/preprocess/convert.py", "content=2", "content=3"); outputFile = workspace.buildAndReturnOutput("//java/com/preprocess:disassemble"); output = new String(Files.readAllBytes(outputFile), UTF_8); assertThat(output, containsString("content=3")); } @Test public void testCxxLibraryDep() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_dep").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_cxx_lib_dep.apk")); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/armeabi/libgnustl_shared.so"); zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/armeabi-v7a/libgnustl_shared.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/x86/libgnustl_shared.so"); } @Test public void testCxxLibraryDepClang() throws IOException { ProjectWorkspace.ProcessResult result = workspace.runBuckCommand( "build", "-c", "ndk.compiler=clang", "-c", "ndk.cxx_runtime=libcxx", "//apps/sample:app_cxx_lib_dep"); result.assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_cxx_lib_dep.apk")); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/armeabi/libc++_shared.so"); zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/armeabi-v7a/libc++_shared.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/x86/libc++_shared.so"); } @Test public void testCxxLibraryDepWithNoFilters() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_dep_no_filters").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_cxx_lib_dep_no_filters.apk")); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so"); } @Test public void testNoCxxDepsDoesNotIncludeNdkRuntime() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_no_cxx_deps").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath("buck-out/gen/apps/sample/app_no_cxx_deps.apk")); zipInspector.assertFileDoesNotExist("lib/armeabi/libgnustl_shared.so"); zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libgnustl_shared.so"); zipInspector.assertFileDoesNotExist("lib/x86/libgnustl_shared.so"); } @Test public void testProguardDontObfuscateGeneratesMappingFile() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_proguard_dontobfuscate").assertSuccess(); Path mapping = workspace.resolve( "buck-out/gen/apps/sample/__app_proguard_dontobfuscate#aapt_package__proguard__/" + ".proguard/mapping.txt"); assertTrue(Files.exists(mapping)); } @Test public void testStaticCxxLibraryDep() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_static_cxx_lib_dep").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_static_cxx_lib_dep.apk")); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo1.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo2.so"); zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_bar.so"); } private Path unzip(Path tmpDir, Path zipPath, String name) throws IOException { Path outPath = tmpDir.resolve(zipPath.getFileName()); try (ZipFile zipFile = new ZipFile(zipPath.toFile())) { Files.copy( zipFile.getInputStream(zipFile.getEntry(name)), outPath, StandardCopyOption.REPLACE_EXISTING); return outPath; } } @Test public void testNativeRelinker() throws IOException, InterruptedException { // TODO(cjhopman): is this really the simplest way to get the objdump tool? AndroidDirectoryResolver androidResolver = new DefaultAndroidDirectoryResolver( workspace.asCell().getFilesystem(), Optional.<String>absent(), Optional.<String>absent(), new DefaultPropertyFinder( workspace.asCell().getFilesystem(), ImmutableMap.copyOf(System.getenv()))); Optional<Path> ndkPath = androidResolver.findAndroidNdkDir(); assertTrue(ndkPath.isPresent()); ImmutableCollection<NdkCxxPlatform> platforms = NdkCxxPlatforms.getPlatforms( new ProjectFilesystem(ndkPath.get()), NdkCxxPlatformCompiler.builder() .setType(NdkCxxPlatforms.DEFAULT_COMPILER_TYPE) .setVersion(NdkCxxPlatforms.DEFAULT_GCC_VERSION) .setGccVersion(NdkCxxPlatforms.DEFAULT_GCC_VERSION) .build(), NdkCxxPlatforms.DEFAULT_CXX_RUNTIME, NdkCxxPlatforms.DEFAULT_TARGET_APP_PLATFORM, NdkCxxPlatforms.DEFAULT_CPU_ABIS, Platform.detect()).values(); assertFalse(platforms.isEmpty()); NdkCxxPlatform platform = platforms.iterator().next(); SourcePathResolver pathResolver = new SourcePathResolver( new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer())); Path apkPath = workspace.buildAndReturnOutput("//apps/sample:app_xdso_dce"); ZipInspector zipInspector = new ZipInspector(apkPath); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_top.so"); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_mid.so"); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_bot.so"); Path tmpDir = tmpFolder.newFolder("xdso"); Path lib = unzip( tmpDir, apkPath, "lib/x86/libnative_xdsodce_top.so"); Symbols sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.global.contains("_Z10JNI_OnLoadii")); assertTrue(sym.undefined.contains("_Z10midFromTopi")); assertTrue(sym.undefined.contains("_Z10botFromTopi")); assertFalse(sym.all.contains("_Z6unusedi")); lib = unzip(tmpDir, apkPath, "lib/x86/libnative_xdsodce_mid.so"); sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.global.contains("_Z10midFromTopi")); assertTrue(sym.undefined.contains("_Z10botFromMidi")); assertFalse(sym.all.contains("_Z6unusedi")); lib = unzip(tmpDir, apkPath, "lib/x86/libnative_xdsodce_bot.so"); sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.global.contains("_Z10botFromTopi")); assertTrue(sym.global.contains("_Z10botFromMidi")); assertFalse(sym.all.contains("_Z6unusedi")); // Run some verification on the same apk with native_relinker disabled. apkPath = workspace.buildAndReturnOutput("//apps/sample:app_no_xdso_dce"); zipInspector = new ZipInspector(apkPath); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_top.so"); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_mid.so"); zipInspector.assertFileExists("lib/x86/libnative_xdsodce_bot.so"); lib = unzip( tmpDir, apkPath, "lib/x86/libnative_xdsodce_top.so"); sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.all.contains("_Z6unusedi")); lib = unzip(tmpDir, apkPath, "lib/x86/libnative_xdsodce_mid.so"); sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.all.contains("_Z6unusedi")); lib = unzip(tmpDir, apkPath, "lib/x86/libnative_xdsodce_bot.so"); sym = Symbols.getSymbols(platform.getObjdump(), pathResolver, lib); assertTrue(sym.all.contains("_Z6unusedi")); } @Test public void testHeaderOnlyCxxLibrary() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_header_only_cxx_lib_dep").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_header_only_cxx_lib_dep.apk")); zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_headeronly.so"); } @Test public void testX86OnlyCxxLibrary() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_with_x86_lib").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_with_x86_lib.apk")); zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libnative_cxx_x86-only.so"); zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libgnustl_shared.so"); zipInspector.assertFileDoesNotExist("lib/armeabi/libnative_cxx_x86-only.so"); zipInspector.assertFileDoesNotExist("lib/armeabi/libgnustl_shared.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_x86-only.so"); zipInspector.assertFileExists("lib/x86/libgnustl_shared.so"); } @Test public void testApksHaveDeterministicTimestamps() throws IOException { ProjectWorkspace.ProcessResult result = workspace.runBuckCommand("build", "//apps/sample:app"); result.assertSuccess(); // Iterate over each of the entries, expecting to see all zeros in the time fields. Path apk = workspace.getPath("buck-out/gen/apps/sample/app.apk"); Date dosEpoch = new Date(ZipUtil.dosToJavaTime(ZipConstants.DOS_FAKE_TIME)); try (ZipInputStream is = new ZipInputStream(Files.newInputStream(apk))) { for (ZipEntry entry = is.getNextEntry(); entry != null; entry = is.getNextEntry()) { assertThat(entry.getName(), new Date(entry.getTime()), Matchers.equalTo(dosEpoch)); } } } @Test public void testCxxLibraryAsAsset() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_asset").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_cxx_lib_asset.apk")); zipInspector.assertFileExists("assets/lib/x86/libnative_cxx_libasset.so"); zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_libasset.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo1.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo2.so"); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo1.so"); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo2.so"); } @Test public void testCxxLibraryAsAssetWithoutPackaging() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_asset_no_package").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_cxx_lib_asset_no_package.apk")); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_libasset.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_libasset.so"); } @Test public void testCompressAssetLibs() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_compress_lib_asset").assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( "buck-out/gen/apps/sample/app_compress_lib_asset.apk")); zipInspector.assertFileExists("assets/lib/libs.xzs"); zipInspector.assertFileExists("assets/lib/metadata.txt"); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_libasset.so"); zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_libasset.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo1.so"); zipInspector.assertFileExists("lib/x86/libnative_cxx_foo2.so"); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo1.so"); zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo2.so"); } @Test public void testLibraryMetadataChecksum() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_asset").assertSuccess(); Path pathToZip = workspace.getPath("buck-out/gen/apps/sample/app_cxx_lib_asset.apk"); ZipFile file = new ZipFile(pathToZip.toFile()); ZipEntry metadata = file.getEntry("assets/lib/metadata.txt"); assertNotNull(metadata); BufferedReader contents = new BufferedReader( new InputStreamReader(file.getInputStream(metadata))); String line = contents.readLine(); byte[] buffer = new byte[512]; while (line != null) { // Each line is of the form <filename> <filesize> <SHA256 checksum> String[] tokens = line.split(" "); assertSame(tokens.length, 3); String filename = tokens[0]; int filesize = Integer.parseInt(tokens[1]); String checksum = tokens[2]; ZipEntry lib = file.getEntry("assets/lib/" + filename); assertNotNull(lib); InputStream is = file.getInputStream(lib); ByteArrayOutputStream out = new ByteArrayOutputStream(); while (filesize > 0) { int read = is.read(buffer, 0, Math.min(buffer.length, filesize)); assertTrue(read >= 0); out.write(buffer, 0, read); filesize -= read; } String actualChecksum = Hashing.sha256().hashBytes(out.toByteArray()).toString(); assertEquals(checksum, actualChecksum); is.close(); out.close(); line = contents.readLine(); } file.close(); contents.close(); } @Test public void testApkEmptyResDirectoriesBuildsCorrectly() throws IOException { workspace.runBuckBuild("//apps/sample:app_no_res").assertSuccess(); } @Test public void testInstrumentationApkWithEmptyResDepBuildsCorrectly() throws IOException { workspace.runBuckBuild("//apps/sample:instrumentation_apk").assertSuccess(); } }
package eu.com.cwsfe.cms.dao; import eu.com.cwsfe.cms.DaoTestsConfiguration; import eu.com.cwsfe.cms.domains.LanguageStatus; import eu.com.cwsfe.cms.model.Language; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.IfProfileValue; import org.springframework.test.annotation.Rollback; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.util.List; import static org.junit.Assert.*; @RunWith(SpringJUnit4ClassRunner.class) @Rollback @ContextConfiguration(classes = {DaoTestsConfiguration.class, CmsLanguagesDAO.class}) @IfProfileValue(name = "test-groups", values = {"integration-tests-local"}) public class CmsLanguagesDAOTest extends AbstractTransactionalJUnit4SpringContextTests { @Autowired private CmsLanguagesDAO dao; @Test public void testCountForAjax() throws Exception { //given //when int result = dao.countForAjax(); //then assertNotNull("query should return non negative value", result); assertTrue("query should return non negative value", result >= 0); } @Test public void testListAll() throws Exception { //given //when List<Language> list = dao.listAll(); //then assertNotNull("Empty results should not return null", list); } @Test public void testListAjax() throws Exception { //given Language language = new Language(); language.setId(1L); language.setCode("pl"); language.setName("Polish"); dao.add(language); //when List<Language> list = dao.listAjax(0, 1); //then assertNotNull("Empty results should not return null", list); assertEquals("Page limit was set to 1", 1, list.size()); } @Test public void testListForDropList() throws Exception { //given Language language = new Language(); language.setId(1L); language.setCode("pl"); String name = "Polish"; language.setName(name); dao.add(language); //when List<Language> results = dao.listForDropList(name, 1); //then assertNotNull(results); assertEquals("Page limit was set to 1", 1, results.size()); } @Test public void testGetById() throws Exception { //given Language language = new Language(); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); language.setId(dao.add(language)); //when Language languageResult = dao.getById(language.getId()); //then assertNotNull(languageResult); assertEquals((long) language.getId(), (long) languageResult.getId()); assertEquals(code, languageResult.getCode()); assertEquals(name, languageResult.getName()); } @Test public void testGetByCode() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); dao.add(language); //when Language languageResult = dao.getByCode(language.getCode()); //then assertNotNull(languageResult); assertEquals(code, languageResult.getCode()); assertEquals(name, languageResult.getName()); } @Test public void testGetByCodeIgnoreCase() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); dao.add(language); //when Language languageResult = dao.getByCodeIgnoreCase(language.getCode().toUpperCase()); //then assertNotNull(languageResult); assertEquals(code, languageResult.getCode()); assertEquals(name, languageResult.getName()); } @Test public void testAdd() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); //when dao.add(language); //then Language languageResult = dao.getByCodeIgnoreCase(language.getCode().toUpperCase()); assertNotNull(languageResult); assertEquals(code, languageResult.getCode()); assertEquals(name, languageResult.getName()); } @Test public void testUpdate() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); language.setId(dao.add(language)); String newCode = "fr"; String newName = "French"; language.setCode(newCode); language.setName(newName); //when dao.update(language); //then Language languageResult = dao.getById(language.getId()); assertNotNull(languageResult); assertEquals((long) language.getId(), (long) languageResult.getId()); assertEquals(newCode, languageResult.getCode()); assertEquals(newName, languageResult.getName()); } @Test public void testDelete() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); language.setId(dao.add(language)); //when dao.delete(language); //then Language languageResult = dao.getById(language.getId()); assertNotNull(languageResult); assertEquals((long) language.getId(), (long) languageResult.getId()); assertEquals("Unexpected status value for deleted object", LanguageStatus.DELETED, languageResult.getStatus()); } @Test public void testUndelete() throws Exception { //given Language language = new Language(); language.setId(1L); String code = "pl"; String name = "Polish"; language.setCode(code); language.setName(name); language.setId(dao.add(language)); dao.delete(language); //when dao.undelete(language); //then Language languageResult = dao.getById(language.getId()); assertNotNull(languageResult); assertEquals((long) language.getId(), (long) languageResult.getId()); assertEquals("Unexpected status value for undeleted object", LanguageStatus.NEW, languageResult.getStatus()); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zmlx.hg4idea.branch; import com.intellij.dvcs.DvcsUtil; import com.intellij.dvcs.repo.Repository; import com.intellij.dvcs.ui.NewBranchAction; import com.intellij.dvcs.ui.PopupElementWithAdditionalInfo; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.VcsBundle; import com.intellij.openapi.vcs.changes.*; import com.intellij.openapi.vcs.changes.ui.CommitChangeListDialog; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcs.log.Hash; import com.intellij.vcs.log.impl.HashImpl; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.zmlx.hg4idea.HgVcs; import org.zmlx.hg4idea.action.HgCommandResultNotifier; import org.zmlx.hg4idea.command.HgBookmarkCommand; import org.zmlx.hg4idea.command.HgBranchCreateCommand; import org.zmlx.hg4idea.execution.HgCommandException; import org.zmlx.hg4idea.execution.HgCommandResult; import org.zmlx.hg4idea.provider.commit.HgCloseBranchExecutor; import org.zmlx.hg4idea.repo.HgRepository; import org.zmlx.hg4idea.repo.HgRepositoryManager; import org.zmlx.hg4idea.ui.HgBookmarkDialog; import org.zmlx.hg4idea.util.HgErrorUtil; import org.zmlx.hg4idea.util.HgUtil; import java.util.*; import static com.intellij.dvcs.ui.BranchActionGroupPopup.wrapWithMoreActionIfNeeded; import static com.intellij.dvcs.ui.BranchActionUtil.FAVORITE_BRANCH_COMPARATOR; import static com.intellij.dvcs.ui.BranchActionUtil.getNumOfTopShownBranches; import static java.util.stream.Collectors.toList; import static org.zmlx.hg4idea.util.HgUtil.getNewBranchNameFromUser; import static org.zmlx.hg4idea.util.HgUtil.getSortedNamesWithoutHashes; public class HgBranchPopupActions { @NotNull private final Project myProject; @NotNull private final HgRepository myRepository; HgBranchPopupActions(@NotNull Project project, @NotNull HgRepository repository) { myProject = project; myRepository = repository; } ActionGroup createActions() { return createActions(null, "", false); } ActionGroup createActions(@Nullable DefaultActionGroup toInsert, @NotNull String repoInfo, boolean firstLevelGroup) { DefaultActionGroup popupGroup = new DefaultActionGroup(null, false); popupGroup.addAction(new HgNewBranchAction(myProject, Collections.singletonList(myRepository), myRepository)); popupGroup.addAction(new HgNewBookmarkAction(Collections.singletonList(myRepository), myRepository)); popupGroup.addAction(new HgBranchPopupActions.HgCloseBranchAction(Collections.singletonList(myRepository), myRepository)); popupGroup.addAction(new HgShowUnnamedHeadsForCurrentBranchAction(myRepository)); if (toInsert != null) { popupGroup.addAll(toInsert); } popupGroup.addSeparator("Bookmarks" + repoInfo); String currentBookmark = myRepository.getCurrentBookmark(); List<HgCommonBranchActions> bookmarkActions = getSortedNamesWithoutHashes(myRepository.getBookmarks()).stream() .filter(bm -> !bm.equals(currentBookmark)) .map(bm -> new BookmarkActions(myProject, Collections.singletonList(myRepository), bm)) .sorted(FAVORITE_BRANCH_COMPARATOR) .collect(toList()); int topShownBookmarks = getNumOfTopShownBranches(bookmarkActions); if (currentBookmark != null) { bookmarkActions.add(0, new CurrentActiveBookmark(myProject, Collections.singletonList(myRepository), currentBookmark)); topShownBookmarks++; } wrapWithMoreActionIfNeeded(myProject, popupGroup, bookmarkActions, topShownBookmarks, firstLevelGroup ? HgBranchPopup.SHOW_ALL_BOOKMARKS_KEY : null, firstLevelGroup); //only opened branches have to be shown popupGroup.addSeparator("Branches" + repoInfo); List<HgCommonBranchActions> branchActions = myRepository.getOpenedBranches().stream() .sorted() .filter(b -> !b.equals(myRepository.getCurrentBranch())) .map(b -> new BranchActions(myProject, Collections.singletonList(myRepository), b)) .sorted(FAVORITE_BRANCH_COMPARATOR) .collect(toList()); branchActions.add(0, new CurrentBranch(myProject, Collections.singletonList(myRepository), myRepository.getCurrentBranch())); wrapWithMoreActionIfNeeded(myProject, popupGroup, branchActions, getNumOfTopShownBranches(branchActions) + 1, firstLevelGroup ? HgBranchPopup.SHOW_ALL_BRANCHES_KEY : null, firstLevelGroup); return popupGroup; } public static class HgNewBranchAction extends NewBranchAction<HgRepository> { @NotNull final HgRepository myPreselectedRepo; public HgNewBranchAction(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull HgRepository preselectedRepo) { super(project, repositories); myPreselectedRepo = preselectedRepo; } @Override public void actionPerformed(AnActionEvent e) { final String name = getNewBranchNameFromUser(myPreselectedRepo, "Create New Branch"); if (name == null) { return; } new Task.Backgroundable(myProject, "Creating " + StringUtil.pluralize("Branch", myRepositories.size()) + "...") { @Override public void run(@NotNull ProgressIndicator indicator) { createNewBranchInCurrentThread(name); } }.queue(); } public void createNewBranchInCurrentThread(@NotNull final String name) { for (final HgRepository repository : myRepositories) { try { HgCommandResult result = new HgBranchCreateCommand(myProject, repository.getRoot(), name).executeInCurrentThread(); repository.update(); if (HgErrorUtil.hasErrorsInCommandExecution(result)) { new HgCommandResultNotifier(myProject) .notifyError(result, "Creation failed", "Branch creation [" + name + "] failed"); } } catch (HgCommandException exception) { HgErrorUtil.handleException(myProject, "Can't create new branch: ", exception); } } } } public static class HgCloseBranchAction extends DumbAwareAction { @NotNull private final List<HgRepository> myRepositories; @NotNull final HgRepository myPreselectedRepo; HgCloseBranchAction(@NotNull List<HgRepository> repositories, @NotNull HgRepository preselectedRepo) { super("Close " + StringUtil.pluralize("branch", repositories.size()), "Close current " + StringUtil.pluralize("branch", repositories.size()), AllIcons.Actions.Delete); myRepositories = repositories; myPreselectedRepo = preselectedRepo; } @Override public void actionPerformed(AnActionEvent e) { final Project project = myPreselectedRepo.getProject(); ApplicationManager.getApplication().saveAll(); ChangeListManager.getInstance(project) .invokeAfterUpdate(() -> commitAndCloseBranch(project), InvokeAfterUpdateMode.SYNCHRONOUS_CANCELLABLE, VcsBundle .message("waiting.changelists.update.for.show.commit.dialog.message"), ModalityState.current()); } private void commitAndCloseBranch(@NotNull final Project project) { final LocalChangeList activeChangeList = ChangeListManager.getInstance(project).getDefaultChangeList(); HgVcs vcs = HgVcs.getInstance(project); assert vcs != null; final HgRepositoryManager repositoryManager = HgUtil.getRepositoryManager(project); List<Change> changesForRepositories = ContainerUtil.filter(activeChangeList.getChanges(), change -> myRepositories.contains(repositoryManager.getRepositoryForFile( ChangesUtil.getFilePath(change)))); HgCloseBranchExecutor closeBranchExecutor = vcs.getCloseBranchExecutor(); closeBranchExecutor.setRepositories(myRepositories); CommitChangeListDialog.commitChanges(project, changesForRepositories, activeChangeList, Collections.singletonList(closeBranchExecutor), false, vcs, "Close Branch", null, false); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabledAndVisible(ContainerUtil.and(myRepositories, repository -> repository.getOpenedBranches() .contains(repository.getCurrentBranch()))); } } public static class HgNewBookmarkAction extends DumbAwareAction { @NotNull protected final List<HgRepository> myRepositories; @NotNull final HgRepository myPreselectedRepo; HgNewBookmarkAction(@NotNull List<HgRepository> repositories, @NotNull HgRepository preselectedRepo) { super("New Bookmark", "Create new bookmark", AllIcons.Modules.AddContentEntry); myRepositories = repositories; myPreselectedRepo = preselectedRepo; } @Override public void update(AnActionEvent e) { if (DvcsUtil.anyRepositoryIsFresh(myRepositories)) { e.getPresentation().setEnabled(false); e.getPresentation().setDescription("Bookmark creation is not possible before the first commit."); } } @Override public void actionPerformed(AnActionEvent e) { final HgBookmarkDialog bookmarkDialog = new HgBookmarkDialog(myPreselectedRepo); if (bookmarkDialog.showAndGet()) { final String name = bookmarkDialog.getName(); if (!StringUtil.isEmptyOrSpaces(name)) { HgBookmarkCommand.createBookmarkAsynchronously(myRepositories, name, bookmarkDialog.isActive()); } } } } public static class HgShowUnnamedHeadsForCurrentBranchAction extends ActionGroup { @NotNull final HgRepository myRepository; @NotNull final String myCurrentBranchName; @NotNull Collection<Hash> myHeads = new HashSet<>(); public HgShowUnnamedHeadsForCurrentBranchAction(@NotNull HgRepository repository) { super(null, true); myRepository = repository; myCurrentBranchName = repository.getCurrentBranch(); getTemplatePresentation().setText(String.format("Unnamed heads for %s", myCurrentBranchName)); myHeads = filterUnnamedHeads(); } @NotNull private Collection<Hash> filterUnnamedHeads() { Collection<Hash> branchWithHashes = myRepository.getBranches().get(myCurrentBranchName); String currentHead = myRepository.getCurrentRevision(); if (branchWithHashes == null || currentHead == null || myRepository.getState() != Repository.State.NORMAL) { // repository is fresh or branch is fresh or complex state return Collections.emptySet(); } else { Collection<Hash> bookmarkHashes = ContainerUtil.map(myRepository.getBookmarks(), info -> info.getHash()); branchWithHashes.removeAll(bookmarkHashes); branchWithHashes.remove(HashImpl.build(currentHead)); } return branchWithHashes; } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { List<AnAction> branchHeadActions = new ArrayList<>(); for (Hash hash : myHeads) { branchHeadActions .add(new HgCommonBranchActions(myRepository.getProject(), Collections.singletonList(myRepository), hash.toShortString())); } return ContainerUtil.toArray(branchHeadActions, new AnAction[branchHeadActions.size()]); } @Override public void update(final AnActionEvent e) { if (myRepository.isFresh() || myHeads.isEmpty()) { e.getPresentation().setEnabledAndVisible(false); } else if (!Repository.State.NORMAL.equals(myRepository.getState())) { e.getPresentation().setEnabled(false); } } } static class BranchActions extends HgCommonBranchActions { BranchActions(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull String branchName) { super(project, repositories, branchName, HgBranchType.BRANCH); } } public static class CurrentBranch extends BranchActions implements PopupElementWithAdditionalInfo { public CurrentBranch(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull String branchName) { super(project, repositories, branchName); } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return AnAction.EMPTY_ARRAY; } @Nullable @Override public String getPrefixInfo() { return "current"; } } /** * Actions available for bookmarks. */ static class BookmarkActions extends HgCommonBranchActions { BookmarkActions(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull String branchName) { super(project, repositories, branchName, HgBranchType.BOOKMARK); } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return ArrayUtil.append(super.getChildren(e), new DeleteBookmarkAction(myProject, myRepositories, myBranchName)); } private static class DeleteBookmarkAction extends HgBranchAbstractAction { DeleteBookmarkAction(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull String branchName) { super(project, "Delete", repositories, branchName); } @Override public void actionPerformed(AnActionEvent e) { HgUtil.executeOnPooledThread(() -> { for (HgRepository repository : myRepositories) { HgBookmarkCommand.deleteBookmarkSynchronously(myProject, repository.getRoot(), myBranchName); } }, myProject); } } } public static class CurrentActiveBookmark extends BookmarkActions implements PopupElementWithAdditionalInfo { public CurrentActiveBookmark(@NotNull Project project, @NotNull List<HgRepository> repositories, @NotNull String branchName) { super(project, repositories, branchName); } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return new AnAction[]{new BookmarkActions.DeleteBookmarkAction(myProject, myRepositories, myBranchName)}; } @Nullable @Override public String getPrefixInfo() { return "active"; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.shards; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse.StoreStatus.*; /** * Response for {@link IndicesShardStoresAction} * * Consists of {@link StoreStatus}s for requested indices grouped by * indices and shard ids and a list of encountered node {@link Failure}s */ public class IndicesShardStoresResponse extends ActionResponse implements ToXContent { /** * Shard store information from a node */ public static class StoreStatus implements Streamable, ToXContent, Comparable<StoreStatus> { private DiscoveryNode node; private long version; private Throwable storeException; private Allocation allocation; /** * The status of the shard store with respect to the cluster */ public enum Allocation { /** * Allocated as primary */ PRIMARY((byte) 0), /** * Allocated as a replica */ REPLICA((byte) 1), /** * Not allocated */ UNUSED((byte) 2); private final byte id; Allocation(byte id) { this.id = id; } private static Allocation fromId(byte id) { switch (id) { case 0: return PRIMARY; case 1: return REPLICA; case 2: return UNUSED; default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); } } public String value() { switch (id) { case 0: return "primary"; case 1: return "replica"; case 2: return "unused"; default: throw new IllegalArgumentException("unknown id for allocation [" + id + "]"); } } private static Allocation readFrom(StreamInput in) throws IOException { return fromId(in.readByte()); } private void writeTo(StreamOutput out) throws IOException { out.writeByte(id); } } private StoreStatus() { } public StoreStatus(DiscoveryNode node, long version, Allocation allocation, Throwable storeException) { this.node = node; this.version = version; this.allocation = allocation; this.storeException = storeException; } /** * Node the store belongs to */ public DiscoveryNode getNode() { return node; } /** * Version of the store, used to select the store that will be * used as a primary. */ public long getVersion() { return version; } /** * Exception while trying to open the * shard index or from when the shard failed */ public Throwable getStoreException() { return storeException; } /** * The allocation status of the store. * {@link Allocation#PRIMARY} indicates a primary shard copy * {@link Allocation#REPLICA} indicates a replica shard copy * {@link Allocation#UNUSED} indicates an unused shard copy */ public Allocation getAllocation() { return allocation; } static StoreStatus readStoreStatus(StreamInput in) throws IOException { StoreStatus storeStatus = new StoreStatus(); storeStatus.readFrom(in); return storeStatus; } @Override public void readFrom(StreamInput in) throws IOException { node = DiscoveryNode.readNode(in); version = in.readLong(); allocation = Allocation.readFrom(in); if (in.readBoolean()) { storeException = in.readThrowable(); } } @Override public void writeTo(StreamOutput out) throws IOException { node.writeTo(out); out.writeLong(version); allocation.writeTo(out); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); } else { out.writeBoolean(false); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { node.toXContent(builder, params); builder.field(Fields.VERSION, version); builder.field(Fields.ALLOCATED, allocation.value()); if (storeException != null) { builder.startObject(Fields.STORE_EXCEPTION); ElasticsearchException.toXContent(builder, params, storeException); builder.endObject(); } return builder; } @Override public int compareTo(StoreStatus other) { if (storeException != null && other.storeException == null) { return 1; } else if (other.storeException != null && storeException == null) { return -1; } else { int compare = Long.compare(other.version, version); if (compare == 0) { return Integer.compare(allocation.id, other.allocation.id); } return compare; } } } /** * Single node failure while retrieving shard store information */ public static class Failure extends DefaultShardOperationFailedException { private String nodeId; public Failure(String nodeId, String index, int shardId, Throwable reason) { super(index, shardId, reason); this.nodeId = nodeId; } private Failure() { } public String nodeId() { return nodeId; } public static Failure readFailure(StreamInput in) throws IOException { Failure failure = new Failure(); failure.readFrom(in); return failure; } @Override public void readFrom(StreamInput in) throws IOException { nodeId = in.readString(); super.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(nodeId); super.writeTo(out); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("node", nodeId()); super.toXContent(builder, params); return builder; } } private ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatuses; private List<Failure> failures; public IndicesShardStoresResponse(ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatuses, List<Failure> failures) { this.storeStatuses = storeStatuses; this.failures = failures; } IndicesShardStoresResponse() { this(ImmutableOpenMap.<String, ImmutableOpenIntMap<List<StoreStatus>>>of(), Collections.<Failure>emptyList()); } /** * Returns {@link StoreStatus}s * grouped by their index names and shard ids. */ public ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> getStoreStatuses() { return storeStatuses; } /** * Returns node {@link Failure}s encountered * while executing the request */ public List<Failure> getFailures() { return failures; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int numResponse = in.readVInt(); ImmutableOpenMap.Builder<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatusesBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < numResponse; i++) { String index = in.readString(); int indexEntries = in.readVInt(); ImmutableOpenIntMap.Builder<List<StoreStatus>> shardEntries = ImmutableOpenIntMap.builder(); for (int shardCount = 0; shardCount < indexEntries; shardCount++) { int shardID = in.readInt(); int nodeEntries = in.readVInt(); List<StoreStatus> storeStatuses = new ArrayList<>(nodeEntries); for (int nodeCount = 0; nodeCount < nodeEntries; nodeCount++) { storeStatuses.add(readStoreStatus(in)); } shardEntries.put(shardID, storeStatuses); } storeStatusesBuilder.put(index, shardEntries.build()); } int numFailure = in.readVInt(); List<Failure> failureBuilder = new ArrayList<>(); for (int i = 0; i < numFailure; i++) { failureBuilder.add(Failure.readFailure(in)); } storeStatuses = storeStatusesBuilder.build(); failures = Collections.unmodifiableList(failureBuilder); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(storeStatuses.size()); for (ObjectObjectCursor<String, ImmutableOpenIntMap<List<StoreStatus>>> indexShards : storeStatuses) { out.writeString(indexShards.key); out.writeVInt(indexShards.value.size()); for (IntObjectCursor<List<StoreStatus>> shardStatusesEntry : indexShards.value) { out.writeInt(shardStatusesEntry.key); out.writeVInt(shardStatusesEntry.value.size()); for (StoreStatus storeStatus : shardStatusesEntry.value) { storeStatus.writeTo(out); } } } out.writeVInt(failures.size()); for (ShardOperationFailedException failure : failures) { failure.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (failures.size() > 0) { builder.startArray(Fields.FAILURES); for (ShardOperationFailedException failure : failures) { builder.startObject(); failure.toXContent(builder, params); builder.endObject(); } builder.endArray(); } builder.startObject(Fields.INDICES); for (ObjectObjectCursor<String, ImmutableOpenIntMap<List<StoreStatus>>> indexShards : storeStatuses) { builder.startObject(indexShards.key); builder.startObject(Fields.SHARDS); for (IntObjectCursor<List<StoreStatus>> shardStatusesEntry : indexShards.value) { builder.startObject(String.valueOf(shardStatusesEntry.key)); builder.startArray(Fields.STORES); for (StoreStatus storeStatus : shardStatusesEntry.value) { builder.startObject(); storeStatus.toXContent(builder, params); builder.endObject(); } builder.endArray(); builder.endObject(); } builder.endObject(); builder.endObject(); } builder.endObject(); return builder; } static final class Fields { static final XContentBuilderString INDICES = new XContentBuilderString("indices"); static final XContentBuilderString SHARDS = new XContentBuilderString("shards"); static final XContentBuilderString FAILURES = new XContentBuilderString("failures"); static final XContentBuilderString STORES = new XContentBuilderString("stores"); // StoreStatus fields static final XContentBuilderString VERSION = new XContentBuilderString("version"); static final XContentBuilderString STORE_EXCEPTION = new XContentBuilderString("store_exception"); static final XContentBuilderString ALLOCATED = new XContentBuilderString("allocation"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.cache; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState; import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow; import org.apache.ignite.internal.processors.cache.persistence.GridCacheOffheapManager; import org.apache.ignite.internal.util.lang.GridIterator; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.LoggerResource; /** * Class contains logic of finding data of already destroyed caches in running cache groups. * * Also, could cleanup this garbage in cache partitions and indexes. */ public class VisorFindAndDeleteGarbageInPersistenceClosure implements IgniteCallable<VisorFindAndDeleteGarbageInPersistenceJobResult> { /** */ private static final long serialVersionUID = 0L; /** Ignite. */ @IgniteInstanceResource private transient IgniteEx ignite; /** Injected logger. */ @LoggerResource private IgniteLogger log; /** Cache group names. */ private Set<String> grpNames; /** Remove garbage. */ private final boolean deleteGarbage; /** Counter of processed partitions. */ private final AtomicInteger processedPartitions = new AtomicInteger(0); /** Total partitions. */ private volatile int totalPartitions; /** Last progress print timestamp. */ private final AtomicLong lastProgressPrintTs = new AtomicLong(0); /** Calculation executor. */ private volatile ExecutorService calcExecutor; /** * @param grpNames Cache group names. * @param deleteGarbage Clean up garbage from partitions. */ public VisorFindAndDeleteGarbageInPersistenceClosure(Set<String> grpNames, boolean deleteGarbage) { this.grpNames = grpNames; this.deleteGarbage = deleteGarbage; } /** {@inheritDoc} */ @Override public VisorFindAndDeleteGarbageInPersistenceJobResult call() throws Exception { calcExecutor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); try { return call0(); } finally { calcExecutor.shutdown(); } } /** * */ private VisorFindAndDeleteGarbageInPersistenceJobResult call0() { Set<Integer> grpIds = calcCacheGroupIds(); List<T2<CacheGroupContext, GridDhtLocalPartition>> partArgs = calcListOfPartitions(grpIds); totalPartitions = partArgs.size(); List<Future<Map<Integer, Map<Integer, Long>>>> procPartFutures = new ArrayList<>(); for (final T2<CacheGroupContext, GridDhtLocalPartition> t2 : partArgs) procPartFutures.add(calcExecutor.submit(new Callable<Map<Integer, Map<Integer, Long>>>() { @Override public Map<Integer, Map<Integer, Long>> call() throws Exception { return processPartition(t2.get1(), t2.get2()); } })); Map<Integer, Map<Integer, Long>> grpIdToPartIdToGarbageCount = new HashMap<>(); int curPart = 0; try { for (; curPart < procPartFutures.size(); curPart++) { Future<Map<Integer, Map<Integer, Long>>> fut = procPartFutures.get(curPart); Map<Integer, Map<Integer, Long>> partRes = fut.get(); for (Map.Entry<Integer, Map<Integer, Long>> e : partRes.entrySet()) { Map<Integer, Long> map = grpIdToPartIdToGarbageCount.computeIfAbsent(e.getKey(), (x) -> new HashMap<>()); for (Map.Entry<Integer, Long> entry : e.getValue().entrySet()) map.compute(entry.getKey(), (k, v) -> (v == null ? 0 : v) + entry.getValue()); } } if (deleteGarbage) cleanup(grpIdToPartIdToGarbageCount); log.warning("VisorFindAndDeleteGarbageInPersistenceClosure finished: processed " + totalPartitions + " partitions."); } catch (InterruptedException | ExecutionException | IgniteCheckedException e) { for (int j = curPart; j < procPartFutures.size(); j++) procPartFutures.get(j).cancel(false); throw unwrapFutureException(e); } return new VisorFindAndDeleteGarbageInPersistenceJobResult(grpIdToPartIdToGarbageCount); } /** * By calling this method we would delete found garbage in partitions and would try to * cleanup indexes. * * @param grpIdToPartIdToGarbageCount GrpId -&gt; PartId -&gt; Garbage count. */ private void cleanup(Map<Integer, Map<Integer, Long>> grpIdToPartIdToGarbageCount) throws IgniteCheckedException { for (Map.Entry<Integer, Map<Integer, Long>> e : grpIdToPartIdToGarbageCount.entrySet()) { int grpId = e.getKey(); CacheGroupContext groupContext = ignite.context().cache().cacheGroup(grpId); assert groupContext != null; for (Integer cacheId : e.getValue().keySet()) { groupContext.offheap().stopCache(cacheId, true); ((GridCacheOffheapManager) groupContext.offheap()).findAndCleanupLostIndexesForStoppedCache(cacheId); } } } /** * @param grpIds Group ids to generate list of partitions for. */ private List<T2<CacheGroupContext, GridDhtLocalPartition>> calcListOfPartitions(Set<Integer> grpIds) { List<T2<CacheGroupContext, GridDhtLocalPartition>> partArgs = new ArrayList<>(); for (Integer grpId : grpIds) { CacheGroupContext grpCtx = ignite.context().cache().cacheGroup(grpId); List<GridDhtLocalPartition> parts = grpCtx.topology().localPartitions(); for (GridDhtLocalPartition part : parts) partArgs.add(new T2<>(grpCtx, part)); } // To decrease contention on same group. Collections.shuffle(partArgs); return partArgs; } /** * @return Set of cache group ids to scan for garbage on. */ private Set<Integer> calcCacheGroupIds() { Set<Integer> grpIds = new HashSet<>(); Set<String> missingCacheGroups = new HashSet<>(); if (!F.isEmpty(grpNames)) { for (String grpName : grpNames) { CacheGroupContext groupContext = ignite.context().cache().cacheGroup(CU.cacheId(grpName)); if (groupContext == null) { missingCacheGroups.add(grpName); continue; } if (groupContext.sharedGroup()) grpIds.add(groupContext.groupId()); else log.warning("Group[name=" + grpName + "] is not shared one, it couldn't contain garbage from destroyed caches."); } if (!missingCacheGroups.isEmpty()) { StringBuilder strBuilder = new StringBuilder("The following cache groups do not exist: "); for (String name : missingCacheGroups) strBuilder.append(name).append(", "); strBuilder.delete(strBuilder.length() - 2, strBuilder.length()); throw new IgniteException(strBuilder.toString()); } } else { Collection<CacheGroupContext> groups = ignite.context().cache().cacheGroups(); for (CacheGroupContext grp : groups) { if (!grp.systemCache() && !grp.isLocal()) grpIds.add(grp.groupId()); } } return grpIds; } /** * @param grpCtx Group context. * @param part Local partition. */ private Map<Integer, Map<Integer, Long>> processPartition( CacheGroupContext grpCtx, GridDhtLocalPartition part ) { if (!part.reserve()) return Collections.emptyMap(); Map<Integer, Map<Integer, Long>> stoppedCachesForGrpId = new HashMap<>(); try { if (part.state() != GridDhtPartitionState.OWNING) return Collections.emptyMap(); GridIterator<CacheDataRow> it = grpCtx.offheap().partitionIterator(part.id()); while (it.hasNextX()) { CacheDataRow row = it.nextX(); if (row.cacheId() == 0) break; int cacheId = row.cacheId(); GridCacheContext cacheCtx = grpCtx.shared().cacheContext(row.cacheId()); if (cacheCtx == null) stoppedCachesForGrpId .computeIfAbsent(grpCtx.groupId(), (x) -> new HashMap<>()) .compute(cacheId, (x, y) -> y == null? 1 : y + 1); } } catch (IgniteCheckedException e) { U.error(log, "Failed to process partition [grpId=" + grpCtx.groupId() + ", partId=" + part.id() + "]", e); return Collections.emptyMap(); } finally { part.release(); } processedPartitions.incrementAndGet(); printProgressIfNeeded(); return stoppedCachesForGrpId; } /** * */ private void printProgressIfNeeded() { long curTs = U.currentTimeMillis(); long lastTs = lastProgressPrintTs.get(); if (curTs - lastTs >= 60_000 && lastProgressPrintTs.compareAndSet(lastTs, curTs)) log.warning("Current progress of VisorFindAndDeleteGarbageInPersistenceClosure: checked " + processedPartitions.get() + " partitions out of " + totalPartitions); } /** * @param e Future result exception. * @return Unwrapped exception. */ private IgniteException unwrapFutureException(Exception e) { assert e instanceof InterruptedException || e instanceof ExecutionException : "Expecting either InterruptedException " + "or ExecutionException"; if (e instanceof InterruptedException) return new IgniteInterruptedException((InterruptedException)e); else if (e.getCause() instanceof IgniteException) return (IgniteException)e.getCause(); else return new IgniteException(e.getCause()); } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.maps.android.clustering; import android.content.Context; import android.os.AsyncTask; import android.os.Build; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.Marker; import com.google.maps.android.MarkerManager; import com.google.maps.android.clustering.algo.Algorithm; import com.google.maps.android.clustering.algo.NonHierarchicalDistanceBasedAlgorithm; import com.google.maps.android.clustering.algo.PreCachingAlgorithmDecorator; import com.google.maps.android.clustering.view.ClusterRenderer; import com.google.maps.android.clustering.view.DefaultClusterRenderer; import java.util.Collection; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Groups many items on a map based on zoom level. * <p/> * ClusterManager should be added to the map as an: <ul> <li>{@link com.google.android.gms.maps.GoogleMap.OnCameraIdleListener}</li> * <li>{@link com.google.android.gms.maps.GoogleMap.OnMarkerClickListener}</li> </ul> */ public class ClusterManager<T extends ClusterItem> implements GoogleMap.OnCameraIdleListener, GoogleMap.OnMarkerClickListener, GoogleMap.OnInfoWindowClickListener { private final MarkerManager mMarkerManager; private final MarkerManager.Collection mMarkers; private final MarkerManager.Collection mClusterMarkers; private Algorithm<T> mAlgorithm; private final ReadWriteLock mAlgorithmLock = new ReentrantReadWriteLock(); private ClusterRenderer<T> mRenderer; private GoogleMap mMap; private CameraPosition mPreviousCameraPosition; private ClusterTask mClusterTask; private final ReadWriteLock mClusterTaskLock = new ReentrantReadWriteLock(); private OnClusterItemClickListener<T> mOnClusterItemClickListener; private OnClusterInfoWindowClickListener<T> mOnClusterInfoWindowClickListener; private OnClusterItemInfoWindowClickListener<T> mOnClusterItemInfoWindowClickListener; private OnClusterClickListener<T> mOnClusterClickListener; public ClusterManager(Context context, GoogleMap map) { this(context, map, new MarkerManager(map)); } public ClusterManager(Context context, GoogleMap map, MarkerManager markerManager) { mMap = map; mMarkerManager = markerManager; mClusterMarkers = markerManager.newCollection(); mMarkers = markerManager.newCollection(); mRenderer = new DefaultClusterRenderer<T>(context, map, this); mAlgorithm = new PreCachingAlgorithmDecorator<T>(new NonHierarchicalDistanceBasedAlgorithm<T>()); mClusterTask = new ClusterTask(); mRenderer.onAdd(); } public MarkerManager.Collection getMarkerCollection() { return mMarkers; } public MarkerManager.Collection getClusterMarkerCollection() { return mClusterMarkers; } public MarkerManager getMarkerManager() { return mMarkerManager; } public void setRenderer(ClusterRenderer<T> view) { mRenderer.setOnClusterClickListener(null); mRenderer.setOnClusterItemClickListener(null); mClusterMarkers.clear(); mMarkers.clear(); mRenderer.onRemove(); mRenderer = view; mRenderer.onAdd(); mRenderer.setOnClusterClickListener(mOnClusterClickListener); mRenderer.setOnClusterInfoWindowClickListener(mOnClusterInfoWindowClickListener); mRenderer.setOnClusterItemClickListener(mOnClusterItemClickListener); mRenderer.setOnClusterItemInfoWindowClickListener(mOnClusterItemInfoWindowClickListener); cluster(); } public void setAlgorithm(Algorithm<T> algorithm) { mAlgorithmLock.writeLock().lock(); try { if (mAlgorithm != null) { algorithm.addItems(mAlgorithm.getItems()); } mAlgorithm = new PreCachingAlgorithmDecorator<T>(algorithm); } finally { mAlgorithmLock.writeLock().unlock(); } cluster(); } public void setAnimation(boolean animate) { mRenderer.setAnimation(animate); } public ClusterRenderer<T> getRenderer() { return mRenderer; } public Algorithm<T> getAlgorithm() { return mAlgorithm; } public void clearItems() { mAlgorithmLock.writeLock().lock(); try { mAlgorithm.clearItems(); } finally { mAlgorithmLock.writeLock().unlock(); } } public void addItems(Collection<T> items) { mAlgorithmLock.writeLock().lock(); try { mAlgorithm.addItems(items); } finally { mAlgorithmLock.writeLock().unlock(); } } public void addItem(T myItem) { mAlgorithmLock.writeLock().lock(); try { mAlgorithm.addItem(myItem); } finally { mAlgorithmLock.writeLock().unlock(); } } public void removeItem(T item) { mAlgorithmLock.writeLock().lock(); try { mAlgorithm.removeItem(item); } finally { mAlgorithmLock.writeLock().unlock(); } } /** * Force a re-cluster. You may want to call this after adding new item(s). */ public void cluster() { mClusterTaskLock.writeLock().lock(); try { // Attempt to cancel the in-flight request. mClusterTask.cancel(true); mClusterTask = new ClusterTask(); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) { mClusterTask.execute(mMap.getCameraPosition().zoom); } else { mClusterTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, mMap.getCameraPosition().zoom); } } finally { mClusterTaskLock.writeLock().unlock(); } } /** * Might re-cluster. */ @Override public void onCameraIdle() { if (mRenderer instanceof GoogleMap.OnCameraIdleListener) { ((GoogleMap.OnCameraIdleListener) mRenderer).onCameraIdle(); } // Don't re-compute clusters if the map has just been panned/tilted/rotated. CameraPosition position = mMap.getCameraPosition(); if (mPreviousCameraPosition != null && mPreviousCameraPosition.zoom == position.zoom) { return; } mPreviousCameraPosition = mMap.getCameraPosition(); cluster(); } @Override public boolean onMarkerClick(Marker marker) { return getMarkerManager().onMarkerClick(marker); } @Override public void onInfoWindowClick(Marker marker) { getMarkerManager().onInfoWindowClick(marker); } /** * Runs the clustering algorithm in a background thread, then re-paints when results come back. */ private class ClusterTask extends AsyncTask<Float, Void, Set<? extends Cluster<T>>> { @Override protected Set<? extends Cluster<T>> doInBackground(Float... zoom) { mAlgorithmLock.readLock().lock(); try { return mAlgorithm.getClusters(zoom[0]); } finally { mAlgorithmLock.readLock().unlock(); } } @Override protected void onPostExecute(Set<? extends Cluster<T>> clusters) { mRenderer.onClustersChanged(clusters); } } /** * Sets a callback that's invoked when a Cluster is tapped. Note: For this listener to function, * the ClusterManager must be added as a click listener to the map. */ public void setOnClusterClickListener(OnClusterClickListener<T> listener) { mOnClusterClickListener = listener; mRenderer.setOnClusterClickListener(listener); } /** * Sets a callback that's invoked when a Cluster is tapped. Note: For this listener to function, * the ClusterManager must be added as a info window click listener to the map. */ public void setOnClusterInfoWindowClickListener(OnClusterInfoWindowClickListener<T> listener) { mOnClusterInfoWindowClickListener = listener; mRenderer.setOnClusterInfoWindowClickListener(listener); } /** * Sets a callback that's invoked when an individual ClusterItem is tapped. Note: For this * listener to function, the ClusterManager must be added as a click listener to the map. */ public void setOnClusterItemClickListener(OnClusterItemClickListener<T> listener) { mOnClusterItemClickListener = listener; mRenderer.setOnClusterItemClickListener(listener); } /** * Sets a callback that's invoked when an individual ClusterItem's Info Window is tapped. Note: For this * listener to function, the ClusterManager must be added as a info window click listener to the map. */ public void setOnClusterItemInfoWindowClickListener(OnClusterItemInfoWindowClickListener<T> listener) { mOnClusterItemInfoWindowClickListener = listener; mRenderer.setOnClusterItemInfoWindowClickListener(listener); } /** * Called when a Cluster is clicked. */ public interface OnClusterClickListener<T extends ClusterItem> { /** * Called when cluster is clicked. * Return true if click has been handled * Return false and the click will dispatched to the next listener */ public boolean onClusterClick(Cluster<T> cluster); } /** * Called when a Cluster's Info Window is clicked. */ public interface OnClusterInfoWindowClickListener<T extends ClusterItem> { public void onClusterInfoWindowClick(Cluster<T> cluster); } /** * Called when an individual ClusterItem is clicked. */ public interface OnClusterItemClickListener<T extends ClusterItem> { public boolean onClusterItemClick(T item); } /** * Called when an individual ClusterItem's Info Window is clicked. */ public interface OnClusterItemInfoWindowClickListener<T extends ClusterItem> { public void onClusterItemInfoWindowClick(T item); } }
/* * The MIT License (MIT) * * Copyright (c) 2014-2017 the original author or authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ package tk.mybatis.mapper.weekend; import tk.mybatis.mapper.util.Sqls; import tk.mybatis.mapper.util.Sqls.Criteria; import tk.mybatis.mapper.util.Sqls.Criterion; import tk.mybatis.mapper.weekend.reflection.Reflections; /** * @author XuYin */ public class WeekendSqls<T> implements tk.mybatis.mapper.entity.SqlsCriteria { private Criteria criteria; private WeekendSqls() { this.criteria = new Sqls.Criteria(); } public static <T> WeekendSqls<T> custom() { return new WeekendSqls<T>(); } public WeekendSqls<T> andIsNull(String property) { this.criteria.getCriterions().add(new Criterion(property, "is null", "and")); return this; } public WeekendSqls<T> andIsNull(Fn<T, Object> fn) { return this.andIsNull(Reflections.fnToFieldName(fn)); } public WeekendSqls<T> andIsNotNull(String property) { this.criteria.getCriterions().add(new Criterion(property, "is not null", "and")); return this; } public WeekendSqls<T> andIsNotNull(Fn<T, Object> fn) { return this.andIsNotNull(Reflections.fnToFieldName(fn)); } public WeekendSqls<T> andEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "=", "and")); return this; } public WeekendSqls<T> andEqualTo(Fn<T, Object> fn, Object value) { return this.andEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andNotEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<>", "and")); return this; } public WeekendSqls<T> andNotEqualTo(Fn<T, Object> fn, Object value) { return this.andNotEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andGreaterThan(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, ">", "and")); return this; } public WeekendSqls<T> andGreaterThan(Fn<T, Object> fn, Object value) { return this.andGreaterThan(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andGreaterThanOrEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, ">=", "and")); return this; } public WeekendSqls<T> andGreaterThanOrEqualTo(Fn<T, Object> fn, Object value) { return this.andGreaterThanOrEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andLessThan(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<", "and")); return this; } public WeekendSqls<T> andLessThan(Fn<T, Object> fn, Object value) { return this.andLessThan(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andLessThanOrEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<=", "and")); return this; } public WeekendSqls<T> andLessThanOrEqualTo(Fn<T, Object> fn, Object value) { return this.andLessThanOrEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andIn(String property, Iterable values) { this.criteria.getCriterions().add(new Criterion(property, values, "in", "and")); return this; } public WeekendSqls<T> andIn(Fn<T, Object> fn, Iterable values) { return this.andIn(Reflections.fnToFieldName(fn), values); } public WeekendSqls<T> andNotIn(String property, Iterable values) { this.criteria.getCriterions().add(new Criterion(property, values, "not in", "and")); return this; } public WeekendSqls<T> andNotIn(Fn<T, Object> fn, Iterable values) { return this.andNotIn(Reflections.fnToFieldName(fn), values); } public WeekendSqls<T> andBetween(String property, Object value1, Object value2) { this.criteria.getCriterions().add(new Criterion(property, value1, value2, "between", "and")); return this; } public WeekendSqls<T> andBetween(Fn<T, Object> fn, Object value1, Object value2) { return this.andBetween(Reflections.fnToFieldName(fn), value1, value2); } public WeekendSqls<T> andNotBetween(String property, Object value1, Object value2) { this.criteria.getCriterions().add(new Criterion(property, value1, value2, "not between", "and")); return this; } public WeekendSqls<T> andNotBetween(Fn<T, Object> fn, Object value1, Object value2) { return this.andNotBetween(Reflections.fnToFieldName(fn), value1, value2); } public WeekendSqls<T> andLike(String property, String value) { this.criteria.getCriterions().add(new Criterion(property, value, "like", "and")); return this; } public WeekendSqls<T> andLike(Fn<T, Object> fn, String value) { return this.andLike(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> andNotLike(String property, String value) { this.criteria.getCriterions().add(new Criterion(property, value, "not like", "and")); return this; } public WeekendSqls<T> andNotLike(Fn<T, Object> fn, String value) { return this.andNotLike(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orIsNull(String property) { this.criteria.getCriterions().add(new Criterion(property, "is null", "or")); return this; } public WeekendSqls<T> orIsNull(Fn<T, Object> fn) { return this.orIsNull(Reflections.fnToFieldName(fn)); } public WeekendSqls<T> orIsNotNull(String property) { this.criteria.getCriterions().add(new Criterion(property, "is not null", "or")); return this; } public WeekendSqls<T> orIsNotNull(Fn<T, Object> fn) { return this.orIsNotNull(Reflections.fnToFieldName(fn)); } public WeekendSqls<T> orEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "=", "or")); return this; } public WeekendSqls<T> orEqualTo(Fn<T, Object> fn, Object value) { return this.orEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orNotEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<>", "or")); return this; } public WeekendSqls<T> orNotEqualTo(Fn<T, Object> fn, Object value) { return this.orNotEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orGreaterThan(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, ">", "or")); return this; } public WeekendSqls<T> orGreaterThan(Fn<T, Object> fn, Object value) { return this.orGreaterThan(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orGreaterThanOrEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, ">=", "or")); return this; } public WeekendSqls<T> orGreaterThanOrEqualTo(Fn<T, Object> fn, Object value) { return this.orGreaterThanOrEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orLessThan(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<", "or")); return this; } public WeekendSqls<T> orLessThan(Fn<T, Object> fn, Object value) { return this.orLessThan(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orLessThanOrEqualTo(String property, Object value) { this.criteria.getCriterions().add(new Criterion(property, value, "<=", "or")); return this; } public WeekendSqls<T> orLessThanOrEqualTo(Fn<T, Object> fn, Object value) { return this.orLessThanOrEqualTo(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orIn(String property, Iterable values) { this.criteria.getCriterions().add(new Criterion(property, values, "in", "or")); return this; } public WeekendSqls<T> orIn(Fn<T, Object> fn, Iterable values) { return this.orIn(Reflections.fnToFieldName(fn), values); } public WeekendSqls<T> orNotIn(String property, Iterable values) { this.criteria.getCriterions().add(new Criterion(property, values, "not in", "or")); return this; } public WeekendSqls<T> orNotIn(Fn<T, Object> fn, Iterable values) { return this.orNotIn(Reflections.fnToFieldName(fn), values); } public WeekendSqls<T> orBetween(String property, Object value1, Object value2) { this.criteria.getCriterions().add(new Criterion(property, value1, value2, "between", "or")); return this; } public WeekendSqls<T> orBetween(Fn<T, Object> fn, Object value1, Object value2) { return this.orBetween(Reflections.fnToFieldName(fn), value1, value2); } public WeekendSqls<T> orNotBetween(String property, Object value1, Object value2) { this.criteria.getCriterions().add(new Criterion(property, value1, value2, "not between", "or")); return this; } public WeekendSqls<T> orNotBetween(Fn<T, Object> fn, Object value1, Object value2) { return this.orNotBetween(Reflections.fnToFieldName(fn), value1, value2); } public WeekendSqls<T> orLike(String property, String value) { this.criteria.getCriterions().add(new Criterion(property, value, "like", "or")); return this; } public WeekendSqls<T> orLike(Fn<T, Object> fn, String value) { return this.orLike(Reflections.fnToFieldName(fn), value); } public WeekendSqls<T> orNotLike(String property, String value) { this.criteria.getCriterions().add(new Criterion(property, value, "not like", "or")); return this; } public WeekendSqls<T> orNotLike(Fn<T, Object> fn, String value) { return this.orNotLike(Reflections.fnToFieldName(fn), value); } @Override public Criteria getCriteria() { return criteria; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.indices.cluster; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.core.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.PrimaryReplicaSyncer.ResyncTask; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardLongFieldRange; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndex; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices; import org.elasticsearch.indices.cluster.IndicesClusterStateService.Shard; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.function.BiConsumer; import java.util.function.Consumer; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * Abstract base class for tests against {@link IndicesClusterStateService} */ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestCase { private boolean enableRandomFailures; @Before public void injectRandomFailures() { enableRandomFailures = randomBoolean(); } protected void disableRandomFailures() { enableRandomFailures = false; } protected void failRandomly() { if (enableRandomFailures && rarely()) { throw new RuntimeException("dummy test failure"); } } /** * Checks if cluster state matches internal state of IndicesClusterStateService instance * * @param state cluster state used for matching */ public void assertClusterStateMatchesNodeState(ClusterState state, IndicesClusterStateService indicesClusterStateService) { MockIndicesService indicesService = (MockIndicesService) indicesClusterStateService.indicesService; ConcurrentMap<ShardId, ShardRouting> failedShardsCache = indicesClusterStateService.failedShardsCache; RoutingNode localRoutingNode = state.getRoutingNodes().node(state.getNodes().getLocalNodeId()); if (localRoutingNode != null) { if (enableRandomFailures == false) { // initializing a shard should succeed when enableRandomFailures is disabled // active shards can be failed if state persistence was disabled in an earlier CS update if (failedShardsCache.values().stream().anyMatch(ShardRouting::initializing)) { fail("failed shard cache should not contain initializing shard routing: " + failedShardsCache.values()); } } // check that all shards in local routing nodes have been allocated for (ShardRouting shardRouting : localRoutingNode) { Index index = shardRouting.index(); IndexMetadata indexMetadata = state.metadata().getIndexSafe(index); MockIndexShard shard = indicesService.getShardOrNull(shardRouting.shardId()); ShardRouting failedShard = failedShardsCache.get(shardRouting.shardId()); if (state.blocks().disableStatePersistence()) { if (shard != null) { fail("Shard with id " + shardRouting + " should be removed from indicesService due to disabled state persistence"); } } else { if (failedShard != null && failedShard.isSameAllocation(shardRouting) == false) { fail("Shard cache has not been properly cleaned for " + failedShard); } if (shard == null && failedShard == null) { // shard must either be there or there must be a failure fail("Shard with id " + shardRouting + " expected but missing in indicesService and failedShardsCache"); } if (enableRandomFailures == false) { if (shard == null && shardRouting.initializing() && failedShard == shardRouting) { // initializing a shard should succeed when enableRandomFailures is disabled fail("Shard with id " + shardRouting + " expected but missing in indicesService " + failedShard); } } if (shard != null) { AllocatedIndex<? extends Shard> indexService = indicesService.indexService(index); assertTrue("Index " + index + " expected but missing in indicesService", indexService != null); // index metadata has been updated assertThat(indexService.getIndexSettings().getIndexMetadata(), equalTo(indexMetadata)); // shard has been created if (enableRandomFailures == false || failedShard == null) { assertTrue("Shard with id " + shardRouting + " expected but missing in indexService", shard != null); // shard has latest shard routing assertThat(shard.routingEntry(), equalTo(shardRouting)); } if (shard.routingEntry().primary() && shard.routingEntry().active()) { IndexShardRoutingTable shardRoutingTable = state.routingTable().shardRoutingTable(shard.shardId()); Set<String> inSyncIds = state.metadata().index(shard.shardId().getIndex()) .inSyncAllocationIds(shard.shardId().id()); assertThat(shard.routingEntry() + " isn't updated with in-sync aIDs", shard.inSyncAllocationIds, equalTo(inSyncIds)); assertThat(shard.routingEntry() + " isn't updated with routing table", shard.routingTable, equalTo(shardRoutingTable)); } } } } } // all other shards / indices have been cleaned up for (AllocatedIndex<? extends Shard> indexService : indicesService) { if (state.blocks().disableStatePersistence()) { fail("Index service " + indexService.index() + " should be removed from indicesService due to disabled state persistence"); } assertTrue(state.metadata().getIndexSafe(indexService.index()) != null); boolean shardsFound = false; for (Shard shard : indexService) { shardsFound = true; ShardRouting persistedShardRouting = shard.routingEntry(); ShardRouting shardRouting = localRoutingNode.getByShardId(persistedShardRouting.shardId()); if (shardRouting == null) { fail("Shard with id " + persistedShardRouting + " locally exists but missing in routing table"); } if (shardRouting.equals(persistedShardRouting) == false) { fail("Local shard " + persistedShardRouting + " has stale routing" + shardRouting); } } if (shardsFound == false) { // check if we have shards of that index in failedShardsCache // if yes, we might not have cleaned the index as failedShardsCache can be populated by another thread assertFalse(failedShardsCache.keySet().stream().noneMatch(shardId -> shardId.getIndex().equals(indexService.index()))); } } } /** * Mock for {@link IndicesService} */ protected class MockIndicesService implements AllocatedIndices<MockIndexShard, MockIndexService> { private volatile Map<String, MockIndexService> indices = emptyMap(); @Override public synchronized MockIndexService createIndex( IndexMetadata indexMetadata, List<IndexEventListener> buildInIndexListener, boolean writeDanglingIndices) throws IOException { MockIndexService indexService = new MockIndexService(new IndexSettings(indexMetadata, Settings.EMPTY)); indices = Maps.copyMapWithAddedEntry(indices, indexMetadata.getIndexUUID(), indexService); return indexService; } @Override public IndexMetadata verifyIndexIsDeleted(Index index, ClusterState state) { return null; } @Override public void deleteUnassignedIndex(String reason, IndexMetadata metadata, ClusterState clusterState) { } @Override public synchronized void removeIndex(Index index, IndexRemovalReason reason, String extraInfo) { if (hasIndex(index)) { Map<String, MockIndexService> newIndices = new HashMap<>(indices); newIndices.remove(index.getUUID()); indices = unmodifiableMap(newIndices); } } @Override @Nullable public MockIndexService indexService(Index index) { return indices.get(index.getUUID()); } @Override public MockIndexShard createShard( final ShardRouting shardRouting, final PeerRecoveryTargetService recoveryTargetService, final PeerRecoveryTargetService.RecoveryListener recoveryListener, final RepositoriesService repositoriesService, final Consumer<IndexShard.ShardFailure> onShardFailure, final Consumer<ShardId> globalCheckpointSyncer, final RetentionLeaseSyncer retentionLeaseSyncer, final DiscoveryNode targetNode, final DiscoveryNode sourceNode) throws IOException { failRandomly(); RecoveryState recoveryState = new RecoveryState(shardRouting, targetNode, sourceNode); MockIndexService indexService = indexService(recoveryState.getShardId().getIndex()); MockIndexShard indexShard = indexService.createShard(shardRouting); indexShard.recoveryState = recoveryState; return indexShard; } @Override public void processPendingDeletes(Index index, IndexSettings indexSettings, TimeValue timeValue) throws IOException, InterruptedException { } private boolean hasIndex(Index index) { return indices.containsKey(index.getUUID()); } @Override public Iterator<MockIndexService> iterator() { return indices.values().iterator(); } } /** * Mock for {@link IndexService} */ protected class MockIndexService implements AllocatedIndex<MockIndexShard> { private volatile Map<Integer, MockIndexShard> shards = emptyMap(); private final IndexSettings indexSettings; public MockIndexService(IndexSettings indexSettings) { this.indexSettings = indexSettings; } @Override public IndexSettings getIndexSettings() { return indexSettings; } @Override public void updateMapping(final IndexMetadata currentIndexMetadata, final IndexMetadata newIndexMetadata) throws IOException { failRandomly(); } @Override public void updateMetadata(final IndexMetadata currentIndexMetadata, final IndexMetadata newIndexMetadata) { indexSettings.updateIndexMetadata(newIndexMetadata); for (MockIndexShard shard: shards.values()) { shard.updateTerm(newIndexMetadata.primaryTerm(shard.shardId().id())); } } @Override public MockIndexShard getShardOrNull(int shardId) { return shards.get(shardId); } public synchronized MockIndexShard createShard(ShardRouting routing) throws IOException { failRandomly(); MockIndexShard shard = new MockIndexShard(routing, indexSettings.getIndexMetadata().primaryTerm(routing.shardId().id())); shards = Maps.copyMapWithAddedEntry(shards, routing.id(), shard); return shard; } @Override public synchronized void removeShard(int shardId, String reason) { if (shards.containsKey(shardId) == false) { return; } HashMap<Integer, MockIndexShard> newShards = new HashMap<>(shards); MockIndexShard indexShard = newShards.remove(shardId); assert indexShard != null; shards = unmodifiableMap(newShards); } @Override public Iterator<MockIndexShard> iterator() { return shards.values().iterator(); } @Override public Index index() { return indexSettings.getIndex(); } } /** * Mock for {@link IndexShard} */ protected class MockIndexShard implements IndicesClusterStateService.Shard { private volatile ShardRouting shardRouting; private volatile RecoveryState recoveryState; private volatile Set<String> inSyncAllocationIds; private volatile IndexShardRoutingTable routingTable; private volatile long term; public MockIndexShard(ShardRouting shardRouting, long term) { this.shardRouting = shardRouting; this.term = term; } @Override public ShardId shardId() { return shardRouting.shardId(); } @Override public RecoveryState recoveryState() { return recoveryState; } @Override public void updateShardState(ShardRouting shardRouting, long newPrimaryTerm, BiConsumer<IndexShard, ActionListener<ResyncTask>> primaryReplicaSyncer, long applyingClusterStateVersion, Set<String> inSyncAllocationIds, IndexShardRoutingTable routingTable) throws IOException { failRandomly(); assertThat(this.shardId(), equalTo(shardRouting.shardId())); assertTrue("current: " + this.shardRouting + ", got: " + shardRouting, this.shardRouting.isSameAllocation(shardRouting)); if (this.shardRouting.active()) { assertTrue("an active shard must stay active, current: " + this.shardRouting + ", got: " + shardRouting, shardRouting.active()); } if (this.shardRouting.primary()) { assertTrue("a primary shard can't be demoted", shardRouting.primary()); if (this.shardRouting.initializing()) { assertEquals("primary term can not be updated on an initializing primary shard: " + shardRouting, term, newPrimaryTerm); } } else if (shardRouting.primary()) { // note: it's ok for a replica in post recovery to be started and promoted at once // this can happen when the primary failed after we sent the start shard message assertTrue("a replica can only be promoted when active. current: " + this.shardRouting + " new: " + shardRouting, shardRouting.active()); } this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; this.inSyncAllocationIds = inSyncAllocationIds; this.routingTable = routingTable; } } @Override public ShardRouting routingEntry() { return shardRouting; } @Override public IndexShardState state() { return null; } public long term() { return term; } public void updateTerm(long newTerm) { assertThat("term can only be incremented: " + shardRouting, newTerm, greaterThanOrEqualTo(term)); if (shardRouting.primary() && shardRouting.active()) { assertThat("term can not be changed on an active primary shard: " + shardRouting, newTerm, equalTo(term)); } this.term = newTerm; } @Override public ShardLongFieldRange getTimestampRange() { return ShardLongFieldRange.EMPTY; } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.wm.impl.welcomeScreen; import com.intellij.diagnostic.IdeMessagePanel; import com.intellij.diagnostic.MessagePool; import com.intellij.icons.AllIcons; import com.intellij.ide.AppLifecycleListener; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.ide.RecentProjectListActionProvider; import com.intellij.ide.dnd.FileCopyPasteUtil; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.plugins.PluginDropHandler; import com.intellij.idea.SplashManager; import com.intellij.jdkEx.JdkEx; import com.intellij.notification.NotificationType; import com.intellij.notification.impl.IdeNotificationArea; import com.intellij.openapi.Disposable; import com.intellij.openapi.MnemonicHelper; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.MenuItemPresentationFactory; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.JBProtocolCommand; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerListener; import com.intellij.openapi.ui.popup.ListItemDescriptorAdapter; import com.intellij.openapi.ui.popup.StackingPopupDispatcher; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.StatusBar; import com.intellij.openapi.wm.WelcomeScreen; import com.intellij.openapi.wm.impl.IdeFrameDecorator; import com.intellij.openapi.wm.impl.IdeGlassPaneImpl; import com.intellij.openapi.wm.impl.ProjectFrameHelper; import com.intellij.openapi.wm.impl.customFrameDecorations.header.CustomFrameDialogContent; import com.intellij.ui.*; import com.intellij.ui.border.CustomLineBorder; import com.intellij.ui.components.JBList; import com.intellij.ui.components.JBSlidingPanel; import com.intellij.ui.components.JBTextField; import com.intellij.ui.components.labels.ActionLink; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.mac.TouchbarDataKeys; import com.intellij.ui.popup.PopupFactoryImpl; import com.intellij.ui.popup.list.GroupedItemsListRenderer; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.Function; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.*; import com.intellij.util.ui.accessibility.AccessibleContextAccessor; import com.intellij.util.ui.accessibility.AccessibleContextDelegate; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleContext; import javax.accessibility.AccessibleRole; import javax.swing.*; import javax.swing.event.ListDataEvent; import javax.swing.event.ListDataListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.datatransfer.Transferable; import java.awt.dnd.*; import java.awt.event.*; import java.io.File; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.function.Consumer; import static com.intellij.util.ui.update.UiNotifyConnector.doWhenFirstShown; /** * @author Konstantin Bulenkov */ public class FlatWelcomeFrame extends JFrame implements IdeFrame, Disposable, AccessibleContextAccessor, WelcomeFrameUpdater { public static final String BOTTOM_PANEL = "BOTTOM_PANEL"; private static final String ACTION_GROUP_KEY = "ACTION_GROUP_KEY"; public static final int DEFAULT_HEIGHT = 460; public static final int MAX_DEFAULT_WIDTH = 777; private BalloonLayout myBalloonLayout; private final FlatWelcomeScreen myScreen; private boolean myDisposed; public FlatWelcomeFrame() { SplashManager.hideBeforeShow(this); JRootPane rootPane = getRootPane(); myScreen = new FlatWelcomeScreen(); IdeGlassPaneImpl glassPane = new IdeGlassPaneImpl(rootPane) { @Override public void addNotify() { super.addNotify(); ApplicationManager.getApplication().invokeLater(() -> JBProtocolCommand.handleCurrentCommand()); } }; setGlassPane(glassPane); glassPane.setVisible(false); int defaultHeight = DEFAULT_HEIGHT; if (IdeFrameDecorator.isCustomDecorationActive()) { JComponent holder = CustomFrameDialogContent.getCustomContentHolder(this, myScreen.getWelcomePanel(), UIManager.getColor("WelcomeScreen.background")); setContentPane(holder); if(holder instanceof CustomFrameDialogContent) defaultHeight+= ((CustomFrameDialogContent)holder).getHeaderHeight(); } else { setContentPane(myScreen.getWelcomePanel()); } setTitle(getWelcomeFrameTitle()); AppUIUtil.updateWindowIcon(this); int width = RecentProjectListActionProvider.getInstance().getActions(false).size() == 0 ? 666 : MAX_DEFAULT_WIDTH; getRootPane().setPreferredSize(JBUI.size(width, defaultHeight)); setResizable(false); Dimension size = getPreferredSize(); Point location = WindowStateService.getInstance().getLocation(WelcomeFrame.DIMENSION_KEY); Rectangle screenBounds = ScreenUtil.getScreenRectangle(location != null ? location : new Point(0, 0)); setBounds( screenBounds.x + (screenBounds.width - size.width) / 2, screenBounds.y + (screenBounds.height - size.height) / 3, size.width, size.height ); setAutoRequestFocus(false); // at this point a window insets may be unavailable, // so we need resize window when it is shown doWhenFirstShown(this, this::pack); MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(this); connection.subscribe(ProjectManager.TOPIC, new ProjectManagerListener() { @Override public void projectOpened(@NotNull Project project) { Disposer.dispose(FlatWelcomeFrame.this); } }); connection.subscribe(AppLifecycleListener.TOPIC, new AppLifecycleListener() { @Override public void appClosing() { saveLocation(getBounds()); } }); myBalloonLayout = new WelcomeBalloonLayoutImpl(rootPane, JBUI.insets(8), myScreen.myEventListener, myScreen.myEventLocation); WelcomeFrame.setupCloseAction(this); MnemonicHelper.init(this); Disposer.register(ApplicationManager.getApplication(), this); UIUtil.decorateWindowHeader(getRootPane()); UIUtil.setCustomTitleBar(this, getRootPane(), runnable -> Disposer.register(this, () -> runnable.run())); } @Override public void addNotify() { if (IdeFrameDecorator.isCustomDecorationActive()) { JdkEx.setHasCustomDecoration(this); } super.addNotify(); } @Override public void dispose() { if (myDisposed) { return; } myDisposed = true; super.dispose(); if (myBalloonLayout != null) { ((BalloonLayoutImpl)myBalloonLayout).dispose(); myBalloonLayout = null; } Disposer.dispose(myScreen); WelcomeFrame.resetInstance(); } private static void saveLocation(@NotNull Rectangle location) { Point middle = new Point(location.x + location.width / 2, location.y + location.height / 2); WindowStateService.getInstance().putLocation(WelcomeFrame.DIMENSION_KEY, middle); } @Nullable @Override public StatusBar getStatusBar() { return null; } public static Color getMainBackground() { return JBColor.namedColor("WelcomeScreen.background", new JBColor(0xf7f7f7, 0x45474a)); } public static Color getProjectsBackground() { return JBColor.namedColor("WelcomeScreen.Projects.background", new JBColor(Gray.xFF, Gray.x39)); } public static Color getLinkNormalColor() { return new JBColor(Gray._0, Gray.xBB); } public static Color getListSelectionColor(boolean hasFocus) { return hasFocus ? JBColor.namedColor("WelcomeScreen.Projects.selectionBackground", new JBColor(0x3875d6, 0x4b6eaf)) : JBColor.namedColor("WelcomeScreen.Projects.selectionInactiveBackground", new JBColor(Gray.xDD, Gray.x45)); } public static Color getActionLinkSelectionColor() { return new JBColor(0xdbe5f5, 0x485875); } public static JBColor getSeparatorColor() { return JBColor.namedColor("WelcomeScreen.separatorColor", new JBColor(Gray.xEC, new Color(72, 75, 78))); } @Override public AccessibleContext getCurrentAccessibleContext() { return accessibleContext; } protected String getWelcomeFrameTitle() { String title = "Welcome to " + ApplicationNamesInfo.getInstance().getFullProductName(); if (Boolean.getBoolean("ide.ui.version.in.title")) { title += ' ' + ApplicationInfo.getInstance().getFullVersion(); } String suffix = ProjectFrameHelper.getSuperUserSuffix(); if (suffix != null) { title += " (" + suffix+")"; } return title; } @NotNull public static JComponent getPreferredFocusedComponent(@NotNull Pair<JPanel, JBList<AnAction>> pair) { if (pair.second.getModel().getSize() == 1) { JBTextField textField = UIUtil.uiTraverser(pair.first).filter(JBTextField.class).first(); if (textField != null) { return textField; } } return pair.second; } private final class FlatWelcomeScreen extends JPanel implements WelcomeScreen, DataProvider { private final JBSlidingPanel mySlidingPanel = new JBSlidingPanel(); private final DefaultActionGroup myTouchbarActions = new DefaultActionGroup(); public Consumer<List<NotificationType>> myEventListener; public Computable<Point> myEventLocation; private LinkLabel<Object> myUpdatePluginsLink; private boolean inDnd; FlatWelcomeScreen() { super(new BorderLayout()); mySlidingPanel.add("root", this); setBackground(getMainBackground()); if (RecentProjectListActionProvider.getInstance().getActions(false, isUseProjectGroups()).size() > 0) { JComponent recentProjects = createRecentProjects(); add(recentProjects, BorderLayout.WEST); JList<?> projectsList = UIUtil.findComponentOfType(recentProjects, JList.class); if (projectsList != null) { projectsList.getModel().addListDataListener(new ListDataListener() { @Override public void intervalAdded(ListDataEvent e) { } @Override public void intervalRemoved(ListDataEvent e) { removeIfNeeded(); } private void removeIfNeeded() { if (RecentProjectListActionProvider.getInstance().getActions(false, isUseProjectGroups()).size() == 0) { FlatWelcomeScreen.this.remove(recentProjects); FlatWelcomeScreen.this.revalidate(); FlatWelcomeScreen.this.repaint(); } } @Override public void contentsChanged(ListDataEvent e) { removeIfNeeded(); } }); projectsList.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { projectsList.repaint(); } @Override public void focusLost(FocusEvent e) { projectsList.repaint(); } }); } } add(createBody(), BorderLayout.CENTER); setDropTarget(new DropTarget(this, new DropTargetAdapter() { @Override public void dragEnter(DropTargetDragEvent e) { setDnd(true); } @Override public void dragExit(DropTargetEvent e) { setDnd(false); } @Override public void drop(DropTargetDropEvent e) { setDnd(false); e.acceptDrop(DnDConstants.ACTION_COPY_OR_MOVE); Transferable transferable = e.getTransferable(); List<File> list = FileCopyPasteUtil.getFileList(transferable); if (list != null && list.size() > 0) { PluginDropHandler pluginHandler = new PluginDropHandler(); if (!pluginHandler.canHandle(transferable, null) || !pluginHandler.handleDrop(transferable, null, null)) { ProjectUtil.tryOpenFileList(null, list, "WelcomeFrame"); } e.dropComplete(true); return; } e.dropComplete(false); } private void setDnd(boolean dnd) { inDnd = dnd; repaint(); } })); TouchbarDataKeys.putActionDescriptor(myTouchbarActions).setShowText(true); } @Override public JComponent getWelcomePanel() { return mySlidingPanel; } @SuppressWarnings("UseJBColor") @Override public void paint(Graphics g) { super.paint(g); if (inDnd) { Rectangle bounds = getBounds(); Color background = JBColor.namedColor("DragAndDrop.areaBackground", new Color(225, 235, 245)); g.setColor(new Color(background.getRed(), background.getGreen(), background.getBlue(), 206)); g.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); Color backgroundBorder = JBColor.namedColor("DragAndDrop.areaBorderColor", new Color(137, 178, 222)); g.setColor(backgroundBorder); g.drawRect(bounds.x, bounds.y, bounds.width, bounds.height); g.drawRect(bounds.x + 1 , bounds.y + 1, bounds.width - 2, bounds.height - 2); Color foreground = JBColor.namedColor("DragAndDrop.areaForeground", Gray._120); g.setColor(foreground); Font labelFont = StartupUiUtil.getLabelFont(); Font font = labelFont.deriveFont(labelFont.getSize() + 5.0f); String drop = "Drop files here to open"; g.setFont(font); int dropWidth = g.getFontMetrics().stringWidth(drop); int dropHeight = g.getFontMetrics().getHeight(); g.drawString(drop, bounds.x + (bounds.width - dropWidth) / 2, (int)(bounds.y + (bounds.height - dropHeight) * 0.45)); } } @NotNull private JComponent createBody() { NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout()); panel.add(createLogo(), BorderLayout.NORTH); panel.add(createActionPanel(), BorderLayout.CENTER); panel.add(createUpdatesSettingsAndDocs(), BorderLayout.SOUTH); return panel; } private JComponent createUpdatesSettingsAndDocs() { JPanel panel = new NonOpaquePanel(new BorderLayout()); panel.add(createUpdatePluginsLink(), BorderLayout.WEST); panel.add(createSettingsAndDocs(), BorderLayout.EAST); return panel; } private JComponent createSettingsAndDocs() { JPanel panel = new NonOpaquePanel(new BorderLayout()); NonOpaquePanel toolbar = new NonOpaquePanel(); AnAction register = ActionManager.getInstance().getAction("Register"); boolean registeredVisible = false; if (register != null) { AnActionEvent e = AnActionEvent.createFromAnAction(register, null, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext(this)); register.update(e); Presentation presentation = e.getPresentation(); if (presentation.isEnabled()) { ActionLink registerLink = new ActionLink("Register", register); // Don't allow focus, as the containing panel is going to focusable. registerLink.setFocusable(false); registerLink.setNormalColor(getLinkNormalColor()); NonOpaquePanel button = new NonOpaquePanel(new BorderLayout()); button.setBorder(JBUI.Borders.empty(4, 10)); button.add(registerLink); installFocusable(button, register, KeyEvent.VK_UP, KeyEvent.VK_RIGHT, true); NonOpaquePanel wrap = new NonOpaquePanel(); wrap.setBorder(JBUI.Borders.emptyLeft(10)); wrap.add(button); panel.add(wrap, BorderLayout.WEST); registeredVisible = true; } } toolbar.setLayout(new BoxLayout(toolbar, BoxLayout.X_AXIS)); toolbar.add(createErrorsLink()); toolbar.add(createEventsLink()); toolbar.add(createActionLink("Configure", IdeActions.GROUP_WELCOME_SCREEN_CONFIGURE, AllIcons.General.GearPlain, !registeredVisible)); toolbar.add(createActionLink("Get Help", IdeActions.GROUP_WELCOME_SCREEN_DOC, null, false)); panel.add(toolbar, BorderLayout.EAST); panel.setBorder(JBUI.Borders.empty(0, 0, 8, 11)); return panel; } private JComponent createErrorsLink() { IdeMessagePanel panel = new IdeMessagePanel(null, MessagePool.getInstance()); panel.setBorder(JBUI.Borders.emptyRight(13)); panel.setOpaque(false); Disposer.register(this, panel); return panel; } private JComponent createEventsLink() { final Ref<ActionLink> actionLinkRef = new Ref<>(); final JComponent panel = createActionLink("Events", AllIcons.Ide.Notification.NoEvents, actionLinkRef, new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { ((WelcomeBalloonLayoutImpl)myBalloonLayout).showPopup(); } }); panel.setVisible(false); myEventListener = types -> { NotificationType type = null; for (NotificationType t : types) { if (NotificationType.ERROR == t) { type = NotificationType.ERROR; break; } if (NotificationType.WARNING == t) { type = NotificationType.WARNING; } else if (type == null && NotificationType.INFORMATION == t) { type = NotificationType.INFORMATION; } } if (types.isEmpty()) { panel.setVisible(false); } else { actionLinkRef.get().setIcon(IdeNotificationArea.createIconWithNotificationCount(actionLinkRef.get(), type, types.size(), false)); panel.setVisible(true); } }; myEventLocation = () -> { Point location = SwingUtilities.convertPoint(panel, 0, 0, getRootPane().getLayeredPane()); return new Point(location.x, location.y + 5); }; return panel; } private JComponent createActionLink(final String text, final String groupId, Icon icon, boolean focusListOnLeft) { final Ref<ActionLink> ref = new Ref<>(null); AnAction action = new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { ActionGroup configureGroup = (ActionGroup)ActionManager.getInstance().getAction(groupId); PopupFactoryImpl.ActionGroupPopup popup = new PopupFactoryImpl.ActionGroupPopup( null, configureGroup, e.getDataContext(), false, false, false, false, null, -1, null, ActionPlaces.WELCOME_SCREEN, new MenuItemPresentationFactory(true), false); popup.showUnderneathOfLabel(ref.get()); } }; JComponent panel = createActionLink(text, icon, ref, action); installFocusable(panel, action, KeyEvent.VK_UP, KeyEvent.VK_DOWN, focusListOnLeft); return panel; } private JComponent createActionLink(String text, Icon icon, Ref<? super ActionLink> ref, AnAction action) { ActionLink link = new ActionLink(text, icon, action); ref.set(link); // Don't allow focus, as the containing panel is going to focusable. link.setFocusable(false); link.setPaintUnderline(false); link.setNormalColor(getLinkNormalColor()); JActionLinkPanel panel = new JActionLinkPanel(link); panel.setBorder(JBUI.Borders.empty(4, 6)); panel.add(createArrow(link), BorderLayout.EAST); return panel; } @NotNull private JComponent createActionPanel() { DefaultActionGroup group = new DefaultActionGroup(); ActionGroup quickStart = (ActionGroup)ActionManager.getInstance().getAction(IdeActions.GROUP_WELCOME_SCREEN_QUICKSTART); collectAllActions(group, quickStart); GridBag gc = new GridBag(); JPanel panel = new JPanel(new GridBagLayout()); panel.setOpaque(false); myTouchbarActions.removeAll(); for (AnAction action : group.getChildren(null)) { AnActionEvent e = AnActionEvent.createFromAnAction(action, null, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext(this)); action.update(e); Presentation presentation = e.getPresentation(); if (presentation.isVisible()) { String text = presentation.getText(); if (text != null && text.endsWith("...")) { text = text.substring(0, text.length() - 3); } Icon icon = presentation.getIcon(); if (icon == null || icon.getIconHeight() != JBUIScale.scale(16) || icon.getIconWidth() != JBUIScale.scale(16)) { icon = JBUI.scale(EmptyIcon.create(16)); } action = wrapGroups(action); ActionLink link = new ActionLink(text, icon, action, null, ActionPlaces.WELCOME_SCREEN); // Don't allow focus, as the containing panel is going to focusable. link.setFocusable(false); link.setPaintUnderline(false); link.setNormalColor(getLinkNormalColor()); JActionLinkPanel button = new JActionLinkPanel(link); button.setBorder(JBUI.Borders.empty(8, 20)); if (action instanceof WelcomePopupAction) { button.add(createArrow(link), BorderLayout.EAST); TouchbarDataKeys.putActionDescriptor(action).setContextComponent(link); } installFocusable(button, action, KeyEvent.VK_UP, KeyEvent.VK_DOWN, true); panel.add(Box.createHorizontalGlue(), gc.nextLine().next().fillCellHorizontally()); panel.add(button, gc.next().anchor(GridBagConstraints.LINE_START)); panel.add(Box.createHorizontalGlue(), gc.next().fillCellHorizontally()); myTouchbarActions.add(action); } } panel.add(Box.createGlue(), gc.nextLine().next().fillCell().anchor(GridBagConstraints.PAGE_END).coverLine(3).weighty(1.0)); return panel; } @Nullable @Override public Object getData(@NotNull String dataId) { if (TouchbarDataKeys.ACTIONS_KEY.is(dataId)) return myTouchbarActions; return null; } /** * Wraps an {@link ActionLink} component and delegates accessibility support to it. */ protected class JActionLinkPanel extends JPanel { @NotNull private final ActionLink myActionLink; public JActionLinkPanel(@NotNull ActionLink actionLink) { super(new BorderLayout()); myActionLink = actionLink; add(myActionLink); setOpaque(false); } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleJActionLinkPanel(myActionLink.getAccessibleContext()); } return accessibleContext; } protected final class AccessibleJActionLinkPanel extends AccessibleContextDelegate { AccessibleJActionLinkPanel(AccessibleContext context) { super(context); } @Override public Container getDelegateParent() { return getParent(); } @Override public AccessibleRole getAccessibleRole() { return AccessibleRole.PUSH_BUTTON; } } } private AnAction wrapGroups(AnAction action) { if (action instanceof ActionGroup && ((ActionGroup)action).isPopup()) { final Pair<JPanel, JBList<AnAction>> panel = createActionGroupPanel((ActionGroup)action, () -> goBack(), this); final Runnable onDone = () -> { setTitle("New Project"); final JBList<AnAction> list = panel.second; ScrollingUtil.ensureSelectionExists(list); final ListSelectionListener[] listeners = ((DefaultListSelectionModel)list.getSelectionModel()).getListeners(ListSelectionListener.class); //avoid component cashing. This helps in case of LaF change for (ListSelectionListener listener : listeners) { listener.valueChanged(new ListSelectionEvent(list, list.getSelectedIndex(), list.getSelectedIndex(), false)); } JComponent toFocus = getPreferredFocusedComponent(panel); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(toFocus, true)); }; final String name = action.getClass().getName(); mySlidingPanel.add(name, panel.first); final Presentation p = action.getTemplatePresentation(); return new DumbAwareAction(p.getText(), p.getDescription(), p.getIcon()) { @Override public void actionPerformed(@NotNull AnActionEvent e) { mySlidingPanel.getLayout().swipe(mySlidingPanel, name, JBCardLayout.SwipeDirection.FORWARD, onDone); } }; } return action; } private void goBack() { mySlidingPanel.swipe("root", JBCardLayout.SwipeDirection.BACKWARD).doWhenDone(() -> { mySlidingPanel.getRootPane().setDefaultButton(null); setTitle(getWelcomeFrameTitle()); }); } private void collectAllActions(@NotNull DefaultActionGroup group, @NotNull ActionGroup actionGroup) { for (AnAction action : actionGroup.getChildren(null)) { if (action instanceof ActionGroup && !((ActionGroup)action).isPopup()) { collectAllActions(group, (ActionGroup)action); } else { group.add(action); } } } @NotNull private JComponent createLogo() { ApplicationInfoEx appInfo = ApplicationInfoEx.getInstanceEx(); NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout()); String welcomeScreenLogoUrl = appInfo.getWelcomeScreenLogoUrl(); if (welcomeScreenLogoUrl != null) { JLabel logo = new JLabel(IconLoader.getIcon(welcomeScreenLogoUrl)); logo.setBorder(JBUI.Borders.empty(30, 0, 10, 0)); logo.setHorizontalAlignment(SwingConstants.CENTER); panel.add(logo, BorderLayout.NORTH); } String applicationName = Boolean.getBoolean("ide.ui.name.with.edition") ? ApplicationNamesInfo.getInstance().getFullProductNameWithEdition() : ApplicationNamesInfo.getInstance().getFullProductName(); JLabel appName = new JLabel(applicationName); appName.setForeground(JBColor.foreground()); appName.setFont(getProductFont(36).deriveFont(Font.PLAIN)); appName.setHorizontalAlignment(SwingConstants.CENTER); String appVersion = "Version "; appVersion += appInfo.getFullVersion(); if (appInfo.isEAP() && !appInfo.getBuild().isSnapshot()) { appVersion += " (" + appInfo.getBuild().asStringWithoutProductCode() + ")"; } JLabel version = new JLabel(appVersion); version.setFont(getProductFont(16)); version.setHorizontalAlignment(SwingConstants.CENTER); version.setForeground(Gray._128); panel.add(appName); panel.add(version, BorderLayout.SOUTH); panel.setBorder(JBUI.Borders.emptyBottom(20)); return panel; } @NotNull private Font getProductFont(int size) { try { return loadFont().deriveFont((float)JBUIScale.scale(size)); } catch (Throwable t) { Logger.getInstance(AppUIUtil.class).warn(t); } return StartupUiUtil.getLabelFont().deriveFont(JBUIScale.scale((float)size)); } @NotNull private Font loadFont() { @SuppressWarnings("SpellCheckingInspection") String fontPath = "/fonts/Roboto-Light.ttf"; URL url = AppUIUtil.class.getResource(fontPath); if (url == null) { Logger.getInstance(AppUIUtil.class).warn("Resource missing: " + fontPath); } else { try (InputStream is = url.openStream()) { return Font.createFont(Font.TRUETYPE_FONT, is); } catch (Throwable t) { Logger.getInstance(AppUIUtil.class).warn("Cannot load font: " + url, t); } } return StartupUiUtil.getLabelFont(); } private JComponent createRecentProjects() { JPanel panel = new JPanel(new BorderLayout()); panel.add(new NewRecentProjectPanel(this), BorderLayout.CENTER); panel.setBackground(getProjectsBackground()); panel.setBorder(new CustomLineBorder(getSeparatorColor(), JBUI.insetsRight(1))); return panel; } private void installFocusable(final JComponent comp, final AnAction action, final int prevKeyCode, final int nextKeyCode, final boolean focusListOnLeft) { comp.setFocusable(true); comp.setFocusTraversalKeysEnabled(true); comp.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { JList list = UIUtil.findComponentOfType(FlatWelcomeFrame.this.getComponent(), JList.class); if (e.getKeyCode() == KeyEvent.VK_ENTER || e.getKeyCode() == KeyEvent.VK_SPACE) { InputEvent event = e; if (e.getComponent() instanceof JComponent) { ActionLink link = UIUtil.findComponentOfType((JComponent)e.getComponent(), ActionLink.class); if (link != null) { event = new MouseEvent(link, MouseEvent.MOUSE_CLICKED, e.getWhen(), e.getModifiers(), 0, 0, 1, false, MouseEvent.BUTTON1); } } action.actionPerformed(AnActionEvent.createFromAnAction(action, event, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext())); } else if (e.getKeyCode() == prevKeyCode) { focusPrev(comp); } else if (e.getKeyCode() == nextKeyCode) { focusNext(comp); } else if (e.getKeyCode() == KeyEvent.VK_LEFT) { if (focusListOnLeft) { if (list != null) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(list, true)); } } else { focusPrev(comp); } } else if (e.getKeyCode() == KeyEvent.VK_RIGHT) { focusNext(comp); } } }); comp.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { comp.setOpaque(true); comp.setBackground(getActionLinkSelectionColor()); } @Override public void focusLost(FocusEvent e) { comp.setOpaque(false); comp.setBackground(getMainBackground()); } }); } private void focusPrev(JComponent comp) { FocusTraversalPolicy policy = FlatWelcomeFrame.this.getFocusTraversalPolicy(); if (policy != null) { Component prev = policy.getComponentBefore(FlatWelcomeFrame.this, comp); if (prev != null) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(prev, true)); } } } private void focusNext(JComponent comp) { FocusTraversalPolicy policy = FlatWelcomeFrame.this.getFocusTraversalPolicy(); if (policy != null) { Component next = policy.getComponentAfter(FlatWelcomeFrame.this, comp); if (next != null) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(next, true)); } } } @Override public void setupFrame(JFrame frame) { } @Override public void dispose() { } private JComponent createUpdatePluginsLink() { myUpdatePluginsLink = new LinkLabel<>(IdeBundle.message("updates.plugins.welcome.screen.link.message"), null); myUpdatePluginsLink.setVisible(false); NonOpaquePanel wrap = new NonOpaquePanel(myUpdatePluginsLink); wrap.setBorder(JBUI.Borders.empty(0, 10, 8, 11)); return wrap; } public void showPluginUpdates(@NotNull Runnable callback) { myUpdatePluginsLink.setListener((__, ___) -> callback.run(), null); myUpdatePluginsLink.setVisible(true); } public void hidePluginUpdates() { myUpdatePluginsLink.setListener(null, null); myUpdatePluginsLink.setVisible(false); } } @Override public void showPluginUpdates(@NotNull Runnable callback) { myScreen.showPluginUpdates(callback); } @Override public void hidePluginUpdates() { myScreen.hidePluginUpdates(); } public static boolean isUseProjectGroups() { return Registry.is("welcome.screen.project.grouping.enabled"); } private static JLabel createArrow(final ActionLink link) { JLabel arrow = new JLabel(AllIcons.General.ArrowDown); arrow.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); arrow.setVerticalAlignment(SwingConstants.BOTTOM); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { final MouseEvent newEvent = MouseEventAdapter.convert(e, link, e.getX(), e.getY()); link.doClick(newEvent); return true; } }.installOn(arrow); return arrow; } @Nullable @Override public BalloonLayout getBalloonLayout() { return myBalloonLayout; } @NotNull @Override public Rectangle suggestChildFrameBounds() { return getBounds(); } @Nullable @Override public Project getProject() { if (ApplicationManager.getApplication().isDisposed()) { return null; } return ProjectManager.getInstance().getDefaultProject(); } @Override public void setFrameTitle(String title) { setTitle(title); } @Override public JComponent getComponent() { return getRootPane(); } public static Pair<JPanel, JBList<AnAction>> createActionGroupPanel(final ActionGroup action, final Runnable backAction, @NotNull Disposable parentDisposable) { JPanel actionsListPanel = new JPanel(new BorderLayout()); actionsListPanel.setBackground(getProjectsBackground()); final List<AnAction> groups = flattenActionGroups(action); final DefaultListModel<AnAction> model = JBList.createDefaultListModel(groups); final JBList<AnAction> list = new JBList<>(model); for (AnAction group : groups) { if (group instanceof Disposable) { Disposer.register(parentDisposable, (Disposable)group); } } Disposer.register(parentDisposable, new Disposable() { @Override public void dispose() { model.clear(); } }); list.setBackground(getProjectsBackground()); list.setCellRenderer(new GroupedItemsListRenderer<AnAction>(new ListItemDescriptorAdapter<AnAction>() { @Nullable @Override public String getTextFor(AnAction value) { return getActionText(value); } @Nullable @Override public String getCaptionAboveOf(AnAction value) { return getParentGroupName(value); } @Override public boolean hasSeparatorAboveOf(AnAction value) { int index = model.indexOf(value); final String parentGroupName = getParentGroupName(value); if (index < 1) return parentGroupName != null; AnAction upper = model.get(index - 1); if (getParentGroupName(upper) == null && parentGroupName != null) return true; return !Comparing.equal(getParentGroupName(upper), parentGroupName); } }) { @Override protected JComponent createItemComponent() { myTextLabel = new ErrorLabel(); myTextLabel.setOpaque(true); myTextLabel.setBorder(JBUI.Borders.empty(3, 7)); return myTextLabel; } @Override protected Color getBackground() { return getProjectsBackground(); } @Override protected void customizeComponent(JList<? extends AnAction> list, AnAction value, boolean isSelected) { if (myTextLabel != null) { myTextLabel.setText(getActionText(value)); myTextLabel.setIcon(value.getTemplatePresentation().getIcon()); } } } ); JScrollPane pane = ScrollPaneFactory.createScrollPane(list, true); pane.setBackground(getProjectsBackground()); actionsListPanel.add(pane, BorderLayout.CENTER); int width = (int)Math.max(Math.min(Math.round(list.getPreferredSize().getWidth()), JBUIScale.scale(200)), JBUIScale.scale(100)); pane.setPreferredSize(JBUI.size(width + 14, -1)); boolean singleProjectGenerator = list.getModel().getSize() == 1; final Ref<Component> selected = Ref.create(); final JPanel main = new JPanel(new BorderLayout()); main.add(actionsListPanel, BorderLayout.WEST); JPanel bottomPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT)); bottomPanel.setBorder(BorderFactory.createMatteBorder(1, 0, 0, 0, new JBColor(Gray._217, Gray._81))); main.add(bottomPanel, BorderLayout.SOUTH); final HashMap<Object, JPanel> panelsMap = new HashMap<>(); ListSelectionListener selectionListener = e -> { if (e.getValueIsAdjusting()) { // Update when a change has been finalized. // For instance, selecting an element with mouse fires two consecutive ListSelectionEvent events. return; } if (!selected.isNull()) { main.remove(selected.get()); } Object value = list.getSelectedValue(); if (value instanceof AbstractActionWithPanel) { final JPanel panel = panelsMap.computeIfAbsent(value, o -> ((AbstractActionWithPanel)value).createPanel()); ((AbstractActionWithPanel)value).onPanelSelected(); panel.setBorder(JBUI.Borders.empty(7, 10)); selected.set(panel); main.add(selected.get()); updateBottomPanel(panel, (AbstractActionWithPanel)value, bottomPanel, backAction); main.revalidate(); main.repaint(); } }; list.addListSelectionListener(selectionListener); if (backAction != null) { new DumbAwareAction() { @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(!StackingPopupDispatcher.getInstance().isPopupFocused()); } @Override public void actionPerformed(@NotNull AnActionEvent e) { backAction.run(); } }.registerCustomShortcutSet(CommonShortcuts.ESCAPE, main, parentDisposable); } installQuickSearch(list); if (singleProjectGenerator) { actionsListPanel.setPreferredSize(new Dimension(0, 0)); } return Pair.create(main, list); } private static void updateBottomPanel(@NotNull JPanel currentPanel, @NotNull AbstractActionWithPanel actionWithPanel, @NotNull JPanel bottomPanel, @Nullable Runnable backAction) { bottomPanel.removeAll(); if (SystemInfo.isMac) { addCancelButton(bottomPanel, backAction); addActionButton(bottomPanel, actionWithPanel, currentPanel); } else { addActionButton(bottomPanel, actionWithPanel, currentPanel); addCancelButton(bottomPanel, backAction); } } private static void addCancelButton(@NotNull JPanel bottomPanel, @Nullable Runnable backAction) { JComponent cancelButton = createCancelButton(backAction); if (cancelButton != null) { bottomPanel.add(cancelButton); } } private static void addActionButton(@NotNull JPanel bottomPanel, @NotNull AbstractActionWithPanel actionWithPanel, @NotNull JPanel currentPanel) { JButton actionButton = actionWithPanel.getActionButton(); bottomPanel.add(actionButton); currentPanel.getRootPane().setDefaultButton(actionButton); } @Nullable private static JComponent createCancelButton(@Nullable Runnable cancelAction) { if (cancelAction == null) return null; JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(e -> cancelAction.run()); return cancelButton; } public static void installQuickSearch(JBList<? extends AnAction> list) { new ListSpeedSearch<>(list, (Function<AnAction, String>)o -> { if (o instanceof AbstractActionWithPanel) { //to avoid dependency mess with ProjectSettingsStepBase return o.getTemplatePresentation().getText(); } return null; }); } private static List<AnAction> flattenActionGroups(@NotNull final ActionGroup action) { final ArrayList<AnAction> groups = new ArrayList<>(); String groupName; for (AnAction anAction : action.getChildren(null)) { if (anAction instanceof ActionGroup) { groupName = getActionText(anAction); for (AnAction childAction : ((ActionGroup)anAction).getChildren(null)) { if (groupName != null) { setParentGroupName(groupName, childAction); } groups.add(childAction); } } else { groups.add(anAction); } } return groups; } private static String getActionText(@NotNull final AnAction value) { return value.getTemplatePresentation().getText(); } private static String getParentGroupName(@NotNull final AnAction value) { return (String)value.getTemplatePresentation().getClientProperty(ACTION_GROUP_KEY); } private static void setParentGroupName(@NotNull final String groupName, @NotNull final AnAction childAction) { childAction.getTemplatePresentation().putClientProperty(ACTION_GROUP_KEY, groupName); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmIpv6FlabelVer14 implements OFOxmIpv6Flabel { private static final Logger logger = LoggerFactory.getLogger(OFOxmIpv6FlabelVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 8; private final static IPv6FlowLabel DEFAULT_VALUE = IPv6FlowLabel.NONE; // OF message fields private final IPv6FlowLabel value; // // Immutable default instance final static OFOxmIpv6FlabelVer14 DEFAULT = new OFOxmIpv6FlabelVer14( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFOxmIpv6FlabelVer14(IPv6FlowLabel value) { if(value == null) { throw new NullPointerException("OFOxmIpv6FlabelVer14: property value cannot be null"); } this.value = value; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x80003804L; } @Override public IPv6FlowLabel getValue() { return value; } @Override public MatchField<IPv6FlowLabel> getMatchField() { return MatchField.IPV6_FLABEL; } @Override public boolean isMasked() { return false; } public OFOxm<IPv6FlowLabel> getCanonical() { // exact match OXM is always canonical return this; } @Override public IPv6FlowLabel getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFOxmIpv6Flabel.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmIpv6Flabel.Builder { final OFOxmIpv6FlabelVer14 parentMessage; // OF message fields private boolean valueSet; private IPv6FlowLabel value; BuilderWithParent(OFOxmIpv6FlabelVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x80003804L; } @Override public IPv6FlowLabel getValue() { return value; } @Override public OFOxmIpv6Flabel.Builder setValue(IPv6FlowLabel value) { this.value = value; this.valueSet = true; return this; } @Override public MatchField<IPv6FlowLabel> getMatchField() { return MatchField.IPV6_FLABEL; } @Override public boolean isMasked() { return false; } @Override public OFOxm<IPv6FlowLabel> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public IPv6FlowLabel getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFOxmIpv6Flabel build() { IPv6FlowLabel value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); // return new OFOxmIpv6FlabelVer14( value ); } } static class Builder implements OFOxmIpv6Flabel.Builder { // OF message fields private boolean valueSet; private IPv6FlowLabel value; @Override public long getTypeLen() { return 0x80003804L; } @Override public IPv6FlowLabel getValue() { return value; } @Override public OFOxmIpv6Flabel.Builder setValue(IPv6FlowLabel value) { this.value = value; this.valueSet = true; return this; } @Override public MatchField<IPv6FlowLabel> getMatchField() { return MatchField.IPV6_FLABEL; } @Override public boolean isMasked() { return false; } @Override public OFOxm<IPv6FlowLabel> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public IPv6FlowLabel getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFOxmIpv6Flabel build() { IPv6FlowLabel value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); return new OFOxmIpv6FlabelVer14( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmIpv6Flabel> { @Override public OFOxmIpv6Flabel readFrom(ChannelBuffer bb) throws OFParseError { // fixed value property typeLen == 0x80003804L int typeLen = bb.readInt(); if(typeLen != (int) 0x80003804) throw new OFParseError("Wrong typeLen: Expected=0x80003804L(0x80003804L), got="+typeLen); IPv6FlowLabel value = IPv6FlowLabel.read4Bytes(bb); OFOxmIpv6FlabelVer14 oxmIpv6FlabelVer14 = new OFOxmIpv6FlabelVer14( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmIpv6FlabelVer14); return oxmIpv6FlabelVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmIpv6FlabelVer14Funnel FUNNEL = new OFOxmIpv6FlabelVer14Funnel(); static class OFOxmIpv6FlabelVer14Funnel implements Funnel<OFOxmIpv6FlabelVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmIpv6FlabelVer14 message, PrimitiveSink sink) { // fixed value property typeLen = 0x80003804L sink.putInt((int) 0x80003804); message.value.putTo(sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmIpv6FlabelVer14> { @Override public void write(ChannelBuffer bb, OFOxmIpv6FlabelVer14 message) { // fixed value property typeLen = 0x80003804L bb.writeInt((int) 0x80003804); message.value.write4Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmIpv6FlabelVer14("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmIpv6FlabelVer14 other = (OFOxmIpv6FlabelVer14) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); return result; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/group_service.proto package com.google.monitoring.v3; /** * * * <pre> * The `ListGroupMembers` response. * </pre> * * Protobuf type {@code google.monitoring.v3.ListGroupMembersResponse} */ public final class ListGroupMembersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.monitoring.v3.ListGroupMembersResponse) ListGroupMembersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListGroupMembersResponse.newBuilder() to construct. private ListGroupMembersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListGroupMembersResponse() { members_ = java.util.Collections.emptyList(); nextPageToken_ = ""; totalSize_ = 0; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListGroupMembersResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { members_ = new java.util.ArrayList<com.google.api.MonitoredResource>(); mutable_bitField0_ |= 0x00000001; } members_.add( input.readMessage(com.google.api.MonitoredResource.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } case 24: { totalSize_ = input.readInt32(); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { members_ = java.util.Collections.unmodifiableList(members_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupMembersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupMembersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.ListGroupMembersResponse.class, com.google.monitoring.v3.ListGroupMembersResponse.Builder.class); } private int bitField0_; public static final int MEMBERS_FIELD_NUMBER = 1; private java.util.List<com.google.api.MonitoredResource> members_; /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public java.util.List<com.google.api.MonitoredResource> getMembersList() { return members_; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public java.util.List<? extends com.google.api.MonitoredResourceOrBuilder> getMembersOrBuilderList() { return members_; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public int getMembersCount() { return members_.size(); } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResource getMembers(int index) { return members_.get(index); } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResourceOrBuilder getMembersOrBuilder(int index) { return members_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TOTAL_SIZE_FIELD_NUMBER = 3; private int totalSize_; /** * * * <pre> * The total number of elements matching this request. * </pre> * * <code>int32 total_size = 3;</code> */ public int getTotalSize() { return totalSize_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < members_.size(); i++) { output.writeMessage(1, members_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } if (totalSize_ != 0) { output.writeInt32(3, totalSize_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < members_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, members_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } if (totalSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, totalSize_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.monitoring.v3.ListGroupMembersResponse)) { return super.equals(obj); } com.google.monitoring.v3.ListGroupMembersResponse other = (com.google.monitoring.v3.ListGroupMembersResponse) obj; boolean result = true; result = result && getMembersList().equals(other.getMembersList()); result = result && getNextPageToken().equals(other.getNextPageToken()); result = result && (getTotalSize() == other.getTotalSize()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMembersCount() > 0) { hash = (37 * hash) + MEMBERS_FIELD_NUMBER; hash = (53 * hash) + getMembersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (37 * hash) + TOTAL_SIZE_FIELD_NUMBER; hash = (53 * hash) + getTotalSize(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.ListGroupMembersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupMembersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupMembersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.monitoring.v3.ListGroupMembersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The `ListGroupMembers` response. * </pre> * * Protobuf type {@code google.monitoring.v3.ListGroupMembersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.monitoring.v3.ListGroupMembersResponse) com.google.monitoring.v3.ListGroupMembersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupMembersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupMembersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.ListGroupMembersResponse.class, com.google.monitoring.v3.ListGroupMembersResponse.Builder.class); } // Construct using com.google.monitoring.v3.ListGroupMembersResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMembersFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (membersBuilder_ == null) { members_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { membersBuilder_.clear(); } nextPageToken_ = ""; totalSize_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupMembersResponse_descriptor; } @java.lang.Override public com.google.monitoring.v3.ListGroupMembersResponse getDefaultInstanceForType() { return com.google.monitoring.v3.ListGroupMembersResponse.getDefaultInstance(); } @java.lang.Override public com.google.monitoring.v3.ListGroupMembersResponse build() { com.google.monitoring.v3.ListGroupMembersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.monitoring.v3.ListGroupMembersResponse buildPartial() { com.google.monitoring.v3.ListGroupMembersResponse result = new com.google.monitoring.v3.ListGroupMembersResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (membersBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { members_ = java.util.Collections.unmodifiableList(members_); bitField0_ = (bitField0_ & ~0x00000001); } result.members_ = members_; } else { result.members_ = membersBuilder_.build(); } result.nextPageToken_ = nextPageToken_; result.totalSize_ = totalSize_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.monitoring.v3.ListGroupMembersResponse) { return mergeFrom((com.google.monitoring.v3.ListGroupMembersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.monitoring.v3.ListGroupMembersResponse other) { if (other == com.google.monitoring.v3.ListGroupMembersResponse.getDefaultInstance()) return this; if (membersBuilder_ == null) { if (!other.members_.isEmpty()) { if (members_.isEmpty()) { members_ = other.members_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMembersIsMutable(); members_.addAll(other.members_); } onChanged(); } } else { if (!other.members_.isEmpty()) { if (membersBuilder_.isEmpty()) { membersBuilder_.dispose(); membersBuilder_ = null; members_ = other.members_; bitField0_ = (bitField0_ & ~0x00000001); membersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMembersFieldBuilder() : null; } else { membersBuilder_.addAllMessages(other.members_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } if (other.getTotalSize() != 0) { setTotalSize(other.getTotalSize()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.monitoring.v3.ListGroupMembersResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.monitoring.v3.ListGroupMembersResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.api.MonitoredResource> members_ = java.util.Collections.emptyList(); private void ensureMembersIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { members_ = new java.util.ArrayList<com.google.api.MonitoredResource>(members_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> membersBuilder_; /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public java.util.List<com.google.api.MonitoredResource> getMembersList() { if (membersBuilder_ == null) { return java.util.Collections.unmodifiableList(members_); } else { return membersBuilder_.getMessageList(); } } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public int getMembersCount() { if (membersBuilder_ == null) { return members_.size(); } else { return membersBuilder_.getCount(); } } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResource getMembers(int index) { if (membersBuilder_ == null) { return members_.get(index); } else { return membersBuilder_.getMessage(index); } } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder setMembers(int index, com.google.api.MonitoredResource value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.set(index, value); onChanged(); } else { membersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder setMembers(int index, com.google.api.MonitoredResource.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.set(index, builderForValue.build()); onChanged(); } else { membersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder addMembers(com.google.api.MonitoredResource value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.add(value); onChanged(); } else { membersBuilder_.addMessage(value); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder addMembers(int index, com.google.api.MonitoredResource value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.add(index, value); onChanged(); } else { membersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder addMembers(com.google.api.MonitoredResource.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.add(builderForValue.build()); onChanged(); } else { membersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder addMembers(int index, com.google.api.MonitoredResource.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.add(index, builderForValue.build()); onChanged(); } else { membersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder addAllMembers( java.lang.Iterable<? extends com.google.api.MonitoredResource> values) { if (membersBuilder_ == null) { ensureMembersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, members_); onChanged(); } else { membersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder clearMembers() { if (membersBuilder_ == null) { members_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { membersBuilder_.clear(); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public Builder removeMembers(int index) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.remove(index); onChanged(); } else { membersBuilder_.remove(index); } return this; } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResource.Builder getMembersBuilder(int index) { return getMembersFieldBuilder().getBuilder(index); } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResourceOrBuilder getMembersOrBuilder(int index) { if (membersBuilder_ == null) { return members_.get(index); } else { return membersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public java.util.List<? extends com.google.api.MonitoredResourceOrBuilder> getMembersOrBuilderList() { if (membersBuilder_ != null) { return membersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(members_); } } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResource.Builder addMembersBuilder() { return getMembersFieldBuilder() .addBuilder(com.google.api.MonitoredResource.getDefaultInstance()); } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public com.google.api.MonitoredResource.Builder addMembersBuilder(int index) { return getMembersFieldBuilder() .addBuilder(index, com.google.api.MonitoredResource.getDefaultInstance()); } /** * * * <pre> * A set of monitored resources in the group. * </pre> * * <code>repeated .google.api.MonitoredResource members = 1;</code> */ public java.util.List<com.google.api.MonitoredResource.Builder> getMembersBuilderList() { return getMembersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> getMembersFieldBuilder() { if (membersBuilder_ == null) { membersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder>( members_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); members_ = null; } return membersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * * * <pre> * If there are more results than have been returned, then this field is * set to a non-empty value. To see the additional results, use that value as * `pageToken` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } private int totalSize_; /** * * * <pre> * The total number of elements matching this request. * </pre> * * <code>int32 total_size = 3;</code> */ public int getTotalSize() { return totalSize_; } /** * * * <pre> * The total number of elements matching this request. * </pre> * * <code>int32 total_size = 3;</code> */ public Builder setTotalSize(int value) { totalSize_ = value; onChanged(); return this; } /** * * * <pre> * The total number of elements matching this request. * </pre> * * <code>int32 total_size = 3;</code> */ public Builder clearTotalSize() { totalSize_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.monitoring.v3.ListGroupMembersResponse) } // @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupMembersResponse) private static final com.google.monitoring.v3.ListGroupMembersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.monitoring.v3.ListGroupMembersResponse(); } public static com.google.monitoring.v3.ListGroupMembersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListGroupMembersResponse> PARSER = new com.google.protobuf.AbstractParser<ListGroupMembersResponse>() { @java.lang.Override public ListGroupMembersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListGroupMembersResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListGroupMembersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListGroupMembersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.monitoring.v3.ListGroupMembersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.jetbrains.python.debugger.pydev; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.xdebugger.XSourcePosition; import com.intellij.xdebugger.breakpoints.SuspendPolicy; import com.intellij.xdebugger.frame.XValueChildrenList; import com.jetbrains.python.console.pydev.PydevCompletionVariant; import com.jetbrains.python.debugger.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author traff */ public class ClientModeMultiProcessDebugger implements ProcessDebugger { private static final Logger LOG = Logger.getInstance(ClientModeMultiProcessDebugger.class); private final IPyDebugProcess myDebugProcess; @NotNull private final String myHost; private final int myPort; @NotNull private final RemoteDebugger myMainDebugger; private final Object myOtherDebuggersObject = new Object(); private final List<RemoteDebugger> myOtherDebuggers = Lists.newArrayList(); private ThreadRegistry myThreadRegistry = new ThreadRegistry(); public ClientModeMultiProcessDebugger(@NotNull final IPyDebugProcess debugProcess, @NotNull String host, int port) { myDebugProcess = debugProcess; myHost = host; myPort = port; myMainDebugger = createDebugger(); myOtherDebuggers.add(myMainDebugger); } @NotNull private RemoteDebugger createDebugger() { return new RemoteDebugger(myDebugProcess, myHost, myPort) { @Override protected void onProcessCreatedEvent() { ApplicationManager.getApplication().executeOnPooledThread(ClientModeMultiProcessDebugger.this::connectToSubprocess); } }; } @Override public boolean isConnected() { return getOtherDebuggers().stream().anyMatch(RemoteDebugger::isConnected); } @Override public void waitForConnect() throws Exception { Thread.sleep(500L); myMainDebugger.waitForConnect(); } private void connectToSubprocess() { try { RemoteDebugger debugger = createDebugger(); debugger.waitForConnect(); addDebugger(debugger); myDebugProcess.init(); debugger.run(); return; } catch (RuntimeException e) { LOG.warn(e); } catch (Exception ignored) { } try { //noinspection BusyWait Thread.sleep(50L); } catch (InterruptedException ignored) { } LOG.debug("Could not connect to subprocess"); } @Override public void close() { for (ProcessDebugger d : allDebuggers()) { d.close(); } } private List<RemoteDebugger> allDebuggers() { List<RemoteDebugger> result; synchronized (myOtherDebuggersObject) { result = new ArrayList<>(myOtherDebuggers); } return result; } @Override public void disconnect() { for (ProcessDebugger d : allDebuggers()) { d.disconnect(); } } @Override public String handshake() throws PyDebuggerException { return myMainDebugger.handshake(); } @Override public PyDebugValue evaluate(String threadId, String frameId, String expression, boolean execute) throws PyDebuggerException { return debugger(threadId).evaluate(threadId, frameId, expression, execute); } @Override public PyDebugValue evaluate(String threadId, String frameId, String expression, boolean execute, boolean trimResult) throws PyDebuggerException { return debugger(threadId).evaluate(threadId, frameId, expression, execute, trimResult); } @Override public void consoleExec(String threadId, String frameId, String expression, PyDebugCallback<String> callback) { debugger(threadId).consoleExec(threadId, frameId, expression, callback); } @Override public XValueChildrenList loadFrame(String threadId, String frameId) throws PyDebuggerException { return debugger(threadId).loadFrame(threadId, frameId); } @Override public XValueChildrenList loadVariable(String threadId, String frameId, PyDebugValue var) throws PyDebuggerException { return debugger(threadId).loadVariable(threadId, frameId, var); } public ArrayChunk loadArrayItems(String threadId, String frameId, PyDebugValue var, int rowOffset, int colOffset, int rows, int cols, String format) throws PyDebuggerException { return debugger(threadId).loadArrayItems(threadId, frameId, var, rowOffset, colOffset, rows, cols, format); } @Override public void loadReferrers(String threadId, String frameId, PyReferringObjectsValue var, PyDebugCallback<XValueChildrenList> callback) { debugger(threadId).loadReferrers(threadId, frameId, var, callback); } @NotNull private ProcessDebugger debugger(@NotNull String threadId) { ProcessDebugger debugger = myThreadRegistry.getDebugger(threadId); if (debugger != null) { return debugger; } else { // thread is not found in registry - lets search for it in attached debuggers for (ProcessDebugger d : getOtherDebuggers()) { for (PyThreadInfo thread : d.getThreads()) { if (threadId.equals(thread.getId())) { return d; } } } //if not found then return main debugger return myMainDebugger; } } @Override public PyDebugValue changeVariable(String threadId, String frameId, PyDebugValue var, String value) throws PyDebuggerException { return debugger(threadId).changeVariable(threadId, frameId, var, value); } @Override public void loadFullVariableValues(@NotNull String threadId, @NotNull String frameId, @NotNull List<PyFrameAccessor.PyAsyncValue<String>> vars) throws PyDebuggerException { debugger(threadId).loadFullVariableValues(threadId, frameId, vars); } @Override public String loadSource(String path) { return myMainDebugger.loadSource(path); } private static class ThreadRegistry { private Map<String, RemoteDebugger> myThreadIdToDebugger = Maps.newHashMap(); public void register(String id, RemoteDebugger debugger) { myThreadIdToDebugger.put(id, debugger); } public RemoteDebugger getDebugger(String threadId) { return myThreadIdToDebugger.get(threadId); } public static String threadName(@NotNull String name, @NotNull String id) { int indx = id.indexOf("_", id.indexOf("_") + 1); if (indx != -1) { id = id.substring(0, indx); } return name + "(" + id + ")"; } } @Override public Collection<PyThreadInfo> getThreads() { cleanOtherDebuggers(); List<PyThreadInfo> threads = collectAllThreads(); if (!isOtherDebuggersEmpty()) { //here we add process id to thread name in case there are more then one process return Collections.unmodifiableCollection(Collections2.transform(threads, t -> { String threadName = ThreadRegistry.threadName(t.getName(), t.getId()); PyThreadInfo newThread = new PyThreadInfo(t.getId(), threadName, t.getFrames(), t.getStopReason(), t.getMessage()); newThread.updateState(t.getState(), t.getFrames()); return newThread; })); } else { return Collections.unmodifiableCollection(threads); } } private List<PyThreadInfo> collectAllThreads() { List<PyThreadInfo> result = Lists.newArrayList(); result.addAll(myMainDebugger.getThreads()); //collect threads and add them to registry to faster access //we don't register mainDebugger as it is default if there is no mapping for (RemoteDebugger d : myOtherDebuggers) { result.addAll(d.getThreads()); for (PyThreadInfo t : d.getThreads()) { myThreadRegistry.register(t.getId(), d); } } return result; } private void cleanOtherDebuggers() { removeDisconnected(getOtherDebuggers()); } private void removeDisconnected(ArrayList<RemoteDebugger> debuggers) { boolean allConnected = true; for (RemoteDebugger d : debuggers) { if (!d.isConnected()) { allConnected = false; } } if (!allConnected) { List<RemoteDebugger> newList = Lists.newArrayList(); for (RemoteDebugger d : debuggers) { if (d.isConnected()) { newList.add(d); } } synchronized (myOtherDebuggersObject) { myOtherDebuggers.clear(); myOtherDebuggers.addAll(newList); } } } private ArrayList<RemoteDebugger> getOtherDebuggers() { synchronized (myOtherDebuggersObject) { return Lists.newArrayList(myOtherDebuggers); } } private boolean isOtherDebuggersEmpty() { synchronized (myOtherDebuggersObject) { return myOtherDebuggers.isEmpty(); } } @Override public void execute(@NotNull AbstractCommand command) { for (ProcessDebugger d : allDebuggers()) { d.execute(command); } } @Override public void suspendAllThreads() { for (ProcessDebugger d : allDebuggers()) { d.suspendAllThreads(); } } @Override public void suspendThread(String threadId) { debugger(threadId).suspendThread(threadId); } @Override public void run() throws PyDebuggerException { myMainDebugger.run(); } @Override public void smartStepInto(String threadId, String functionName) { debugger(threadId).smartStepInto(threadId, functionName); } @Override public void resumeOrStep(String threadId, ResumeOrStepCommand.Mode mode) { debugger(threadId).resumeOrStep(threadId, mode); } @Override public void setNextStatement(@NotNull String threadId, @NotNull XSourcePosition sourcePosition, @Nullable String functionName, @NotNull PyDebugCallback<Pair<Boolean, String>> callback) { debugger(threadId).setNextStatement(threadId, sourcePosition, functionName, callback); } @Override public void setTempBreakpoint(@NotNull String type, @NotNull String file, int line) { for (ProcessDebugger d : allDebuggers()) { d.setTempBreakpoint(type, file, line); } } @Override public void removeTempBreakpoint(@NotNull String file, int line) { for (ProcessDebugger d : allDebuggers()) { d.removeTempBreakpoint(file, line); } } @Override public void setBreakpoint(@NotNull String typeId, @NotNull String file, int line, @Nullable String condition, @Nullable String logExpression, @Nullable String funcName, @NotNull SuspendPolicy policy) { for (ProcessDebugger d : allDebuggers()) { d.setBreakpoint(typeId, file, line, condition, logExpression, funcName, policy); } } @Override public void removeBreakpoint(@NotNull String typeId, @NotNull String file, int line) { for (ProcessDebugger d : allDebuggers()) { d.removeBreakpoint(typeId, file, line); } } @Override public void setShowReturnValues(boolean isShowReturnValues) { for (ProcessDebugger d : allDebuggers()) { d.setShowReturnValues(isShowReturnValues); } } private void addDebugger(RemoteDebugger debugger) { synchronized (myOtherDebuggersObject) { myOtherDebuggers.add(debugger); } } public void addCloseListener(RemoteDebuggerCloseListener listener) { myMainDebugger.addCloseListener(listener); } @Override public List<PydevCompletionVariant> getCompletions(String threadId, String frameId, String prefix) { return debugger(threadId).getCompletions(threadId, frameId, prefix); } @Override public String getDescription(String threadId, String frameId, String cmd) { return debugger(threadId).getDescription(threadId, frameId, cmd); } @Override public void addExceptionBreakpoint(ExceptionBreakpointCommandFactory factory) { for (RemoteDebugger d : allDebuggers()) { d.execute(factory.createAddCommand(d)); } } @Override public void removeExceptionBreakpoint(ExceptionBreakpointCommandFactory factory) { for (RemoteDebugger d : allDebuggers()) { d.execute(factory.createRemoveCommand(d)); } } @Override public void suspendOtherThreads(PyThreadInfo thread) { for (RemoteDebugger d : allDebuggers()) { // we should notify the debugger in each process about suspending all threads d.suspendOtherThreads(thread); } } }
package org.yeastrc.xlink.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import org.yeastrc.xlink.db.DBConnectionFactory; import org.yeastrc.xlink.dto.SearchProgramsPerSearchDTO; /** * search_programs_per_search table */ public class SearchProgramsPerSearchDAO { private static final Logger log = LoggerFactory.getLogger( SearchProgramsPerSearchDAO.class); private SearchProgramsPerSearchDAO() { } public static SearchProgramsPerSearchDAO getInstance() { return new SearchProgramsPerSearchDAO(); } /** * @param id * @return null if not found * @throws Exception */ public SearchProgramsPerSearchDTO getSearchProgramDTOForId( int id ) throws Exception { SearchProgramsPerSearchDTO result = null; Connection conn = null; PreparedStatement pstmt = null; ResultSet rs = null; String sql = "SELECT * " + " FROM search_programs_per_search WHERE id = ?"; try { conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); pstmt = conn.prepareStatement( sql ); pstmt.setInt( 1, id ); rs = pstmt.executeQuery(); if ( rs.next() ) { result = getFromResultSet( rs ); } } catch ( Exception e ) { log.error( "ERROR: database connection: '" + DBConnectionFactory.PROXL + "' sql: " + sql, e ); throw e; } finally { // be sure database handles are closed if( rs != null ) { try { rs.close(); } catch( Throwable t ) { ; } rs = null; } if( pstmt != null ) { try { pstmt.close(); } catch( Throwable t ) { ; } pstmt = null; } if( conn != null ) { try { conn.close(); } catch( Throwable t ) { ; } conn = null; } } return result; } /** * @param rs * @return * @throws SQLException */ private SearchProgramsPerSearchDTO getFromResultSet( ResultSet rs ) throws SQLException { SearchProgramsPerSearchDTO item = new SearchProgramsPerSearchDTO(); item.setId( rs.getInt( "id" ) ); item.setSearchId( rs.getInt( "search_id" ) ); item.setName( rs.getString( "name" ) ); item.setDisplayName( rs.getString( "display_name" ) ); item.setVersion( rs.getString( "version" ) ); item.setDescription( rs.getString( "description" ) ); return item; } /** * @param item * @throws Exception */ public void save( SearchProgramsPerSearchDTO item ) throws Exception { Connection conn = null; try { conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); save( item, conn ); } catch ( Exception e ) { String msg = "ERROR inserting item. Error getting database connection: '" + DBConnectionFactory.PROXL + "'" + "\n item: " + item; log.error( msg, e ); throw e; } finally { // be sure database handles are closed if( conn != null ) { try { conn.close(); } catch( Throwable t ) { ; } conn = null; } } } private final String INSERT_SQL = "INSERT INTO search_programs_per_search ( search_id, name, display_name, version, description ) " + "VALUES ( ?, ?, ?, ?, ? )"; /** * @param item * @param conn * @throws Exception */ public void save( SearchProgramsPerSearchDTO item, Connection conn ) throws Exception { // Connection conn = null; PreparedStatement pstmt = null; ResultSet rs = null; final String sql = INSERT_SQL; try { // conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); pstmt = conn.prepareStatement( sql, Statement.RETURN_GENERATED_KEYS ); int counter = 0; counter++; pstmt.setInt( counter, item.getSearchId() ); counter++; pstmt.setString( counter, item.getName() ); counter++; pstmt.setString( counter, item.getDisplayName() ); counter++; pstmt.setString( counter, item.getVersion() ); counter++; pstmt.setString( counter, item.getDescription() ); pstmt.executeUpdate(); rs = pstmt.getGeneratedKeys(); if( rs.next() ) { item.setId( rs.getInt( 1 ) ); } else throw new Exception( "Failed to insert item" ); } catch ( Exception e ) { String msg = "ERROR inserting item. database connection: '" + DBConnectionFactory.PROXL + "'" + "\n item: " + item + "\nsql: " + sql; log.error( msg, e ); throw e; } finally { // be sure database handles are closed if( rs != null ) { try { rs.close(); } catch( Throwable t ) { ; } rs = null; } if( pstmt != null ) { try { pstmt.close(); } catch( Throwable t ) { ; } pstmt = null; } // if( conn != null ) { // try { conn.close(); } catch( Throwable t ) { ; } // conn = null; // } } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.cli; import static com.facebook.buck.jvm.java.JavaTest.COMPILED_TESTS_LIBRARY_FLAVOR; import static org.junit.Assert.assertEquals; import com.facebook.buck.android.AndroidBinaryBuilder; import com.facebook.buck.android.AndroidLibraryBuilder; import com.facebook.buck.core.config.FakeBuckConfig; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetFactory; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.model.targetgraph.ImmutableTargetGraphCreationResult; import com.facebook.buck.core.model.targetgraph.TargetGraph; import com.facebook.buck.core.model.targetgraph.TargetGraphFactory; import com.facebook.buck.core.model.targetgraph.TargetNode; import com.facebook.buck.core.model.targetgraph.TestTargetGraphCreationResultFactory; import com.facebook.buck.core.sourcepath.FakeSourcePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem; import com.facebook.buck.jvm.java.JavaBinaryRuleBuilder; import com.facebook.buck.jvm.java.JavaLibraryBuilder; import com.facebook.buck.jvm.java.JavaTestBuilder; import com.facebook.buck.jvm.java.KeystoreBuilder; import com.facebook.buck.testutil.TestConsole; import com.facebook.buck.util.json.ObjectMappers; import com.facebook.buck.versions.VersionedAliasBuilder; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.SortedSet; import org.junit.Before; import org.junit.Test; public class AuditClasspathCommandTest { private TestConsole console; private AuditClasspathCommand auditClasspathCommand; private CommandRunnerParams params; private ProjectFilesystem projectFilesystem; @Before public void setUp() { console = new TestConsole(); auditClasspathCommand = new AuditClasspathCommand(); params = CommandRunnerParamsForTesting.builder() .setConsole(console) .setToolchainProvider(AndroidBinaryBuilder.createToolchainProviderForAndroidBinary()) .build(); projectFilesystem = new FakeProjectFilesystem(); } @Test public void testClassPathOutput() throws Exception { // Test that no output is created. auditClasspathCommand.printClasspath( params, TestTargetGraphCreationResultFactory.create( TargetGraphFactory.newInstance(ImmutableSet.of()))); assertEquals("", console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); // Add build rules such that all implementations of HasClasspathEntries are tested. BuildTarget javaLibraryTarget = BuildTargetFactory.newInstance("//:test-java-library"); BuildTarget testJavaTarget = BuildTargetFactory.newInstance("//:project-tests"); BuildTarget androidLibraryTarget = BuildTargetFactory.newInstance("//:test-android-library"); BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//:keystore"); BuildTarget testAndroidTarget = BuildTargetFactory.newInstance("//:test-android-binary"); TargetNode<?> javaLibraryNode = JavaLibraryBuilder.createBuilder(javaLibraryTarget) .addSrc(Paths.get("src/com/facebook/TestJavaLibrary.java")) .addTest(testJavaTarget) .build(); TargetNode<?> androidLibraryNode = AndroidLibraryBuilder.createBuilder(androidLibraryTarget) .addSrc(Paths.get("src/com/facebook/TestAndroidLibrary.java")) .addDep(javaLibraryTarget) .build(); TargetNode<?> keystoreNode = KeystoreBuilder.createBuilder(keystoreTarget) .setStore(FakeSourcePath.of("debug.keystore")) .setProperties(FakeSourcePath.of("keystore.properties")) .build(); TargetNode<?> testAndroidNode = AndroidBinaryBuilder.createBuilder(testAndroidTarget) .setManifest(FakeSourcePath.of("AndroidManifest.xml")) .setKeystore(keystoreTarget) .setOriginalDeps(ImmutableSortedSet.of(androidLibraryTarget, javaLibraryTarget)) .build(); TargetNode<?> testJavaNode = JavaTestBuilder.createBuilder(testJavaTarget) .addDep(javaLibraryTarget) .addSrc(Paths.get("src/com/facebook/test/ProjectTests.java")) .build(); TargetGraph targetGraph = TargetGraphFactory.newInstance( ImmutableSet.of( javaLibraryNode, androidLibraryNode, keystoreNode, testAndroidNode, testJavaNode)); auditClasspathCommand.printClasspath( params, TestTargetGraphCreationResultFactory.create(targetGraph)); // Still empty. assertEquals("", console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); // Request the top build target. This will test the following: // - paths don't appear multiple times when dependencies are referenced multiple times. // - dependencies are walked // - independent targets in the same BUCK file are not included in the output auditClasspathCommand.printClasspath( params, ImmutableTargetGraphCreationResult.of(targetGraph, ImmutableSet.of(testAndroidTarget))); Path root = projectFilesystem.getRootPath(); SortedSet<String> expectedPaths = Sets.newTreeSet( Arrays.asList( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), androidLibraryTarget, "lib__%s__output") .resolve(androidLibraryTarget.getShortName() + ".jar")) .toString(), root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaLibraryTarget, "lib__%s__output") .resolve(javaLibraryTarget.getShortName() + ".jar")) .toString())); String expectedClasspath = String.join(System.lineSeparator(), expectedPaths) + System.lineSeparator(); assertEquals(expectedClasspath, console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); // Add independent test target. This will test: // - the union of the classpath is output. // - all rules have implemented HasClasspathEntries. // Note that the output streams are reset. setUp(); auditClasspathCommand.printClasspath( params, ImmutableTargetGraphCreationResult.of( TargetGraphFactory.newInstance( ImmutableSet.of( javaLibraryNode, androidLibraryNode, keystoreNode, testAndroidNode, testJavaNode)), ImmutableSet.of( testAndroidTarget, javaLibraryTarget, androidLibraryTarget, testJavaTarget))); BuildTarget testJavaCompiledJar = testJavaTarget.withFlavors(COMPILED_TESTS_LIBRARY_FLAVOR); expectedPaths.add( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), testJavaCompiledJar, "lib__%s__output") .resolve(testJavaCompiledJar.getShortNameAndFlavorPostfix() + ".jar")) .toString()); expectedClasspath = String.join(System.lineSeparator(), expectedPaths) + System.lineSeparator(); assertEquals(expectedClasspath, console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); } private static final String EXPECTED_JSON = Joiner.on("") .join( "{", "\"//:test-android-library\":", "[", "%s,", "%s", "],", "\"//:test-java-library\":", "[", "%s", "]", "}"); @Test public void testJsonClassPathOutput() throws Exception { // Build a DependencyGraph of build rules manually. BuildTarget javaTarget = BuildTargetFactory.newInstance("//:test-java-library"); TargetNode<?> javaNode = JavaLibraryBuilder.createBuilder(javaTarget) .addSrc(Paths.get("src/com/facebook/TestJavaLibrary.java")) .build(); BuildTarget androidTarget = BuildTargetFactory.newInstance("//:test-android-library"); TargetNode<?> androidNode = AndroidLibraryBuilder.createBuilder(androidTarget) .addSrc(Paths.get("src/com/facebook/TestAndroidLibrary.java")) .addDep(javaTarget) .build(); auditClasspathCommand.printJsonClasspath( params, ImmutableTargetGraphCreationResult.of( TargetGraphFactory.newInstance(ImmutableSet.of(androidNode, javaNode)), ImmutableSet.of(androidTarget, javaTarget))); Path root = projectFilesystem.getRootPath(); ObjectMapper objectMapper = ObjectMappers.legacyCreate(); String expected = String.format( EXPECTED_JSON, objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaTarget, "lib__%s__output") .resolve(javaTarget.getShortName() + ".jar"))), objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), androidTarget, "lib__%s__output") .resolve(androidTarget.getShortName() + ".jar"))), objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaTarget, "lib__%s__output") .resolve(javaTarget.getShortName() + ".jar")))); assertEquals(expected, console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); } @Test public void testClassPathWithVersions() throws Exception { // Build the test target graph. TargetNode<?> javaLibrary = JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:test-java-library")) .addSrc(Paths.get("src/com/facebook/TestJavaLibrary.java")) .build(); TargetNode<?> androidLibrary = JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:test-android-library")) .addSrc(Paths.get("src/com/facebook/TestAndroidLibrary.java")) .addDep(javaLibrary.getBuildTarget()) .build(); TargetNode<?> version = new VersionedAliasBuilder(BuildTargetFactory.newInstance("//:version")) .setVersions("1.0", "//:test-android-library") .build(); TargetNode<?> binary = new JavaBinaryRuleBuilder(BuildTargetFactory.newInstance("//:rule")) .setDeps(ImmutableSortedSet.of(version.getBuildTarget())) .build(); TargetGraph targetGraph = TargetGraphFactory.newInstance(javaLibrary, androidLibrary, version, binary); // Run the command. ImmutableSet<BuildTarget> targets = ImmutableSet.of(androidLibrary.getBuildTarget(), javaLibrary.getBuildTarget()); auditClasspathCommand.printClasspath( params.withBuckConfig( FakeBuckConfig.builder() .setSections(ImmutableMap.of("build", ImmutableMap.of("versions", "true"))) .build()), ImmutableTargetGraphCreationResult.of(targetGraph, targets)); // Verify output. Path root = projectFilesystem.getRootPath(); ImmutableSortedSet<String> expectedPaths = ImmutableSortedSet.of( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), androidLibrary.getBuildTarget(), "lib__%s__output") .resolve(androidLibrary.getBuildTarget().getShortName() + ".jar")) .toString(), root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaLibrary.getBuildTarget(), "lib__%s__output") .resolve(javaLibrary.getBuildTarget().getShortName() + ".jar")) .toString()); String expectedClasspath = String.join(System.lineSeparator(), expectedPaths) + System.lineSeparator(); assertEquals(expectedClasspath, console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); } @Test public void testJsonClassPathWithVersions() throws Exception { // Build the test target graph. TargetNode<?> javaLibrary = JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:test-java-library")) .addSrc(Paths.get("src/com/facebook/TestJavaLibrary.java")) .build(); TargetNode<?> androidLibrary = JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:test-android-library")) .addSrc(Paths.get("src/com/facebook/TestAndroidLibrary.java")) .addDep(javaLibrary.getBuildTarget()) .build(); TargetNode<?> version = new VersionedAliasBuilder(BuildTargetFactory.newInstance("//:version")) .setVersions("1.0", "//:test-android-library") .build(); TargetNode<?> binary = new JavaBinaryRuleBuilder(BuildTargetFactory.newInstance("//:rule")) .setDeps(ImmutableSortedSet.of(version.getBuildTarget())) .build(); TargetGraph targetGraph = TargetGraphFactory.newInstance(javaLibrary, androidLibrary, version, binary); // Run the command. ImmutableSet<BuildTarget> targets = ImmutableSet.of(androidLibrary.getBuildTarget(), javaLibrary.getBuildTarget()); auditClasspathCommand.printJsonClasspath( params.withBuckConfig( FakeBuckConfig.builder() .setSections(ImmutableMap.of("build", ImmutableMap.of("versions", "true"))) .build()), ImmutableTargetGraphCreationResult.of(targetGraph, targets)); // Verify output. Path root = projectFilesystem.getRootPath(); ObjectMapper objectMapper = ObjectMappers.legacyCreate(); String expected = String.format( EXPECTED_JSON, objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaLibrary.getBuildTarget(), "lib__%s__output") .resolve(javaLibrary.getBuildTarget().getShortName() + ".jar"))), objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), androidLibrary.getBuildTarget(), "lib__%s__output") .resolve(androidLibrary.getBuildTarget().getShortName() + ".jar"))), objectMapper.valueToTree( root.resolve( BuildTargetPaths.getGenPath( params.getCell().getFilesystem(), javaLibrary.getBuildTarget(), "lib__%s__output") .resolve(javaLibrary.getBuildTarget().getShortName() + ".jar")))); assertEquals(expected, console.getTextWrittenToStdOut()); assertEquals("", console.getTextWrittenToStdErr()); } }
/* * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.zuul.scriptManager; import com.netflix.zuul.filters.FilterType; import net.jcip.annotations.ThreadSafe; import java.util.Date; import java.util.concurrent.atomic.AtomicBoolean; /** * Representation of a ZuulFilter for representing and storing in a database */ @ThreadSafe public class FilterInfo implements Comparable<FilterInfo>{ private final String filter_id; private final String filter_name; private final String filter_code; private final FilterType filter_type; private final String filter_disablePropertyName; private final String filter_order; private final String application_name; private int revision; private Date creationDate; /* using AtomicBoolean so we can pass it into EndpointScriptMonitor */ private final AtomicBoolean isActive = new AtomicBoolean(); private final AtomicBoolean isCanary = new AtomicBoolean(); /** * Constructor * @param filter_id * @param filter_code * @param filter_type * @param filter_name * @param disablePropertyName * @param filter_order * @param application_name */ public FilterInfo(String filter_id, String filter_code, FilterType filter_type, String filter_name, String disablePropertyName, String filter_order, String application_name) { this.filter_id = filter_id; this.filter_code = filter_code; this.filter_type = filter_type; this.filter_name = filter_name; this.filter_disablePropertyName = disablePropertyName; this.filter_order = filter_order; this.application_name = application_name; isActive.set(false); isCanary.set(false); } /** * * @return the filter name; the class name of the filter */ public String getFilterName() { return filter_name; } /** * the Source code for the filter * @return the Source code for the filter */ public String getFilterCode() { return filter_code; } /** * * @return the name of the property to disable the filter. */ public String getFilterDisablePropertyName() { return filter_disablePropertyName; } /** * * @return the filter_type */ public FilterType getFilterType() { return filter_type; } @Override public String toString() { return "FilterInfo{" + "filter_id='" + filter_id + '\'' + ", filter_name='" + filter_name + '\'' + ", filter_type='" + filter_type + '\'' + ", revision=" + revision + ", creationDate=" + creationDate + ", isActive=" + isActive + ", isCanary=" + isCanary + ", application_name=" + application_name + '}'; } /** * the application name context of the filter. This is for if Zuul is applied to different applications in the same datastor * @return */ public String getApplication_name() { return application_name; } /** * * @param filter_id * @param revision * @param creationDate * @param isActive * @param isCanary * @param filter_code * @param filter_type * @param filter_name * @param disablePropertyName * @param filter_order * @param application_name */ public FilterInfo(String filter_id, int revision, Date creationDate, boolean isActive, boolean isCanary, String filter_code, FilterType filter_type, String filter_name, String disablePropertyName, String filter_order, String application_name) { this.filter_id = filter_id; this.revision = revision; this.creationDate = creationDate; this.isActive.set(isActive); this.isCanary.set(isCanary); this.filter_code = filter_code; this.filter_name = filter_name; this.filter_type = filter_type; this.filter_order = filter_order; this.filter_disablePropertyName = disablePropertyName; this.application_name = application_name; } /** * * @return the revision of this filter */ public int getRevision() { return revision; } /** * * @return creation date */ public Date getCreationDate() { return creationDate; } /** * * @return true if this filter is active */ public boolean isActive() { return isActive.get(); } /** * * @return true if this filter should be active a "canary" cluster. A "canary" cluster is a separate cluster * where filters may be tested before going to the full production cluster. */ public boolean isCanary() { return isCanary.get(); } /** * * @return unique key for the filter */ public String getFilterID() { return filter_id; } /** * * @return the filter order */ public String getFilterOrder() { return filter_order; } /** * builds the unique filter_id key * @param application_name * @param filter_type * @param filter_name * @return key is application_name:filter_name:filter_type */ public static String buildFilterID(String application_name, FilterType filter_type, String filter_name) { return application_name + ":" + filter_name + ":" + filter_type.toString(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FilterInfo that = (FilterInfo) o; if (revision != that.revision) return false; if (creationDate != null ? !creationDate.equals(that.creationDate) : that.creationDate != null) return false; if (filter_code != null ? !filter_code.equals(that.filter_code) : that.filter_code != null) return false; if (filter_id != null ? !filter_id.equals(that.filter_id) : that.filter_id != null) return false; if (filter_name != null ? !filter_name.equals(that.filter_name) : that.filter_name != null) return false; if (filter_type != null ? !filter_type.equals(that.filter_type) : that.filter_type != null) return false; if (isActive != null ? !(isActive.get() == that.isActive.get()) : that.isActive != null) return false; if (isCanary != null ? !(isCanary.get() == that.isCanary.get()) : that.isCanary != null) return false; return true; } @Override public int hashCode() { int result = filter_id != null ? filter_id.hashCode() : 0; result = 31 * result + (filter_name != null ? filter_name.hashCode() : 0); result = 31 * result + (filter_code != null ? filter_code.hashCode() : 0); result = 31 * result + (filter_type != null ? filter_type.hashCode() : 0); result = 31 * result + revision; result = 31 * result + (creationDate != null ? creationDate.hashCode() : 0); result = 31 * result + (isActive != null ? isActive.hashCode() : 0); result = 31 * result + (isCanary != null ? isCanary.hashCode() : 0); return result; } @Override public int compareTo(FilterInfo filterInfo) { if(filterInfo.getFilterName().equals(this.getFilterName())){ return filterInfo.creationDate.compareTo(getCreationDate()); } return filterInfo.getFilterName().compareTo(this.getFilterName()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.raptor.legacy.storage.organization; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.prestosql.plugin.raptor.legacy.RaptorMetadata; import io.prestosql.plugin.raptor.legacy.metadata.ColumnInfo; import io.prestosql.plugin.raptor.legacy.metadata.ColumnStats; import io.prestosql.plugin.raptor.legacy.metadata.MetadataDao; import io.prestosql.plugin.raptor.legacy.metadata.ShardInfo; import io.prestosql.plugin.raptor.legacy.metadata.ShardManager; import io.prestosql.plugin.raptor.legacy.metadata.ShardMetadata; import io.prestosql.plugin.raptor.legacy.metadata.Table; import io.prestosql.plugin.raptor.legacy.metadata.TableColumn; import io.prestosql.spi.connector.ConnectorMetadata; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.type.Type; import io.prestosql.type.InternalTypeManager; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalInt; import java.util.Set; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; import static io.prestosql.plugin.raptor.legacy.metadata.SchemaDaoUtil.createTablesWithRetry; import static io.prestosql.plugin.raptor.legacy.metadata.TestDatabaseShardManager.createShardManager; import static io.prestosql.plugin.raptor.legacy.metadata.TestDatabaseShardManager.shardInfo; import static io.prestosql.plugin.raptor.legacy.storage.organization.ShardOrganizerUtil.getOrganizationEligibleShards; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.DateType.DATE; import static io.prestosql.spi.type.TimestampType.TIMESTAMP; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.spi.type.VarcharType.createVarcharType; import static io.prestosql.testing.TestingConnectorSession.SESSION; import static org.testng.Assert.assertEquals; @Test(singleThreaded = true) public class TestShardOrganizerUtil { private static final List<ColumnInfo> COLUMNS = ImmutableList.of( new ColumnInfo(1, TIMESTAMP), new ColumnInfo(2, BIGINT), new ColumnInfo(3, VARCHAR)); private DBI dbi; private Handle dummyHandle; private File dataDir; private ShardManager shardManager; private MetadataDao metadataDao; private ConnectorMetadata metadata; @BeforeMethod public void setup() { dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime() + ThreadLocalRandom.current().nextLong()); dbi.registerMapper(new TableColumn.Mapper(new InternalTypeManager(createTestMetadataManager()))); dummyHandle = dbi.open(); createTablesWithRetry(dbi); dataDir = Files.createTempDir(); metadata = new RaptorMetadata(dbi, createShardManager(dbi)); metadataDao = dbi.onDemand(MetadataDao.class); shardManager = createShardManager(dbi); } @AfterMethod(alwaysRun = true) public void teardown() throws Exception { dummyHandle.close(); deleteRecursively(dataDir.toPath(), ALLOW_INSECURE); } @Test public void testGetOrganizationEligibleShards() { int day1 = 1111; int day2 = 2222; SchemaTableName tableName = new SchemaTableName("default", "test"); metadata.createTable(SESSION, tableMetadataBuilder(tableName) .column("orderkey", BIGINT) .column("orderdate", DATE) .column("orderstatus", createVarcharType(3)) .property("ordering", ImmutableList.of("orderstatus", "orderkey")) .property("temporal_column", "orderdate") .build(), false); Table tableInfo = metadataDao.getTableInformation(tableName.getSchemaName(), tableName.getTableName()); List<TableColumn> tableColumns = metadataDao.listTableColumns(tableInfo.getTableId()); Map<String, TableColumn> tableColumnMap = Maps.uniqueIndex(tableColumns, TableColumn::getColumnName); long orderDate = tableColumnMap.get("orderdate").getColumnId(); long orderKey = tableColumnMap.get("orderkey").getColumnId(); long orderStatus = tableColumnMap.get("orderstatus").getColumnId(); List<ShardInfo> shards = ImmutableList.<ShardInfo>builder() .add(shardInfo( UUID.randomUUID(), "node1", ImmutableList.of( new ColumnStats(orderDate, day1, day1 + 10), new ColumnStats(orderKey, 13L, 14L), new ColumnStats(orderStatus, "aaa", "abc")))) .add(shardInfo( UUID.randomUUID(), "node1", ImmutableList.of( new ColumnStats(orderDate, day2, day2 + 100), new ColumnStats(orderKey, 2L, 20L), new ColumnStats(orderStatus, "aaa", "abc")))) .add(shardInfo( UUID.randomUUID(), "node1", ImmutableList.of( new ColumnStats(orderDate, day1, day2), new ColumnStats(orderKey, 2L, 11L), new ColumnStats(orderStatus, "aaa", "abc")))) .add(shardInfo( UUID.randomUUID(), "node1", ImmutableList.of( new ColumnStats(orderDate, day1, day2), new ColumnStats(orderKey, 2L, null), new ColumnStats(orderStatus, "aaa", "abc")))) .add(shardInfo( UUID.randomUUID(), "node1", ImmutableList.of( new ColumnStats(orderDate, day1, null), new ColumnStats(orderKey, 2L, 11L), new ColumnStats(orderStatus, "aaa", "abc")))) .build(); long transactionId = shardManager.beginTransaction(); shardManager.commitShards(transactionId, tableInfo.getTableId(), COLUMNS, shards, Optional.empty(), 0); Set<ShardMetadata> shardMetadatas = shardManager.getNodeShards("node1"); Long temporalColumnId = metadataDao.getTemporalColumnId(tableInfo.getTableId()); TableColumn temporalColumn = metadataDao.getTableColumn(tableInfo.getTableId(), temporalColumnId); Set<ShardIndexInfo> actual = ImmutableSet.copyOf(getOrganizationEligibleShards(dbi, metadataDao, tableInfo, shardMetadatas, false)); List<ShardIndexInfo> expected = getShardIndexInfo(tableInfo, shards, temporalColumn, Optional.empty()); assertEquals(actual, expected); List<TableColumn> sortColumns = metadataDao.listSortColumns(tableInfo.getTableId()); Set<ShardIndexInfo> actualSortRange = ImmutableSet.copyOf(getOrganizationEligibleShards(dbi, metadataDao, tableInfo, shardMetadatas, true)); List<ShardIndexInfo> expectedSortRange = getShardIndexInfo(tableInfo, shards, temporalColumn, Optional.of(sortColumns)); assertEquals(actualSortRange, expectedSortRange); } private static List<ShardIndexInfo> getShardIndexInfo(Table tableInfo, List<ShardInfo> shards, TableColumn temporalColumn, Optional<List<TableColumn>> sortColumns) { long tableId = tableInfo.getTableId(); Type temporalType = temporalColumn.getDataType(); ImmutableList.Builder<ShardIndexInfo> builder = ImmutableList.builder(); for (ShardInfo shard : shards) { ColumnStats temporalColumnStats = shard.getColumnStats().stream() .filter(columnStats -> columnStats.getColumnId() == temporalColumn.getColumnId()) .findFirst() .get(); if (temporalColumnStats.getMin() == null || temporalColumnStats.getMax() == null) { continue; } Optional<ShardRange> sortRange = Optional.empty(); if (sortColumns.isPresent()) { Map<Long, ColumnStats> columnIdToStats = Maps.uniqueIndex(shard.getColumnStats(), ColumnStats::getColumnId); ImmutableList.Builder<Type> typesBuilder = ImmutableList.builder(); ImmutableList.Builder<Object> minBuilder = ImmutableList.builder(); ImmutableList.Builder<Object> maxBuilder = ImmutableList.builder(); boolean isShardEligible = true; for (TableColumn sortColumn : sortColumns.get()) { ColumnStats columnStats = columnIdToStats.get(sortColumn.getColumnId()); typesBuilder.add(sortColumn.getDataType()); if (columnStats.getMin() == null || columnStats.getMax() == null) { isShardEligible = false; break; } minBuilder.add(columnStats.getMin()); maxBuilder.add(columnStats.getMax()); } if (!isShardEligible) { continue; } List<Type> types = typesBuilder.build(); List<Object> minValues = minBuilder.build(); List<Object> maxValues = maxBuilder.build(); sortRange = Optional.of(ShardRange.of(new Tuple(types, minValues), new Tuple(types, maxValues))); } builder.add(new ShardIndexInfo( tableId, OptionalInt.empty(), shard.getShardUuid(), shard.getRowCount(), shard.getUncompressedSize(), sortRange, Optional.of(ShardRange.of( new Tuple(temporalType, temporalColumnStats.getMin()), new Tuple(temporalType, temporalColumnStats.getMax()))))); } return builder.build(); } }
package com.thinkbiganalytics.feedmgr.rest.controller; /*- * #%L * thinkbig-metadata-rest-controller * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.collect.Collections2; import com.thinkbiganalytics.Formatters; import com.thinkbiganalytics.discovery.schema.QueryResult; import com.thinkbiganalytics.discovery.schema.TableSchema; import com.thinkbiganalytics.feedmgr.nifi.controllerservice.DBCPConnectionPoolService; import com.thinkbiganalytics.feedmgr.rest.Model; import com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl; import com.thinkbiganalytics.feedmgr.service.datasource.DatasourceModelTransform; import com.thinkbiganalytics.feedmgr.service.security.SecurityService; import com.thinkbiganalytics.metadata.api.MetadataAccess; import com.thinkbiganalytics.metadata.api.datasource.DatasourceDefinitionProvider; import com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider; import com.thinkbiganalytics.metadata.api.datasource.JdbcDatasourceDetails; import com.thinkbiganalytics.metadata.api.security.AccessControlled; import com.thinkbiganalytics.metadata.rest.model.data.Datasource; import com.thinkbiganalytics.metadata.rest.model.data.DatasourceCriteria; import com.thinkbiganalytics.metadata.rest.model.data.DatasourceDefinition; import com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource; import com.thinkbiganalytics.metadata.rest.model.data.UserDatasource; import com.thinkbiganalytics.nifi.rest.client.NiFiRestClient; import com.thinkbiganalytics.rest.model.RestResponseStatus; import com.thinkbiganalytics.security.AccessController; import com.thinkbiganalytics.security.rest.controller.SecurityModelTransform; import com.thinkbiganalytics.security.rest.model.ActionGroup; import com.thinkbiganalytics.security.rest.model.PermissionsChange; import com.thinkbiganalytics.security.rest.model.RoleMembershipChange; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.security.AccessControlException; import java.security.Principal; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.NotFoundException; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.SwaggerDefinition; import io.swagger.annotations.Tag; @Api(tags = "Feed Manager - Data Sources", produces = "application/json") @Component @Path("/v1/metadata/datasource") @SwaggerDefinition(tags = @Tag(name = "Feed Manager - Data Sources", description = "manages data sources")) public class DatasourceController { private static final Logger log = LoggerFactory.getLogger(DatasourceController.class); /** * Ensures the user has the correct permissions. */ @Inject private AccessController accessController; @Inject private DatasourceProvider datasetProvider; @Inject private DatasourceDefinitionProvider datasourceDefinitionProvider; /** * The {@code Datasource} transformer */ @Inject private DatasourceModelTransform datasourceTransform; @Inject private MetadataAccess metadata; /** * NiFi REST client */ @Inject private NiFiRestClient nifiRestClient; /** * Provides table lists and schemas from JDBC connections. */ @Inject private DBCPConnectionPoolService dbcpConnectionPoolTableInfo; @Inject private SecurityService securityService; @Inject private SecurityModelTransform securityTransform; /** * Gets a list of datasource that match the criteria provided. * * @param name the name of a data source * @param owner the owner of a data source * @param on the time of the data source * @param after to specify data source to created after the time given * @param before to specify data source to created after the time given * @param type the type of the data source * @return a list of data sources * @throws AccessControlException if the user does not have the {@code ACCESS_DATASOURCES} permission */ @GET @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the data sources matching the provided criteria.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the data sources.", response = Datasource.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Kylo is unavailable.", response = RestResponseStatus.class) }) public List<Datasource> getDatasources(@QueryParam(DatasourceCriteria.NAME) final String name, @QueryParam(DatasourceCriteria.OWNER) final String owner, @QueryParam(DatasourceCriteria.ON) final String on, @QueryParam(DatasourceCriteria.AFTER) final String after, @QueryParam(DatasourceCriteria.BEFORE) final String before, @QueryParam(DatasourceCriteria.TYPE) final String type) { return this.metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); com.thinkbiganalytics.metadata.api.datasource.DatasourceCriteria criteria = createDatasourceCriteria(name, owner, on, after, before, type); return datasetProvider.getDatasources(criteria).stream() .map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.CONNECTIONS)) .collect(Collectors.toList()); }); } /** * Updates the specified data source. * * @param datasource the data source * @return the data source * @throws AccessControlException if the user does not have the {@code EDIT_DATASOURCES} permission */ @POST @ApiOperation("Updates the specified data source.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the data source.", response = Datasource.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Kylo is unavailable.", response = RestResponseStatus.class) }) public Datasource postDatasource(@Nonnull final UserDatasource datasource) { return metadata.commit(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EDIT_DATASOURCES); datasourceTransform.toDomain(datasource); if (datasource instanceof JdbcDatasource) { ((JdbcDatasource) datasource).setPassword(null); } return datasource; }); } /** * Gets the datasource with the id provided. * * @param idStr the datasource id * @param sensitive {@code true} to include sensitive fields in the response, or {@code false} otherwise * @return the datasource object * @throws AccessControlException if the user does not have the {@code ACCESS_DATASOURCES} permission */ @GET @Path("{id}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the data source with the provided id.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the data source.", response = Datasource.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "The data source does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Kylo is unavailable.", response = RestResponseStatus.class) }) public Datasource getDatasource(@PathParam("id") String idStr, @QueryParam("sensitive") boolean sensitive) { return this.metadata.read(() -> { // Check permissions accessController.checkPermission(AccessController.SERVICES, sensitive ? FeedServicesAccessControl.ADMIN_DATASOURCES : FeedServicesAccessControl.ACCESS_DATASOURCES); com.thinkbiganalytics.metadata.api.datasource.Datasource.ID id = this.datasetProvider.resolve(idStr); com.thinkbiganalytics.metadata.api.datasource.Datasource ds = this.datasetProvider.getDatasource(id); if (ds != null) { final Datasource restModel = datasourceTransform.toDatasource(ds, sensitive ? DatasourceModelTransform.Level.ADMIN : DatasourceModelTransform.Level.FULL); if (ds instanceof AccessControlled) { securityTransform.applyAccessControl((AccessControlled) ds, restModel); } return restModel; } else { throw new NotFoundException("No datasource exists with the given ID: " + idStr); } }); } /** * Deletes the datasource with the specified id. * * @param idStr the datasource id */ @DELETE @Path("{id}") @ApiOperation("Deletes the data source with the provided id.") @ApiResponses({ @ApiResponse(code = 204, message = "The data source was deleted."), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "The data source does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Kylo is unavailable.", response = RestResponseStatus.class) }) public void deleteDatasource(@PathParam("id") final String idStr) { metadata.commit(() -> { final com.thinkbiganalytics.metadata.api.datasource.Datasource.ID id = datasetProvider.resolve(idStr); final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasetProvider.getDatasource(id); if (datasource == null) { throw new NotFoundException("No datasource exists with the given ID: " + idStr); } if (datasource instanceof com.thinkbiganalytics.metadata.api.datasource.UserDatasource) { final com.thinkbiganalytics.metadata.api.datasource.UserDatasource userDatasource = (com.thinkbiganalytics.metadata.api.datasource.UserDatasource) datasource; userDatasource.getDetails().ifPresent(details -> { if (details instanceof JdbcDatasourceDetails) { ((JdbcDatasourceDetails) details).getControllerServiceId() .ifPresent(controllerServiceId -> nifiRestClient.controllerServices().disableAndDeleteAsync(controllerServiceId)); } }); datasetProvider.removeDatasource(id); } }); } /** * get the datasource definitions * * @return the set of datasource definitions * @throws AccessControlException if the user does not have the {@code ACCESS_DATASOURCES} permission */ @GET @Path("/datasource-definitions") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the data source definitions.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the data source definitions.", response = DatasourceDefinition.class, responseContainer = "Set"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Kylo is unavailable.", response = RestResponseStatus.class) }) public Set<DatasourceDefinition> getDatasourceDefinitions() { return this.metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); Set<com.thinkbiganalytics.metadata.api.datasource.DatasourceDefinition> datasourceDefinitions = this.datasourceDefinitionProvider.getDatasourceDefinitions(); if (datasourceDefinitions != null) { return new HashSet<>(Collections2.transform(datasourceDefinitions, Model.DOMAIN_TO_DS_DEFINITION)); } return null; }); } /** * Executes a query on the specified datasource. * * @param idStr the datasource id * @param query the SQL query * @return the SQL result */ @GET @Path("{id}/query") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Executes a query and returns the result.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the result.", response = QueryResult.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 400, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) }) public Response query(@PathParam("id") final String idStr, @QueryParam("query") final String query) { // Verify user has access to data source final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasetProvider.getDatasource(datasetProvider.resolve(idStr)); return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId); }); // Execute query return metadata.read(() -> { final QueryResult result = id.map(datasetProvider::getDatasource) .map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)) .filter(JdbcDatasource.class::isInstance) .map(JdbcDatasource.class::cast) .map(datasource -> dbcpConnectionPoolTableInfo.executeQueryForDatasource(datasource, query)) .orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr)); return Response.ok(result).build(); }, MetadataAccess.SERVICE); } /** * Gets the table names from the specified data source. * * @param idStr the data source id * @return the list of schema names */ @GET @Path("{id}/schemas") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets the table names from the data source.", notes = "Connects to the database specified by the data source.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the schema names.", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) }) public Response getSchemaNames(@PathParam("id") final String idStr) { // Verify user has access to data source final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasetProvider.getDatasource(datasetProvider.resolve(idStr)); return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId); }); // Retrieve table names using system user return metadata.read(() -> { final List<String> tables = id.map(datasetProvider::getDatasource) .map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)) .filter(JdbcDatasource.class::isInstance) .map(JdbcDatasource.class::cast) .map(datasource -> dbcpConnectionPoolTableInfo.getSchemaNamesForDatasource(datasource)) .orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr)); return Response.ok(tables).build(); }, MetadataAccess.SERVICE); } /** * Gets the table names from the specified data source. * * @param idStr the data source id * @param schema the schema name, or {@code null} for all schemas * @return the list of table names */ @GET @Path("{id}/tables") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets the table names from the data source.", notes = "Connects to the database specified by the data source.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the table names.", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) }) public Response getTableNames(@PathParam("id") final String idStr, @QueryParam("schema") final String schema, @QueryParam("tableName") final String tableName) { // Verify user has access to data source final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasetProvider.getDatasource(datasetProvider.resolve(idStr)); return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId); }); // Retrieve table names using system user return metadata.read(() -> { final List<String> tables = id.map(datasetProvider::getDatasource) .map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)) .filter(JdbcDatasource.class::isInstance) .map(JdbcDatasource.class::cast) .map(datasource -> dbcpConnectionPoolTableInfo.getTableNamesForDatasource(datasource, schema, tableName)) .orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr)); return Response.ok(tables).build(); }, MetadataAccess.SERVICE); } /** * Gets the schema of the specified table using the specified data source. * * @param idStr the data source id * @param tableName the table name * @param schema the schema name, or {@code null} to search all schemas * @return the table and field details */ @GET @Path("{id}/tables/{tableName}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets the schema of the specified table.", notes = "Connects to the database specified by the data source.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the table schema.", response = TableSchema.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) }) public Response describeTable(@PathParam("id") final String idStr, @PathParam("tableName") final String tableName, @QueryParam("schema") final String schema) { // Verify user has access to data source final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> { accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES); final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasetProvider.getDatasource(datasetProvider.resolve(idStr)); return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId); }); // Retrieve table description using system user return metadata.read(() -> { final TableSchema tableSchema = id.map(datasetProvider::getDatasource) .map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)) .filter(JdbcDatasource.class::isInstance) .map(JdbcDatasource.class::cast) .map(datasource -> dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName)) .orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr)); return Response.ok(tableSchema).build(); }, MetadataAccess.SERVICE); } @GET @Path("{id}/actions/available") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the list of available actions that may be permitted or revoked on a data source.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the actions.", response = ActionGroup.class), @ApiResponse(code = 404, message = "A data source with the given ID does not exist.", response = RestResponseStatus.class) }) public Response getAvailableActions(@PathParam("id") final String datasourceIdStr) { log.debug("Get available actions for data source: {}", datasourceIdStr); return this.securityService.getAvailableDatasourceActions(datasourceIdStr) .map(g -> Response.ok(g).build()) .orElseThrow(() -> new WebApplicationException("A data source with the given ID does not exist: " + datasourceIdStr, Response.Status.NOT_FOUND)); } @GET @Path("{id}/actions/allowed") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the list of actions permitted for the given username and/or groups.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the actions.", response = ActionGroup.class), @ApiResponse(code = 404, message = "A data source with the given ID does not exist.", response = RestResponseStatus.class) }) public Response getAllowedActions(@PathParam("id") final String datasourceIdStr, @QueryParam("user") final Set<String> userNames, @QueryParam("group") final Set<String> groupNames) { log.debug("Get allowed actions for data source: {}", datasourceIdStr); Set<? extends Principal> users = Arrays.stream(this.securityTransform.asUserPrincipals(userNames)).collect(Collectors.toSet()); Set<? extends Principal> groups = Arrays.stream(this.securityTransform.asGroupPrincipals(groupNames)).collect(Collectors.toSet()); return this.securityService.getAllowedDatasourceActions(datasourceIdStr, Stream.concat(users.stream(), groups.stream()).collect(Collectors.toSet())) .map(g -> Response.ok(g).build()) .orElseThrow(() -> new WebApplicationException("A data source with the given ID does not exist: " + datasourceIdStr, Response.Status.NOT_FOUND)); } @POST @Path("{id}/actions/allowed") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Updates the permissions for a data source using the supplied permission change request.") @ApiResponses({ @ApiResponse(code = 200, message = "The permissions were changed successfully.", response = ActionGroup.class), @ApiResponse(code = 400, message = "The type is not valid.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "No data source exists with the specified ID.", response = RestResponseStatus.class) }) public Response postPermissionsChange(@PathParam("id") final String datasourceIdStr, final PermissionsChange changes) { return this.securityService.changeDatasourcePermissions(datasourceIdStr, changes) .map(g -> Response.ok(g).build()) .orElseThrow(() -> new WebApplicationException("A data source with the given ID does not exist: " + datasourceIdStr, Response.Status.NOT_FOUND)); } @GET @Path("{id}/actions/change") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Constructs and returns a permission change request for a set of users/groups containing the actions that the requester may permit or revoke.") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the change request that may be modified by the client and re-posted.", response = PermissionsChange.class), @ApiResponse(code = 400, message = "The type is not valid.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "No data source exists with the specified ID.", response = RestResponseStatus.class) }) public Response getAllowedPermissionsChange(@PathParam("id") final String datasourceIdStr, @QueryParam("type") final String changeType, @QueryParam("user") final Set<String> userNames, @QueryParam("group") final Set<String> groupNames) { if (StringUtils.isBlank(changeType)) { throw new WebApplicationException("The query parameter \"type\" is required", Response.Status.BAD_REQUEST); } Set<? extends Principal> users = Arrays.stream(this.securityTransform.asUserPrincipals(userNames)).collect(Collectors.toSet()); Set<? extends Principal> groups = Arrays.stream(this.securityTransform.asGroupPrincipals(groupNames)).collect(Collectors.toSet()); return this.securityService.createDatasourcePermissionChange(datasourceIdStr, PermissionsChange.ChangeType.valueOf(changeType.toUpperCase()), Stream.concat(users.stream(), groups.stream()).collect(Collectors.toSet())) .map(p -> Response.ok(p).build()) .orElseThrow(() -> new WebApplicationException("A data source with the given ID does not exist: " + datasourceIdStr, Response.Status.NOT_FOUND)); } @GET @Path("{id}/roles") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the list of assigned members the data source's roles") @ApiResponses({ @ApiResponse(code = 200, message = "Returns the role memberships.", response = ActionGroup.class), @ApiResponse(code = 404, message = "A data source with the given ID does not exist.", response = RestResponseStatus.class) }) public Response getRoleMemberships(@PathParam("id") final String datasourceIdStr, @QueryParam("verbose") @DefaultValue("false") final boolean verbose) { return this.securityService.getDatasourceRoleMemberships(datasourceIdStr) .map(m -> Response.ok(m).build()) .orElseThrow(() -> new WebApplicationException("A data source with the given ID does not exist: " + datasourceIdStr, Response.Status.NOT_FOUND)); } @POST @Path("{id}/roles") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Updates the members of one of a data source's roles.") @ApiResponses({ @ApiResponse(code = 200, message = "The permissions were changed successfully.", response = ActionGroup.class), @ApiResponse(code = 404, message = "No data source exists with the specified ID.", response = RestResponseStatus.class) }) public Response postPermissionsChange(@PathParam("id") final String datasourceIdStr, final RoleMembershipChange changes) { return this.securityService.changeDatasourceRoleMemberships(datasourceIdStr, changes) .map(m -> Response.ok(m).build()) .orElseThrow(() -> new WebApplicationException("Either a data source with the ID \"" + datasourceIdStr + "\" does not exist or it does not have a role the named \"" + changes.getRoleName() + "\"", Response.Status.NOT_FOUND)); } private com.thinkbiganalytics.metadata.api.datasource.DatasourceCriteria createDatasourceCriteria(String name, String owner, String on, String after, String before, String type) { com.thinkbiganalytics.metadata.api.datasource.DatasourceCriteria criteria = datasetProvider.datasetCriteria(); if (StringUtils.isNotEmpty(name)) { criteria.name(name); } // if (StringUtils.isNotEmpty(owner)) criteria.owner(owner); // TODO implement if (StringUtils.isNotEmpty(on)) { criteria.createdOn(Formatters.parseDateTime(on)); } if (StringUtils.isNotEmpty(after)) { criteria.createdAfter(Formatters.parseDateTime(after)); } if (StringUtils.isNotEmpty(before)) { criteria.createdBefore(Formatters.parseDateTime(before)); } if (StringUtils.isNotEmpty(type)) { if ("UserDatasource".equalsIgnoreCase(type)) { criteria.type(com.thinkbiganalytics.metadata.api.datasource.UserDatasource.class); } } return criteria; } }
/* * The MIT License * * Copyright 2019 Dr. Matthias Laux. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.ml.tools; import java.io.Serializable; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.jdom2.Element; /** * * @author Dr. Matthias Laux */ public class PropertyManager implements Serializable { static final long serialVersionUID = 0427567567L; public final static ResolutionPolicy DEFAULT_RESOLUTION_POLICY = ResolutionPolicy.ALL_NAMESPACES; public final static Namespace DEFAULT_NAMESPACE = new Namespace("default_namespace_do_not_use_elsewhere ##$$%%"); public final static String DEFAULT_MACRO_PATTERN = "##(.+?)##"; private static boolean DEFAULT_AVOID_OVERWRITES = false; private ResolutionPolicy resolutionPolicy = DEFAULT_RESOLUTION_POLICY; private final Map<Namespace, Map<String, String>> namespaceData = new TreeMap<>(); private boolean avoidOverwrites = DEFAULT_AVOID_OVERWRITES; private boolean resolveMacros = true; private static Matcher macroMatcher = Pattern.compile(DEFAULT_MACRO_PATTERN).matcher(""); /** * */ public enum XML { properties, property, name, namespace, propertySet, propertySets, parent, ignoreNamespace } /** * */ public enum ResolutionPolicy { WITHIN_NAMESPACE, ALL_NAMESPACES, NONE } /** * */ private class ResolutionResult { private String value = null; private boolean foundReplacement = false; /** * * @param value */ public void setValue(String value) { if (value == null) { throw new IllegalArgumentException("value may not be null"); } this.value = value; } /** * * @param foundReplacement */ public void setFoundReplacement(boolean foundReplacement) { this.foundReplacement = foundReplacement; } /** * * @return */ public String getValue() { return value; } /** * * @return */ public boolean foundReplacement() { return foundReplacement; } } /** * */ public PropertyManager() { } /** * Create an instance and try to add all the properties to the default * namespace; the actual namespace used may be overridden based on the rules * described for {@link #setProperties(Namespace, Element)} * * @param element */ public PropertyManager(Element element) { if (element == null) { throw new IllegalArgumentException("element may not be null"); } setProperties(element); } /** * Create an instance and try to add all the properties to the namespace * provided; the actual namespace used may be overridden based on the rules * described for {@link #setProperties(Namespace, Element)} * * @param namespace * @param element */ public PropertyManager(Namespace namespace, Element element) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (element == null) { throw new IllegalArgumentException("element may not be null"); } setProperties(namespace, element); } /** * Create an instance and add all the properties to the default namespace * * @param properties */ public PropertyManager(Map<String, String> properties) { if (properties == null) { throw new IllegalArgumentException("properties must not be null"); } setProperties(properties); } /** * Create an instance and add all the properties to the namespace provided * * @param namespace * @param properties */ public PropertyManager(Namespace namespace, Map<String, String> properties) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (properties == null) { throw new IllegalArgumentException("properties must not be null"); } setProperties(namespace, properties); } /** * Create an instance which inherits properties from the provided instances * (in the order in which they are provided which can be relevant, depending * on the selected overwrite policy) * * @param propertyManagers */ public PropertyManager(PropertyManager... propertyManagers) { if (propertyManagers == null) { throw new IllegalArgumentException("propertyManagers may not be null"); } for (PropertyManager propertyManager : propertyManagers) { for (Namespace namespace : propertyManager.getNamespaces()) { setProperties(namespace, propertyManager.getProperties(namespace)); } } } /** * * @param element * @return */ public static Map<String, PropertyManager> createPropertyManagers(Element element) { if (element == null) { throw new NullPointerException("element may not be null"); } Map<String, PropertyManager> propertyManagers; if (element.getChild(XML.propertySets.toString()) != null) { propertyManagers = new HashMap<>(); //.... Scan if there is a properties element without a name attribute - these are propagated to all the named sets PropertyManager generalManager = null; for (Element propertySetElement : element.getChild(XML.propertySets.toString()).getChildren(XML.propertySet.toString())) { if (propertySetElement.getAttribute(XML.name.toString()) == null) { generalManager = new PropertyManager(propertySetElement); break; } } //.... No generic properties to cascade to all new propertyManagers found if (generalManager == null) { generalManager = new PropertyManager(); } //.... Find all named propertySet elements and process them for (Element propertySetElement : element.getChild(XML.propertySets.toString()).getChildren(XML.propertySet.toString())) { if (propertySetElement.getAttribute(XML.name.toString()) != null) { PropertyManager namedManager; //.... A derived property set, i. e. this property manager inherits properties from a parent if (propertySetElement.getAttribute(XML.parent.toString()) != null) { String parentName = propertySetElement.getAttributeValue(XML.parent.toString()); if (!propertyManagers.containsKey(parentName)) { throw new UnsupportedOperationException("Unknown parent PropertySet referenced: " + parentName); } //.... Do we need to ignore certain namespaces when crrating the derived PropertyManager? if (propertySetElement.getAttribute(XML.ignoreNamespace.toString()) != null) { //.... Get the names of the namespaces to ignore when creating the new named manager String[] ns = propertySetElement.getAttributeValue(XML.ignoreNamespace.toString()).split(":"); Set<String> nset = new HashSet<>(); Collections.addAll(nset, ns); PropertyManager parentPropertyManager = propertyManagers.get(parentName); namedManager = new PropertyManager(generalManager); //.... Copy over the default namespace (it can not be ignored) namedManager.setProperties(DEFAULT_NAMESPACE, parentPropertyManager.getProperties(DEFAULT_NAMESPACE)); //.... Copy over the other namespaces unless their name is part of the ignore set for (Namespace namespace : parentPropertyManager.getNamespaces()) { if (!nset.contains(namespace.toString())) { namedManager.setProperties(namespace, parentPropertyManager.getProperties(namespace)); } } //.... No namespaces to ignore } else { namedManager = new PropertyManager(generalManager, propertyManagers.get(parentName)); } //.... No parent to inherit from } else { namedManager = new PropertyManager(generalManager); } namedManager.setProperties(propertySetElement); String name = propertySetElement.getAttributeValue(XML.name.toString()).trim(); propertyManagers.put(name, namedManager); } } } else { propertyManagers = new HashMap<>(); } return propertyManagers; } /** * * @param avoidOverwrites */ public static void setDefaultAvoidOverwrites(boolean avoidOverwrites) { DEFAULT_AVOID_OVERWRITES = avoidOverwrites; } /** * * @param resolutionPolicy */ public void setResolutionPolicy(ResolutionPolicy resolutionPolicy) { if (resolutionPolicy == null) { throw new IllegalArgumentException("resolutionPolicy may not be null"); } this.resolutionPolicy = resolutionPolicy; resolveMacros = resolutionPolicy != ResolutionPolicy.NONE; } /** * * @return */ public ResolutionPolicy getResolutionPolicy() { return resolutionPolicy; } /** * * @param patternString */ public void setMacroPattern(String patternString) { if (patternString == null) { throw new IllegalArgumentException("patternString may not be null"); } macroMatcher = Pattern.compile(patternString).matcher(""); } /** * * @param avoidOverwrites */ public void setAvoidOverwrites(boolean avoidOverwrites) { this.avoidOverwrites = avoidOverwrites; } /** * Return a collection of all namespaces which are actually used for * properties * * @return */ public Collection<Namespace> getNamespaces() { return namespaceData.keySet(); } /** * * @return */ public boolean doesAvoidOverwrites() { return avoidOverwrites; } /** * Check if the namespace provided is used for one or more properties * * @param namespace * @return */ public boolean containsNamespace(Namespace namespace) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } return namespaceData.containsKey(namespace); } /** * Set a property in the default namespace * * @param key * @param value */ public void setProperty(String key, String value) { setProperty(DEFAULT_NAMESPACE, key, value); } /** * * @param key * @param value */ public void setProperty(Enum key, String value) { setProperty(DEFAULT_NAMESPACE, key.toString(), value); } /** * * @param namespace * @param key * @param value */ public void setProperty(Namespace namespace, Enum key, String value) { setProperty(namespace, key.toString(), value); } /** * Set a property in the namespace provided * * @param namespace * @param key * @param value */ public void setProperty(Namespace namespace, String key, String value) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (key == null) { throw new IllegalArgumentException("key may not be null"); } if (value == null) { throw new IllegalArgumentException("value may not be null"); } if (avoidOverwrites && namespaceData.containsKey(namespace) && namespaceData.get(namespace).containsKey(key)) { return; } if (!namespaceData.containsKey(namespace)) { namespaceData.put(namespace, new TreeMap<>()); } if (!resolveMacros) { namespaceData.get(namespace).put(key, value); } else { namespaceData.get(namespace).put(key, resolveMacros(namespace, value).getValue()); backwardResolveMacros(); // Resolve backward references } } /** * Add all the properties to the default namespace; the actual namespace * used may be overridden based on the rules described for * {@link #setProperties(Namespace, Element)} * * @param element */ public final void setProperties(Element element) { setProperties(DEFAULT_NAMESPACE, element); } /** * Add all the properties to the namespace provided. The actual namespace * depends on some more conditions or options: * <ol> * <li> first choice is the namespace provided here * <li> this can be overridden if a different namespace is provided as an * attribute to the &lt;properties&gt; element. This cascades down to all * the &lt;property&gt; elements below * <li> this again can be overridden if a different namespace is provided as * an attribute to a &lt;property&gt; element below the &lt;properties&gt; * element * </ol> * * @param namespace * @param element */ public final void setProperties(Namespace namespace, Element element) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (element == null) { throw new IllegalArgumentException("element may not be null"); } Element propertiesElement = element.getChild(XML.properties.toString()); if (propertiesElement != null) { //.... Check if there is a namespace attribute at the properties level Namespace topNamespace = null; if (propertiesElement.getAttribute(XML.namespace.toString()) != null) { topNamespace = new Namespace(propertiesElement.getAttributeValue(XML.namespace.toString())); } //.... Now check all children for (Element propertyElement : propertiesElement.getChildren(XML.property.toString())) { //.... Check if a name has been specified if (propertyElement.getAttribute(XML.name.toString()) == null) { throw new IllegalArgumentException("Missing property attribute: " + XML.name.toString()); } //.... First choice: if a namespace was given as argument, take that one Namespace actualNamespace = namespace; //.... Second approach: if there is an explicit namespace at the properties element level, take that one if (topNamespace != null) { actualNamespace = topNamespace; } //.... Third approach: do we have an explicit namespace for this particular property? This overrides everything else if (propertyElement.getAttribute(XML.namespace.toString()) != null) { actualNamespace = new Namespace(propertyElement.getAttributeValue(XML.namespace.toString())); } //.... Now finally set the property in the correct namespace setProperty(actualNamespace, propertyElement.getAttributeValue(XML.name.toString()), propertyElement.getTextTrim()); } } } /** * Adds all the given properties to the default namespace * * @param properties */ public final void setProperties(Map<String, String> properties) { setProperties(DEFAULT_NAMESPACE, properties); } /** * Adds all the given properties to the namespace provided * * @param namespace * @param properties */ public final void setProperties(Namespace namespace, Map<String, String> properties) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (properties == null) { throw new IllegalArgumentException("properties may not be null"); } for (String key : properties.keySet()) { setProperty(namespace, key, properties.get(key)); } } /** * Set all the properties in the specified PropertyManager instance in the * current one * * @param propertyManager */ public void setProperties(PropertyManager propertyManager) { if (propertyManager == null) { throw new NullPointerException("propertyManager may not be null"); } for (Namespace namespace : propertyManager.getNamespaces()) { setProperties(namespace, propertyManager.getProperties(namespace)); } } /** * Get a property in the default namespace * * @param key * @return */ public String getProperty(String key) { return getProperty(DEFAULT_NAMESPACE, key); } /** * * @param key * @return */ public String getProperty(Enum key) { return getProperty(DEFAULT_NAMESPACE, key.toString()); } /** * * @param namespace * @param key * @return */ public String getProperty(Namespace namespace, Enum key) { return getProperty(namespace, key.toString()); } /** * Get a property in the namespace provided * * @param namespace * @param key * @return */ public String getProperty(Namespace namespace, String key) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (key == null) { throw new IllegalArgumentException("key may not be null"); } if (!containsProperty(namespace, key)) { return null; } return namespaceData.get(namespace).get(key); } /** * This can be quite handy to extract data from a property with a default * value in case it does not exist or is ill-formatted. One can argue * whether it is a good idea to return the default even if the property is * ill-formed, but for now let's try this * * @param namespace * @param key * @param defaultValue * @return */ public int getInt(Namespace namespace, String key, int defaultValue) { if (namespace == null) { throw new NullPointerException("namespace may not be null"); } if (key == null) { throw new NullPointerException("key may not be null"); } if (!containsProperty(namespace, key)) { return defaultValue; } try { return Integer.parseInt(getProperty(namespace, key)); } catch (NumberFormatException ex) { return defaultValue; } } /** * * @param key * @param defaultValue * @return */ public int getInt(String key, int defaultValue) { return getInt(DEFAULT_NAMESPACE, key, defaultValue); } /** * * @param key * @param defaultValue * @return */ public int getInt(Enum key, int defaultValue) { return getInt(DEFAULT_NAMESPACE, key.toString(), defaultValue); } /** * * @param namespace * @param key * @param defaultValue * @return */ public int getInt(Namespace namespace, Enum key, int defaultValue) { return getInt(namespace, key.toString(), defaultValue); } /** * * @param namespace * @param key * @param defaultValue * @return */ public String getString(Namespace namespace, String key, String defaultValue) { if (namespace == null) { throw new NullPointerException("namespace may not be null"); } if (key == null) { throw new NullPointerException("key may not be null"); } if (!containsProperty(namespace, key)) { return defaultValue; } return getProperty(namespace, key); } /** * * @param key * @param defaultValue * @return */ public String getString(String key, String defaultValue) { return getString(DEFAULT_NAMESPACE, key, defaultValue); } /** * * @param namespace * @param key * @param defaultValue * @return */ public String getString(Namespace namespace, Enum key, String defaultValue) { return getString(namespace, key.toString(), defaultValue); } /** * * @param key * @param defaultValue * @return */ public String getString(Enum key, String defaultValue) { return getString(DEFAULT_NAMESPACE, key.toString(), defaultValue); } /** * Check if the property exists in the default namespace * * @param key * @return */ public boolean containsProperty(String key) { return containsProperty(DEFAULT_NAMESPACE, key); } /** * * @param key * @return */ public boolean containsProperty(Enum key) { return containsProperty(DEFAULT_NAMESPACE, key.toString()); } /** * * @param key * @return */ public boolean containsNonEmptyProperty(String key) { return containsNonEmptyProperty(DEFAULT_NAMESPACE, key); } /** * * @param key * @return */ public boolean containsNonEmptyProperty(Enum key) { return containsNonEmptyProperty(DEFAULT_NAMESPACE, key.toString()); } /** * * @param namespace * @param key * @return */ public boolean containsProperty(Namespace namespace, Enum key) { return containsProperty(namespace, key.toString()); } /** * Check if the property exists in the namespace provided * * @param namespace * @param key * @return */ public boolean containsProperty(Namespace namespace, String key) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (key == null) { throw new IllegalArgumentException("key may not be null"); } if (!namespaceData.containsKey(namespace)) { return false; } return namespaceData.get(namespace).containsKey(key); } /** * * @param namespace * @param key * @return */ public boolean containsNonEmptyProperty(Namespace namespace, Enum key) { return containsNonEmptyProperty(namespace, key.toString()); } /** * * @param namespace * @param key * @return */ public boolean containsNonEmptyProperty(Namespace namespace, String key) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (key == null) { throw new IllegalArgumentException("key may not be null"); } if (!namespaceData.containsKey(namespace)) { return false; } return namespaceData.get(namespace).containsKey(key) && namespaceData.get(namespace).get(key).trim().length() > 0; } /** * Return all properties in the default namespace * * @return */ public Map<String, String> getProperties() { return getProperties(DEFAULT_NAMESPACE); } /** * Return all properties in the namespace given * * @param namespace * @return */ public Map<String, String> getProperties(Namespace namespace) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (namespaceData.containsKey(namespace)) { return namespaceData.get(namespace); } else { return new HashMap<>(); } } /** * * @param element * @return */ public static boolean containsPropertiesElement(Element element) { if (element == null) { throw new IllegalArgumentException("element may not be null"); } return element.getChild(XML.properties.toString()) != null; } /** * * @param enums */ public void validatePropertyNames(Enum... enums) { validatePropertyNames(DEFAULT_NAMESPACE, enums); } /** * * @param namespace * @param enums */ public void validatePropertyNames(Namespace namespace, Enum... enums) { if (enums == null) { throw new IllegalArgumentException("enums may not be null"); } if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } for (Enum e : enums) { if (!containsProperty(namespace, e.toString())) { if (namespace.equals(DEFAULT_NAMESPACE)) { throw new IllegalArgumentException("Missing property key '" + e.toString() + "' in default namespace"); } else { throw new IllegalArgumentException("Missing property key '" + e.toString() + "' in namespace " + namespace); } } } } /** * * @param <E> * @param propertyEnum */ public <E extends Enum> void validateAllPropertyNames(E propertyEnum) { validateAllPropertyNames(DEFAULT_NAMESPACE, propertyEnum); } /** * * @param <E> * @param namespace * @param propertyEnum */ public <E extends Enum> void validateAllPropertyNames(Namespace namespace, E propertyEnum) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (propertyEnum == null) { throw new IllegalArgumentException("propertyEnum may not be null"); } for (Enum e : propertyEnum.getClass().getEnumConstants()) { if (!containsProperty(namespace, e.toString())) { if (namespace.equals(DEFAULT_NAMESPACE)) { throw new IllegalArgumentException("Missing property key '" + e.toString() + "' in default namespace"); } else { throw new IllegalArgumentException("Missing property key '" + e.toString() + "' in namespace " + namespace); } } } } /** * * @param namespace * @param testValue * @return */ private ResolutionResult resolveMacros(Namespace namespace, String testValue) { if (namespace == null) { throw new IllegalArgumentException("namespace may not be null"); } if (testValue == null) { throw new IllegalArgumentException("testValue may not be null"); } ResolutionResult resolutionResult = new ResolutionResult(); StringBuffer sb = new StringBuffer(50); macroMatcher.reset(testValue); while (macroMatcher.find()) { resolutionResult.setFoundReplacement(true); String referencedKey = macroMatcher.group(1); switch (resolutionPolicy) { case ALL_NAMESPACES: boolean found = false; for (Namespace ns : namespaceData.keySet()) { if (containsProperty(ns, referencedKey)) { macroMatcher.appendReplacement(sb, getProperty(ns, referencedKey)); found = true; break; } } break; // No fall-through required as we have convered all namespaces case WITHIN_NAMESPACE: if (containsProperty(namespace, referencedKey)) { macroMatcher.appendReplacement(sb, getProperty(namespace, referencedKey)); } break; } } macroMatcher.appendTail(sb); resolutionResult.setValue(sb.toString()); return resolutionResult; } /** * * @return */ @Override public String toString() { StringBuilder sb = new StringBuilder(300); for (Namespace namespace : namespaceData.keySet()) { boolean isDefault = namespace.equals(DEFAULT_NAMESPACE); for (String key : getProperties(namespace).keySet()) { if (isDefault) { sb.append("(): "); sb.append(key); sb.append(" - "); sb.append(getProperty(namespace, key)); sb.append("\n"); } else { sb.append("("); sb.append(namespace); sb.append("): "); sb.append(key); sb.append(" - "); sb.append(getProperty(namespace, key)); sb.append("\n"); } } } return sb.toString(); } /** * Currently we can not detect circular references, which is a stupid thing * to do in the first place */ private void backwardResolveMacros() { for (Namespace namespace : namespaceData.keySet()) { for (String key : namespaceData.get(namespace).keySet()) { ResolutionResult resolutionResult = resolveMacros(namespace, namespaceData.get(namespace).get(key)); if (resolutionResult.foundReplacement()) { namespaceData.get(namespace).put(key, resolutionResult.getValue()); } } } } /** * Remark: this unfortunately does not account for namespace information * * @param element * @return */ public static Map<String, String> extractProperties(Element element) { if (element == null) { throw new IllegalArgumentException("element may not be null"); } if (element.getChild(XML.properties.toString()) != null) { Map<String, String> p = new HashMap<>(); for (Element propertyElement : element.getChild(XML.properties.toString()).getChildren(XML.property.toString())) { p.put(propertyElement.getAttributeValue(XML.name.toString()), propertyElement.getTextTrim()); } return p; } else { throw new IllegalArgumentException("element does not contain child: " + XML.properties.toString()); } } }
/* * Copyright 2014, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.transport; import com.google.common.base.Preconditions; import io.grpc.Status; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.util.zip.GZIPInputStream; import javax.annotation.concurrent.NotThreadSafe; /** * Deframer for GRPC frames. * * <p>This class is not thread-safe. All calls to public methods should be made in the transport * thread. */ @NotThreadSafe public class MessageDeframer implements Closeable { private static final int HEADER_LENGTH = 5; private static final int COMPRESSED_FLAG_MASK = 1; private static final int RESERVED_MASK = 0xFE; public enum Compression { NONE, GZIP } /** * A listener of deframing events. */ public interface Listener { /** * Called when the given number of bytes has been read from the input source of the deframer. * * @param numBytes the number of bytes read from the deframer's input source. */ void bytesRead(int numBytes); /** * Called to deliver the next complete message. * * @param is stream containing the message. */ void messageRead(InputStream is); /** * Called when end-of-stream has not yet been reached but there are no complete messages * remaining to be delivered. */ void deliveryStalled(); /** * Called when the stream is complete and all messages have been successfully delivered. */ void endOfStream(); } private enum State { HEADER, BODY } private final Listener listener; private final Compression compression; private State state = State.HEADER; private int requiredLength = HEADER_LENGTH; private boolean compressedFlag; private boolean endOfStream; private CompositeReadableBuffer nextFrame; private CompositeReadableBuffer unprocessed = new CompositeReadableBuffer(); private long pendingDeliveries; private boolean deliveryStalled = true; private boolean inDelivery = false; /** * Creates a deframer. Compression will not be supported. * * @param listener listener for deframer events. */ public MessageDeframer(Listener listener) { this(listener, Compression.NONE); } /** * Create a deframer. * * @param listener listener for deframer events. * @param compression the compression used if a compressed frame is encountered, with {@code NONE} * meaning unsupported */ public MessageDeframer(Listener listener, Compression compression) { this.listener = Preconditions.checkNotNull(listener, "sink"); this.compression = Preconditions.checkNotNull(compression, "compression"); } /** * Requests up to the given number of messages from the call to be delivered to * {@link Listener#messageRead(InputStream)}. No additional messages will be delivered. * * <p>If {@link #close()} has been called, this method will have no effect. * * @param numMessages the requested number of messages to be delivered to the listener. */ public void request(int numMessages) { Preconditions.checkArgument(numMessages > 0, "numMessages must be > 0"); if (isClosed()) { return; } pendingDeliveries += numMessages; deliver(); } /** * Adds the given data to this deframer and attempts delivery to the sink. * * @param data the raw data read from the remote endpoint. Must be non-null. * @param endOfStream if {@code true}, indicates that {@code data} is the end of the stream from * the remote endpoint. * @throws IllegalStateException if {@link #close()} has been called previously or if * {@link #deframe(ReadableBuffer, boolean)} has previously been called with * {@code endOfStream=true}. */ public void deframe(ReadableBuffer data, boolean endOfStream) { Preconditions.checkNotNull(data, "data"); boolean needToCloseData = true; try { checkNotClosed(); Preconditions.checkState(!this.endOfStream, "Past end of stream"); needToCloseData = false; unprocessed.addBuffer(data); // Indicate that all of the data for this stream has been received. this.endOfStream = endOfStream; deliver(); } finally { if (needToCloseData) { data.close(); } } } /** * Indicates whether delivery is currently stalled, pending receipt of more data. */ public boolean isStalled() { return deliveryStalled; } /** * Closes this deframer and frees any resources. After this method is called, additional * calls will have no effect. */ @Override public void close() { try { if (unprocessed != null) { unprocessed.close(); } if (nextFrame != null) { nextFrame.close(); } } finally { unprocessed = null; nextFrame = null; } } /** * Indicates whether or not this deframer has been closed. */ public boolean isClosed() { return unprocessed == null; } /** * Throws if this deframer has already been closed. */ private void checkNotClosed() { Preconditions.checkState(!isClosed(), "MessageDeframer is already closed"); } /** * Reads and delivers as many messages to the sink as possible. */ private void deliver() { // We can have reentrancy here when using a direct executor, triggered by calls to // request more messages. This is safe as we simply loop until pendingDelivers = 0 if (inDelivery) { return; } inDelivery = true; try { // Process the uncompressed bytes. boolean stalled = false; while (pendingDeliveries > 0 && readRequiredBytes()) { switch (state) { case HEADER: processHeader(); break; case BODY: // Read the body and deliver the message. processBody(); // Since we've delivered a message, decrement the number of pending // deliveries remaining. pendingDeliveries--; break; default: throw new AssertionError("Invalid state: " + state); } } // We are stalled when there are no more bytes to process. This allows delivering errors as // soon as the buffered input has been consumed, independent of whether the application // has requested another message. stalled = !isDataAvailable(); if (endOfStream) { if (!isDataAvailable()) { listener.endOfStream(); } else if (stalled) { // We've received the entire stream and have data available but we don't have // enough to read the next frame ... this is bad. throw Status.INTERNAL.withDescription("Encountered end-of-stream mid-frame") .asRuntimeException(); } } // Never indicate that we're stalled if we've received all the data for the stream. stalled &= !endOfStream; // If we're transitioning to the stalled state, notify the listener. boolean previouslyStalled = deliveryStalled; deliveryStalled = stalled; if (stalled && !previouslyStalled) { listener.deliveryStalled(); } } finally { inDelivery = false; } } private boolean isDataAvailable() { return unprocessed.readableBytes() > 0 || (nextFrame != null && nextFrame.readableBytes() > 0); } /** * Attempts to read the required bytes into nextFrame. * * @return {@code true} if all of the required bytes have been read. */ private boolean readRequiredBytes() { int totalBytesRead = 0; try { if (nextFrame == null) { nextFrame = new CompositeReadableBuffer(); } // Read until the buffer contains all the required bytes. int missingBytes; while ((missingBytes = requiredLength - nextFrame.readableBytes()) > 0) { if (unprocessed.readableBytes() == 0) { // No more data is available. return false; } int toRead = Math.min(missingBytes, unprocessed.readableBytes()); totalBytesRead += toRead; nextFrame.addBuffer(unprocessed.readBytes(toRead)); } return true; } finally { if (totalBytesRead > 0) { listener.bytesRead(totalBytesRead); } } } /** * Processes the GRPC compression header which is composed of the compression flag and the outer * frame length. */ private void processHeader() { int type = nextFrame.readUnsignedByte(); if ((type & RESERVED_MASK) != 0) { throw Status.INTERNAL.withDescription("Frame header malformed: reserved bits not zero") .asRuntimeException(); } compressedFlag = (type & COMPRESSED_FLAG_MASK) != 0; // Update the required length to include the length of the frame. requiredLength = nextFrame.readInt(); // Continue reading the frame body. state = State.BODY; } /** * Processes the body of the GRPC compression frame. A single compression frame may contain * several GRPC messages within it. */ private void processBody() { InputStream stream = compressedFlag ? getCompressedBody() : getUncompressedBody(); nextFrame = null; listener.messageRead(stream); // Done with this frame, begin processing the next header. state = State.HEADER; requiredLength = HEADER_LENGTH; } private InputStream getUncompressedBody() { return ReadableBuffers.openStream(nextFrame, true); } private InputStream getCompressedBody() { if (compression == Compression.NONE) { throw Status.INTERNAL.withDescription( "Can't decode compressed frame as compression not configured.").asRuntimeException(); } if (compression != Compression.GZIP) { throw new AssertionError("Unknown compression type"); } try { return new GZIPInputStream(ReadableBuffers.openStream(nextFrame, true)); } catch (IOException e) { throw new RuntimeException(e); } } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointOnErrorSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.JsonTransformMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeFirstInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeSecondInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MessageInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyGroupMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider; /** * @generated */ public class EsbModelingAssistantProviderOfPublishEventMediatorOutputConnectorEditPart extends EsbModelingAssistantProvider { /** * @generated */ @Override public List<IElementType> getRelTypesOnSource(IAdaptable source) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSource((PublishEventMediatorOutputConnectorEditPart) sourceEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSource(PublishEventMediatorOutputConnectorEditPart source) { List<IElementType> types = new ArrayList<IElementType>(1); types.add(EsbElementTypes.EsbLink_4001); return types; } /** * @generated */ @Override public List<IElementType> getRelTypesOnSourceAndTarget(IAdaptable source, IAdaptable target) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSourceAndTarget((PublishEventMediatorOutputConnectorEditPart) sourceEditPart, targetEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSourceAndTarget(PublishEventMediatorOutputConnectorEditPart source, IGraphicalEditPart targetEditPart) { List<IElementType> types = new LinkedList<IElementType>(); if (targetEditPart instanceof ProxyInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DropMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PropertyGroupMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LogMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SendMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof StoreMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BuilderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallTemplateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PayloadFactoryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnqueueMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof URLRewriteMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ValidateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ConditionalRouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BAMMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BeanMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EJBMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof NamedEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressingEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorOperationInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoopBackMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RespondMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DataMapperMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FastXSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ForEachMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PublishEventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof JsonTransformMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MessageInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequencesInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof InboundEndpointSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof InboundEndpointOnErrorSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } return types; } /** * @generated */ @Override public List<IElementType> getTypesForTarget(IAdaptable source, IElementType relationshipType) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class); return doGetTypesForTarget((PublishEventMediatorOutputConnectorEditPart) sourceEditPart, relationshipType); } /** * @generated */ public List<IElementType> doGetTypesForTarget(PublishEventMediatorOutputConnectorEditPart source, IElementType relationshipType) { List<IElementType> types = new ArrayList<IElementType>(); if (relationshipType == EsbElementTypes.EsbLink_4001) { types.add(EsbElementTypes.ProxyInputConnector_3003); types.add(EsbElementTypes.ProxyFaultInputConnector_3489); types.add(EsbElementTypes.DropMediatorInputConnector_3008); types.add(EsbElementTypes.PropertyMediatorInputConnector_3033); types.add(EsbElementTypes.PropertyGroupMediatorInputConnector_3789); types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121); types.add(EsbElementTypes.FilterMediatorInputConnector_3010); types.add(EsbElementTypes.LogMediatorInputConnector_3018); types.add(EsbElementTypes.EnrichMediatorInputConnector_3036); types.add(EsbElementTypes.XSLTMediatorInputConnector_3039); types.add(EsbElementTypes.SwitchMediatorInputConnector_3042); types.add(EsbElementTypes.SequenceInputConnector_3049); types.add(EsbElementTypes.EventMediatorInputConnector_3052); types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055); types.add(EsbElementTypes.ClassMediatorInputConnector_3058); types.add(EsbElementTypes.SpringMediatorInputConnector_3061); types.add(EsbElementTypes.ScriptMediatorInputConnector_3064); types.add(EsbElementTypes.FaultMediatorInputConnector_3067); types.add(EsbElementTypes.XQueryMediatorInputConnector_3070); types.add(EsbElementTypes.CommandMediatorInputConnector_3073); types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076); types.add(EsbElementTypes.DBReportMediatorInputConnector_3079); types.add(EsbElementTypes.SmooksMediatorInputConnector_3082); types.add(EsbElementTypes.SendMediatorInputConnector_3085); types.add(EsbElementTypes.HeaderMediatorInputConnector_3100); types.add(EsbElementTypes.CloneMediatorInputConnector_3103); types.add(EsbElementTypes.CacheMediatorInputConnector_3106); types.add(EsbElementTypes.IterateMediatorInputConnector_3109); types.add(EsbElementTypes.CalloutMediatorInputConnector_3115); types.add(EsbElementTypes.TransactionMediatorInputConnector_3118); types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124); types.add(EsbElementTypes.RuleMediatorInputConnector_3127); types.add(EsbElementTypes.OAuthMediatorInputConnector_3130); types.add(EsbElementTypes.AggregateMediatorInputConnector_3112); types.add(EsbElementTypes.StoreMediatorInputConnector_3589); types.add(EsbElementTypes.BuilderMediatorInputConnector_3592); types.add(EsbElementTypes.CallTemplateMediatorInputConnector_3595); types.add(EsbElementTypes.PayloadFactoryMediatorInputConnector_3598); types.add(EsbElementTypes.EnqueueMediatorInputConnector_3601); types.add(EsbElementTypes.URLRewriteMediatorInputConnector_3621); types.add(EsbElementTypes.ValidateMediatorInputConnector_3624); types.add(EsbElementTypes.RouterMediatorInputConnector_3629); types.add(EsbElementTypes.ConditionalRouterMediatorInputConnector_3636); types.add(EsbElementTypes.BAMMediatorInputConnector_3681); types.add(EsbElementTypes.BeanMediatorInputConnector_3684); types.add(EsbElementTypes.EJBMediatorInputConnector_3687); types.add(EsbElementTypes.DefaultEndPointInputConnector_3021); types.add(EsbElementTypes.AddressEndPointInputConnector_3030); types.add(EsbElementTypes.FailoverEndPointInputConnector_3088); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3693); types.add(EsbElementTypes.WSDLEndPointInputConnector_3092); types.add(EsbElementTypes.NamedEndpointInputConnector_3661); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095); types.add(EsbElementTypes.APIResourceEndpointInputConnector_3675); types.add(EsbElementTypes.AddressingEndpointInputConnector_3690); types.add(EsbElementTypes.HTTPEndPointInputConnector_3710); types.add(EsbElementTypes.TemplateEndpointInputConnector_3717); types.add(EsbElementTypes.CloudConnectorInputConnector_3720); types.add(EsbElementTypes.CloudConnectorOperationInputConnector_3723); types.add(EsbElementTypes.LoopBackMediatorInputConnector_3737); types.add(EsbElementTypes.RespondMediatorInputConnector_3740); types.add(EsbElementTypes.CallMediatorInputConnector_3743); types.add(EsbElementTypes.DataMapperMediatorInputConnector_3762); types.add(EsbElementTypes.FastXSLTMediatorInputConnector_3765); types.add(EsbElementTypes.ForEachMediatorInputConnector_3781); types.add(EsbElementTypes.PublishEventMediatorInputConnector_3786); types.add(EsbElementTypes.JsonTransformMediatorInputConnector_3792); types.add(EsbElementTypes.ProxyInSequenceInputConnector_3731); types.add(EsbElementTypes.MessageInputConnector_3046); types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014); types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015); types.add(EsbElementTypes.SequencesInputConnector_3616); types.add(EsbElementTypes.DefaultEndPointInputConnector_3644); types.add(EsbElementTypes.AddressEndPointInputConnector_3647); types.add(EsbElementTypes.FailoverEndPointInputConnector_3650); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3697); types.add(EsbElementTypes.WSDLEndPointInputConnector_3654); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3657); types.add(EsbElementTypes.HTTPEndPointInputConnector_3713); types.add(EsbElementTypes.TemplateEndpointInputConnector_3726); types.add(EsbElementTypes.APIResourceInputConnector_3670); types.add(EsbElementTypes.APIResourceFaultInputConnector_3672); types.add(EsbElementTypes.APIResourceInSequenceInputConnector_3747); types.add(EsbElementTypes.InboundEndpointSequenceInputConnector_3768); types.add(EsbElementTypes.InboundEndpointOnErrorSequenceInputConnector_3770); } return types; } }
package com.structurizr.view; import com.structurizr.AbstractWorkspaceTestBase; import com.structurizr.Workspace; import com.structurizr.model.*; import org.junit.Before; import org.junit.Test; import java.util.HashMap; import java.util.Map; import static org.junit.Assert.*; public class ContainerViewTests extends AbstractWorkspaceTestBase { private SoftwareSystem softwareSystem; private ContainerView view; @Before public void setUp() { softwareSystem = model.addSoftwareSystem(Location.Internal, "The System", "Description"); view = new ContainerView(softwareSystem, "containers", "Description"); } @Test public void test_construction() { assertEquals("The System - Containers", view.getName()); assertEquals("Description", view.getDescription()); assertEquals(0, view.getElements().size()); assertSame(softwareSystem, view.getSoftwareSystem()); assertEquals(softwareSystem.getId(), view.getSoftwareSystemId()); assertSame(model, view.getModel()); } @Test public void test_addAllSoftwareSystems_DoesNothing_WhenThereAreNoOtherSoftwareSystems() { assertEquals(0, view.getElements().size()); view.addAllSoftwareSystems(); assertEquals(0, view.getElements().size()); } @Test public void test_addAllSoftwareSystems_AddsAllSoftwareSystems_WhenThereAreSomeSoftwareSystemsInTheModel() { SoftwareSystem softwareSystemA = model.addSoftwareSystem(Location.External, "System A", "Description"); SoftwareSystem softwareSystemB = model.addSoftwareSystem(Location.External, "System B", "Description"); view.addAllSoftwareSystems(); assertEquals(2, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(softwareSystemA))); assertTrue(view.getElements().contains(new ElementView(softwareSystemB))); } @Test public void test_addAllPeople_DoesNothing_WhenThereAreNoPeople() { assertEquals(0, view.getElements().size()); view.addAllPeople(); assertEquals(0, view.getElements().size()); } @Test public void test_addAllPeople_AddsAllPeople_WhenThereAreSomePeopleInTheModel() { Person userA = model.addPerson(Location.External, "User A", "Description"); Person userB = model.addPerson(Location.External, "User B", "Description"); view.addAllPeople(); assertEquals(2, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(userA))); assertTrue(view.getElements().contains(new ElementView(userB))); } @Test public void test_addAllElements_DoesNothing_WhenThereAreNoSoftwareSystemsOrPeople() { assertEquals(0, view.getElements().size()); view.addAllElements(); assertEquals(0, view.getElements().size()); } @Test public void test_addAllElements_AddsAllSoftwareSystemsAndPeopleAndContainers_WhenThereAreSomeSoftwareSystemsAndPeopleAndContainersInTheModel() { SoftwareSystem softwareSystemA = model.addSoftwareSystem(Location.External, "System A", "Description"); SoftwareSystem softwareSystemB = model.addSoftwareSystem(Location.External, "System B", "Description"); Person userA = model.addPerson(Location.External, "User A", "Description"); Person userB = model.addPerson(Location.External, "User B", "Description"); Container webApplication = softwareSystem.addContainer("Web Application", "Does something", "Apache Tomcat"); Container database = softwareSystem.addContainer("Database", "Does something", "MySQL"); view.addAllElements(); assertEquals(6, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(softwareSystemA))); assertTrue(view.getElements().contains(new ElementView(softwareSystemB))); assertTrue(view.getElements().contains(new ElementView(userA))); assertTrue(view.getElements().contains(new ElementView(userB))); assertTrue(view.getElements().contains(new ElementView(webApplication))); assertTrue(view.getElements().contains(new ElementView(database))); } @Test public void test_addAllContainers_DoesNothing_WhenThereAreNoContainers() { assertEquals(0, view.getElements().size()); view.addAllContainers(); assertEquals(0, view.getElements().size()); } @Test public void test_addAllContainers_AddsAllContainers_WhenThereAreSomeContainers() { Container webApplication = softwareSystem.addContainer("Web Application", "Does something", "Apache Tomcat"); Container database = softwareSystem.addContainer("Database", "Does something", "MySQL"); view.addAllContainers(); assertEquals(2, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(webApplication))); assertTrue(view.getElements().contains(new ElementView(database))); } @Test public void test_addNearestNeightbours_DoesNothing_WhenANullElementIsSpecified() { view.addNearestNeighbours(null); assertEquals(0, view.getElements().size()); } @Test public void test_addNearestNeighbours_DoesNothing_WhenThereAreNoNeighbours() { view.addNearestNeighbours(softwareSystem); assertEquals(0, view.getElements().size()); } @Test public void test_addNearestNeighbours_AddsNearestNeighbours_WhenThereAreSomeNearestNeighbours() { SoftwareSystem softwareSystemA = model.addSoftwareSystem("System A", "Description"); SoftwareSystem softwareSystemB = model.addSoftwareSystem("System B", "Description"); Person userA = model.addPerson("User A", "Description"); Person userB = model.addPerson("User B", "Description"); // userA -> systemA -> system -> systemB -> userB userA.uses(softwareSystemA, ""); softwareSystemA.uses(softwareSystem, ""); softwareSystem.uses(softwareSystemB, ""); softwareSystemB.delivers(userB, ""); // userA -> systemA -> web application -> systemB -> userB // web application -> database Container webApplication = softwareSystem.addContainer("Web Application", "", ""); Container database = softwareSystem.addContainer("Database", "", ""); softwareSystemA.uses(webApplication, ""); webApplication.uses(softwareSystemB, ""); webApplication.uses(database, ""); // userA -> systemA -> controller -> service -> repository -> database Component controller = webApplication.addComponent("Controller", ""); Component service = webApplication.addComponent("Service", ""); Component repository = webApplication.addComponent("Repository", ""); softwareSystemA.uses(controller, ""); controller.uses(service, ""); service.uses(repository, ""); repository.uses(database, ""); // userA -> systemA -> controller -> service -> systemB -> userB service.uses(softwareSystemB, ""); view.addNearestNeighbours(webApplication); assertEquals(4, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(softwareSystemA))); assertTrue(view.getElements().contains(new ElementView(softwareSystemB))); assertTrue(view.getElements().contains(new ElementView(webApplication))); assertTrue(view.getElements().contains(new ElementView(database))); view = new ContainerView(softwareSystem, "containers", "Description"); view.addNearestNeighbours(softwareSystemA); assertEquals(3, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(userA))); assertTrue(view.getElements().contains(new ElementView(softwareSystemA))); assertTrue(view.getElements().contains(new ElementView(webApplication))); view = new ContainerView(softwareSystem, "containers", "Description"); view.addNearestNeighbours(webApplication); assertEquals(4, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(softwareSystemA))); assertTrue(view.getElements().contains(new ElementView(webApplication))); assertTrue(view.getElements().contains(new ElementView(database))); assertTrue(view.getElements().contains(new ElementView(softwareSystemB))); } @Test public void test_remove_RemovesContainer() { Container webApplication = softwareSystem.addContainer("Web Application", "", ""); Container database = softwareSystem.addContainer("Database", "", ""); view.addAllContainers(); assertEquals(2, view.getElements().size()); view.remove(webApplication); assertEquals(1, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(database))); } @Test public void test_remove_ElementsWithTag() { final String TAG = "myTag"; Container webApplication = softwareSystem.addContainer("Web Application", "", ""); Container database = softwareSystem.addContainer("Database", "", ""); database.addTags(TAG); view.addAllContainers(); assertEquals(2, view.getElements().size()); view.removeElementsWithTag(TAG); assertEquals(1, view.getElements().size()); assertEquals(webApplication, view.getElements().iterator().next().getElement()); } @Test public void test_remove_RelationshipWithTag() { final String TAG = "myTag"; Container webApplication = softwareSystem.addContainer("Web Application", "", ""); Container database = softwareSystem.addContainer("Database", "", ""); webApplication.uses(database, "").addTags(TAG); view.addAllContainers(); assertEquals(2, view.getElements().size()); assertEquals(1, view.getRelationships().size()); view.removeRelationshipsWithTag(TAG); assertEquals(2, view.getElements().size()); assertEquals(0, view.getRelationships().size()); } @Test public void test_addDependentSoftwareSystem() { assertEquals(0, view.getElements().size()); assertEquals(0, view.getRelationships().size()); view.addDependentSoftwareSystems(); SoftwareSystem softwareSystem2 = model.addSoftwareSystem(Location.External, "SoftwareSystem 2", ""); view.addDependentSoftwareSystems(); assertEquals(0, view.getElements().size()); assertEquals(0, view.getRelationships().size()); softwareSystem2.uses(softwareSystem, ""); view.addDependentSoftwareSystems(); assertEquals(1, view.getElements().size()); } @Test public void test_addDependentSoftwareSystem2() { Container container1a = softwareSystem.addContainer("Container 1A", "", ""); SoftwareSystem softwareSystem2 = model.addSoftwareSystem(Location.External, "SoftwareSystem 2", ""); Container container2a = softwareSystem2.addContainer("Container 2-A", "", ""); model.setImpliedRelationshipsStrategy(new CreateImpliedRelationshipsUnlessAnyRelationshipExistsStrategy()); container2a.uses(container1a, ""); view.addDependentSoftwareSystems(); view.addAllContainers(); assertEquals(2, view.getElements().size()); assertEquals(1, view.getRelationships().size()); } @Test public void test_addDefaultElements() { model.setImpliedRelationshipsStrategy(new CreateImpliedRelationshipsUnlessAnyRelationshipExistsStrategy()); CustomElement element = model.addCustomElement("Custom"); Person user1 = model.addPerson("User 1"); Person user2 = model.addPerson("User 2"); SoftwareSystem softwareSystem1 = model.addSoftwareSystem("Software System 1"); Container container1 = softwareSystem1.addContainer("Container 1", "", ""); SoftwareSystem softwareSystem2 = model.addSoftwareSystem("Software System 2"); Container container2 = softwareSystem2.addContainer("Container 2", "", ""); user1.uses(container1, "Uses"); user2.uses(container2, "Uses"); container1.uses(container2, "Uses"); view = new ContainerView(softwareSystem1, "containers", "Description"); view.addDefaultElements(); assertEquals(3, view.getElements().size()); assertFalse(view.getElements().contains(new ElementView(element))); assertTrue(view.getElements().contains(new ElementView(user1))); assertFalse(view.getElements().contains(new ElementView(user2))); assertFalse(view.getElements().contains(new ElementView(softwareSystem1))); assertTrue(view.getElements().contains(new ElementView(softwareSystem2))); assertTrue(view.getElements().contains(new ElementView(container1))); assertFalse(view.getElements().contains(new ElementView(container2))); element.uses(container1, "Uses"); view.addDefaultElements(); assertEquals(4, view.getElements().size()); assertTrue(view.getElements().contains(new ElementView(element))); assertTrue(view.getElements().contains(new ElementView(user1))); assertFalse(view.getElements().contains(new ElementView(user2))); assertFalse(view.getElements().contains(new ElementView(softwareSystem1))); assertTrue(view.getElements().contains(new ElementView(softwareSystem2))); assertTrue(view.getElements().contains(new ElementView(container1))); assertFalse(view.getElements().contains(new ElementView(container2))); } @Test public void test_addSoftwareSystem_ThrowsAnException_WhenTheSoftwareSystemIsTheScopeOfTheView() { SoftwareSystem softwareSystem = model.addSoftwareSystem("Software System"); view = new ContainerView(softwareSystem, "containers", "Description"); try { view.add(softwareSystem); fail(); } catch (ElementNotPermittedInViewException e) { assertEquals("The software system in scope cannot be added to a container view.", e.getMessage()); } } @Test public void test_addSoftwareSystem_ThrowsAnException_WhenAChildContainerIsAlreadyAdded() { try { SoftwareSystem softwareSystem1 = model.addSoftwareSystem("Software System 1"); Container container1 = softwareSystem1.addContainer("Container 1"); SoftwareSystem softwareSystem2 = model.addSoftwareSystem("Software System 2"); Container container2 = softwareSystem2.addContainer("Container 2"); ContainerView view = views.createContainerView(softwareSystem1, "key", "Description"); view.add(container1); view.add(container2); view.add(softwareSystem2); fail(); } catch (ElementNotPermittedInViewException e) { assertEquals("A child of Software System 2 is already in this view.", e.getMessage()); } } @Test public void test_addContainer_ThrowsAnException_WhenTheParentIsAlreadyAdded() { try { SoftwareSystem softwareSystem1 = model.addSoftwareSystem("Software System 1"); Container container1 = softwareSystem1.addContainer("Container 1"); SoftwareSystem softwareSystem2 = model.addSoftwareSystem("Software System 2"); Container container2 = softwareSystem2.addContainer("Container 2"); ContainerView view = views.createContainerView(softwareSystem1, "key", "Description"); view.add(container1); view.add(softwareSystem2); view.add(container2); fail(); } catch (ElementNotPermittedInViewException e) { assertEquals("A parent of Container 2 is already in this view.", e.getMessage()); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.service.execution; import com.google.common.annotations.VisibleForTesting; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.ExternalSystemException; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskId; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskNotificationListener; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemRunConfiguration; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.StreamUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import org.gradle.initialization.BuildLayoutParameters; import org.gradle.internal.nativeintegration.services.NativeServices; import org.gradle.process.internal.JvmOptions; import org.gradle.tooling.*; import org.gradle.tooling.internal.consumer.DefaultGradleConnector; import org.gradle.tooling.model.build.BuildEnvironment; import org.gradle.util.GradleVersion; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.service.project.DistributionFactoryExt; import org.jetbrains.plugins.gradle.service.project.ProjectResolverContext; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleExecutionSettings; import org.jetbrains.plugins.gradle.tooling.internal.init.Init; import org.jetbrains.plugins.gradle.util.GradleConstants; import org.jetbrains.plugins.gradle.util.GradleEnvironment; import org.jetbrains.plugins.gradle.util.GradleUtil; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Denis Zhdanov * @since 3/14/13 5:11 PM */ public class GradleExecutionHelper { private static final Logger LOG = Logger.getInstance(GradleExecutionHelper.class); @SuppressWarnings("MethodMayBeStatic") @NotNull public <T> ModelBuilder<T> getModelBuilder(@NotNull Class<T> modelType, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull ProjectConnection connection, @NotNull ExternalSystemTaskNotificationListener listener) { ModelBuilder<T> result = connection.model(modelType); if (settings != null) { prepare(result, id, settings, listener, connection); } return result; } @SuppressWarnings("MethodMayBeStatic") @NotNull public BuildLauncher getBuildLauncher(@NotNull final ExternalSystemTaskId id, @NotNull ProjectConnection connection, @Nullable GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener) { BuildLauncher result = connection.newBuild(); if (settings != null) { prepare(result, id, settings, listener, connection); } return result; } @Nullable public static BuildEnvironment getBuildEnvironment(ProjectResolverContext projectResolverContext) { return getBuildEnvironment(projectResolverContext.getConnection(), projectResolverContext.getExternalSystemTaskId(), projectResolverContext.getListener()); } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @NotNull GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull ProjectConnection connection) { prepare(operation, id, settings, listener, connection, new OutputWrapper(listener, id, true), new OutputWrapper(listener, id, false)); } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @NotNull GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull ProjectConnection connection, @NotNull final OutputStream standardOutput, @NotNull final OutputStream standardError) { Set<String> jvmArgs = settings.getVmOptions(); BuildEnvironment buildEnvironment = getBuildEnvironment(connection, id, listener); String gradleVersion = buildEnvironment != null ? buildEnvironment.getGradle().getGradleVersion() : null; if (!jvmArgs.isEmpty()) { // merge gradle args e.g. defined in gradle.properties Collection<String> merged = buildEnvironment != null ? mergeJvmArgs(settings.getServiceDirectory(), buildEnvironment.getJava().getJvmArguments(), jvmArgs) : jvmArgs; // filter nulls and empty strings List<String> filteredArgs = ContainerUtil.mapNotNull(merged, s -> StringUtil.isEmpty(s) ? null : s); operation.setJvmArguments(ArrayUtil.toStringArray(filteredArgs)); } if (settings.isOfflineWork()) { settings.withArgument(GradleConstants.OFFLINE_MODE_CMD_OPTION); } final Application application = ApplicationManager.getApplication(); if (application != null && application.isUnitTestMode()) { if (!settings.getArguments().contains("--quiet")) { settings.withArgument("--info"); } settings.withArgument("--recompile-scripts"); } if (!settings.getArguments().isEmpty()) { String loggableArgs = StringUtil.join(obfuscatePasswordParameters(settings.getArguments()), " "); LOG.info("Passing command-line args to Gradle Tooling API: " + loggableArgs); // filter nulls and empty strings List<String> filteredArgs = ContainerUtil.mapNotNull(settings.getArguments(), s -> StringUtil.isEmpty(s) ? null : s); // TODO remove this replacement when --tests option will become available for tooling API replaceTestCommandOptionWithInitScript(filteredArgs); operation.withArguments(ArrayUtil.toStringArray(filteredArgs)); } setupEnvironment(operation, settings, gradleVersion, id, listener); final String javaHome = settings.getJavaHome(); if (javaHome != null && new File(javaHome).isDirectory()) { operation.setJavaHome(new File(javaHome)); } String buildRootDir = buildEnvironment == null ? null : buildEnvironment.getBuildIdentifier().getRootDir().getPath(); GradleProgressListener gradleProgressListener = new GradleProgressListener(listener, id, buildRootDir); operation.addProgressListener((ProgressListener)gradleProgressListener); operation.addProgressListener((org.gradle.tooling.events.ProgressListener)gradleProgressListener); operation.setStandardOutput(standardOutput); operation.setStandardError(standardError); InputStream inputStream = settings.getUserData(ExternalSystemRunConfiguration.RUN_INPUT_KEY); if (inputStream != null) { operation.setStandardInput(inputStream); } } private static void setupEnvironment(@NotNull LongRunningOperation operation, @NotNull GradleExecutionSettings settings, @Nullable String gradleVersion, ExternalSystemTaskId taskId, ExternalSystemTaskNotificationListener listener) { boolean isEnvironmentCustomizationSupported = gradleVersion != null && GradleVersion.version(gradleVersion).getBaseVersion().compareTo(GradleVersion.version("3.5")) >= 0; if (!isEnvironmentCustomizationSupported) { if (!settings.isPassParentEnvs() || !settings.getEnv().isEmpty()) { listener.onTaskOutput(taskId, String.format( "The version of Gradle you are using%s does not support the environment variables customization feature. " + "Support for this is available in Gradle 3.5 and all later versions.\n", gradleVersion == null ? "" : (" (" + gradleVersion + ")")), false); } return; } GeneralCommandLine commandLine = new GeneralCommandLine(); commandLine.withEnvironment(settings.getEnv()); commandLine.withParentEnvironmentType( settings.isPassParentEnvs() ? GeneralCommandLine.ParentEnvironmentType.CONSOLE : GeneralCommandLine.ParentEnvironmentType.NONE); Map<String, String> effectiveEnvironment = commandLine.getEffectiveEnvironment(); operation.setEnvironmentVariables(effectiveEnvironment); } public <T> T execute(@NotNull String projectPath, @Nullable GradleExecutionSettings settings, @NotNull Function<ProjectConnection, T> f) { final String projectDir; final File projectPathFile = new File(projectPath); if (projectPathFile.isFile() && projectPath.endsWith(GradleConstants.EXTENSION) && projectPathFile.getParent() != null) { projectDir = projectPathFile.getParent(); } else { projectDir = projectPath; } String userDir = null; if (!GradleEnvironment.ADJUST_USER_DIR) { try { userDir = System.getProperty("user.dir"); if (userDir != null) System.setProperty("user.dir", projectDir); } catch (Exception ignore) { } } ProjectConnection connection = getConnection(projectDir, settings); try { return f.fun(connection); } catch (ExternalSystemException e) { throw e; } catch (Throwable e) { LOG.debug("Gradle execution error", e); Throwable rootCause = ExceptionUtil.getRootCause(e); throw new ExternalSystemException(ExceptionUtil.getMessage(rootCause)); } finally { try { connection.close(); if (userDir != null) { // restore original user.dir property System.setProperty("user.dir", userDir); } } catch (Throwable e) { LOG.debug("Gradle connection close error", e); } } } public void ensureInstalledWrapper(@NotNull ExternalSystemTaskId id, @NotNull String projectPath, @NotNull GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener) { if (!settings.getDistributionType().isWrapped()) return; if (settings.getDistributionType() == DistributionType.DEFAULT_WRAPPED && GradleUtil.findDefaultWrapperPropertiesFile(projectPath) != null) { return; } final long ttlInMs = settings.getRemoteProcessIdleTtlInMs(); ProjectConnection connection = getConnection(projectPath, settings); try { settings.setRemoteProcessIdleTtlInMs(100); try { final File wrapperPropertyFileLocation = FileUtil.createTempFile("wrap", "loc"); wrapperPropertyFileLocation.deleteOnExit(); final String[] lines = { "", "gradle.taskGraph.afterTask { Task task ->", " if (task instanceof Wrapper) {", " def wrapperPropertyFileLocation = task.jarFile.getCanonicalPath() - '.jar' + '.properties'", " new File('" + StringUtil.escapeBackSlashes(wrapperPropertyFileLocation.getCanonicalPath()) + "').write wrapperPropertyFileLocation", "}}", "", }; final File tempFile = writeToFileGradleInitScript(StringUtil.join(lines, SystemProperties.getLineSeparator())); settings.withArguments(GradleConstants.INIT_SCRIPT_CMD_OPTION, tempFile.getAbsolutePath()); BuildLauncher launcher = getBuildLauncher(id, connection, settings, listener); launcher.forTasks("wrapper"); launcher.run(); String wrapperPropertyFile = FileUtil.loadFile(wrapperPropertyFileLocation); settings.setWrapperPropertyFile(wrapperPropertyFile); } catch (IOException e) { LOG.warn("Can't update wrapper", e); } } catch (Throwable e) { LOG.warn("Can't update wrapper", e); } finally { settings.setRemoteProcessIdleTtlInMs(ttlInMs); try { connection.close(); } catch (Throwable e) { // ignore } } } private static List<String> mergeJvmArgs(String serviceDirectory, List<String> jvmArgs, Set<String> jvmArgsFromIdeSettings) { File gradleUserHomeDir = serviceDirectory != null ? new File(serviceDirectory) : new BuildLayoutParameters().getGradleUserHomeDir(); LOG.debug("Gradle home: " + gradleUserHomeDir); NativeServices.initialize(gradleUserHomeDir); Map<String, String> mergedArgs = new LinkedHashMap<>(); for (String jvmArg : ContainerUtil.concat(jvmArgs, jvmArgsFromIdeSettings)) { int i = jvmArg.indexOf('='); if(i <= 0) { mergedArgs.put(jvmArg, ""); } else { mergedArgs.put(jvmArg.substring(0, i), jvmArg.substring(i)); } } List<String> mergedList = new ArrayList<>(); for (Map.Entry<String, String> entry : mergedArgs.entrySet()) { mergedList.add(entry.getKey() + entry.getValue()); } JvmOptions jvmOptions = new JvmOptions(null); jvmOptions.setAllJvmArgs(mergedList); return jvmOptions.getAllJvmArgs(); } /** * Allows to retrieve gradle api connection to use for the given project. * * @param projectPath target project path * @param settings execution settings to use * @return connection to use * @throws IllegalStateException if it's not possible to create the connection */ @NotNull private static ProjectConnection getConnection(@NotNull String projectPath, @Nullable GradleExecutionSettings settings) throws IllegalStateException { File projectDir = new File(projectPath); GradleConnector connector = GradleConnector.newConnector(); int ttl = -1; if (settings != null) { File gradleHome = settings.getGradleHome() == null ? null : new File(settings.getGradleHome()); //noinspection EnumSwitchStatementWhichMissesCases switch (settings.getDistributionType()) { case LOCAL: if (gradleHome != null) { connector.useInstallation(gradleHome); } break; case WRAPPED: if (settings.getWrapperPropertyFile() != null) { DistributionFactoryExt.setWrappedDistribution(connector, settings.getWrapperPropertyFile(), gradleHome); } break; } // Setup service directory if necessary. String serviceDirectory = settings.getServiceDirectory(); if (serviceDirectory != null) { connector.useGradleUserHomeDir(new File(serviceDirectory)); } // Setup logging if necessary. if (settings.isVerboseProcessing() && connector instanceof DefaultGradleConnector) { ((DefaultGradleConnector)connector).setVerboseLogging(true); } ttl = (int)settings.getRemoteProcessIdleTtlInMs(); } // do not spawn gradle daemons during test execution final Application app = ApplicationManager.getApplication(); ttl = (app != null && app.isUnitTestMode()) ? 10000 : ttl; if (ttl > 0 && connector instanceof DefaultGradleConnector) { ((DefaultGradleConnector)connector).daemonMaxIdleTime(ttl, TimeUnit.MILLISECONDS); } connector.forProjectDirectory(projectDir); ProjectConnection connection = connector.connect(); if (connection == null) { throw new IllegalStateException(String.format( "Can't create connection to the target project via gradle tooling api. Project path: '%s'", projectPath )); } return connection; } @Nullable public static File generateInitScript(boolean isBuildSrcProject, @NotNull Set<Class> toolingExtensionClasses) { InputStream stream = Init.class.getResourceAsStream("/org/jetbrains/plugins/gradle/tooling/internal/init/init.gradle"); try { if (stream == null) { LOG.warn("Can't get init script template"); return null; } final String toolingExtensionsJarPaths = getToolingExtensionsJarPaths(toolingExtensionClasses); String script = FileUtil.loadTextAndClose(stream).replaceFirst(Pattern.quote("${EXTENSIONS_JARS_PATH}"), toolingExtensionsJarPaths); if (isBuildSrcProject) { String buildSrcDefaultInitScript = getBuildSrcDefaultInitScript(); if (buildSrcDefaultInitScript == null) return null; script += buildSrcDefaultInitScript; } return writeToFileGradleInitScript(script); } catch (Exception e) { LOG.warn("Can't generate IJ gradle init script", e); return null; } finally { StreamUtil.closeStream(stream); } } public static File writeToFileGradleInitScript(@NotNull String content) throws IOException { return writeToFileGradleInitScript(content, "ijinit"); } public static File writeToFileGradleInitScript(@NotNull String content, @NotNull String filePrefix) throws IOException { File tempFile = new File(FileUtil.getTempDirectory(), filePrefix + '.' + GradleConstants.EXTENSION); if (tempFile.exists() && StringUtil.equals(content, FileUtil.loadFile(tempFile))) { return tempFile; } tempFile = FileUtil.findSequentNonexistentFile(tempFile.getParentFile(), filePrefix, GradleConstants.EXTENSION); FileUtil.writeToFile(tempFile, content); tempFile.deleteOnExit(); return tempFile; } @Nullable public static String getBuildSrcDefaultInitScript() { InputStream stream = Init.class.getResourceAsStream("/org/jetbrains/plugins/gradle/tooling/internal/init/buildSrcInit.gradle"); try { if (stream == null) return null; return FileUtil.loadTextAndClose(stream); } catch (Exception e) { LOG.warn("Can't use IJ gradle init script", e); return null; } finally { StreamUtil.closeStream(stream); } } @Nullable public static GradleVersion getGradleVersion(@NotNull ProjectConnection connection, @NotNull ExternalSystemTaskId taskId, @NotNull ExternalSystemTaskNotificationListener listener) { final BuildEnvironment buildEnvironment = getBuildEnvironment(connection, taskId, listener); GradleVersion gradleVersion = null; if (buildEnvironment != null) { gradleVersion = GradleVersion.version(buildEnvironment.getGradle().getGradleVersion()); } return gradleVersion; } @Nullable public static BuildEnvironment getBuildEnvironment(@NotNull ProjectConnection connection, @NotNull ExternalSystemTaskId taskId, @NotNull ExternalSystemTaskNotificationListener listener) { ModelBuilder<BuildEnvironment> modelBuilder = connection.model(BuildEnvironment.class); // do not use connection.getModel methods since it doesn't allow to handle progress events // and we can miss gradle tooling client side events like distribution download. GradleProgressListener gradleProgressListener = new GradleProgressListener(listener, taskId); modelBuilder.addProgressListener((ProgressListener)gradleProgressListener); modelBuilder.addProgressListener((org.gradle.tooling.events.ProgressListener)gradleProgressListener); modelBuilder.setStandardOutput(new OutputWrapper(listener, taskId, true)); modelBuilder.setStandardError(new OutputWrapper(listener, taskId, false)); final BuildEnvironment buildEnvironment = modelBuilder.get(); if (LOG.isDebugEnabled()) { try { LOG.debug("Gradle version: " + buildEnvironment.getGradle().getGradleVersion()); LOG.debug("Gradle java home: " + buildEnvironment.getJava().getJavaHome()); LOG.debug("Gradle jvm arguments: " + buildEnvironment.getJava().getJvmArguments()); } catch (Throwable t) { LOG.debug(t); } } return buildEnvironment; } private static void replaceTestCommandOptionWithInitScript(@NotNull List<String> args) { Set<String> testIncludePatterns = ContainerUtil.newLinkedHashSet(); Iterator<String> it = args.iterator(); while (it.hasNext()) { final String next = it.next(); if ("--tests".equals(next)) { it.remove(); if (it.hasNext()) { testIncludePatterns.add(it.next()); it.remove(); } } } if (!testIncludePatterns.isEmpty()) { StringBuilder buf = new StringBuilder(); buf.append('['); for (Iterator<String> iterator = testIncludePatterns.iterator(); iterator.hasNext(); ) { String pattern = iterator.next(); buf.append('\'').append(pattern).append('\''); if (iterator.hasNext()) { buf.append(','); } } buf.append(']'); InputStream stream = Init.class.getResourceAsStream("/org/jetbrains/plugins/gradle/tooling/internal/init/testFilterInit.gradle"); try { if (stream == null) { LOG.warn("Can't get test filter init script template"); return; } String script = FileUtil.loadTextAndClose(stream).replaceFirst(Pattern.quote("${TEST_NAME_INCLUDES}"), Matcher.quoteReplacement(buf.toString())); final File tempFile = writeToFileGradleInitScript(script, "ijtestinit"); ContainerUtil.addAll(args, GradleConstants.INIT_SCRIPT_CMD_OPTION, tempFile.getAbsolutePath()); } catch (Exception e) { LOG.warn("Can't generate IJ gradle test filter init script", e); } finally { StreamUtil.closeStream(stream); } } } @NotNull private static String getToolingExtensionsJarPaths(@NotNull Set<Class> toolingExtensionClasses) { final Set<String> jarPaths = ContainerUtil.map2SetNotNull(toolingExtensionClasses, aClass -> { String path = PathManager.getJarPathForClass(aClass); return path == null ? null : PathUtil.getCanonicalPath(path); }); StringBuilder buf = new StringBuilder(); buf.append('['); for (Iterator<String> it = jarPaths.iterator(); it.hasNext(); ) { String jarPath = it.next(); buf.append('\"').append(jarPath).append('\"'); if (it.hasNext()) { buf.append(','); } } buf.append(']'); return buf.toString(); } /* deprecated methods to be removed in future version */ /** * @deprecated {@link #getModelBuilder(Class, ExternalSystemTaskId, GradleExecutionSettings, ProjectConnection, ExternalSystemTaskNotificationListener)} */ @SuppressWarnings("MethodMayBeStatic") @NotNull public <T> ModelBuilder<T> getModelBuilder(@NotNull Class<T> modelType, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull ProjectConnection connection, @NotNull ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs) { ModelBuilder<T> result = connection.model(modelType); prepare(result, id, settings, listener, extraJvmArgs, ContainerUtil.newArrayList(), connection); return result; } /** * @deprecated {@link #getBuildLauncher(ExternalSystemTaskId, ProjectConnection, GradleExecutionSettings, ExternalSystemTaskNotificationListener)} */ @SuppressWarnings("MethodMayBeStatic") @NotNull public BuildLauncher getBuildLauncher(@NotNull final ExternalSystemTaskId id, @NotNull ProjectConnection connection, @Nullable GradleExecutionSettings settings, @NotNull ExternalSystemTaskNotificationListener listener, @NotNull final List<String> vmOptions, @NotNull final List<String> commandLineArgs) { BuildLauncher result = connection.newBuild(); prepare(result, id, settings, listener, vmOptions, commandLineArgs, connection); return result; } @VisibleForTesting @NotNull static List<String> obfuscatePasswordParameters(@NotNull List<String> commandLineArguments) { List<String> replaced = new ArrayList<>(commandLineArguments.size()); final String PASSWORD_PARAMETER_IDENTIFIER = ".password="; for (String option : commandLineArguments) { // Find parameters ending in "password", like: // -Pandroid.injected.signing.store.password= // -Pandroid.injected.signing.key.password= int index = option.indexOf(PASSWORD_PARAMETER_IDENTIFIER); if (index == -1) { replaced.add(option); } else { replaced.add(option.substring(0, index + PASSWORD_PARAMETER_IDENTIFIER.length()) + "*********"); } } return replaced; } /** * @deprecated to be removed in future version */ @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs, @NotNull ProjectConnection connection) { if (settings == null) return; settings.withVmOptions(extraJvmArgs); prepare(operation, id, settings, listener, connection, new OutputWrapper(listener, id, true), new OutputWrapper(listener, id, false)); } /** * @deprecated use {@link #prepare(LongRunningOperation, ExternalSystemTaskId, GradleExecutionSettings, ExternalSystemTaskNotificationListener, ProjectConnection)} */ @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @Nullable GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs, @NotNull List<String> commandLineArgs, @NotNull ProjectConnection connection) { if (settings == null) return; settings.withArguments(commandLineArgs).withVmOptions(extraJvmArgs); prepare(operation, id, settings, listener, connection, new OutputWrapper(listener, id, true), new OutputWrapper(listener, id, false)); } /** * @deprecated use {@link #prepare(LongRunningOperation, ExternalSystemTaskId, GradleExecutionSettings, ExternalSystemTaskNotificationListener, ProjectConnection, OutputStream, OutputStream)} */ @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public static void prepare(@NotNull LongRunningOperation operation, @NotNull final ExternalSystemTaskId id, @NotNull GradleExecutionSettings settings, @NotNull final ExternalSystemTaskNotificationListener listener, @NotNull List<String> extraJvmArgs, @NotNull List<String> commandLineArgs, @NotNull ProjectConnection connection, @NotNull final OutputStream standardOutput, @NotNull final OutputStream standardError) { settings.withArguments(commandLineArgs).withVmOptions(extraJvmArgs); prepare(operation, id, settings, listener, connection, standardOutput, standardError); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.nio.charset.Charset; import java.util.Map; import java.util.zip.Deflater; import org.w3c.dom.Node; import org.apache.camel.model.DataFormatDefinition; import org.apache.camel.model.ProcessorDefinition; import org.apache.camel.model.dataformat.ASN1DataFormat; import org.apache.camel.model.dataformat.AvroDataFormat; import org.apache.camel.model.dataformat.Base64DataFormat; import org.apache.camel.model.dataformat.BeanioDataFormat; import org.apache.camel.model.dataformat.BindyDataFormat; import org.apache.camel.model.dataformat.BindyType; import org.apache.camel.model.dataformat.BoonDataFormat; import org.apache.camel.model.dataformat.CastorDataFormat; import org.apache.camel.model.dataformat.CsvDataFormat; import org.apache.camel.model.dataformat.CustomDataFormat; import org.apache.camel.model.dataformat.FhirJsonDataFormat; import org.apache.camel.model.dataformat.FhirXmlDataFormat; import org.apache.camel.model.dataformat.GzipDataFormat; import org.apache.camel.model.dataformat.HL7DataFormat; import org.apache.camel.model.dataformat.HessianDataFormat; import org.apache.camel.model.dataformat.IcalDataFormat; import org.apache.camel.model.dataformat.JacksonXMLDataFormat; import org.apache.camel.model.dataformat.JaxbDataFormat; import org.apache.camel.model.dataformat.JibxDataFormat; import org.apache.camel.model.dataformat.JsonDataFormat; import org.apache.camel.model.dataformat.JsonLibrary; import org.apache.camel.model.dataformat.LZFDataFormat; import org.apache.camel.model.dataformat.MimeMultipartDataFormat; import org.apache.camel.model.dataformat.PGPDataFormat; import org.apache.camel.model.dataformat.ProtobufDataFormat; import org.apache.camel.model.dataformat.RssDataFormat; import org.apache.camel.model.dataformat.SerializationDataFormat; import org.apache.camel.model.dataformat.SoapJaxbDataFormat; import org.apache.camel.model.dataformat.StringDataFormat; import org.apache.camel.model.dataformat.SyslogDataFormat; import org.apache.camel.model.dataformat.TarFileDataFormat; import org.apache.camel.model.dataformat.ThriftDataFormat; import org.apache.camel.model.dataformat.TidyMarkupDataFormat; import org.apache.camel.model.dataformat.XMLBeansDataFormat; import org.apache.camel.model.dataformat.XMLSecurityDataFormat; import org.apache.camel.model.dataformat.XStreamDataFormat; import org.apache.camel.model.dataformat.XmlJsonDataFormat; import org.apache.camel.model.dataformat.YAMLDataFormat; import org.apache.camel.model.dataformat.YAMLLibrary; import org.apache.camel.model.dataformat.ZipDataFormat; import org.apache.camel.model.dataformat.ZipFileDataFormat; import org.apache.camel.util.CollectionStringBuffer; import org.apache.camel.util.jsse.KeyStoreParameters; /** * An expression for constructing the different possible {@link org.apache.camel.spi.DataFormat} * options. * * @version */ public class DataFormatClause<T extends ProcessorDefinition<?>> { private final T processorType; private final Operation operation; /** * {@link org.apache.camel.spi.DataFormat} operations. */ public enum Operation { Marshal, Unmarshal } public DataFormatClause(T processorType, Operation operation) { this.processorType = processorType; this.operation = operation; } /** * Uses the Avro data format */ public T avro() { return dataFormat(new AvroDataFormat()); } public T avro(Object schema) { AvroDataFormat dataFormat = new AvroDataFormat(); dataFormat.setSchema(schema); return dataFormat(dataFormat); } public T avro(String instanceClassName) { return dataFormat(new AvroDataFormat(instanceClassName)); } /** * Uses the base64 data format */ public T base64() { Base64DataFormat dataFormat = new Base64DataFormat(); return dataFormat(dataFormat); } /** * Uses the base64 data format */ public T base64(int lineLength, String lineSeparator, boolean urlSafe) { Base64DataFormat dataFormat = new Base64DataFormat(); dataFormat.setLineLength(lineLength); dataFormat.setLineSeparator(lineSeparator); dataFormat.setUrlSafe(urlSafe); return dataFormat(dataFormat); } /** * Uses the beanio data format */ public T beanio(String mapping, String streamName) { BeanioDataFormat dataFormat = new BeanioDataFormat(); dataFormat.setMapping(mapping); dataFormat.setStreamName(streamName); return dataFormat(dataFormat); } /** * Uses the beanio data format */ public T beanio(String mapping, String streamName, String encoding) { BeanioDataFormat dataFormat = new BeanioDataFormat(); dataFormat.setMapping(mapping); dataFormat.setStreamName(streamName); dataFormat.setEncoding(encoding); return dataFormat(dataFormat); } /** * Uses the beanio data format */ public T beanio(String mapping, String streamName, String encoding, boolean ignoreUnidentifiedRecords, boolean ignoreUnexpectedRecords, boolean ignoreInvalidRecords) { BeanioDataFormat dataFormat = new BeanioDataFormat(); dataFormat.setMapping(mapping); dataFormat.setStreamName(streamName); dataFormat.setEncoding(encoding); dataFormat.setIgnoreUnidentifiedRecords(ignoreUnidentifiedRecords); dataFormat.setIgnoreUnexpectedRecords(ignoreUnexpectedRecords); dataFormat.setIgnoreInvalidRecords(ignoreInvalidRecords); return dataFormat(dataFormat); } /** * Uses the beanio data format */ public T beanio(String mapping, String streamName, String encoding, String beanReaderErrorHandlerType) { BeanioDataFormat dataFormat = new BeanioDataFormat(); dataFormat.setMapping(mapping); dataFormat.setStreamName(streamName); dataFormat.setEncoding(encoding); dataFormat.setBeanReaderErrorHandlerType(beanReaderErrorHandlerType); return dataFormat(dataFormat); } /** * Uses the Bindy data format * * @param type the type of bindy data format to use * @param classType the POJO class type */ public T bindy(BindyType type, Class<?> classType) { BindyDataFormat bindy = new BindyDataFormat(); bindy.setType(type); bindy.setClassType(classType); return dataFormat(bindy); } /** * Uses the Bindy data format * * @param type the type of bindy data format to use * @param classType the POJO class type * @param unwrapSingleInstance whether unmarshal should unwrap if there is a single instance in the result */ public T bindy(BindyType type, Class<?> classType, boolean unwrapSingleInstance) { BindyDataFormat bindy = new BindyDataFormat(); bindy.setType(type); bindy.setClassType(classType); bindy.setUnwrapSingleInstance(unwrapSingleInstance); return dataFormat(bindy); } /** * Uses the Boon data format * * @param classType the POJO class type */ public T boon(Class<?> classType) { BoonDataFormat boon = new BoonDataFormat(); boon.setUnmarshalType(classType); return dataFormat(boon); } /** * Uses the CSV data format */ public T csv() { return dataFormat(new CsvDataFormat()); } /** * Uses the CSV data format for a huge file. * Sequential access through an iterator. */ public T csvLazyLoad() { return dataFormat(new CsvDataFormat(true)); } /** * Uses the custom data format */ public T custom(String ref) { return dataFormat(new CustomDataFormat(ref)); } /** * Uses the Castor data format */ public T castor() { return dataFormat(new CastorDataFormat()); } /** * Uses the Castor data format * * @param mappingFile name of mapping file to locate in classpath */ public T castor(String mappingFile) { CastorDataFormat castor = new CastorDataFormat(); castor.setMappingFile(mappingFile); return dataFormat(castor); } /** * Uses the Castor data format * * @param mappingFile name of mapping file to locate in classpath * @param validation whether validation is enabled or not */ public T castor(String mappingFile, boolean validation) { CastorDataFormat castor = new CastorDataFormat(); castor.setMappingFile(mappingFile); castor.setValidation(validation); return dataFormat(castor); } /** * Uses the GZIP deflater data format */ public T gzip() { GzipDataFormat gzdf = new GzipDataFormat(); return dataFormat(gzdf); } /** * Uses the Hessian data format */ public T hessian() { return dataFormat(new HessianDataFormat()); } /** * Uses the HL7 data format */ public T hl7() { return dataFormat(new HL7DataFormat()); } /** * Uses the HL7 data format */ public T hl7(boolean validate) { HL7DataFormat hl7 = new HL7DataFormat(); hl7.setValidate(validate); return dataFormat(hl7); } /** * Uses the HL7 data format */ public T hl7(Object parser) { HL7DataFormat hl7 = new HL7DataFormat(); hl7.setParser(parser); return dataFormat(hl7); } /** * Uses the iCal data format */ public T ical(boolean validating) { IcalDataFormat ical = new IcalDataFormat(); ical.setValidating(validating); return dataFormat(ical); } /** * Uses the LZF deflater data format */ public T lzf() { LZFDataFormat lzfdf = new LZFDataFormat(); return dataFormat(lzfdf); } /** * Uses the MIME Multipart data format */ public T mimeMultipart() { MimeMultipartDataFormat mm = new MimeMultipartDataFormat(); return dataFormat(mm); } /** * Uses the MIME Multipart data format * * @param multipartSubType Specifies the subtype of the MIME Multipart */ public T mimeMultipart(String multipartSubType) { MimeMultipartDataFormat mm = new MimeMultipartDataFormat(); mm.setMultipartSubType(multipartSubType); return dataFormat(mm); } /** * Uses the MIME Multipart data format * * @param multipartSubType the subtype of the MIME Multipart * @param multipartWithoutAttachment defines whether a message without attachment is also marshaled * into a MIME Multipart (with only one body part). * @param headersInline define the MIME Multipart headers as part of the message body * or as Camel headers * @param binaryContent have binary encoding for binary content (true) or use Base-64 * encoding for binary content (false) */ public T mimeMultipart(String multipartSubType, boolean multipartWithoutAttachment, boolean headersInline, boolean binaryContent) { MimeMultipartDataFormat mm = new MimeMultipartDataFormat(); mm.setMultipartSubType(multipartSubType); mm.setMultipartWithoutAttachment(multipartWithoutAttachment); mm.setHeadersInline(headersInline); mm.setBinaryContent(binaryContent); return dataFormat(mm); } /** * Uses the MIME Multipart data format * * @param multipartSubType the subtype of the MIME Multipart * @param multipartWithoutAttachment defines whether a message without attachment is also marshaled * into a MIME Multipart (with only one body part). * @param headersInline define the MIME Multipart headers as part of the message body * or as Camel headers * @param includeHeaders if headersInline is set to true all camel headers matching this * regex are also stored as MIME headers on the Multipart * @param binaryContent have binary encoding for binary content (true) or use Base-64 * encoding for binary content (false) */ public T mimeMultipart(String multipartSubType, boolean multipartWithoutAttachment, boolean headersInline, String includeHeaders, boolean binaryContent) { MimeMultipartDataFormat mm = new MimeMultipartDataFormat(); mm.setMultipartSubType(multipartSubType); mm.setMultipartWithoutAttachment(multipartWithoutAttachment); mm.setHeadersInline(headersInline); mm.setIncludeHeaders(includeHeaders); mm.setBinaryContent(binaryContent); return dataFormat(mm); } /** * Uses the MIME Multipart data format * * @param multipartWithoutAttachment defines whether a message without attachment is also marshaled * into a MIME Multipart (with only one body part). * @param headersInline define the MIME Multipart headers as part of the message body * or as Camel headers * @param binaryContent have binary encoding for binary content (true) or use Base-64 * encoding for binary content (false) */ public T mimeMultipart(boolean multipartWithoutAttachment, boolean headersInline, boolean binaryContent) { MimeMultipartDataFormat mm = new MimeMultipartDataFormat(); mm.setMultipartWithoutAttachment(multipartWithoutAttachment); mm.setHeadersInline(headersInline); mm.setBinaryContent(binaryContent); return dataFormat(mm); } /** * Uses the PGP data format */ public T pgp(String keyFileName, String keyUserid) { PGPDataFormat pgp = new PGPDataFormat(); pgp.setKeyFileName(keyFileName); pgp.setKeyUserid(keyUserid); return dataFormat(pgp); } /** * Uses the PGP data format */ public T pgp(String keyFileName, String keyUserid, String password) { PGPDataFormat pgp = new PGPDataFormat(); pgp.setKeyFileName(keyFileName); pgp.setKeyUserid(keyUserid); pgp.setPassword(password); return dataFormat(pgp); } /** * Uses the PGP data format */ public T pgp(String keyFileName, String keyUserid, String password, boolean armored, boolean integrity) { PGPDataFormat pgp = new PGPDataFormat(); pgp.setKeyFileName(keyFileName); pgp.setKeyUserid(keyUserid); pgp.setPassword(password); pgp.setArmored(armored); pgp.setIntegrity(integrity); return dataFormat(pgp); } /** * Uses the Jackson XML data format */ public T jacksonxml() { return dataFormat(new JacksonXMLDataFormat()); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type */ public T jacksonxml(Class<?> unmarshalType) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type * @param jsonView * the view type for xml jackson type */ public T jacksonxml(Class<?> unmarshalType, Class<?> jsonView) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); jacksonXMLDataFormat.setJsonView(jsonView); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format using the Jackson library turning pretty * printing on or off * * @param prettyPrint * turn pretty printing on or off */ public T jacksonxml(boolean prettyPrint) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setPrettyPrint(prettyPrint); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type * @param prettyPrint * turn pretty printing on or off */ public T jacksonxml(Class<?> unmarshalType, boolean prettyPrint) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); jacksonXMLDataFormat.setPrettyPrint(prettyPrint); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type * @param jsonView * the view type for xml jackson type * @param prettyPrint * turn pretty printing on or off */ public T jacksonxml(Class<?> unmarshalType, Class<?> jsonView, boolean prettyPrint) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); jacksonXMLDataFormat.setJsonView(jsonView); jacksonXMLDataFormat.setPrettyPrint(prettyPrint); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type * @param jsonView * the view type for xml jackson type * @param include * include such as <tt>ALWAYS</tt>, <tt>NON_NULL</tt>, etc. */ public T jacksonxml(Class<?> unmarshalType, Class<?> jsonView, String include) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); jacksonXMLDataFormat.setJsonView(jsonView); jacksonXMLDataFormat.setInclude(include); return dataFormat(jacksonXMLDataFormat); } /** * Uses the Jackson XML data format * * @param unmarshalType * unmarshal type for xml jackson type * @param jsonView * the view type for xml jackson type * @param include * include such as <tt>ALWAYS</tt>, <tt>NON_NULL</tt>, etc. * @param prettyPrint * turn pretty printing on or off */ public T jacksonxml(Class<?> unmarshalType, Class<?> jsonView, String include, boolean prettyPrint) { JacksonXMLDataFormat jacksonXMLDataFormat = new JacksonXMLDataFormat(); jacksonXMLDataFormat.setUnmarshalType(unmarshalType); jacksonXMLDataFormat.setJsonView(jsonView); jacksonXMLDataFormat.setInclude(include); jacksonXMLDataFormat.setPrettyPrint(prettyPrint); return dataFormat(jacksonXMLDataFormat); } /** * Uses the JAXB data format */ public T jaxb() { return dataFormat(new JaxbDataFormat()); } /** * Uses the JAXB data format with context path */ public T jaxb(String contextPath) { JaxbDataFormat dataFormat = new JaxbDataFormat(); dataFormat.setContextPath(contextPath); return dataFormat(dataFormat); } /** * Uses the JAXB data format turning pretty printing on or off */ public T jaxb(boolean prettyPrint) { return dataFormat(new JaxbDataFormat(prettyPrint)); } /** * Uses the JiBX data format. */ public T jibx() { return dataFormat(new JibxDataFormat()); } /** * Uses the JiBX data format with unmarshall class. */ public T jibx(Class<?> unmarshallClass) { return dataFormat(new JibxDataFormat(unmarshallClass)); } /** * Uses the JSON data format using the XStream json library */ public T json() { return dataFormat(new JsonDataFormat()); } /** * Uses the JSON data format using the XStream json library turning pretty printing on or off * * @param prettyPrint turn pretty printing on or off */ public T json(boolean prettyPrint) { JsonDataFormat json = new JsonDataFormat(); json.setPrettyPrint(prettyPrint); return dataFormat(json); } /** * Uses the JSON data format * * @param library the json library to use */ public T json(JsonLibrary library) { return dataFormat(new JsonDataFormat(library)); } /** * Uses the JSON data format * * @param library the json library to use * @param prettyPrint turn pretty printing on or off */ public T json(JsonLibrary library, boolean prettyPrint) { JsonDataFormat json = new JsonDataFormat(library); json.setPrettyPrint(prettyPrint); return dataFormat(json); } /** * Uses the JSON data format * * @param type the json type to use * @param unmarshalType unmarshal type for json jackson type */ public T json(JsonLibrary type, Class<?> unmarshalType) { JsonDataFormat json = new JsonDataFormat(type); json.setUnmarshalType(unmarshalType); return dataFormat(json); } /** * Uses the JSON data format * * @param type the json type to use * @param unmarshalType unmarshal type for json jackson type * @param prettyPrint turn pretty printing on or off */ public T json(JsonLibrary type, Class<?> unmarshalType, boolean prettyPrint) { JsonDataFormat json = new JsonDataFormat(type); json.setUnmarshalType(unmarshalType); json.setPrettyPrint(prettyPrint); return dataFormat(json); } /** * Uses the Jackson JSON data format * * @param unmarshalType unmarshal type for json jackson type * @param jsonView the view type for json jackson type */ public T json(Class<?> unmarshalType, Class<?> jsonView) { JsonDataFormat json = new JsonDataFormat(JsonLibrary.Jackson); json.setUnmarshalType(unmarshalType); json.setJsonView(jsonView); return dataFormat(json); } /** * Uses the Jackson JSON data format * * @param unmarshalType unmarshal type for json jackson type * @param jsonView the view type for json jackson type * @param prettyPrint turn pretty printing on or off */ public T json(Class<?> unmarshalType, Class<?> jsonView, boolean prettyPrint) { JsonDataFormat json = new JsonDataFormat(JsonLibrary.Jackson); json.setUnmarshalType(unmarshalType); json.setJsonView(jsonView); json.setPrettyPrint(prettyPrint); return dataFormat(json); } /** * Uses the Jackson JSON data format * * @param unmarshalType unmarshal type for json jackson type * @param jsonView the view type for json jackson type * @param include include such as <tt>ALWAYS</tt>, <tt>NON_NULL</tt>, etc. */ public T json(Class<?> unmarshalType, Class<?> jsonView, String include) { JsonDataFormat json = new JsonDataFormat(JsonLibrary.Jackson); json.setUnmarshalType(unmarshalType); json.setJsonView(jsonView); json.setInclude(include); return dataFormat(json); } /** * Uses the Jackson JSON data format * * @param unmarshalType unmarshal type for json jackson type * @param jsonView the view type for json jackson type * @param include include such as <tt>ALWAYS</tt>, <tt>NON_NULL</tt>, etc. * @param prettyPrint turn pretty printing on or off */ public T json(Class<?> unmarshalType, Class<?> jsonView, String include, boolean prettyPrint) { JsonDataFormat json = new JsonDataFormat(JsonLibrary.Jackson); json.setUnmarshalType(unmarshalType); json.setJsonView(jsonView); json.setInclude(include); json.setPrettyPrint(prettyPrint); return dataFormat(json); } /** * Uses the protobuf data format */ public T protobuf() { return dataFormat(new ProtobufDataFormat()); } public T protobuf(Object defaultInstance) { ProtobufDataFormat dataFormat = new ProtobufDataFormat(); dataFormat.setDefaultInstance(defaultInstance); return dataFormat(dataFormat); } public T protobuf(Object defaultInstance, String contentTypeFormat) { ProtobufDataFormat dataFormat = new ProtobufDataFormat(); dataFormat.setDefaultInstance(defaultInstance); dataFormat.setContentTypeFormat(contentTypeFormat); return dataFormat(dataFormat); } public T protobuf(String instanceClassName) { return dataFormat(new ProtobufDataFormat(instanceClassName)); } public T protobuf(String instanceClassName, String contentTypeFormat) { return dataFormat(new ProtobufDataFormat(instanceClassName, contentTypeFormat)); } /** * Uses the RSS data format */ public T rss() { return dataFormat(new RssDataFormat()); } /** * Uses the Java Serialization data format */ public T serialization() { return dataFormat(new SerializationDataFormat()); } /** * Uses the Soap 1.1 JAXB data format */ public T soapjaxb() { return dataFormat(new SoapJaxbDataFormat()); } /** * Uses the Soap 1.1 JAXB data format */ public T soapjaxb(String contextPath) { return dataFormat(new SoapJaxbDataFormat(contextPath)); } /** * Uses the Soap 1.1 JAXB data format */ public T soapjaxb(String contextPath, String elementNameStrategyRef) { return dataFormat(new SoapJaxbDataFormat(contextPath, elementNameStrategyRef)); } /** * Uses the Soap 1.1 JAXB data format */ public T soapjaxb(String contextPath, Object elementNameStrategy) { return dataFormat(new SoapJaxbDataFormat(contextPath, elementNameStrategy)); } /** * Uses the Soap 1.2 JAXB data format */ public T soapjaxb12() { SoapJaxbDataFormat soap = new SoapJaxbDataFormat(); soap.setVersion("1.2"); return dataFormat(soap); } /** * Uses the Soap 1.2 JAXB data format */ public T soapjaxb12(String contextPath) { SoapJaxbDataFormat soap = new SoapJaxbDataFormat(contextPath); soap.setVersion("1.2"); return dataFormat(soap); } /** * Uses the Soap 1.2 JAXB data format */ public T soapjaxb12(String contextPath, String elementNameStrategyRef) { SoapJaxbDataFormat soap = new SoapJaxbDataFormat(contextPath, elementNameStrategyRef); soap.setVersion("1.2"); return dataFormat(soap); } /** * Uses the Soap JAXB data format */ public T soapjaxb12(String contextPath, Object elementNameStrategy) { SoapJaxbDataFormat soap = new SoapJaxbDataFormat(contextPath, elementNameStrategy); soap.setVersion("1.2"); return dataFormat(soap); } /** * Uses the String data format */ public T string() { return string(null); } /** * Uses the String data format supporting encoding using given charset */ public T string(String charset) { StringDataFormat sdf = new StringDataFormat(); sdf.setCharset(charset); return dataFormat(sdf); } /** * Uses the Syslog data format */ public T syslog() { return dataFormat(new SyslogDataFormat()); } /** * Uses the Thrift data format */ public T thrift() { return dataFormat(new ThriftDataFormat()); } public T thrift(Object defaultInstance) { ThriftDataFormat dataFormat = new ThriftDataFormat(); dataFormat.setDefaultInstance(defaultInstance); return dataFormat(dataFormat); } public T thrift(Object defaultInstance, String contentTypeFormat) { ThriftDataFormat dataFormat = new ThriftDataFormat(); dataFormat.setDefaultInstance(defaultInstance); dataFormat.setContentTypeFormat(contentTypeFormat); return dataFormat(dataFormat); } public T thrift(String instanceClassName) { return dataFormat(new ThriftDataFormat(instanceClassName)); } public T thrift(String instanceClassName, String contentTypeFormat) { return dataFormat(new ThriftDataFormat(instanceClassName, contentTypeFormat)); } /** * Return WellFormed HTML (an XML Document) either * {@link java.lang.String} or {@link org.w3c.dom.Node} */ public T tidyMarkup(Class<?> dataObjectType) { return dataFormat(new TidyMarkupDataFormat(dataObjectType)); } /** * Return TidyMarkup in the default format * as {@link org.w3c.dom.Node} */ public T tidyMarkup() { return dataFormat(new TidyMarkupDataFormat(Node.class)); } /** * Uses the XStream data format. * <p/> * Favor using {@link #xstream(String)} to pass in a permission */ public T xstream() { return dataFormat(new XStreamDataFormat()); } /** * Uses the xstream by setting the encoding or permission * * @param encodingOrPermission is either an encoding or permission syntax */ public T xstream(String encodingOrPermission) { // is it an encoding? if not we assume its a permission if (Charset.isSupported(encodingOrPermission)) { return xstream(encodingOrPermission, (String) null); } else { return xstream(null, encodingOrPermission); } } /** * Uses the xstream by setting the encoding */ public T xstream(String encoding, String permission) { XStreamDataFormat xdf = new XStreamDataFormat(); xdf.setPermissions(permission); xdf.setEncoding(encoding); return dataFormat(xdf); } /** * Uses the xstream by permitting the java type * * @param type the pojo xstream should use as allowed permission */ public T xstream(Class<?> type) { return xstream(null, type); } /** * Uses the xstream by permitting the java type * * @param encoding encoding to use * @param type the pojo class(es) xstream should use as allowed permission */ public T xstream(String encoding, Class<?>... type) { CollectionStringBuffer csb = new CollectionStringBuffer(","); for (Class<?> clazz : type) { csb.append("+"); csb.append(clazz.getName()); } return xstream(encoding, csb.toString()); } /** * Uses the YAML data format * * @param library the yaml library to use */ public T yaml(YAMLLibrary library) { return dataFormat(new YAMLDataFormat(library)); } /** * Uses the YAML data format * * @param library the yaml type to use * @param type the type for json snakeyaml type */ public T yaml(YAMLLibrary library, Class<?> type) { return dataFormat(new YAMLDataFormat(library, type)); } /** * Uses the XML Security data format */ public T secureXML() { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String passPhrase) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, passPhrase); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String passPhrase) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, passPhrase); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String passPhrase, String xmlCipherAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, passPhrase, xmlCipherAlgorithm); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String passPhrase, String xmlCipherAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, passPhrase, xmlCipherAlgorithm); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, byte[] passPhraseByte) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(); xsdf.setSecureTag(secureTag); xsdf.setSecureTagContents(secureTagContents); xsdf.setPassPhraseByte(passPhraseByte); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, byte[] passPhraseByte) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(); xsdf.setSecureTag(secureTag); xsdf.setNamespaces(namespaces); xsdf.setSecureTagContents(secureTagContents); xsdf.setPassPhraseByte(passPhraseByte); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, byte[] passPhraseByte, String xmlCipherAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(); xsdf.setSecureTag(secureTag); xsdf.setSecureTagContents(secureTagContents); xsdf.setPassPhraseByte(passPhraseByte); xsdf.setXmlCipherAlgorithm(xmlCipherAlgorithm); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, byte[] passPhraseByte, String xmlCipherAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(); xsdf.setSecureTag(secureTag); xsdf.setNamespaces(namespaces); xsdf.setSecureTagContents(secureTagContents); xsdf.setPassPhraseByte(passPhraseByte); xsdf.setXmlCipherAlgorithm(xmlCipherAlgorithm); return dataFormat(xsdf); } /** * @deprecated Use {@link #secureXML(String, Map, boolean, String, String, String, String)} instead. * Uses the XML Security data format */ @Deprecated public T secureXML(String secureTag, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, String keyOrTrustStoreParametersId) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParametersId); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, String keyOrTrustStoreParametersId, String keyPassword) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParametersId, keyPassword); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, KeyStoreParameters keyOrTrustStoreParameters) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParameters); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, KeyStoreParameters keyOrTrustStoreParameters, String keyPassword) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParameters, keyPassword); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, String keyOrTrustStoreParametersId) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParametersId); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, String keyOrTrustStoreParametersId, String keyPassword) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParametersId, keyPassword); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, KeyStoreParameters keyOrTrustStoreParameters) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParameters); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, KeyStoreParameters keyOrTrustStoreParameters, String keyPassword) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParameters, keyPassword); return dataFormat(xsdf); } /** * Uses the XML Security data format */ public T secureXML(String secureTag, Map<String, String> namespaces, boolean secureTagContents, String recipientKeyAlias, String xmlCipherAlgorithm, String keyCipherAlgorithm, KeyStoreParameters keyOrTrustStoreParameters, String keyPassword, String digestAlgorithm) { XMLSecurityDataFormat xsdf = new XMLSecurityDataFormat(secureTag, namespaces, secureTagContents, recipientKeyAlias, xmlCipherAlgorithm, keyCipherAlgorithm, keyOrTrustStoreParameters, keyPassword, digestAlgorithm); return dataFormat(xsdf); } /** * Uses the Tar file data format */ public T tarFile() { TarFileDataFormat tfdf = new TarFileDataFormat(); return dataFormat(tfdf); } /** * Uses the xmlBeans data format */ public T xmlBeans() { return dataFormat(new XMLBeansDataFormat()); } /** * Uses the xmljson dataformat, based on json-lib */ @Deprecated public T xmljson() { return dataFormat(new XmlJsonDataFormat()); } /** * Uses the xmljson dataformat, based on json-lib, initializing custom options with a Map */ @Deprecated public T xmljson(Map<String, String> options) { return dataFormat(new XmlJsonDataFormat(options)); } /** * Uses the ZIP deflater data format */ public T zip() { ZipDataFormat zdf = new ZipDataFormat(Deflater.DEFAULT_COMPRESSION); return dataFormat(zdf); } /** * Uses the ZIP deflater data format */ public T zip(int compressionLevel) { ZipDataFormat zdf = new ZipDataFormat(compressionLevel); return dataFormat(zdf); } /** * Uses the ZIP file data format */ public T zipFile() { ZipFileDataFormat zfdf = new ZipFileDataFormat(); return dataFormat(zfdf); } /** * Uses the ASN.1 file data format */ public T asn1() { ASN1DataFormat asn1Df = new ASN1DataFormat(); return dataFormat(asn1Df); } public T asn1(String clazzName) { return dataFormat(new ASN1DataFormat(clazzName)); } public T asn1(Boolean usingIterator) { return dataFormat(new ASN1DataFormat(usingIterator)); } /** * Uses the FHIR JSON data format */ public T fhirJson() { FhirJsonDataFormat jsonDataFormat = new FhirJsonDataFormat(); return dataFormat(jsonDataFormat); } public T fhirJson(String version) { FhirJsonDataFormat jsonDataFormat = new FhirJsonDataFormat(); jsonDataFormat.setFhirVersion(version); return dataFormat(jsonDataFormat); } public T fhirJson(boolean prettyPrint) { FhirJsonDataFormat jsonDataFormat = new FhirJsonDataFormat(); jsonDataFormat.setPrettyPrint(prettyPrint); return dataFormat(jsonDataFormat); } public T fhirJson(String version, boolean prettyPrint) { FhirJsonDataFormat jsonDataFormat = new FhirJsonDataFormat(); jsonDataFormat.setPrettyPrint(prettyPrint); jsonDataFormat.setFhirVersion(version); return dataFormat(jsonDataFormat); } /** * Uses the FHIR XML data format */ public T fhirXml() { FhirXmlDataFormat fhirXmlDataFormat = new FhirXmlDataFormat(); return dataFormat(fhirXmlDataFormat); } public T fhirXml(String version) { FhirXmlDataFormat fhirXmlDataFormat = new FhirXmlDataFormat(); fhirXmlDataFormat.setFhirVersion(version); return dataFormat(fhirXmlDataFormat); } public T fhirXml(boolean prettyPrint) { FhirXmlDataFormat fhirXmlDataFormat = new FhirXmlDataFormat(); fhirXmlDataFormat.setPrettyPrint(prettyPrint); return dataFormat(fhirXmlDataFormat); } public T fhirXml(String version, boolean prettyPrint) { FhirXmlDataFormat fhirXmlDataFormat = new FhirXmlDataFormat(); fhirXmlDataFormat.setFhirVersion(version); fhirXmlDataFormat.setPrettyPrint(prettyPrint); return dataFormat(fhirXmlDataFormat); } @SuppressWarnings("unchecked") private T dataFormat(DataFormatDefinition dataFormatType) { switch (operation) { case Unmarshal: return (T) processorType.unmarshal(dataFormatType); case Marshal: return (T) processorType.marshal(dataFormatType); default: throw new IllegalArgumentException("Unknown DataFormat operation: " + operation); } } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.assertions.bpmn; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.test.ProcessEngineRule; import org.camunda.bpm.engine.test.assertions.helpers.Failure; import org.camunda.bpm.engine.test.assertions.helpers.ProcessAssertTestCase; import org.junit.Rule; import org.junit.Test; import static org.camunda.bpm.engine.test.assertions.ProcessEngineTests.*; import static org.assertj.core.api.Assertions.*; public class ProcessEngineTestsFindIdTest extends ProcessAssertTestCase { @Rule public ProcessEngineRule processEngineRule = new ProcessEngineRule(); @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testFindPlainTaskByName() { // Given // Process model deployed // When String id = findId("Plain task"); // Then assertThat(id).isEqualTo("PlainTask_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testFindEndEventByName() { // Given // Process model deployed // When String end = findId("End"); // Then assertThat(end).isEqualTo("End_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testFindAttachedEventByName() { // Given // Process model deployed // When String attachedBoundaryEvent = findId("2 days"); // Then assertThat(attachedBoundaryEvent).isEqualTo("n2Days_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testFindGatewayByName() { // Given // process model deployed // When String gateway = findId("Continue?"); // Then assertThat(gateway).isEqualTo("Continue_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testNameNotFound() { // Given // Process model deployed // When // findId("This should not be found"); // Then expect(new Failure() { @Override public void when() { findId("This should not be found"); } }, "doesn't exist"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testNameNull() { // Given // Process model deployed // When // findId(null); // Then expect(new Failure() { @Override public void when() { findId(null); } }, IllegalArgumentException.class); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findTest.bpmn") public void testFindAllElements() { // Given // Process model deployed // When String start = findId("Start"); String plainTask = findId("Plain task"); String userTask = findId("User task"); String receiveTask = findId("Receive task"); String attachedBoundaryEvent = findId("2 days"); String gateway = findId("Continue?"); String end = findId("End"); String messageEnd = findId("Message End"); //Then assertThat(start).isEqualTo("Start_TestID"); assertThat(plainTask).isEqualTo("PlainTask_TestID"); assertThat(userTask).isEqualTo("UserTask_TestID"); assertThat(receiveTask).isEqualTo("ReceiveTask_TestID"); assertThat(attachedBoundaryEvent).isEqualTo("n2Days_TestID"); assertThat(gateway).isEqualTo("Continue_TestID"); assertThat(end).isEqualTo("End_TestID"); assertThat(messageEnd).isEqualTo("MessageEnd_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findInTwoPools.bpmn") public void testFindInTwoPoolsInPool1() { // Given // Process model with two pools deployed // When String callActivity = findId("Call activity one"); // Then assertThat(callActivity).isEqualTo("CallActivityOne_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findInTwoPools.bpmn") public void testFindTwoPoolsInPool2() { // Given // Process model with two pools deployed // When String task = findId("Subprocess task"); // Then assertThat(task).isEqualTo("SubProcessTask_TestID"); } @Test @Deployment(resources = {"bpmn/ProcessEngineTests-findTest.bpmn", "bpmn/ProcessEngineTests-findInTwoPools.bpmn"}) public void testFindOneInEachOfTwoDiagrams() { // Given // Two process models deployed // When String start = findId("Start"); String plainTask = findId("Plain task"); String startSuperProcess = findId("Super started"); String taskTwo = findId("Task two"); String proc2Started = findId("Proc 2 started"); // Then assertThat(start).isEqualTo("Start_TestID"); assertThat(plainTask).isEqualTo("PlainTask_TestID"); assertThat(startSuperProcess).isEqualTo("SuperStarted_TestID"); assertThat(taskTwo).isEqualTo("TaskTwo_TestID"); assertThat(proc2Started).isEqualTo("Proc2Started_TestID"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findDuplicateNames.bpmn") public void testProcessWithDuplicateNames() { // Given // Process model with duplicate task names deployed // When // findId("Task one"); findId("Event one"); findId("Gateway one"); // Then expect(new Failure() { @Override public void when() { findId("Task one"); } }, "not unique"); // And expect(new Failure() { @Override public void when() { findId("Event one"); } }, "not unique"); // And expect(new Failure() { @Override public void when() { findId("Gateway one"); } }, "not unique"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findDuplicateNamesOnTaskAndGateway.bpmn") public void testProcesswithDuplicateNamesOnDifferentElementsTypes() { // Given // Process model with same name on task and gateway deployed // When // findId("Element one"); // Then expect(new Failure() { @Override public void when() { findId("Element one"); } }, "not unique"); } @Test @Deployment(resources = "bpmn/ProcessEngineTests-findDuplicateNamesOnTaskAndGateway.bpmn") public void testProcessWithDuplicateNamesDindTheUniqueOnly() { // Given // Process model with two pools and a mix of duplicate and unique names deployed // When String startOne = findId("Start one"); String endTwo = findId("End two"); // Then assertThat(startOne).isEqualTo("StartOne_TestID"); assertThat(endTwo).isEqualTo("EndTwo_TestID"); } }
/* Copyright (c) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gdata.data.contacts; import com.google.gdata.util.common.xml.XmlNamespace; import com.google.gdata.data.AttributeGenerator; import com.google.gdata.data.AttributeHelper; import com.google.gdata.data.ExtensionDescription; import com.google.gdata.data.ExtensionPoint; import com.google.gdata.data.ExtensionProfile; import com.google.gdata.data.extensions.When; import com.google.gdata.util.ParseException; /** * Contact's events. * * */ @ExtensionDescription.Default( nsAlias = ContactsNamespace.GCONTACT_ALIAS, nsUri = ContactsNamespace.GCONTACT, localName = Event.XML_NAME) public class Event extends ExtensionPoint { /** XML element name */ static final String XML_NAME = "event"; /** XML "label" attribute name */ private static final String LABEL = "label"; /** XML "rel" attribute name */ private static final String REL = "rel"; /** Label */ private String label = null; /** Event type */ private String rel = null; /** Event type. */ public static final class Rel { /** Anniversary event. */ public static final String ANNIVERSARY = "anniversary"; /** Other event. */ public static final String OTHER = "other"; } /** * Default mutable constructor. */ public Event() { super(); } /** * Immutable constructor. * * @param label label. * @param rel event type. */ public Event(String label, String rel) { super(); setLabel(label); setRel(rel); setImmutable(true); } @Override public void declareExtensions(ExtensionProfile extProfile) { if (extProfile.isDeclared(Event.class)) { return; } extProfile.declare(Event.class, new ExtensionDescription(When.class, new XmlNamespace("gd", "http://schemas.google.com/g/2005"), "when", true, false, false)); new When().declareExtensions(extProfile); } /** * Returns the label. * * @return label */ public String getLabel() { return label; } /** * Sets the label. * * @param label label or <code>null</code> to reset */ public void setLabel(String label) { throwExceptionIfImmutable(); this.label = label; } /** * Returns whether it has the label. * * @return whether it has the label */ public boolean hasLabel() { return getLabel() != null; } /** * Returns the event type. * * @return event type */ public String getRel() { return rel; } /** * Sets the event type. * * @param rel event type or <code>null</code> to reset */ public void setRel(String rel) { throwExceptionIfImmutable(); this.rel = rel; } /** * Returns whether it has the event type. * * @return whether it has the event type */ public boolean hasRel() { return getRel() != null; } /** * Returns the time period description. * * @return time period description */ public When getWhen() { return getExtension(When.class); } /** * Sets the time period description. * * @param when time period description or <code>null</code> to reset */ public void setWhen(When when) { if (when == null) { removeExtension(When.class); } else { setExtension(when); } } /** * Returns whether it has the time period description. * * @return whether it has the time period description */ public boolean hasWhen() { return hasExtension(When.class); } @Override protected void validate() { } /** * Returns the extension description, specifying whether it is required, and * whether it is repeatable. * * @param required whether it is required * @param repeatable whether it is repeatable * @return extension description */ public static ExtensionDescription getDefaultDescription(boolean required, boolean repeatable) { ExtensionDescription desc = ExtensionDescription.getDefaultDescription(Event.class); desc.setRequired(required); desc.setRepeatable(repeatable); return desc; } @Override protected void putAttributes(AttributeGenerator generator) { generator.put(LABEL, label); generator.put(REL, rel); } @Override protected void consumeAttributes(AttributeHelper helper) throws ParseException { label = helper.consume(LABEL, false); rel = helper.consume(REL, false); } @Override public String toString() { return "{Event label=" + label + " rel=" + rel + "}"; } }
/* * Copyright (c) 2015 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.algolia.search.saas; import android.os.AsyncTask; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.lang.ref.WeakReference; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.zip.GZIPInputStream; /** * An abstract API client. */ public abstract class AbstractClient { // ---------------------------------------------------------------------- // Types // ---------------------------------------------------------------------- /*** * A version of a software library. * Used to construct the <code>User-Agent</code> header. */ public static class LibraryVersion { public final @NonNull String name; public final @NonNull String version; public LibraryVersion(@NonNull String name, @NonNull String version) { this.name = name; this.version = version; } @Override public boolean equals(Object object) { if (!(object instanceof LibraryVersion)) return false; LibraryVersion other = (LibraryVersion)object; return this.name.equals(other.name) && this.version.equals(other.version); } @Override public int hashCode() { return name.hashCode() ^ version.hashCode(); } } private static class HostStatus { boolean isUp = true; long lastTryTimestamp; HostStatus(boolean isUp) { this.isUp = isUp; lastTryTimestamp = new Date().getTime(); } } // ---------------------------------------------------------------------- // Constants // ---------------------------------------------------------------------- /** This library's version. */ private final static String version = "3.10.1"; // ---------------------------------------------------------------------- // Fields // ---------------------------------------------------------------------- /** * The user agents as a raw string. This is what is passed in request headers. * WARNING: It is stored for efficiency purposes. It should not be modified directly. */ private String userAgentRaw; /** The user agents, as a structured list of library versions. */ private List<LibraryVersion> userAgents = new ArrayList<>(); /** Connect timeout (ms). */ private int connectTimeout = 2000; /** Default read (receive) timeout (ms). */ private int readTimeout = 30000; /** Read timeout for search requests (ms). */ private int searchTimeout = 5000; /** Delay to wait when a host is down before retrying it (ms). */ private int hostDownDelay = 5000; private final String applicationID; private final String apiKey; private List<String> readHosts; private List<String> writeHosts; private HashMap<String, HostStatus> hostStatuses = new HashMap<>(); /** * HTTP headers that will be sent with every request. */ private HashMap<String, String> headers = new HashMap<String, String>(); /** Handler used to execute operations on the main thread. */ protected Handler mainHandler = new Handler(Looper.getMainLooper()); /** Thread pool used to run asynchronous requests. */ protected ExecutorService searchExecutorService = Executors.newFixedThreadPool(4); protected Map<String, WeakReference<Object>> indices = new HashMap<>(); // ---------------------------------------------------------------------- // Initialization // ---------------------------------------------------------------------- /** * Create a new client. * * @param applicationID [optional] The application ID. * @param apiKey [optional] A valid API key for the service. * @param readHosts List of hosts for read operations. * @param writeHosts List of hosts for write operations. */ protected AbstractClient(@Nullable String applicationID, @Nullable String apiKey, @Nullable String[] readHosts, @Nullable String[] writeHosts) { this.applicationID = applicationID; this.apiKey = apiKey; this.addUserAgent(new LibraryVersion("Algolia for Android", version)); this.addUserAgent(new LibraryVersion("Android", Build.VERSION.RELEASE)); if (readHosts != null) setReadHosts(readHosts); if (writeHosts != null) setWriteHosts(writeHosts); } // ---------------------------------------------------------------------- // Accessors // ---------------------------------------------------------------------- public String getApplicationID() { return applicationID; } /** * Set an HTTP header that will be sent with every request. * * @param name Header name. * @param value Value for the header. If null, the header will be removed. */ public void setHeader(@NonNull String name, @Nullable String value) { if (value == null) { headers.remove(name); } else { headers.put(name, value); } } /** * Get an HTTP header. * * @param name Header name. */ public String getHeader(@NonNull String name) { return headers.get(name); } public String[] getReadHosts() { return readHosts.toArray(new String[readHosts.size()]); } public void setReadHosts(@NonNull String... hosts) { if (hosts.length == 0) { throw new IllegalArgumentException("Hosts array cannot be empty"); } readHosts = Arrays.asList(hosts); } public String[] getWriteHosts() { return writeHosts.toArray(new String[writeHosts.size()]); } public void setWriteHosts(@NonNull String... hosts) { if (hosts.length == 0) { throw new IllegalArgumentException("Hosts array cannot be empty"); } writeHosts = Arrays.asList(hosts); } /** * Set read and write hosts to the same value (convenience method). * * @param hosts New hosts. Must not be empty. */ public void setHosts(@NonNull String... hosts) { setReadHosts(hosts); setWriteHosts(hosts); } /** * Get the connection timeout. * * @return The connection timeout (ms). */ public int getConnectTimeout() { return connectTimeout; } /** * Set the connection timeout. * * @param connectTimeout The new connection timeout (ms). */ public void setConnectTimeout(int connectTimeout) { checkTimeout(connectTimeout); this.connectTimeout = connectTimeout; } /** * Get the default read timeout. * * @return The default read timeout (ms). */ public int getReadTimeout() { return readTimeout; } /** * Set the default read timeout. * * @param readTimeout The default read timeout (ms). */ public void setReadTimeout(int readTimeout) { checkTimeout(readTimeout); this.readTimeout = readTimeout; } /** * Get the read timeout for search requests. * * @return The read timeout for search requests (ms). */ public int getSearchTimeout() { return searchTimeout; } /** * Set the read timeout for search requests. * * @param searchTimeout The read timeout for search requests (ms). */ public void setSearchTimeout(int searchTimeout) { checkTimeout(searchTimeout); this.searchTimeout = searchTimeout; } /** * Get the timeout for retrying connection to a down host. * * @return The delay before connecting again to a down host (ms). */ public int getHostDownDelay() { return hostDownDelay; } /** * Set the timeout for retrying connection to a down host. * * @param hostDownDelay The delay before connecting again to a down host (ms). */ public void setHostDownDelay(int hostDownDelay) { checkTimeout(hostDownDelay); this.hostDownDelay = hostDownDelay; } /** * Add a software library to the list of user agents. * * @param userAgent The library to add. */ public void addUserAgent(@NonNull LibraryVersion userAgent) { userAgents.add(userAgent); updateUserAgents(); } /** * Remove a software library from the list of user agents. * * @param userAgent The library to remove. */ public void removeUserAgent(@NonNull LibraryVersion userAgent) { userAgents.remove(userAgent); updateUserAgents(); } /** * Retrieve the list of declared user agents. * * @return The declared user agents. */ public @NonNull LibraryVersion[] getUserAgents() { return userAgents.toArray(new LibraryVersion[userAgents.size()]); } /** * Test whether a user agent is declared. * * @param userAgent The user agent to look for. * @return true if it is declared on this client, false otherwise. */ public boolean hasUserAgent(@NonNull LibraryVersion userAgent) { return userAgents.contains(userAgent); } private void updateUserAgents() { StringBuilder s = new StringBuilder(); for (LibraryVersion userAgent : userAgents) { if (s.length() != 0) { s.append("; "); } s.append(userAgent.name); s.append(" ("); s.append(userAgent.version); s.append(")"); } userAgentRaw = s.toString(); } private List<String> getReadHostsThatAreUp() { return hostsThatAreUp(readHosts); } private List<String> getWriteHostsThatAreUp() { return hostsThatAreUp(writeHosts); } // ---------------------------------------------------------------------- // Utilities // ---------------------------------------------------------------------- /** * HTTP method. */ private enum Method { GET, POST, PUT, DELETE } protected byte[] getRequestRaw(String url, boolean search) throws AlgoliaException { return _requestRaw(Method.GET, url, null, getReadHostsThatAreUp(), connectTimeout, search ? searchTimeout : readTimeout); } protected JSONObject getRequest(String url, boolean search) throws AlgoliaException { return _request(Method.GET, url, null, getReadHostsThatAreUp(), connectTimeout, search ? searchTimeout : readTimeout); } protected JSONObject deleteRequest(String url) throws AlgoliaException { return _request(Method.DELETE, url, null, getWriteHostsThatAreUp(), connectTimeout, readTimeout); } protected JSONObject postRequest(String url, String obj, boolean readOperation) throws AlgoliaException { return _request(Method.POST, url, obj, (readOperation ? getReadHostsThatAreUp() : getWriteHostsThatAreUp()), connectTimeout, (readOperation ? searchTimeout : readTimeout)); } protected byte[] postRequestRaw(String url, String obj, boolean readOperation) throws AlgoliaException { return _requestRaw(Method.POST, url, obj, (readOperation ? getReadHostsThatAreUp() : getWriteHostsThatAreUp()), connectTimeout, (readOperation ? searchTimeout : readTimeout)); } protected JSONObject putRequest(String url, String obj) throws AlgoliaException { return _request(Method.PUT, url, obj, getWriteHostsThatAreUp(), connectTimeout, readTimeout); } /** * Reads the InputStream as UTF-8 * * @param stream the InputStream to read * @return the stream's content as a String * @throws IOException if the stream can't be read, decoded as UTF-8 or closed */ private static String _toCharArray(InputStream stream) throws IOException { InputStreamReader is = new InputStreamReader(stream, "UTF-8"); StringBuilder builder = new StringBuilder(); char[] buf = new char[1000]; int l = 0; while (l >= 0) { builder.append(buf, 0, l); l = is.read(buf); } is.close(); return builder.toString(); } /** * Reads the InputStream into a byte array * @param stream the InputStream to read * @return the stream's content as a byte[] * @throws AlgoliaException if the stream can't be read or flushed */ private static byte[] _toByteArray(InputStream stream) throws AlgoliaException { ByteArrayOutputStream out = new ByteArrayOutputStream(); int read; byte[] buffer = new byte[1024]; try { while ((read = stream.read(buffer, 0, buffer.length)) != -1) { out.write(buffer, 0, read); } out.flush(); return out.toByteArray(); } catch (IOException e) { throw new AlgoliaException("Error while reading stream: " + e.getMessage()); } } protected static JSONObject _getJSONObject(String input) throws JSONException { return new JSONObject(new JSONTokener(input)); } protected static JSONObject _getJSONObject(byte[] array) throws JSONException, UnsupportedEncodingException { return new JSONObject(new String(array, "UTF-8")); } private static JSONObject _getAnswerJSONObject(InputStream istream) throws IOException, JSONException { return _getJSONObject(_toCharArray(istream)); } /** * Send the query according to parameters and returns its result as a JSONObject * * @param m HTTP Method to use * @param url endpoint URL * @param json optional JSON Object to send * @param hostsArray array of hosts to try successively * @param connectTimeout maximum wait time to open connection * @param readTimeout maximum time to read data on socket * @return a JSONObject containing the resulting data or error * @throws AlgoliaException if the request data is not valid json */ private JSONObject _request(Method m, String url, String json, List<String> hostsArray, int connectTimeout, int readTimeout) throws AlgoliaException { try { return _getJSONObject(_requestRaw(m, url, json, hostsArray, connectTimeout, readTimeout)); } catch (JSONException e) { throw new AlgoliaException("JSON decode error:" + e.getMessage()); } catch (UnsupportedEncodingException e) { throw new AlgoliaException("UTF-8 decode error:" + e.getMessage()); } } /** * Send the query according to parameters and returns its result as a JSONObject * * @param m HTTP Method to use * @param url endpoint URL * @param json optional JSON Object to send * @param hostsArray array of hosts to try successively * @param connectTimeout maximum wait time to open connection * @param readTimeout maximum time to read data on socket * @return a JSONObject containing the resulting data or error * @throws AlgoliaException in case of connection or data handling error */ private byte[] _requestRaw(Method m, String url, String json, List<String> hostsArray, int connectTimeout, int readTimeout) throws AlgoliaException { String requestMethod; List<Exception> errors = new ArrayList<>(hostsArray.size()); // for each host for (String host : hostsArray) { switch (m) { case DELETE: requestMethod = "DELETE"; break; case GET: requestMethod = "GET"; break; case POST: requestMethod = "POST"; break; case PUT: requestMethod = "PUT"; break; default: throw new IllegalArgumentException("Method " + m + " is not supported"); } InputStream stream = null; HttpURLConnection hostConnection = null; // set URL try { URL hostURL = new URL("https://" + host + url); hostConnection = (HttpURLConnection) hostURL.openConnection(); //set timeouts hostConnection.setRequestMethod(requestMethod); hostConnection.setConnectTimeout(connectTimeout); hostConnection.setReadTimeout(readTimeout); // set auth headers hostConnection.setRequestProperty("X-Algolia-Application-Id", this.applicationID); hostConnection.setRequestProperty("X-Algolia-API-Key", this.apiKey); for (Map.Entry<String, String> entry : this.headers.entrySet()) { hostConnection.setRequestProperty(entry.getKey(), entry.getValue()); } // set user agent hostConnection.setRequestProperty("User-Agent", userAgentRaw); // write JSON entity if (json != null) { if (!(requestMethod.equals("PUT") || requestMethod.equals("POST"))) { throw new IllegalArgumentException("Method " + m + " cannot enclose entity"); } hostConnection.setRequestProperty("Content-type", "application/json; charset=UTF-8"); hostConnection.setDoOutput(true); OutputStreamWriter writer = new OutputStreamWriter(hostConnection.getOutputStream(), "UTF-8"); writer.write(json); writer.close(); } // read response int code = hostConnection.getResponseCode(); final boolean codeIsError = code / 100 != 2; stream = codeIsError ? hostConnection.getErrorStream() : hostConnection.getInputStream(); // As per the official Java docs (not the Android docs): // - `getErrorStream()` may return null => we have to handle this case. // See <https://docs.oracle.com/javase/7/docs/api/java/net/HttpURLConnection.html#getErrorStream()>. // - `getInputStream()` should never return null... but let's err on the side of caution. // See <https://docs.oracle.com/javase/7/docs/api/java/net/URLConnection.html#getInputStream()>. if (stream == null) { throw new IOException(String.format("Null stream when reading connection (status %d)", code)); } hostStatuses.put(host, new HostStatus(true)); final byte[] rawResponse; String encoding = hostConnection.getContentEncoding(); if (encoding != null && encoding.equals("gzip")) { rawResponse = _toByteArray(new GZIPInputStream(stream)); } else { rawResponse = _toByteArray(stream); } // handle http errors if (codeIsError) { if (code / 100 == 4) { consumeQuietly(hostConnection); throw new AlgoliaException(_getJSONObject(rawResponse).getString("message"), code); } else { consumeQuietly(hostConnection); errors.add(new AlgoliaException(_toCharArray(stream), code)); continue; } } return rawResponse; } catch (JSONException e) { // fatal consumeQuietly(hostConnection); throw new AlgoliaException("Invalid JSON returned by server", e); } catch (UnsupportedEncodingException e) { // fatal consumeQuietly(hostConnection); throw new AlgoliaException("Invalid encoding returned by server", e); } catch (IOException e) { // host error, continue on the next host hostStatuses.put(host, new HostStatus(false)); consumeQuietly(hostConnection); errors.add(e); } finally { if (stream != null) { try { stream.close(); } catch (IOException e) { e.printStackTrace(); } } } } String errorMessage = "All hosts failed: " + Arrays.toString(errors.toArray()); // When several errors occurred, use the last one as the cause for the returned exception. Throwable lastError = errors.get(errors.size() - 1); throw new AlgoliaException(errorMessage, lastError); } /** * Ensures that the entity content is fully consumed and the content stream, if exists, * is closed. */ private static void consumeQuietly(final HttpURLConnection connection) { try { int read = 0; while (read != -1) { read = connection.getInputStream().read(); } connection.getInputStream().close(); read = 0; while (read != -1) { read = connection.getErrorStream().read(); } connection.getErrorStream().close(); connection.disconnect(); } catch (IOException e) { // no inputStream to close } } private void checkTimeout(int connectTimeout) { if (connectTimeout <= 0) { throw new IllegalArgumentException(); } } /** * Get the hosts that are not considered down in a given list. * @param hosts a list of hosts whose {@link HostStatus} will be checked. * @return the hosts considered up, or all hosts if none is known to be reachable. */ private List<String> hostsThatAreUp(List<String> hosts) { List<String> upHosts = new ArrayList<>(); for (String host : hosts) { if (isUpOrCouldBeRetried(host)) { upHosts.add(host); } } return upHosts.isEmpty() ? hosts : upHosts; } boolean isUpOrCouldBeRetried(String host) { HostStatus status = hostStatuses.get(host); return status == null || status.isUp || new Date().getTime() - status.lastTryTimestamp >= hostDownDelay; } // ---------------------------------------------------------------------- // Utils // ---------------------------------------------------------------------- /** * Abstract {@link Request} implementation using an `AsyncTask`. * Derived classes just have to implement the {@link #run()} method. */ abstract protected class AsyncTaskRequest implements Request { /** The completion handler notified of the result. May be null if the caller omitted it. */ private CompletionHandler completionHandler; /** The executor used to execute the request. */ private ExecutorService executorService; private boolean finished = false; /** * The underlying asynchronous task. */ private AsyncTask<Void, Void, APIResult> task = new AsyncTask<Void, Void, APIResult>() { @Override protected APIResult doInBackground(Void... params) { try { return new APIResult(run()); } catch (AlgoliaException e) { return new APIResult(e); } } @Override protected void onPostExecute(APIResult result) { finished = true; if (completionHandler != null) { completionHandler.requestCompleted(result.content, result.error); } } @Override protected void onCancelled(APIResult apiResult) { finished = true; } }; /** * Construct a new request with the specified completion handler, executing on the client's default executor. * * @param completionHandler The completion handler to be notified of results. May be null if the caller omitted it. */ protected AsyncTaskRequest(@Nullable CompletionHandler completionHandler) { this(completionHandler, searchExecutorService); } /** * Construct a new request with the specified completion handler, executing on the specified executor. * * @param completionHandler The completion handler to be notified of results. May be null if the caller omitted it. * @param executorService Executor service on which to execute the request. */ protected AsyncTaskRequest(@Nullable CompletionHandler completionHandler, @NonNull ExecutorService executorService) { this.completionHandler = completionHandler; this.executorService = executorService; } /** * Run this request synchronously. To be implemented by derived classes. * <p> * <strong>Do not call this method directly.</strong> Will be run in a background thread when calling * {@link #start()}. * </p> * * @return The request's result. * @throws AlgoliaException If an error was encountered. */ @NonNull abstract protected JSONObject run() throws AlgoliaException; /** * Run this request asynchronously. * * @return This instance. */ public AsyncTaskRequest start() { // WARNING: Starting with Honeycomb (3.0), `AsyncTask` execution is serial, so we must force parallel // execution. See <http://developer.android.com/reference/android/os/AsyncTask.html>. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { task.executeOnExecutor(executorService); } else { task.execute(); } return this; } /** * Cancel this request. * The listener will not be called after a request has been cancelled. * <p> * WARNING: Cancelling a request may or may not cancel the underlying network call, depending how late the * cancellation happens. In other words, a cancelled request may have already been executed by the server. In any * case, cancelling never carries "undo" semantics. * </p> */ @Override public void cancel() { // NOTE: We interrupt the task's thread to better cope with timeouts. task.cancel(true /* mayInterruptIfRunning */); } /** * Test if this request is still running. * * @return true if completed or cancelled, false if still running. */ @Override public boolean isFinished() { return finished; } /** * Test if this request has been cancelled. * * @return true if cancelled, false otherwise. */ @Override public boolean isCancelled() { return task.isCancelled(); } } }
/* * Copyright (c) 2015 Michael Krotscheck * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package net.krotscheck.stk.stream; import net.krotscheck.stk.stream.Stream.Builder; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Unit tests for the stream class. Assert that it has some methods, but is * mostly a passthrough to the underlying schema builder. * * @author Michael Krotscheck */ public final class StreamTest { /** * Test raw schema. */ private Map<String, Type> testRaw; /** * Test schema instance. */ private Schema testSchema; /** * Test setup (mostly dummy data creation). * * @throws Exception Any unexpected exception. */ @Before public void setUp() throws Exception { testRaw = new HashMap<>(); testRaw.put("col_string", Type.STRING); testRaw.put("col_integer", Type.INT); testRaw.put("col_byte", Type.BYTE); testSchema = new Schema.Builder().add(testRaw).build(); } /** * Test teardown. */ @After public void tearDown() { testRaw = null; testSchema = null; } /** * Assert that the constructor is private. * * @throws Exception Any unexpected exception. */ @Test(expected = InstantiationException.class) public void testPrivateConstructor() throws Exception { Stream.class.newInstance(); // exception here } /** * Assert that we can retrieve the name and the schema. * * @throws Exception Any unexpected exception. */ @Test public void testGetBasicProperties() throws Exception { Stream s = new Builder("test") .addSchema(testSchema) .build(); Assert.assertEquals("test", s.getStreamId()); // Make sure the schema is cloned. Assert.assertNotSame(testSchema, s.getSchema()); // Make sure all the field names match. Set<String> fieldSet = new HashSet<>(); for (String field : s.getFields()) { fieldSet.add(field); } Assert.assertEquals(testSchema.keySet(), fieldSet); } /** * Assert that we can set the direct stream flag. * * @throws Exception An unexpected exception. */ @Test public void testSetDirect() throws Exception { Stream s = new Builder("test") .isDirect(true) .build(); Assert.assertTrue(s.isDirect()); } /** * Assert that we can add a schema. * * @throws Exception An unexpected exception. */ @Test public void testAddSchema() throws Exception { Stream s = new Builder("test") .addSchema(testSchema) .build(); Schema schema = s.getSchema(); Assert.assertEquals(testRaw.keySet(), schema.keySet()); } /** * Assert that we can add a schema expressed as a map. * * @throws Exception An unexpected exception. */ @Test public void testAddSchemaFields() throws Exception { Stream s = new Builder("test") .addSchemaFields(testRaw) .build(); Schema schema = s.getSchema(); Assert.assertEquals(testRaw.keySet(), schema.keySet()); } /** * Assert that we can add individual fields. * * @throws Exception An unexpected exception. */ @Test public void testAddSchemaField() throws Exception { Stream s = new Builder("test") .addSchemaField("test", Type.BOOLEAN) .build(); Schema schema = s.getSchema(); Assert.assertTrue(schema.containsKey("test")); Assert.assertEquals(1, schema.size()); } /** * Assert that adding multiple schemae merges them into one. * * @throws Exception Any unexpected exception. */ @Test public void testMergeSchemae() throws Exception { Stream s = new Builder("test") .addSchema(testSchema) .addSchemaFields(testRaw) .build(); // Make sure the schema is cloned. Assert.assertNotSame(testSchema, s.getSchema()); // Make sure content is identical (since testRaw is the raw version // of testSchema). Assert.assertEquals(testSchema, s.getSchema()); } /** * Test that the builder subclass generates an expected stream. * * @throws Exception Any unexpected exception. */ @Test public void testBuilder() throws Exception { Builder b = new Builder("test"); Object s = b.addSchemaFields(testRaw).build(); Assert.assertTrue("Output must be Stream", s instanceof Stream); } /** * Test that the builder subclass generates distinct stream instances with * multiple invocations. * * @throws Exception Any unexpected exception. */ @Test public void testBuilderGeneratesDistinct() throws Exception { Builder b = new Builder("test").addSchema(testSchema); Stream s1 = b.build(); Stream s2 = b.build(); Assert.assertNotSame(s1, s2); Assert.assertNotSame(s1.getSchema(), s2.getSchema()); } /** * Test that we can merge two different streams. * * @throws Exception Any unexpected exception. */ @Test public void testMergeBasic() throws Exception { Builder b1 = new Builder("default") .addSchemaField("field_1", Type.STRING) .addSchemaField("field_2", Type.BOOLEAN); Builder b2 = new Builder("default") .addSchemaField("field_3", Type.BYTE) .addSchemaField("field_4", Type.STRING); Stream s1 = b1.build(); Stream s2 = b2.build(); Stream sm = s1.merge(s2); Schema s = sm.getSchema(); // Assert that two different schemae are merged. Assert.assertEquals(4, s.size()); Assert.assertTrue(s.containsKey("field_1")); Assert.assertTrue(s.containsKey("field_2")); Assert.assertTrue(s.containsKey("field_3")); Assert.assertTrue(s.containsKey("field_4")); // Assert that the invoker's streamId is the same. Assert.assertEquals("default", sm.getStreamId()); // Assert that the invoker's 'direct' flag is the same. Assert.assertFalse(sm.isDirect()); } /** * Test that name differences throw conflicts. * * @throws Exception Any unexpected exception. */ @Test public void testMergeNameConflict() throws Exception { Stream s1 = new Builder("default").build(); Stream s2 = new Builder("not_default").build(); Stream s3 = s1.merge(s2); Assert.assertNotSame(s3, s2); Assert.assertNotSame(s3, s1); // This check handles the equality, as we're not adding anything else // that could cause a hashCode conflict. Assert.assertEquals(s3, s2); } /** * Test that 'direct' differences throw conflicts. * * @throws Exception Any unexpected exception. */ @Test public void testMergeDirectConflict() throws Exception { Stream s1 = new Builder("default").isDirect(true).build(); Stream s2 = new Builder("default").isDirect(false).build(); Stream s3 = s1.merge(s2); Assert.assertNotSame(s3, s2); Assert.assertNotSame(s3, s1); Assert.assertFalse(s3.isDirect()); } /** * Test that streams' equality can be tested. * * @throws Exception Any unexpected exception. */ @Test public void testEquals() throws Exception { Builder b = new Builder("default").addSchema(testSchema); Builder bm = new Builder("management").addSchema(testSchema); Builder be = new Builder("default"); Builder bd = new Builder("default").isDirect(true); Stream s1 = b.build(); Stream s2 = b.build(); Stream s3 = be.build(); Stream s4 = bm.build(); Stream s5 = bd.build(); Assert.assertEquals(s1, s1); Assert.assertEquals(s1, s2); Assert.assertEquals(s2, s1); Assert.assertNotEquals(s1, s3); Assert.assertNotEquals(s2, s3); Assert.assertNotEquals(s3, s1); Assert.assertNotEquals(s3, s2); Assert.assertNotEquals(s1, s4); Assert.assertNotEquals(s2, s4); Assert.assertNotEquals(s4, s1); Assert.assertNotEquals(s4, s2); Assert.assertNotEquals(s1, s5); Assert.assertNotEquals(s2, s5); Assert.assertNotEquals(s5, s1); Assert.assertNotEquals(s5, s2); Assert.assertNotEquals(new Object(), s1); Assert.assertNotEquals(s1, new Object()); // Null test Assert.assertFalse(s1.equals(null)); } /** * Test that different (cloned) versions of a schema are detected in a * hash. * * @throws Exception Any unexpected exception. */ @Test public void testDiffHash() throws Exception { Stream s1 = new Stream.Builder("foo").addSchema(testSchema).build(); Stream s2 = new Stream.Builder("foo1").addSchema(testSchema).build(); Stream s3 = new Stream.Builder("foo").build(); Assert.assertNotEquals(s1.hashCode(), s2.hashCode()); Assert.assertNotEquals(s1.hashCode(), s3.hashCode()); } /** * Test that similar (cloned) versions of a schema are detected in a hash. * * @throws Exception Any unexpected exception. */ @Test public void testHash() throws Exception { Builder b = new Stream.Builder("foo").addSchema(testSchema); Stream s1 = b.build(); Stream s2 = b.build(); Assert.assertEquals(s1.hashCode(), s2.hashCode()); HashSet<Stream> hs = new HashSet<>(); hs.add(s1); Assert.assertTrue(hs.contains(s2)); } /** * Test that the stream can be serialized. * * @throws Exception Any unexpected exception. */ @Test public void testSerializable() throws Exception { Builder b = new Stream.Builder("foo").addSchema(testSchema); Stream s = b.build(); // Serialize the object ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(s); byte[] result = baos.toByteArray(); oos.close(); // Deserialize the object ByteArrayInputStream bais = new ByteArrayInputStream(result); ObjectInputStream ois = new ObjectInputStream(bais); Object deserializedStream = ois.readObject(); Assert.assertTrue(s.equals(deserializedStream)); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * def */ package com.microsoft.azure.management.network.v2019_04_01.implementation; import com.microsoft.azure.arm.resources.collection.implementation.GroupableResourcesCoreImpl; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGateways; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGateway; import rx.Observable; import rx.Completable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import com.microsoft.azure.arm.resources.ResourceUtilsCore; import com.microsoft.azure.arm.utils.RXMapper; import rx.functions.Func1; import com.microsoft.azure.PagedList; import com.microsoft.azure.Page; import java.util.List; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewayBackendHealth; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewayBackendHealthOnDemand; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewayAvailableWafRuleSetsResult; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewayOnDemandProbe; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewayAvailableSslOptions; import com.microsoft.azure.management.network.v2019_04_01.ApplicationGatewaySslPredefinedPolicy; class ApplicationGatewaysImpl extends GroupableResourcesCoreImpl<ApplicationGateway, ApplicationGatewayImpl, ApplicationGatewayInner, ApplicationGatewaysInner, NetworkManager> implements ApplicationGateways { protected ApplicationGatewaysImpl(NetworkManager manager) { super(manager.inner().applicationGateways(), manager); } @Override protected Observable<ApplicationGatewayInner> getInnerAsync(String resourceGroupName, String name) { ApplicationGatewaysInner client = this.inner(); return client.getByResourceGroupAsync(resourceGroupName, name); } @Override protected Completable deleteInnerAsync(String resourceGroupName, String name) { ApplicationGatewaysInner client = this.inner(); return client.deleteAsync(resourceGroupName, name).toCompletable(); } @Override public Observable<String> deleteByIdsAsync(Collection<String> ids) { if (ids == null || ids.isEmpty()) { return Observable.empty(); } Collection<Observable<String>> observables = new ArrayList<>(); for (String id : ids) { final String resourceGroupName = ResourceUtilsCore.groupFromResourceId(id); final String name = ResourceUtilsCore.nameFromResourceId(id); Observable<String> o = RXMapper.map(this.inner().deleteAsync(resourceGroupName, name), id); observables.add(o); } return Observable.mergeDelayError(observables); } @Override public Observable<String> deleteByIdsAsync(String...ids) { return this.deleteByIdsAsync(new ArrayList<String>(Arrays.asList(ids))); } @Override public void deleteByIds(Collection<String> ids) { if (ids != null && !ids.isEmpty()) { this.deleteByIdsAsync(ids).toBlocking().last(); } } @Override public void deleteByIds(String...ids) { this.deleteByIds(new ArrayList<String>(Arrays.asList(ids))); } @Override public PagedList<ApplicationGateway> listByResourceGroup(String resourceGroupName) { ApplicationGatewaysInner client = this.inner(); return this.wrapList(client.listByResourceGroup(resourceGroupName)); } @Override public Observable<ApplicationGateway> listByResourceGroupAsync(String resourceGroupName) { ApplicationGatewaysInner client = this.inner(); return client.listByResourceGroupAsync(resourceGroupName) .flatMapIterable(new Func1<Page<ApplicationGatewayInner>, Iterable<ApplicationGatewayInner>>() { @Override public Iterable<ApplicationGatewayInner> call(Page<ApplicationGatewayInner> page) { return page.items(); } }) .map(new Func1<ApplicationGatewayInner, ApplicationGateway>() { @Override public ApplicationGateway call(ApplicationGatewayInner inner) { return wrapModel(inner); } }); } @Override public PagedList<ApplicationGateway> list() { ApplicationGatewaysInner client = this.inner(); return this.wrapList(client.listAll()); } @Override public Observable<ApplicationGateway> listAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAllAsync() .flatMapIterable(new Func1<Page<ApplicationGatewayInner>, Iterable<ApplicationGatewayInner>>() { @Override public Iterable<ApplicationGatewayInner> call(Page<ApplicationGatewayInner> page) { return page.items(); } }) .map(new Func1<ApplicationGatewayInner, ApplicationGateway>() { @Override public ApplicationGateway call(ApplicationGatewayInner inner) { return wrapModel(inner); } }); } @Override public ApplicationGatewayImpl define(String name) { return wrapModel(name); } @Override public Completable startAsync(String resourceGroupName, String applicationGatewayName) { ApplicationGatewaysInner client = this.inner(); return client.startAsync(resourceGroupName, applicationGatewayName).toCompletable(); } @Override public Completable stopAsync(String resourceGroupName, String applicationGatewayName) { ApplicationGatewaysInner client = this.inner(); return client.stopAsync(resourceGroupName, applicationGatewayName).toCompletable(); } @Override public Observable<ApplicationGatewayBackendHealth> backendHealthAsync(String resourceGroupName, String applicationGatewayName) { ApplicationGatewaysInner client = this.inner(); return client.backendHealthAsync(resourceGroupName, applicationGatewayName) .map(new Func1<ApplicationGatewayBackendHealthInner, ApplicationGatewayBackendHealth>() { @Override public ApplicationGatewayBackendHealth call(ApplicationGatewayBackendHealthInner inner) { return new ApplicationGatewayBackendHealthImpl(inner, manager()); } }); } @Override public Observable<ApplicationGatewayBackendHealthOnDemand> backendHealthOnDemandAsync(String resourceGroupName, String applicationGatewayName, ApplicationGatewayOnDemandProbe probeRequest) { ApplicationGatewaysInner client = this.inner(); return client.backendHealthOnDemandAsync(resourceGroupName, applicationGatewayName, probeRequest) .map(new Func1<ApplicationGatewayBackendHealthOnDemandInner, ApplicationGatewayBackendHealthOnDemand>() { @Override public ApplicationGatewayBackendHealthOnDemand call(ApplicationGatewayBackendHealthOnDemandInner inner) { return new ApplicationGatewayBackendHealthOnDemandImpl(inner, manager()); } }); } @Override public Observable<String> listAvailableServerVariablesAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableServerVariablesAsync() .flatMap(new Func1<List<String>, Observable<String>>() { @Override public Observable<String> call(List<String> innerList) { return Observable.from(innerList); } });} @Override public Observable<String> listAvailableRequestHeadersAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableRequestHeadersAsync() .flatMap(new Func1<List<String>, Observable<String>>() { @Override public Observable<String> call(List<String> innerList) { return Observable.from(innerList); } });} @Override public Observable<String> listAvailableResponseHeadersAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableResponseHeadersAsync() .flatMap(new Func1<List<String>, Observable<String>>() { @Override public Observable<String> call(List<String> innerList) { return Observable.from(innerList); } });} @Override public Observable<ApplicationGatewayAvailableWafRuleSetsResult> listAvailableWafRuleSetsAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableWafRuleSetsAsync() .map(new Func1<ApplicationGatewayAvailableWafRuleSetsResultInner, ApplicationGatewayAvailableWafRuleSetsResult>() { @Override public ApplicationGatewayAvailableWafRuleSetsResult call(ApplicationGatewayAvailableWafRuleSetsResultInner inner) { return new ApplicationGatewayAvailableWafRuleSetsResultImpl(inner, manager()); } }); } @Override protected ApplicationGatewayImpl wrapModel(ApplicationGatewayInner inner) { return new ApplicationGatewayImpl(inner.name(), inner, manager()); } @Override protected ApplicationGatewayImpl wrapModel(String name) { return new ApplicationGatewayImpl(name, new ApplicationGatewayInner(), this.manager()); } @Override public Observable<ApplicationGatewayAvailableSslOptions> listAvailableSslOptionsAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableSslOptionsAsync() .map(new Func1<ApplicationGatewayAvailableSslOptionsInner, ApplicationGatewayAvailableSslOptions>() { @Override public ApplicationGatewayAvailableSslOptions call(ApplicationGatewayAvailableSslOptionsInner inner) { return new ApplicationGatewayAvailableSslOptionsImpl(inner, manager()); } }); } @Override public Observable<ApplicationGatewaySslPredefinedPolicy> listAvailableSslPredefinedPoliciesAsync() { ApplicationGatewaysInner client = this.inner(); return client.listAvailableSslPredefinedPoliciesAsync() .flatMapIterable(new Func1<Page<ApplicationGatewaySslPredefinedPolicyInner>, Iterable<ApplicationGatewaySslPredefinedPolicyInner>>() { @Override public Iterable<ApplicationGatewaySslPredefinedPolicyInner> call(Page<ApplicationGatewaySslPredefinedPolicyInner> page) { return page.items(); } }) .map(new Func1<ApplicationGatewaySslPredefinedPolicyInner, ApplicationGatewaySslPredefinedPolicy>() { @Override public ApplicationGatewaySslPredefinedPolicy call(ApplicationGatewaySslPredefinedPolicyInner inner) { return new ApplicationGatewaySslPredefinedPolicyImpl(inner, manager()); } }); } @Override public Observable<ApplicationGatewaySslPredefinedPolicy> getSslPredefinedPolicyAsync(String predefinedPolicyName) { ApplicationGatewaysInner client = this.inner(); return client.getSslPredefinedPolicyAsync(predefinedPolicyName) .map(new Func1<ApplicationGatewaySslPredefinedPolicyInner, ApplicationGatewaySslPredefinedPolicy>() { @Override public ApplicationGatewaySslPredefinedPolicy call(ApplicationGatewaySslPredefinedPolicyInner inner) { return new ApplicationGatewaySslPredefinedPolicyImpl(inner, manager()); } }); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudtrace/v1/trace.proto package com.google.devtools.cloudtrace.v1; /** * * * <pre> * A trace describes how long it takes for an application to perform an * operation. It consists of a set of spans, each of which represent a single * timed event within the operation. * </pre> * * Protobuf type {@code google.devtools.cloudtrace.v1.Trace} */ public final class Trace extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudtrace.v1.Trace) TraceOrBuilder { private static final long serialVersionUID = 0L; // Use Trace.newBuilder() to construct. private Trace(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Trace() { projectId_ = ""; traceId_ = ""; spans_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Trace( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); projectId_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); traceId_ = s; break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { spans_ = new java.util.ArrayList<com.google.devtools.cloudtrace.v1.TraceSpan>(); mutable_bitField0_ |= 0x00000004; } spans_.add( input.readMessage( com.google.devtools.cloudtrace.v1.TraceSpan.parser(), extensionRegistry)); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { spans_ = java.util.Collections.unmodifiableList(spans_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.cloudtrace.v1.TraceProto .internal_static_google_devtools_cloudtrace_v1_Trace_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.cloudtrace.v1.TraceProto .internal_static_google_devtools_cloudtrace_v1_Trace_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.cloudtrace.v1.Trace.class, com.google.devtools.cloudtrace.v1.Trace.Builder.class); } private int bitField0_; public static final int PROJECT_ID_FIELD_NUMBER = 1; private volatile java.lang.Object projectId_; /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TRACE_ID_FIELD_NUMBER = 2; private volatile java.lang.Object traceId_; /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public java.lang.String getTraceId() { java.lang.Object ref = traceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); traceId_ = s; return s; } } /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public com.google.protobuf.ByteString getTraceIdBytes() { java.lang.Object ref = traceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); traceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SPANS_FIELD_NUMBER = 3; private java.util.List<com.google.devtools.cloudtrace.v1.TraceSpan> spans_; /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public java.util.List<com.google.devtools.cloudtrace.v1.TraceSpan> getSpansList() { return spans_; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public java.util.List<? extends com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder> getSpansOrBuilderList() { return spans_; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public int getSpansCount() { return spans_.size(); } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpan getSpans(int index) { return spans_.get(index); } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder getSpansOrBuilder(int index) { return spans_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getProjectIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (!getTraceIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, traceId_); } for (int i = 0; i < spans_.size(); i++) { output.writeMessage(3, spans_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getProjectIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (!getTraceIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, traceId_); } for (int i = 0; i < spans_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, spans_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.cloudtrace.v1.Trace)) { return super.equals(obj); } com.google.devtools.cloudtrace.v1.Trace other = (com.google.devtools.cloudtrace.v1.Trace) obj; boolean result = true; result = result && getProjectId().equals(other.getProjectId()); result = result && getTraceId().equals(other.getTraceId()); result = result && getSpansList().equals(other.getSpansList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + TRACE_ID_FIELD_NUMBER; hash = (53 * hash) + getTraceId().hashCode(); if (getSpansCount() > 0) { hash = (37 * hash) + SPANS_FIELD_NUMBER; hash = (53 * hash) + getSpansList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.cloudtrace.v1.Trace parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.cloudtrace.v1.Trace parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.cloudtrace.v1.Trace parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.cloudtrace.v1.Trace parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.devtools.cloudtrace.v1.Trace prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A trace describes how long it takes for an application to perform an * operation. It consists of a set of spans, each of which represent a single * timed event within the operation. * </pre> * * Protobuf type {@code google.devtools.cloudtrace.v1.Trace} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudtrace.v1.Trace) com.google.devtools.cloudtrace.v1.TraceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.cloudtrace.v1.TraceProto .internal_static_google_devtools_cloudtrace_v1_Trace_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.cloudtrace.v1.TraceProto .internal_static_google_devtools_cloudtrace_v1_Trace_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.cloudtrace.v1.Trace.class, com.google.devtools.cloudtrace.v1.Trace.Builder.class); } // Construct using com.google.devtools.cloudtrace.v1.Trace.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSpansFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); projectId_ = ""; traceId_ = ""; if (spansBuilder_ == null) { spans_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { spansBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.cloudtrace.v1.TraceProto .internal_static_google_devtools_cloudtrace_v1_Trace_descriptor; } @java.lang.Override public com.google.devtools.cloudtrace.v1.Trace getDefaultInstanceForType() { return com.google.devtools.cloudtrace.v1.Trace.getDefaultInstance(); } @java.lang.Override public com.google.devtools.cloudtrace.v1.Trace build() { com.google.devtools.cloudtrace.v1.Trace result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.cloudtrace.v1.Trace buildPartial() { com.google.devtools.cloudtrace.v1.Trace result = new com.google.devtools.cloudtrace.v1.Trace(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.projectId_ = projectId_; result.traceId_ = traceId_; if (spansBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { spans_ = java.util.Collections.unmodifiableList(spans_); bitField0_ = (bitField0_ & ~0x00000004); } result.spans_ = spans_; } else { result.spans_ = spansBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.cloudtrace.v1.Trace) { return mergeFrom((com.google.devtools.cloudtrace.v1.Trace) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.devtools.cloudtrace.v1.Trace other) { if (other == com.google.devtools.cloudtrace.v1.Trace.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; onChanged(); } if (!other.getTraceId().isEmpty()) { traceId_ = other.traceId_; onChanged(); } if (spansBuilder_ == null) { if (!other.spans_.isEmpty()) { if (spans_.isEmpty()) { spans_ = other.spans_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureSpansIsMutable(); spans_.addAll(other.spans_); } onChanged(); } } else { if (!other.spans_.isEmpty()) { if (spansBuilder_.isEmpty()) { spansBuilder_.dispose(); spansBuilder_ = null; spans_ = other.spans_; bitField0_ = (bitField0_ & ~0x00000004); spansBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSpansFieldBuilder() : null; } else { spansBuilder_.addAllMessages(other.spans_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.devtools.cloudtrace.v1.Trace parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.devtools.cloudtrace.v1.Trace) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object projectId_ = ""; /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; onChanged(); return this; } /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); onChanged(); return this; } /** * * * <pre> * Project ID of the Cloud project where the trace data is stored. * </pre> * * <code>string project_id = 1;</code> */ public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; onChanged(); return this; } private java.lang.Object traceId_ = ""; /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public java.lang.String getTraceId() { java.lang.Object ref = traceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); traceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public com.google.protobuf.ByteString getTraceIdBytes() { java.lang.Object ref = traceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); traceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public Builder setTraceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } traceId_ = value; onChanged(); return this; } /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public Builder clearTraceId() { traceId_ = getDefaultInstance().getTraceId(); onChanged(); return this; } /** * * * <pre> * Globally unique identifier for the trace. This identifier is a 128-bit * numeric value formatted as a 32-byte hex string. * </pre> * * <code>string trace_id = 2;</code> */ public Builder setTraceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); traceId_ = value; onChanged(); return this; } private java.util.List<com.google.devtools.cloudtrace.v1.TraceSpan> spans_ = java.util.Collections.emptyList(); private void ensureSpansIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { spans_ = new java.util.ArrayList<com.google.devtools.cloudtrace.v1.TraceSpan>(spans_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.cloudtrace.v1.TraceSpan, com.google.devtools.cloudtrace.v1.TraceSpan.Builder, com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder> spansBuilder_; /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public java.util.List<com.google.devtools.cloudtrace.v1.TraceSpan> getSpansList() { if (spansBuilder_ == null) { return java.util.Collections.unmodifiableList(spans_); } else { return spansBuilder_.getMessageList(); } } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public int getSpansCount() { if (spansBuilder_ == null) { return spans_.size(); } else { return spansBuilder_.getCount(); } } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpan getSpans(int index) { if (spansBuilder_ == null) { return spans_.get(index); } else { return spansBuilder_.getMessage(index); } } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder setSpans(int index, com.google.devtools.cloudtrace.v1.TraceSpan value) { if (spansBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpansIsMutable(); spans_.set(index, value); onChanged(); } else { spansBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder setSpans( int index, com.google.devtools.cloudtrace.v1.TraceSpan.Builder builderForValue) { if (spansBuilder_ == null) { ensureSpansIsMutable(); spans_.set(index, builderForValue.build()); onChanged(); } else { spansBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder addSpans(com.google.devtools.cloudtrace.v1.TraceSpan value) { if (spansBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpansIsMutable(); spans_.add(value); onChanged(); } else { spansBuilder_.addMessage(value); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder addSpans(int index, com.google.devtools.cloudtrace.v1.TraceSpan value) { if (spansBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpansIsMutable(); spans_.add(index, value); onChanged(); } else { spansBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder addSpans(com.google.devtools.cloudtrace.v1.TraceSpan.Builder builderForValue) { if (spansBuilder_ == null) { ensureSpansIsMutable(); spans_.add(builderForValue.build()); onChanged(); } else { spansBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder addSpans( int index, com.google.devtools.cloudtrace.v1.TraceSpan.Builder builderForValue) { if (spansBuilder_ == null) { ensureSpansIsMutable(); spans_.add(index, builderForValue.build()); onChanged(); } else { spansBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder addAllSpans( java.lang.Iterable<? extends com.google.devtools.cloudtrace.v1.TraceSpan> values) { if (spansBuilder_ == null) { ensureSpansIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, spans_); onChanged(); } else { spansBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder clearSpans() { if (spansBuilder_ == null) { spans_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { spansBuilder_.clear(); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public Builder removeSpans(int index) { if (spansBuilder_ == null) { ensureSpansIsMutable(); spans_.remove(index); onChanged(); } else { spansBuilder_.remove(index); } return this; } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpan.Builder getSpansBuilder(int index) { return getSpansFieldBuilder().getBuilder(index); } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder getSpansOrBuilder(int index) { if (spansBuilder_ == null) { return spans_.get(index); } else { return spansBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public java.util.List<? extends com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder> getSpansOrBuilderList() { if (spansBuilder_ != null) { return spansBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(spans_); } } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpan.Builder addSpansBuilder() { return getSpansFieldBuilder() .addBuilder(com.google.devtools.cloudtrace.v1.TraceSpan.getDefaultInstance()); } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public com.google.devtools.cloudtrace.v1.TraceSpan.Builder addSpansBuilder(int index) { return getSpansFieldBuilder() .addBuilder(index, com.google.devtools.cloudtrace.v1.TraceSpan.getDefaultInstance()); } /** * * * <pre> * Collection of spans in the trace. * </pre> * * <code>repeated .google.devtools.cloudtrace.v1.TraceSpan spans = 3;</code> */ public java.util.List<com.google.devtools.cloudtrace.v1.TraceSpan.Builder> getSpansBuilderList() { return getSpansFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.cloudtrace.v1.TraceSpan, com.google.devtools.cloudtrace.v1.TraceSpan.Builder, com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder> getSpansFieldBuilder() { if (spansBuilder_ == null) { spansBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.cloudtrace.v1.TraceSpan, com.google.devtools.cloudtrace.v1.TraceSpan.Builder, com.google.devtools.cloudtrace.v1.TraceSpanOrBuilder>( spans_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); spans_ = null; } return spansBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudtrace.v1.Trace) } // @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.Trace) private static final com.google.devtools.cloudtrace.v1.Trace DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.cloudtrace.v1.Trace(); } public static com.google.devtools.cloudtrace.v1.Trace getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Trace> PARSER = new com.google.protobuf.AbstractParser<Trace>() { @java.lang.Override public Trace parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Trace(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Trace> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Trace> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.cloudtrace.v1.Trace getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * PhoneGap is available under *either* the terms of the modified BSD license *or* the * MIT License (2008). See http://opensource.org/licenses/alphabetical for full text. * * Copyright (c) 2005-2011, Nitobi Software Inc. * Copyright (c) 2010-2011, IBM Corporation */ package com.phonegap.plugins.childBrowser; import java.io.IOException; import java.io.InputStream; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.text.InputType; import android.util.Log; import android.util.TypedValue; import android.view.Gravity; import android.view.KeyEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.WindowManager.LayoutParams; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.RelativeLayout; public class ChildBrowser extends CordovaPlugin{ protected static final String LOG_TAG = "ChildBrowser"; private static int CLOSE_EVENT = 0; private static int LOCATION_CHANGED_EVENT = 1; private CallbackContext browserCallbackContext = null; private Dialog dialog; private WebView webview; private EditText edittext; private boolean showLocationBar = true; /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackId The callback id used when calling back into JavaScript. * @return A PluginResult object with a status and message. */ @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) { PluginResult.Status status = PluginResult.Status.OK; String result = ""; try { if (action.equals("showWebPage")) { this.browserCallbackContext = callbackContext; // If the ChildBrowser is already open then throw an error if (dialog != null && dialog.isShowing()) { browserCallbackContext.error("ChildBrowser is already open"); return true; } result = this.showWebPage(args.getString(0), args.optJSONObject(1)); if (result.length() > 0) { status = PluginResult.Status.ERROR; browserCallbackContext.error(result); return true; } else { PluginResult pluginResult = new PluginResult(status, result); pluginResult.setKeepCallback(true); browserCallbackContext.sendPluginResult(pluginResult); return true; } } else if (action.equals("close")) { closeDialog(); JSONObject obj = new JSONObject(); obj.put("type", CLOSE_EVENT); PluginResult pluginResult = new PluginResult(status, obj); pluginResult.setKeepCallback(false); browserCallbackContext.sendPluginResult(pluginResult); return true; } else if (action.equals("openExternal")) { result = this.openExternal(args.getString(0), args.optBoolean(1)); if (result.length() > 0) { status = PluginResult.Status.ERROR; } } else { status = PluginResult.Status.INVALID_ACTION; } browserCallbackContext.sendPluginResult(new PluginResult(status, result)); return true; } catch (JSONException e) { browserCallbackContext.sendPluginResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION)); return false; } } /** * Display a new browser with the specified URL. * * @param url The url to load. * @param usePhoneGap Load url in PhoneGap webview * @return "" if ok, or error message. */ public String openExternal(String url, boolean usePhoneGap) { try { Intent intent = null; if (usePhoneGap) { intent = new Intent().setClass(this.cordova.getActivity(), org.apache.cordova.DroidGap.class); intent.setData(Uri.parse(url)); // This line will be removed in future. intent.putExtra("url", url); // Timeout parameter: 60 sec max - May be less if http device timeout is less. intent.putExtra("loadUrlTimeoutValue", 60000); // These parameters can be configured if you want to show the loading dialog intent.putExtra("loadingDialog", "Wait,Loading web page..."); // show loading dialog intent.putExtra("hideLoadingDialogOnPageLoad", true); // hide it once page has completely loaded } else { intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(url)); } this.cordova.getActivity().startActivity(intent); return ""; } catch (android.content.ActivityNotFoundException e) { Log.d(LOG_TAG, "ChildBrowser: Error loading url "+url+":"+ e.toString()); return e.toString(); } } /** * Closes the dialog */ private void closeDialog() { if (dialog != null) { dialog.dismiss(); } } /** * Checks to see if it is possible to go back one page in history, then does so. */ private void goBack() { if (this.webview.canGoBack()) { this.webview.goBack(); } } /** * Checks to see if it is possible to go forward one page in history, then does so. */ private void goForward() { if (this.webview.canGoForward()) { this.webview.goForward(); } } /** * Navigate to the new page * * @param url to load */ private void navigate(String url) { InputMethodManager imm = (InputMethodManager)this.cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(edittext.getWindowToken(), 0); if (!url.startsWith("http") && !url.startsWith("file:")) { this.webview.loadUrl("http://" + url); } else { this.webview.loadUrl(url); } this.webview.requestFocus(); } /** * Should we show the location bar? * * @return boolean */ private boolean getShowLocationBar() { return this.showLocationBar; } /** * Display a new browser with the specified URL. * * @param url The url to load. * @param jsonObject */ public String showWebPage(final String url, JSONObject options) { // Determine if we should hide the location bar. if (options != null) { showLocationBar = options.optBoolean("showLocationBar", true); } // Create dialog in new thread Runnable runnable = new Runnable() { /** * Convert our DIP units to Pixels * * @return int */ private int dpToPixels(int dipValue) { int value = (int) TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, (float) dipValue, cordova.getActivity().getResources().getDisplayMetrics() ); return value; } public void run() { // Let's create the main dialog dialog = new Dialog(cordova.getActivity(), android.R.style.Theme_NoTitleBar); dialog.getWindow().getAttributes().windowAnimations = android.R.style.Animation_Dialog; dialog.requestWindowFeature(Window.FEATURE_NO_TITLE); dialog.setCancelable(true); dialog.setOnDismissListener(new DialogInterface.OnDismissListener() { public void onDismiss(DialogInterface dialog) { try { JSONObject obj = new JSONObject(); obj.put("type", CLOSE_EVENT); sendUpdate(obj, false); } catch (JSONException e) { Log.d(LOG_TAG, "Should never happen"); } } }); // Main container layout LinearLayout main = new LinearLayout(cordova.getActivity()); main.setOrientation(LinearLayout.VERTICAL); // Toolbar layout RelativeLayout toolbar = new RelativeLayout(cordova.getActivity()); toolbar.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.FILL_PARENT, this.dpToPixels(44))); toolbar.setPadding(this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2)); toolbar.setHorizontalGravity(Gravity.LEFT); toolbar.setVerticalGravity(Gravity.TOP); // Action Button Container layout RelativeLayout actionButtonContainer = new RelativeLayout(cordova.getActivity()); actionButtonContainer.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); actionButtonContainer.setHorizontalGravity(Gravity.LEFT); actionButtonContainer.setVerticalGravity(Gravity.CENTER_VERTICAL); actionButtonContainer.setId(1); // Back button ImageButton back = new ImageButton(cordova.getActivity()); RelativeLayout.LayoutParams backLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.FILL_PARENT); backLayoutParams.addRule(RelativeLayout.ALIGN_LEFT); back.setLayoutParams(backLayoutParams); back.setContentDescription("Back Button"); back.setId(2); try { back.setImageBitmap(loadDrawable("www/childbrowser/icon_arrow_left.png")); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage(), e); } back.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goBack(); } }); // Forward button ImageButton forward = new ImageButton(cordova.getActivity()); RelativeLayout.LayoutParams forwardLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.FILL_PARENT); forwardLayoutParams.addRule(RelativeLayout.RIGHT_OF, 2); forward.setLayoutParams(forwardLayoutParams); forward.setContentDescription("Forward Button"); forward.setId(3); try { forward.setImageBitmap(loadDrawable("www/childbrowser/icon_arrow_right.png")); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage(), e); } forward.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goForward(); } }); // Edit Text Box edittext = new EditText(cordova.getActivity()); RelativeLayout.LayoutParams textLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT); textLayoutParams.addRule(RelativeLayout.RIGHT_OF, 1); textLayoutParams.addRule(RelativeLayout.LEFT_OF, 5); edittext.setLayoutParams(textLayoutParams); edittext.setId(4); edittext.setSingleLine(true); edittext.setText(url); edittext.setInputType(InputType.TYPE_TEXT_VARIATION_URI); edittext.setImeOptions(EditorInfo.IME_ACTION_GO); edittext.setInputType(InputType.TYPE_NULL); // Will not except input... Makes the text NON-EDITABLE edittext.setOnKeyListener(new View.OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { // If the event is a key-down event on the "enter" button if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) { navigate(edittext.getText().toString()); return true; } return false; } }); // Close button ImageButton close = new ImageButton(cordova.getActivity()); RelativeLayout.LayoutParams closeLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.FILL_PARENT); closeLayoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); close.setLayoutParams(closeLayoutParams); forward.setContentDescription("Close Button"); close.setId(5); try { close.setImageBitmap(loadDrawable("www/childbrowser/icon_close.png")); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage(), e); } close.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { closeDialog(); } }); // WebView webview = new WebView(cordova.getActivity()); webview.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); webview.setWebChromeClient(new WebChromeClient()); WebViewClient client = new ChildBrowserClient(edittext); webview.setWebViewClient(client); WebSettings settings = webview.getSettings(); settings.setJavaScriptEnabled(true); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setBuiltInZoomControls(true); settings.setPluginsEnabled(true); settings.setDomStorageEnabled(true); webview.loadUrl(url); webview.setId(6); webview.getSettings().setLoadWithOverviewMode(true); webview.getSettings().setUseWideViewPort(true); webview.requestFocus(); webview.requestFocusFromTouch(); // Add the back and forward buttons to our action button container layout actionButtonContainer.addView(back); actionButtonContainer.addView(forward); // Add the views to our toolbar toolbar.addView(actionButtonContainer); toolbar.addView(edittext); toolbar.addView(close); // Don't add the toolbar if its been disabled if (getShowLocationBar()) { // Add our toolbar to our main view/layout main.addView(toolbar); } // Add our webview to our main view/layout main.addView(webview); WindowManager.LayoutParams lp = new WindowManager.LayoutParams(); lp.copyFrom(dialog.getWindow().getAttributes()); lp.width = WindowManager.LayoutParams.FILL_PARENT; lp.height = WindowManager.LayoutParams.FILL_PARENT; dialog.setContentView(main); dialog.show(); dialog.getWindow().setAttributes(lp); } private Bitmap loadDrawable(String filename) throws java.io.IOException { InputStream input = cordova.getActivity().getAssets().open(filename); return BitmapFactory.decodeStream(input); } }; this.cordova.getActivity().runOnUiThread(runnable); return ""; } /** * Create a new plugin result and send it back to JavaScript * * @param obj a JSONObject contain event payload information */ private void sendUpdate(JSONObject obj, boolean keepCallback) { if (this.browserCallbackContext != null) { PluginResult result = new PluginResult(PluginResult.Status.OK, obj); result.setKeepCallback(keepCallback); this.browserCallbackContext.sendPluginResult(result); } } /** * The webview client receives notifications about appView */ public class ChildBrowserClient extends WebViewClient { EditText edittext; /** * Constructor. * * @param mContext * @param edittext */ public ChildBrowserClient(EditText mEditText) { this.edittext = mEditText; } /** * Notify the host application that a page has started loading. * * @param view The webview initiating the callback. * @param url The url of the page. */ @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { super.onPageStarted(view, url, favicon); String newloc; if (url.startsWith("http:") || url.startsWith("https:") || url.startsWith("file:")) { newloc = url; } else { newloc = "http://" + url; } if (!newloc.equals(edittext.getText().toString())) { edittext.setText(newloc); } try { JSONObject obj = new JSONObject(); obj.put("type", LOCATION_CHANGED_EVENT); obj.put("location", url); sendUpdate(obj, true); } catch (JSONException e) { Log.d("ChildBrowser", "This should never happen"); } } } }
/* * Copyright 1999-2011 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.druid.sql.visitor; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.sql.Blob; import java.sql.Clob; import java.sql.NClob; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.alibaba.druid.sql.ast.SQLCommentHint; import com.alibaba.druid.sql.ast.SQLDataType; import com.alibaba.druid.sql.ast.SQLExpr; import com.alibaba.druid.sql.ast.SQLObject; import com.alibaba.druid.sql.ast.SQLOrderBy; import com.alibaba.druid.sql.ast.SQLOver; import com.alibaba.druid.sql.ast.SQLSetQuantifier; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.ast.expr.SQLAggregateExpr; import com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr; import com.alibaba.druid.sql.ast.expr.SQLAllExpr; import com.alibaba.druid.sql.ast.expr.SQLAnyExpr; import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLBinaryOperator; import com.alibaba.druid.sql.ast.expr.SQLBooleanExpr; import com.alibaba.druid.sql.ast.expr.SQLCaseExpr; import com.alibaba.druid.sql.ast.expr.SQLCastExpr; import com.alibaba.druid.sql.ast.expr.SQLCharExpr; import com.alibaba.druid.sql.ast.expr.SQLCurrentOfCursorExpr; import com.alibaba.druid.sql.ast.expr.SQLDefaultExpr; import com.alibaba.druid.sql.ast.expr.SQLExistsExpr; import com.alibaba.druid.sql.ast.expr.SQLHexExpr; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; import com.alibaba.druid.sql.ast.expr.SQLInListExpr; import com.alibaba.druid.sql.ast.expr.SQLInSubQueryExpr; import com.alibaba.druid.sql.ast.expr.SQLIntegerExpr; import com.alibaba.druid.sql.ast.expr.SQLListExpr; import com.alibaba.druid.sql.ast.expr.SQLMethodInvokeExpr; import com.alibaba.druid.sql.ast.expr.SQLNCharExpr; import com.alibaba.druid.sql.ast.expr.SQLNotExpr; import com.alibaba.druid.sql.ast.expr.SQLNullExpr; import com.alibaba.druid.sql.ast.expr.SQLNumberExpr; import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import com.alibaba.druid.sql.ast.expr.SQLSomeExpr; import com.alibaba.druid.sql.ast.expr.SQLUnaryExpr; import com.alibaba.druid.sql.ast.expr.SQLVariantRefExpr; import com.alibaba.druid.sql.ast.statement.NotNullConstraint; import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn; import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddConstraint; import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddIndex; import com.alibaba.druid.sql.ast.statement.SQLAlterTableAlterColumn; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDisableConstraint; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDisableKeys; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDropColumnItem; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDropConstraint; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDropForeignKey; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDropIndex; import com.alibaba.druid.sql.ast.statement.SQLAlterTableDropPrimaryKey; import com.alibaba.druid.sql.ast.statement.SQLAlterTableEnableConstraint; import com.alibaba.druid.sql.ast.statement.SQLAlterTableEnableKeys; import com.alibaba.druid.sql.ast.statement.SQLAlterTableItem; import com.alibaba.druid.sql.ast.statement.SQLAlterTableRenameColumn; import com.alibaba.druid.sql.ast.statement.SQLAlterTableStatement; import com.alibaba.druid.sql.ast.statement.SQLAssignItem; import com.alibaba.druid.sql.ast.statement.SQLCallStatement; import com.alibaba.druid.sql.ast.statement.SQLCharacterDataType; import com.alibaba.druid.sql.ast.statement.SQLCheck; import com.alibaba.druid.sql.ast.statement.SQLColumnCheck; import com.alibaba.druid.sql.ast.statement.SQLColumnConstraint; import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition; import com.alibaba.druid.sql.ast.statement.SQLColumnPrimaryKey; import com.alibaba.druid.sql.ast.statement.SQLColumnReference; import com.alibaba.druid.sql.ast.statement.SQLColumnUniqueKey; import com.alibaba.druid.sql.ast.statement.SQLCommentStatement; import com.alibaba.druid.sql.ast.statement.SQLCreateDatabaseStatement; import com.alibaba.druid.sql.ast.statement.SQLCreateIndexStatement; import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement; import com.alibaba.druid.sql.ast.statement.SQLCreateTriggerStatement; import com.alibaba.druid.sql.ast.statement.SQLCreateTriggerStatement.TriggerEvent; import com.alibaba.druid.sql.ast.statement.SQLCreateTriggerStatement.TriggerType; import com.alibaba.druid.sql.ast.statement.SQLCreateViewStatement; import com.alibaba.druid.sql.ast.statement.SQLDeleteStatement; import com.alibaba.druid.sql.ast.statement.SQLDropDatabaseStatement; import com.alibaba.druid.sql.ast.statement.SQLDropFunctionStatement; import com.alibaba.druid.sql.ast.statement.SQLDropIndexStatement; import com.alibaba.druid.sql.ast.statement.SQLDropProcedureStatement; import com.alibaba.druid.sql.ast.statement.SQLDropSequenceStatement; import com.alibaba.druid.sql.ast.statement.SQLDropTableSpaceStatement; import com.alibaba.druid.sql.ast.statement.SQLDropTableStatement; import com.alibaba.druid.sql.ast.statement.SQLDropTriggerStatement; import com.alibaba.druid.sql.ast.statement.SQLDropUserStatement; import com.alibaba.druid.sql.ast.statement.SQLDropViewStatement; import com.alibaba.druid.sql.ast.statement.SQLExplainStatement; import com.alibaba.druid.sql.ast.statement.SQLExprHint; import com.alibaba.druid.sql.ast.statement.SQLExprTableSource; import com.alibaba.druid.sql.ast.statement.SQLForeignKeyConstraint; import com.alibaba.druid.sql.ast.statement.SQLForeignKeyImpl; import com.alibaba.druid.sql.ast.statement.SQLGrantStatement; import com.alibaba.druid.sql.ast.statement.SQLInsertStatement; import com.alibaba.druid.sql.ast.statement.SQLInsertStatement.ValuesClause; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; import com.alibaba.druid.sql.ast.statement.SQLPrimaryKey; import com.alibaba.druid.sql.ast.statement.SQLPrimaryKeyImpl; import com.alibaba.druid.sql.ast.statement.SQLReleaseSavePointStatement; import com.alibaba.druid.sql.ast.statement.SQLRollbackStatement; import com.alibaba.druid.sql.ast.statement.SQLSavePointStatement; import com.alibaba.druid.sql.ast.statement.SQLSelect; import com.alibaba.druid.sql.ast.statement.SQLSelectGroupByClause; import com.alibaba.druid.sql.ast.statement.SQLSelectItem; import com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem; import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock; import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.druid.sql.ast.statement.SQLSetStatement; import com.alibaba.druid.sql.ast.statement.SQLSubqueryTableSource; import com.alibaba.druid.sql.ast.statement.SQLTableElement; import com.alibaba.druid.sql.ast.statement.SQLTruncateStatement; import com.alibaba.druid.sql.ast.statement.SQLUnionQuery; import com.alibaba.druid.sql.ast.statement.SQLUnionQueryTableSource; import com.alibaba.druid.sql.ast.statement.SQLUnique; import com.alibaba.druid.sql.ast.statement.SQLUniqueConstraint; import com.alibaba.druid.sql.ast.statement.SQLUpdateSetItem; import com.alibaba.druid.sql.ast.statement.SQLUpdateStatement; import com.alibaba.druid.sql.ast.statement.SQLUseStatement; import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause; public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements PrintableVisitor { protected final Appendable appender; private String indent = "\t"; private int indentCount = 0; private boolean prettyFormat = true; protected int selectListNumberOfLine = 5; private List<Object> parameters; public SQLASTOutputVisitor(Appendable appender){ this.appender = appender; } public int getParametersSize() { if (parameters == null) { return 0; } return this.parameters.size(); } public List<Object> getParameters() { if (parameters == null) { parameters = new ArrayList<Object>(); } return parameters; } public void setParameters(List<Object> parameters) { this.parameters = parameters; } public int getIndentCount() { return indentCount; } public Appendable getAppender() { return appender; } public boolean isPrettyFormat() { return prettyFormat; } public void setPrettyFormat(boolean prettyFormat) { this.prettyFormat = prettyFormat; } public void decrementIndent() { this.indentCount -= 1; } public void incrementIndent() { this.indentCount += 1; } public void print(char value) { try { this.appender.append(value); } catch (IOException e) { throw new RuntimeException("println error", e); } } public void print(int value) { print(Integer.toString(value)); } public void print(Date date) { SimpleDateFormat dateFormat; if (date instanceof java.sql.Timestamp) { dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); } else { dateFormat = new SimpleDateFormat("yyyy-MM-dd"); } print("'" + dateFormat.format(date) + "'"); } public void print(long value) { print(Long.toString(value)); } public void print(String text) { try { this.appender.append(text); } catch (IOException e) { throw new RuntimeException("println error", e); } } protected void printAlias(String alias) { if ((alias != null) && (alias.length() > 0)) { print(" "); print(alias); } } protected void printAndAccept(List<? extends SQLObject> nodes, String seperator) { for (int i = 0, size = nodes.size(); i < size; ++i) { if (i != 0) { print(seperator); } nodes.get(i).accept(this); } } protected void printSelectList(List<SQLSelectItem> selectList) { incrementIndent(); for (int i = 0, size = selectList.size(); i < size; ++i) { if (i != 0) { if (i % selectListNumberOfLine == 0) { println(); } print(", "); } selectList.get(i).accept(this); } decrementIndent(); } protected void printlnAndAccept(List<? extends SQLObject> nodes, String seperator) { for (int i = 0, size = nodes.size(); i < size; ++i) { if (i != 0) { println(seperator); } ((SQLObject) nodes.get(i)).accept(this); } } public void printIndent() { for (int i = 0; i < this.indentCount; ++i) { print(this.indent); } } public void println() { if (!isPrettyFormat()) { print(' '); return; } print("\n"); printIndent(); } public void println(String text) { print(text); println(); } // //////////////////// public boolean visit(SQLBetweenExpr x) { x.getTestExpr().accept(this); if (x.isNot()) { print(" NOT BETWEEN "); } else { print(" BETWEEN "); } x.getBeginExpr().accept(this); print(" AND "); x.getEndExpr().accept(this); return false; } public boolean visit(SQLBinaryOpExpr x) { SQLObject parent = x.getParent(); boolean isRoot = parent instanceof SQLSelectQueryBlock; boolean relational = x.getOperator() == SQLBinaryOperator.BooleanAnd || x.getOperator() == SQLBinaryOperator.BooleanOr; if (isRoot && relational) { incrementIndent(); } List<SQLExpr> groupList = new ArrayList<SQLExpr>(); SQLExpr left = x.getLeft(); for (;;) { if (left instanceof SQLBinaryOpExpr && ((SQLBinaryOpExpr) left).getOperator() == x.getOperator()) { SQLBinaryOpExpr binaryLeft = (SQLBinaryOpExpr) left; groupList.add(binaryLeft.getRight()); left = binaryLeft.getLeft(); } else { groupList.add(left); break; } } for (int i = groupList.size() - 1; i >= 0; --i) { SQLExpr item = groupList.get(i); visitBinaryLeft(item, x.getOperator()); if (relational) { println(); } else { print(" "); } print(x.getOperator().name); print(" "); } visitorBinaryRight(x); if (isRoot && relational) { decrementIndent(); } return false; } private void visitorBinaryRight(SQLBinaryOpExpr x) { if (x.getRight() instanceof SQLBinaryOpExpr) { SQLBinaryOpExpr right = (SQLBinaryOpExpr) x.getRight(); boolean rightRational = right.getOperator() == SQLBinaryOperator.BooleanAnd || right.getOperator() == SQLBinaryOperator.BooleanOr; if (right.getOperator().priority >= x.getOperator().priority) { if (rightRational) { incrementIndent(); } print('('); right.accept(this); print(')'); if (rightRational) { decrementIndent(); } } else { right.accept(this); } } else { x.getRight().accept(this); } } private void visitBinaryLeft(SQLExpr left, SQLBinaryOperator op) { if (left instanceof SQLBinaryOpExpr) { SQLBinaryOpExpr binaryLeft = (SQLBinaryOpExpr) left; boolean leftRational = binaryLeft.getOperator() == SQLBinaryOperator.BooleanAnd || binaryLeft.getOperator() == SQLBinaryOperator.BooleanOr; if (binaryLeft.getOperator().priority > op.priority) { if (leftRational) { incrementIndent(); } print('('); left.accept(this); print(')'); if (leftRational) { decrementIndent(); } } else { left.accept(this); } } else { left.accept(this); } } public boolean visit(SQLCaseExpr x) { print("CASE "); if (x.getValueExpr() != null) { x.getValueExpr().accept(this); print(" "); } printAndAccept(x.getItems(), " "); if (x.getElseExpr() != null) { print(" ELSE "); x.getElseExpr().accept(this); } print(" END"); return false; } public boolean visit(SQLCaseExpr.Item x) { print("WHEN "); x.getConditionExpr().accept(this); print(" THEN "); x.getValueExpr().accept(this); return false; } public boolean visit(SQLCastExpr x) { print("CAST("); x.getExpr().accept(this); print(" AS "); x.getDataType().accept(this); print(")"); return false; } public boolean visit(SQLCharExpr x) { if ((x.getText() == null) || (x.getText().length() == 0)) { print("NULL"); } else { print("'"); print(x.getText().replaceAll("'", "''")); print("'"); } return false; } public boolean visit(SQLDataType x) { print(x.getName()); if (x.getArguments().size() > 0) { print("("); printAndAccept(x.getArguments(), ", "); print(")"); } return false; } public boolean visit(SQLCharacterDataType x) { visit((SQLDataType) x); return false; } public boolean visit(SQLExistsExpr x) { if (x.isNot()) { print("NOT EXISTS ("); } else { print("EXISTS ("); } incrementIndent(); x.getSubQuery().accept(this); decrementIndent(); print(")"); return false; } public boolean visit(SQLIdentifierExpr x) { print(x.getName()); return false; } public boolean visit(SQLInListExpr x) { x.getExpr().accept(this); if (x.isNot()) { print(" NOT IN ("); } else { print(" IN ("); } printAndAccept(x.getTargetList(), ", "); print(')'); return false; } public boolean visit(SQLIntegerExpr x) { return SQLASTOutputVisitorUtils.visit(this, x); } public boolean visit(SQLMethodInvokeExpr x) { if (x.getOwner() != null) { x.getOwner().accept(this); print("."); } print(x.getMethodName()); print("("); printAndAccept(x.getParameters(), ", "); print(")"); return false; } public boolean visit(SQLAggregateExpr x) { print(x.getMethodName()); print("("); if (x.getOption() != null) { print(x.getOption().toString()); print(' '); } printAndAccept(x.getArguments(), ", "); visitAggreateRest(x); print(")"); if (x.getWithinGroup() != null) { print(" WITHIN GROUP ("); x.getWithinGroup().accept(this); print(")"); } if (x.getOver() != null) { print(" "); x.getOver().accept(this); } return false; } protected void visitAggreateRest(SQLAggregateExpr aggregateExpr) { } public boolean visit(SQLAllColumnExpr x) { print("*"); return true; } public boolean visit(SQLNCharExpr x) { if ((x.getText() == null) || (x.getText().length() == 0)) { print("NULL"); } else { print("N'"); print(x.getText().replace("'", "''")); print("'"); } return false; } public boolean visit(SQLNotExpr x) { print("NOT "); x.getExpr().accept(this); return false; } public boolean visit(SQLNullExpr x) { print("NULL"); return false; } public boolean visit(SQLNumberExpr x) { return SQLASTOutputVisitorUtils.visit(this, x); } public boolean visit(SQLPropertyExpr x) { x.getOwner().accept(this); print("."); print(x.getName()); return false; } public boolean visit(SQLQueryExpr x) { SQLObject parent = x.getParent(); if (parent instanceof SQLSelect) { parent = parent.getParent(); } if (parent instanceof SQLStatement) { incrementIndent(); println(); x.getSubQuery().accept(this); decrementIndent(); } else if (parent instanceof ValuesClause) { println(); print("("); x.getSubQuery().accept(this); print(")"); println(); } else { print("("); incrementIndent(); println(); x.getSubQuery().accept(this); println(); decrementIndent(); print(")"); } return false; } public boolean visit(SQLSelectGroupByClause x) { if (x.getItems().size() > 0) { print("GROUP BY "); printAndAccept(x.getItems(), ", "); } if (x.getHaving() != null) { println(); print("HAVING "); x.getHaving().accept(this); } return false; } public boolean visit(SQLSelect x) { x.getQuery().setParent(x); if (x.getWithSubQuery() != null) { x.getWithSubQuery().accept(this); println(); } x.getQuery().accept(this); if (x.getOrderBy() != null) { println(); x.getOrderBy().accept(this); } if (x.getHintsSize() > 0) { printAndAccept(x.getHints(), ""); } return false; } public boolean visit(SQLSelectQueryBlock x) { print("SELECT "); if (SQLSetQuantifier.ALL == x.getDistionOption()) { print("ALL "); } else if (SQLSetQuantifier.DISTINCT == x.getDistionOption()) { print("DISTINCT "); } else if (SQLSetQuantifier.UNIQUE == x.getDistionOption()) { print("UNIQUE "); } printSelectList(x.getSelectList()); if (x.getFrom() != null) { println(); print("FROM "); x.getFrom().accept(this); } if (x.getWhere() != null) { println(); print("WHERE "); x.getWhere().setParent(x); x.getWhere().accept(this); } if (x.getGroupBy() != null) { println(); x.getGroupBy().accept(this); } return false; } public boolean visit(SQLSelectItem x) { if(x.isConnectByRoot()) { print("CONNECT_BY_ROOT "); } x.getExpr().accept(this); if ((x.getAlias() != null) && (x.getAlias().length() > 0)) { print(" AS "); print(x.getAlias()); } return false; } public boolean visit(SQLOrderBy x) { if (x.getItems().size() > 0) { print("ORDER BY "); printAndAccept(x.getItems(), ", "); } return false; } public boolean visit(SQLSelectOrderByItem x) { x.getExpr().accept(this); if (x.getType() != null) { print(" "); print(x.getType().name().toUpperCase()); } if (x.getCollate() != null) { print(" COLLATE "); print(x.getCollate()); } return false; } public boolean visit(SQLExprTableSource x) { x.getExpr().accept(this); if (x.getAlias() != null) { print(' '); print(x.getAlias()); } return false; } public boolean visit(SQLSelectStatement stmt) { SQLSelect select = stmt.getSelect(); select.accept(this); return false; } public boolean visit(SQLVariantRefExpr x) { int index = x.getIndex(); if (parameters == null || index >= parameters.size()) { print(x.getName()); return false; } Object param = parameters.get(index); printParameter(param); return false; } public void printParameter(Object param) { if (param == null) { print("NULL"); return; } if (param instanceof Number // || param instanceof Boolean) { print(param.toString()); return; } if (param instanceof String) { SQLCharExpr charExpr = new SQLCharExpr((String) param); visit(charExpr); return; } if (param instanceof Date) { print((Date) param); return; } if (param instanceof InputStream) { print("'<InputStream>"); return; } if (param instanceof Reader) { print("'<Reader>"); return; } if (param instanceof Blob) { print("'<Blob>"); return; } if (param instanceof NClob) { print("'<NClob>"); return; } if (param instanceof Clob) { print("'<Clob>"); return; } print("'" + param.getClass().getName() + "'"); } public boolean visit(SQLDropTableStatement x) { if (x.isTemporary()) { print("DROP TEMPORARY TABLE "); } else { print("DROP TABLE "); } if (x.isIfExists()) { print("IF EXISTS "); } printAndAccept(x.getTableSources(), ", "); if (x.isCascade()) { printCascade(); } if (x.isRestrict()) { print(" RESTRICT"); } if (x.isPurge()) { print(" PURGE"); } return false; } protected void printCascade() { print(" CASCADE"); } public boolean visit(SQLDropViewStatement x) { print("DROP VIEW "); if (x.isIfExists()) { print("IF EXISTS "); } printAndAccept(x.getTableSources(), ", "); if (x.isCascade()) { printCascade(); } return false; } public boolean visit(SQLTableElement x) { if (x instanceof SQLColumnDefinition) { return visit((SQLColumnDefinition) x); } throw new RuntimeException("TODO"); } public boolean visit(SQLColumnDefinition x) { x.getName().accept(this); if (x.getDataType() != null) { print(' '); x.getDataType().accept(this); } if (x.getDefaultExpr() != null) { visitColumnDefault(x); } for (SQLColumnConstraint item : x.getConstraints()) { boolean newLine = item instanceof SQLForeignKeyConstraint // || item instanceof SQLPrimaryKey // || item instanceof SQLColumnCheck // || item instanceof SQLColumnCheck // || item.getName() != null; if (newLine) { incrementIndent(); println(); } else { print(' '); } item.accept(this); if (newLine) { decrementIndent(); } } if (x.getEnable() != null) { if (x.getEnable().booleanValue()) { print(" ENABLE"); } } if (x.getComment() != null) { print(" COMMENT "); x.getComment().accept(this); } return false; } protected void visitColumnDefault(SQLColumnDefinition x) { print(" DEFAULT "); x.getDefaultExpr().accept(this); } public boolean visit(SQLDeleteStatement x) { print("DELETE FROM "); x.getTableName().accept(this); if (x.getWhere() != null) { print(" WHERE "); x.getWhere().setParent(x); x.getWhere().accept(this); } return false; } public boolean visit(SQLCurrentOfCursorExpr x) { print("CURRENT OF "); x.getCursorName().accept(this); return false; } public boolean visit(SQLInsertStatement x) { print("INSERT INTO "); x.getTableSource().accept(this); if (x.getColumns().size() > 0) { incrementIndent(); println(); print("("); for (int i = 0, size = x.getColumns().size(); i < size; ++i) { if (i != 0) { if (i % 5 == 0) { println(); } print(", "); } x.getColumns().get(i).accept(this); } print(")"); decrementIndent(); } if (x.getValues() != null) { println(); print("VALUES"); println(); x.getValues().accept(this); } else { if (x.getQuery() != null) { println(); x.getQuery().setParent(x); x.getQuery().accept(this); } } return false; } public boolean visit(SQLUpdateSetItem x) { x.getColumn().accept(this); print(" = "); x.getValue().accept(this); return false; } public boolean visit(SQLUpdateStatement x) { print("UPDATE "); x.getTableSource().accept(this); println(); print("SET "); for (int i = 0, size = x.getItems().size(); i < size; ++i) { if (i != 0) { print(", "); } x.getItems().get(i).accept(this); } if (x.getWhere() != null) { println(); print("WHERE "); x.getWhere().setParent(x); x.getWhere().accept(this); } return false; } public boolean visit(SQLCreateTableStatement x) { print("CREATE TABLE "); if (SQLCreateTableStatement.Type.GLOBAL_TEMPORARY.equals(x.getType())) { print("GLOBAL TEMPORARY "); } else if (SQLCreateTableStatement.Type.LOCAL_TEMPORARY.equals(x.getType())) { print("LOCAL TEMPORARY "); } x.getName().accept(this); int size = x.getTableElementList().size(); if (size > 0) { print(" ("); incrementIndent(); println(); for (int i = 0; i < size; ++i) { if (i != 0) { print(","); println(); } x.getTableElementList().get(i).accept(this); } decrementIndent(); println(); print(")"); } return false; } public boolean visit(SQLUniqueConstraint x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("UNIQUE ("); for (int i = 0, size = x.getColumns().size(); i < size; ++i) { if (i != 0) { print(", "); } x.getColumns().get(i).accept(this); } print(")"); return false; } public boolean visit(NotNullConstraint x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("NOT NULL"); return false; } @Override public boolean visit(SQLUnionQuery x) { x.getLeft().accept(this); println(); print(x.getOperator().name); println(); boolean needParen = false; if (x.getOrderBy() != null) { needParen = true; } if (needParen) { print('('); x.getRight().accept(this); print(')'); } else { x.getRight().accept(this); } if (x.getOrderBy() != null) { println(); x.getOrderBy().accept(this); } return false; } @Override public boolean visit(SQLUnaryExpr x) { print(x.getOperator().name); SQLExpr expr = x.getExpr(); switch (x.getOperator()) { case BINARY: case Prior: case ConnectByRoot: print(' '); expr.accept(this); return false; default: break; } if (expr instanceof SQLBinaryOpExpr) { print('('); expr.accept(this); print(')'); } else if (expr instanceof SQLUnaryExpr) { print('('); expr.accept(this); print(')'); } else { expr.accept(this); } return false; } @Override public boolean visit(SQLHexExpr x) { print("0x"); print(x.getHex()); String charset = (String) x.getAttribute("USING"); if (charset != null) { print(" USING "); print(charset); } return false; } @Override public boolean visit(SQLSetStatement x) { print("SET "); printAndAccept(x.getItems(), ", "); List<SQLCommentHint> hints = x.getHints(); if (hints != null && !hints.isEmpty()) { print(" "); for (SQLCommentHint hint : hints) { hint.accept(this); } } return false; } @Override public boolean visit(SQLAssignItem x) { x.getTarget().accept(this); print(" = "); x.getValue().accept(this); return false; } @Override public boolean visit(SQLCallStatement x) { if (x.isBrace()) { print("{"); } if (x.getOutParameter() != null) { x.getOutParameter().accept(this); print(" = "); } print("CALL "); x.getProcedureName().accept(this); print('('); printAndAccept(x.getParameters(), ", "); print(')'); if (x.isBrace()) { print("}"); } return false; } @Override public boolean visit(SQLJoinTableSource x) { x.getLeft().accept(this); incrementIndent(); if (x.getJoinType() == JoinType.COMMA) { print(","); } else { println(); print(JoinType.toString(x.getJoinType())); } print(" "); x.getRight().accept(this); if (x.getCondition() != null) { incrementIndent(); print(" ON "); x.getCondition().accept(this); decrementIndent(); } if (x.getUsing().size() > 0) { print(" USING ("); printAndAccept(x.getUsing(), ", "); print(")"); } if (x.getAlias() != null) { print(" AS "); print(x.getAlias()); } decrementIndent(); return false; } @Override public boolean visit(ValuesClause x) { print("("); incrementIndent(); for (int i = 0, size = x.getValues().size(); i < size; ++i) { if (i != 0) { if (i % 5 == 0) { println(); } print(", "); } SQLExpr expr = x.getValues().get(i); expr.setParent(x); expr.accept(this); } decrementIndent(); print(")"); return false; } @Override public boolean visit(SQLSomeExpr x) { print("SOME ("); incrementIndent(); x.getSubQuery().accept(this); decrementIndent(); print(")"); return false; } @Override public boolean visit(SQLAnyExpr x) { print("ANY ("); incrementIndent(); x.getSubQuery().accept(this); decrementIndent(); print(")"); return false; } @Override public boolean visit(SQLAllExpr x) { print("ALL ("); incrementIndent(); x.getSubQuery().accept(this); decrementIndent(); print(")"); return false; } @Override public boolean visit(SQLInSubQueryExpr x) { x.getExpr().accept(this); if (x.isNot()) { print(" NOT IN ("); } else { print(" IN ("); } incrementIndent(); x.getSubQuery().accept(this); decrementIndent(); print(")"); return false; } @Override public boolean visit(SQLListExpr x) { print("("); printAndAccept(x.getItems(), ", "); print(")"); return false; } @Override public boolean visit(SQLSubqueryTableSource x) { print("("); incrementIndent(); x.getSelect().accept(this); println(); decrementIndent(); print(")"); if (x.getAlias() != null) { print(' '); print(x.getAlias()); } return false; } @Override public boolean visit(SQLTruncateStatement x) { print("TRUNCATE TABLE "); printAndAccept(x.getTableSources(), ", "); return false; } @Override public boolean visit(SQLDefaultExpr x) { print("DEFAULT"); return false; } @Override public void endVisit(SQLCommentStatement x) { } @Override public boolean visit(SQLCommentStatement x) { print("COMMENT ON "); if (x.getType() != null) { print(x.getType().name()); print(" "); } x.getOn().accept(this); print(" IS "); x.getComment().accept(this); return false; } @Override public boolean visit(SQLUseStatement x) { print("USE "); x.getDatabase().accept(this); return false; } @Override public boolean visit(SQLAlterTableAddColumn x) { print("ADD ("); printAndAccept(x.getColumns(), ", "); print(")"); return false; } @Override public boolean visit(SQLAlterTableDropColumnItem x) { print("DROP COLUMN "); this.printAndAccept(x.getColumns(), ", "); return false; } @Override public void endVisit(SQLAlterTableAddColumn x) { } @Override public boolean visit(SQLDropIndexStatement x) { print("DROP INDEX "); x.getIndexName().accept(this); SQLExprTableSource table = x.getTableName(); if (table != null) { print(" ON "); table.accept(this); } return false; } @Override public boolean visit(SQLSavePointStatement x) { print("SAVEPOINT "); x.getName().accept(this); return false; } @Override public boolean visit(SQLReleaseSavePointStatement x) { print("RELEASE SAVEPOINT "); x.getName().accept(this); return false; } @Override public boolean visit(SQLRollbackStatement x) { print("ROLLBACK"); if (x.getTo() != null) { print(" TO "); x.getTo().accept(this); } return false; } public boolean visit(SQLCommentHint x) { print("/*"); print(x.getText()); print("*/"); return false; } @Override public boolean visit(SQLCreateDatabaseStatement x) { print("CREATE DATABASE "); x.getName().accept(this); return false; } @Override public boolean visit(SQLCreateViewStatement x) { print("CREATE "); if(x.isOrReplace()) { print("OR REPLACE "); } print("VIEW "); x.getName().accept(this); if (x.getColumns().size() > 0) { print(" ("); printAndAccept(x.getColumns(), ", "); print(")"); } print(" AS "); x.getSubQuery().accept(this); return false; } @Override public boolean visit(SQLAlterTableDropIndex x) { print("DROP INDEX "); x.getIndexName().accept(this); return false; } @Override public boolean visit(SQLOver x) { print("OVER ("); if (x.getPartitionBy().size() > 0) { print("PARTITION BY "); printAndAccept(x.getPartitionBy(), ", "); print(' '); } if (x.getOrderBy() != null) { x.getOrderBy().accept(this); } print(")"); return false; } @Override public boolean visit(SQLColumnPrimaryKey x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("PRIMARY KEY"); return false; } @Override public boolean visit(SQLColumnUniqueKey x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("UNIQUE"); return false; } @Override public boolean visit(SQLColumnCheck x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("CHECK ("); x.getExpr().accept(this); print(')'); if (x.getEnable() != null) { if (x.getEnable().booleanValue()) { print(" ENABLE"); } else { print(" DISABLE"); } } return false; } @Override public boolean visit(SQLWithSubqueryClause x) { print("WITH"); if (x.getRecursive() == Boolean.TRUE) { print(" RECURSIVE"); } incrementIndent(); println(); printlnAndAccept(x.getEntries(), ", "); decrementIndent(); return false; } @Override public boolean visit(SQLWithSubqueryClause.Entry x) { x.getName().accept(this); if (x.getColumns().size() > 0) { print(" ("); printAndAccept(x.getColumns(), ", "); print(")"); } println(); print("AS"); println(); print("("); incrementIndent(); println(); x.getSubQuery().accept(this); decrementIndent(); println(); print(")"); return false; } @Override public boolean visit(SQLAlterTableAlterColumn x) { print("ALTER COLUMN "); x.getColumn().accept(this); return false; } @Override public boolean visit(SQLCheck x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("CHECK ("); incrementIndent(); x.getExpr().accept(this); decrementIndent(); print(')'); return false; } @Override public boolean visit(SQLAlterTableDropForeignKey x) { print("DROP FOREIGN KEY "); x.getIndexName().accept(this); return false; } @Override public boolean visit(SQLAlterTableDropPrimaryKey x) { print("DROP PRIMARY KEY"); return false; } @Override public boolean visit(SQLAlterTableEnableKeys x) { print("ENABLE KEYS"); return false; } @Override public boolean visit(SQLAlterTableDisableKeys x) { print("DISABLE KEYS"); return false; } public boolean visit(SQLAlterTableDisableConstraint x) { print("DISABLE CONSTRAINT "); x.getConstraintName().accept(this); return false; } public boolean visit(SQLAlterTableEnableConstraint x) { print("ENABLE CONSTRAINT "); x.getConstraintName().accept(this); return false; } @Override public boolean visit(SQLAlterTableDropConstraint x) { print("DROP CONSTRAINT "); x.getConstraintName().accept(this); return false; } @Override public boolean visit(SQLAlterTableStatement x) { print("ALTER TABLE "); x.getName().accept(this); incrementIndent(); for (int i = 0; i < x.getItems().size(); ++i) { SQLAlterTableItem item = x.getItems().get(i); if (i != 0) { print(','); } println(); item.accept(this); } decrementIndent(); return false; } @Override public boolean visit(SQLExprHint x) { x.getExpr().accept(this); return false; } @Override public boolean visit(SQLCreateIndexStatement x) { print("CREATE "); if (x.getType() != null) { print(x.getType()); print(" "); } print("INDEX "); x.getName().accept(this); print(" ON "); x.getTable().accept(this); print(" ("); printAndAccept(x.getItems(), ", "); print(")"); return false; } @Override public boolean visit(SQLUnique x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(" "); } print("UNIQUE ("); printAndAccept(x.getColumns(), ", "); print(")"); return false; } @Override public boolean visit(SQLPrimaryKeyImpl x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(" "); } print("PRIMARY KEY ("); printAndAccept(x.getColumns(), ", "); print(")"); return false; } @Override public boolean visit(SQLAlterTableRenameColumn x) { print("RENAME COLUMN "); x.getColumn().accept(this); print(" TO "); x.getTo().accept(this); return false; } @Override public boolean visit(SQLColumnReference x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(" "); } print("REFERENCES "); x.getTable().accept(this); print(" ("); printAndAccept(x.getColumns(), ", "); print(")"); return false; } @Override public boolean visit(SQLForeignKeyImpl x) { if (x.getName() != null) { print("CONSTRAINT "); x.getName().accept(this); print(' '); } print("FOREIGN KEY ("); printAndAccept(x.getReferencingColumns(), ", "); print(")"); print(" REFERENCES "); x.getReferencedTableName().accept(this); print(" ("); printAndAccept(x.getReferencedColumns(), ", "); print(")"); return false; } @Override public boolean visit(SQLDropSequenceStatement x) { print("DROP SEQUENCE "); x.getName().accept(this); return false; } @Override public void endVisit(SQLDropSequenceStatement x) { } @Override public boolean visit(SQLDropTriggerStatement x) { print("DROP TRIGGER "); x.getName().accept(this); return false; } @Override public void endVisit(SQLDropUserStatement x) { } @Override public boolean visit(SQLDropUserStatement x) { print("DROP USER "); printAndAccept(x.getUsers(), ", "); return false; } @Override public boolean visit(SQLExplainStatement x) { print("EXPLAIN"); println(); x.getStatement().accept(this); return false; } @Override public boolean visit(SQLGrantStatement x) { print("GRANT "); printAndAccept(x.getPrivileges(), ", "); if (x.getOn() != null) { print(" ON "); if (x.getObjectType() != null) { print(x.getObjectType().name()); print(' '); } x.getOn().accept(this); } if (x.getTo() != null) { print(" TO "); x.getTo().accept(this); } boolean with = false; if (x.getMaxQueriesPerHour() != null) { if (!with) { print(" WITH"); with = true; } print(" MAX_QUERIES_PER_HOUR "); x.getMaxQueriesPerHour().accept(this); } if (x.getMaxUpdatesPerHour() != null) { if (!with) { print(" WITH"); with = true; } print(" MAX_UPDATES_PER_HOUR "); x.getMaxUpdatesPerHour().accept(this); } if (x.getMaxConnectionsPerHour() != null) { if (!with) { print(" WITH"); with = true; } print(" MAX_CONNECTIONS_PER_HOUR "); x.getMaxConnectionsPerHour().accept(this); } if (x.getMaxUserConnections() != null) { if (!with) { print(" WITH"); with = true; } print(" MAX_USER_CONNECTIONS "); x.getMaxUserConnections().accept(this); } if (x.isAdminOption()) { if (!with) { print(" WITH"); with = true; } print(" ADMIN OPTION"); } if (x.getIdentifiedBy() != null) { print(" IDENTIFIED BY "); x.getIdentifiedBy().accept(this); } return false; } @Override public boolean visit(SQLDropDatabaseStatement x) { print("DROP DATABASE "); if (x.isIfExists()) { print("IF EXISTS "); } x.getDatabase().accept(this); return false; } @Override public boolean visit(SQLDropFunctionStatement x) { print("DROP FUNCTION "); if (x.isIfExists()) { print("IF EXISTS "); } x.getName().accept(this); return false; } @Override public boolean visit(SQLDropTableSpaceStatement x) { print("DROP TABLESPACE "); if (x.isIfExists()) { print("IF EXISTS "); } x.getName().accept(this); return false; } @Override public boolean visit(SQLDropProcedureStatement x) { print("DROP PROCEDURE "); if (x.isIfExists()) { print("IF EXISTS "); } x.getName().accept(this); return false; } @Override public boolean visit(SQLAlterTableAddIndex x) { print("ADD "); if (x.getType() != null) { print(x.getType()); print(" "); } if (x.isUnique()) { print("UNIQUE "); } print("INDEX "); if (x.getName() != null) { x.getName().accept(this); print(' '); } print("("); printAndAccept(x.getItems(), ", "); print(")"); if (x.getUsing() != null) { print(" USING "); print(x.getUsing()); } return false; } @Override public boolean visit(SQLAlterTableAddConstraint x) { if (x.isWithNoCheck()) { print("WITH NOCHECK "); } print("ADD "); x.getConstraint().accept(this); return false; } public boolean visit(SQLCreateTriggerStatement x) { print("CREATE "); if (x.isOrReplace()) { print("OR REPLEACE "); } print("TRIGGER "); x.getName().accept(this); incrementIndent(); println(); if (TriggerType.INSTEAD_OF.equals(x.getTriggerType())) { print("INSTEAD OF"); } else { print(x.getTriggerType().name()); } for (TriggerEvent event : x.getTriggerEvents()) { print(' '); print(event.name()); } println(); print("ON "); x.getOn().accept(this); if (x.isForEachRow()) { println(); print("FOR EACH ROW"); } decrementIndent(); println(); x.getBody().accept(this); return false; } public boolean visit(SQLBooleanExpr x) { print(x.getValue() ? "true" : "false"); return false; } public void endVisit(SQLBooleanExpr x) { } @Override public boolean visit(SQLUnionQueryTableSource x) { print("("); incrementIndent(); x.getUnion().accept(this); println(); decrementIndent(); print(")"); if (x.getAlias() != null) { print(' '); print(x.getAlias()); } return false; } }
/** * */ package com.kevinomyonga.pesapaldroid.post; import com.kevinomyonga.pesapaldroid.ApiUrlConstants; import com.kevinomyonga.pesapaldroid.IRequest; import com.kevinomyonga.pesapaldroid.Pesapal; import com.kevinomyonga.pesapaldroid.exception.PostRequestException; import java.net.MalformedURLException; import java.net.URL; import java.util.Set; import java.util.UUID; import oauth.signpost.OAuth; import oauth.signpost.OAuthConsumer; import oauth.signpost.exception.OAuthCommunicationException; import oauth.signpost.exception.OAuthExpectationFailedException; import oauth.signpost.exception.OAuthMessageSignerException; import oauth.signpost.http.HttpParameters; /** * @author Davide Parise mailto:bubini.mara5@gmail.com * Sep 10, 2014 * * Make the signed URL with XML post and callback. * For create a new instance of this class use the inner class Builder. * * !!This class not check the error and NOT validate parameter for now!! * Make sure you pass all parameter correct. * * After instantiate an object of this class make sure you have called Pesapal.initialize() * for initialize the library. */ public class PostRequest implements IRequest { protected final String callback; // the callback return URL protected final String form; // the XML form to send by HTTP GET protected final String baseUrl; // the base URL where send the GET request. /** * This parameters came from the builder * @param form - the from to send * @param callback - the callback */ private PostRequest(String baseUrl,String form, String callback) { this.form = form; this.callback = callback; this.baseUrl = baseUrl; } /* (non-Javadoc) * @see com.davide.parise.pesapalandroidlib.lib.IRequest#getString() * */ @Override public String getURL() throws PostRequestException { OAuthConsumer consumer = Pesapal.getConsumer(); String url = OAuth.addQueryParameters(baseUrl,"pesapal_request_data",form,"oauth_callback",callback); try { url = consumer.sign(url); } catch (OAuthMessageSignerException e) { throw new PostRequestException("Can't sign message",e); } catch (OAuthExpectationFailedException e) { throw new PostRequestException("Failed to sign message",e); } catch (OAuthCommunicationException e) { throw new PostRequestException("Communication failed while sign message",e); } String query; try { query = url.split("\\?")[1]; } catch (IndexOutOfBoundsException e) { throw new PostRequestException("Invalid query parameters"); } // the OAuth library add the other callback param. // remove it by calling keyset() HttpParameters params = OAuth.decodeForm(query); url = baseUrl; //params.remove(OAuth.OAUTH_CALLBACK); // remove duplicate of callback parameter Set<String> keyset = params.keySet(); for(String key : keyset){ Set<String> values = params.get(key); for(String value : values){ url = OAuth.addQueryParameters(url,key,value); } } return url; } /* (non-Javadoc) * @see com.davide.parise.pesapalandroidlib.lib.IRequest#get() */ @Override public URL get() throws PostRequestException { try { return new URL(getURL()); } catch (MalformedURLException e) { throw new PostRequestException("Malformed url. base is "+baseUrl, e); } } /** * @return the callback */ public String getCallback() { return callback; } /** * @return the form */ public String getForm() { return form; } /** * @return the baseUrl */ public String getBaseUrl() { return baseUrl; } /** * * @author Davide Parise mailto:bubini.mara5@gmail.com * Sep 10, 2014 * * Inner class for build the post request. * The builder not check sanity of parameters passed * * Make sure you pass all parameters correct * Make sure you set all parameters * * Automatically set the type MERCHANT and generate an unique reference. * The callback is set by default to pesapal.com API for query status. */ public static class Builder{ // API URL constants protected static final String url_demo = ApiUrlConstants.POST.url_demo; protected static final String url_demo_mobile = ApiUrlConstants.POST.url_demo_mobile; protected static final String url_live = ApiUrlConstants.POST.url_live; protected static final String url_live_mobile = ApiUrlConstants.POST.url_live_mobile; // default callback URL protected static final String callback_live = ApiUrlConstants.CALLBACK.callback_live; protected static final String callback_demo = ApiUrlConstants.CALLBACK.callback_demo; private final String POST_XML = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" + "<PesapalDirectOrderInfo xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" " + "Amount=\"%s\" Description=\"%s\" " + "Type=\"%s\" Reference=\"%s\" " + "FirstName=\"%s\" LastName=\"%s\" " + "Email=\"%s\" PhoneNumber=\"%s\" " + "xmlns=\"http://www.pesapal.com\" />"; // variables for make the xml post private String amount; private String description; private String type; private String reference; private String fName; private String lName; private String mail; private String phone; // private String callback; private boolean isMobile; public static class TYPE{ public static String MERCHANT = "MERCHANT"; public static String ORDER = "ORDER"; } /** * Constructor that initialize some variables with default values */ public Builder(){ type = TYPE.MERCHANT; callback = Pesapal.getDefaultCallback(); reference = UUID.randomUUID().toString(); isMobile = true; } /** * Create new PostRequest object. * @return - new PostRequest object */ public PostRequest build(){ String form = String.format(POST_XML, amount,description,type,reference,fName,lName,mail,phone); return new PostRequest(getBaseUrl(),form,callback); } /** * Set the amount to pay * @param amount - the amount to pay * @return - this builder */ public Builder amount(String amount){ this.amount = amount; return this; } /** * Use inner-class TYPE to set it * @param type - the one of MERCHANT or ORDER. The default value is MERCHANT * @return - this */ public Builder type(String type){ this.type = type; return this; } /** * * @param description - the description * @return - this */ public Builder description(String description){ this.description = description; return this; } /** * Set the reference. This must be unique for all transaction. * It is set automatically it is not necessary to set it manually. * The default value is generated by UUID. * @param reference - the reference * @return - this */ public Builder reference(String reference){ this.reference = reference; return this; } /** * Set the first and last name * @param fName - first name * @param lName - last name * @return - this */ public Builder name(String fName,String lName){ this.fName = fName; this.lName = lName; return this; } /** * Set the one of mail or phone * @param mail - the mail * @return - this */ public Builder mail(String mail){ this.mail = mail; return this; } /** * Set the phone number. Must be set one of phone or mail * @param phone - the phone number * @return - this */ public Builder phone(String phone){ this.phone = phone; return this; } /** * Build a request from mobile. Change the Pesapal.com API URL for request * * @param isMobile - the isMobile. The default value is true * @return - this */ public Builder isMobile(boolean isMobile){ this.isMobile = isMobile; return this; } /** * Set the callback return URL * * @param calback - this is set by default, change only if is necessary * @return - this */ public Builder callback(String calback){ this.callback = calback; return this; } /** * Util method for get the Pesapal.com API URL * based on demo and mobile variable. * @return - this */ private String getBaseUrl(){ if(Pesapal.isDEMO()){ return isMobile ? url_demo_mobile : url_demo; }else{ return isMobile ? url_live_mobile : url_live; } } } }
/* * Copyright (c) 2015 Spotify AB * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.spotify.missinglink; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.spotify.missinglink.Conflict.ConflictCategory; import com.spotify.missinglink.datamodel.AccessedField; import com.spotify.missinglink.datamodel.Artifact; import com.spotify.missinglink.datamodel.CalledMethod; import com.spotify.missinglink.datamodel.ClassTypeDescriptor; import com.spotify.missinglink.datamodel.DeclaredClass; import com.spotify.missinglink.datamodel.DeclaredMethod; import com.spotify.missinglink.datamodel.Dependency; import com.spotify.missinglink.datamodel.FieldDependencyBuilder; import com.spotify.missinglink.datamodel.MethodDependencyBuilder; import java.util.Arrays; import static com.spotify.missinglink.Simple.INT; import static com.spotify.missinglink.Simple.STRING; import static com.spotify.missinglink.Simple.VOID; import static com.spotify.missinglink.Simple.array; import static com.spotify.missinglink.Simple.methodMap; import static com.spotify.missinglink.Simple.newAccess; import static com.spotify.missinglink.Simple.newArtifact; import static com.spotify.missinglink.Simple.newCall; import static com.spotify.missinglink.Simple.newClass; import static com.spotify.missinglink.Simple.newMethod; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; public class FeatureTest { private final ConflictChecker conflictChecker = new ConflictChecker(); @org.junit.Test public void testSimpleConflict() throws Exception { final DeclaredMethod methodOnlyInD1 = newMethod(false, INT, "foo").build(); final DeclaredClass fooClass = newClass("com/d/Foo").methods(methodMap(methodOnlyInD1)).build(); final Artifact d2 = newArtifact("empty"); final CalledMethod methodCall = newCall(fooClass, methodOnlyInD1, false, true); final DeclaredMethod mainMethod = newMethod(true, VOID, "main") .methodCalls(ImmutableSet.of(methodCall)).build(); final DeclaredClass rootClass = newClass("com/Root") .methods(methodMap(mainMethod)) .build(); final Artifact root = newArtifact("root", rootClass); final ImmutableList<Artifact> classpath = ImmutableList.of(root, d2); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(rootClass.className(), mainMethod, methodCall)) .reason("Class not found: com.d.Foo") .category(ConflictCategory.CLASS_NOT_FOUND) .usedBy(root.name()) .existsIn(ConflictChecker.UNKNOWN_ARTIFACT_NAME) .build(); assertThat(conflictChecker .check(root, classpath, classpath)) .isEqualTo(ImmutableList.of(expectedConflict)); } @org.junit.Test public void testSimpleConflict2() throws Exception { final DeclaredMethod methodOnlyInD1 = newMethod(false, INT, "foo").build(); final DeclaredClass fooClass = newClass("com/d/Foo") .methods(methodMap(methodOnlyInD1)) .build(); final DeclaredClass d2Class = newClass("com/d/Foo").build(); final Artifact d2 = newArtifact("D2", d2Class); final CalledMethod methodCall = newCall(fooClass, methodOnlyInD1, false, true); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .build(); final DeclaredClass rootClass = newClass("com/Root") .methods(methodMap(mainMethod)) .build(); final Artifact root = newArtifact("root", rootClass); final ImmutableList<Artifact> classpath = ImmutableList.of(root, d2); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(rootClass.className(), mainMethod, methodCall)) .reason("Method not found: com.d.Foo.foo()") .category(ConflictCategory.METHOD_SIGNATURE_NOT_FOUND) .usedBy(root.name()) .existsIn(d2.name()) .build(); assertThat(conflictChecker .check(root, classpath, classpath)) .isEqualTo(ImmutableList.of(expectedConflict)); } @org.junit.Test public void testMissingField() throws Exception { final DeclaredClass d2Class = newClass("com/d/Foo").build(); final Artifact d2 = newArtifact("D2", d2Class); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of()) .fieldAccesses(ImmutableSet.of( newAccess("I", "foo", "com/d/Foo", 12) )) .build(); final DeclaredClass rootClass = newClass("com/Root") .methods(methodMap(mainMethod)) .build(); final Artifact root = newArtifact("root", rootClass); final ImmutableList<Artifact> classpath = ImmutableList.of(root, d2); final AccessedField accessed = newAccess(INT, "foo", "com/d/Foo", 12); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(rootClass.className(), mainMethod, accessed)) .reason("Field not found: foo") .category(ConflictCategory.FIELD_NOT_FOUND) .usedBy(root.name()) .existsIn(d2.name()) .build(); assertThat(conflictChecker .check(root, classpath, classpath)) .isEqualTo(ImmutableList.of(expectedConflict)); } @org.junit.Test public void testNoConflictWithInheritedMethodCall() throws Exception { final DeclaredMethod methodOnlyInSuper = newMethod(false, INT, "foo").build(); final DeclaredClass superClass = newClass("com/super").methods(methodMap(methodOnlyInSuper)).build(); final DeclaredClass subClass = newClass("com/Sub") .parents(ImmutableSet.of(superClass.className())) .build(); final CalledMethod methodCall = newCall(subClass, methodOnlyInSuper, false, true); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .fieldAccesses(ImmutableSet.of()) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, subClass, mainClass); assertThat(conflictChecker.check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact) )).isEmpty(); } @org.junit.Test public void testConflictWithInheritedMethodCallIfNonVirtual() throws Exception { final DeclaredMethod methodOnlyInSuper = newMethod(false, INT, "foo").build(); final DeclaredClass superClass = newClass("com/super").methods(methodMap(methodOnlyInSuper)).build(); final DeclaredClass subClass = newClass("com/Sub") .parents(ImmutableSet.of(superClass.className())) .build(); final CalledMethod methodCall = newCall(subClass, methodOnlyInSuper, false, false); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .fieldAccesses(ImmutableSet.of()) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, subClass, mainClass); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(mainClass.className(), mainMethod, methodCall)) .reason("Method not found: com.Sub.foo()") .category(ConflictCategory.METHOD_SIGNATURE_NOT_FOUND) .usedBy(artifact.name()) .existsIn(artifact.name()) .build(); assertEquals(Arrays.asList(expectedConflict), conflictChecker.check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact))); } @org.junit.Test public void testNoConflictWithCovariantReturnType() throws Exception { final DeclaredMethod superMethod = newMethod(false, "Ljava/lang/CharSequence;", "foo").build(); final DeclaredClass superClass = newClass("com/Super").methods(methodMap(superMethod)).build(); final DeclaredMethod subMethod = newMethod(false, "Ljava/lang/String;", "foo").build(); final DeclaredClass subClass = newClass("com/Sub").methods(methodMap(subMethod)) .parents(ImmutableSet.of(superClass.className())) .build(); final CalledMethod methodCall = newCall(subClass, superMethod, false, true); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, subClass, mainClass); assertThat(conflictChecker .check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact) )).isEmpty(); } @org.junit.Test public void testNoConflictWithStaticCall() throws Exception { final DeclaredMethod methodOnlyInSuper = newMethod(true, INT, "foo").build(); final DeclaredClass superClass = newClass("com/super").methods(methodMap(methodOnlyInSuper)).build(); final CalledMethod methodCall = newCall(superClass, methodOnlyInSuper, true, false); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .fieldAccesses(ImmutableSet.of()) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, mainClass); assertThat(conflictChecker.check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact) )).isEmpty(); } @org.junit.Test public void testConflictWithStaticToVirtualCall() throws Exception { final DeclaredMethod methodOnlyInSuper = newMethod(false, INT, "foo").build(); final DeclaredClass superClass = newClass("com/super").methods(methodMap(methodOnlyInSuper)).build(); final CalledMethod methodCall = newCall(superClass, methodOnlyInSuper, true, false); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .fieldAccesses(ImmutableSet.of()) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, mainClass); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(mainClass.className(), mainMethod, methodCall)) .reason("Method not found: com.super.foo()") .category(ConflictCategory.METHOD_SIGNATURE_NOT_FOUND) .usedBy(artifact.name()) .existsIn(artifact.name()) .build(); assertEquals(Arrays.asList(expectedConflict), conflictChecker.check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact) )); } @org.junit.Test public void testConflictWithVirtualToStaticCall() throws Exception { final DeclaredMethod methodOnlyInSuper = newMethod(true, INT, "foo").build(); final DeclaredClass superClass = newClass("com/super").methods(methodMap(methodOnlyInSuper)).build(); final CalledMethod methodCall = newCall(superClass, methodOnlyInSuper, false, true); final DeclaredMethod mainMethod = newMethod(true, VOID, "main", array(STRING)) .methodCalls(ImmutableSet.of(methodCall)) .fieldAccesses(ImmutableSet.of()) .build(); final DeclaredClass mainClass = newClass("com/Main").methods(methodMap(mainMethod)).build(); final Artifact artifact = newArtifact("art", superClass, mainClass); final Conflict expectedConflict = new ConflictBuilder() .dependency(dependency(mainClass.className(), mainMethod, methodCall)) .reason("Method not found: com.super.foo()") .category(ConflictCategory.METHOD_SIGNATURE_NOT_FOUND) .usedBy(artifact.name()) .existsIn(artifact.name()) .build(); assertEquals(Arrays.asList(expectedConflict), conflictChecker.check(artifact, ImmutableList.of(artifact), ImmutableList.of(artifact) )); } private static Dependency dependency(ClassTypeDescriptor className, DeclaredMethod declaredMethod, CalledMethod methodCall) { return new MethodDependencyBuilder() .fromClass(className) .fromMethod(declaredMethod.descriptor()) .fromLineNumber(methodCall.lineNumber()) .targetClass(methodCall.owner()) .targetMethod(methodCall.descriptor()) .build(); } private static Dependency dependency(ClassTypeDescriptor className, DeclaredMethod declaredMethod, AccessedField field) { return new FieldDependencyBuilder() .fromClass(className) .fromMethod(declaredMethod.descriptor()) .fromLineNumber(field.lineNumber()) .targetClass(field.owner()) .fieldName(field.name()) .fieldType(field.descriptor()) .build(); } }
/* * Copyright 2008 The MITRE Corporation (http://www.mitre.org/). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mitre.mrald.taglib; import java.io.File; import java.io.IOException; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.JspException; import javax.servlet.jsp.tagext.TagSupport; import org.mitre.mrald.util.Config; import org.mitre.mrald.util.FormUtils; import org.mitre.mrald.util.MraldOutFile; import org.mitre.mrald.util.User; /** * Description of the Class * *@author jchoyt *@created November 8, 2002 */ public class DecisionMakerTag extends TagSupport { // // Getting the location of the custom forms in the web server // private String dir_struct = Config.getProperty( "customForms" ); // private String form_id; private String formType; private String formAccess="Personal"; /** * Constructor for the DecisionMakerTag object */ public DecisionMakerTag() { super(); } /** * The main program for the DecisionMakerTag class * *@param args The command line arguments */ public static void main( String[] args ) { new DecisionMakerTag(); } /** * Description of the Method * *@return Description of the Return Value *@exception JspException Description of the Exception */ public int doStartTag() throws JspException { // // Establish the User is logged in and valid // /* * TODO: use ValidateUserTag to do this */ User user = ( User ) pageContext.getSession().getAttribute( Config.getProperty( "cookietag" ) ); //User user = ( User ) pageContext.findAttribute( Config.getProperty( "cookietag" ) ); if ( user == null ) { throw new JspException( "Couldn't find the User object in the session" ); } String userid = user.getEmail(); if ( pageContext.getRequest().getParameter( "formAccess" ) != null && (pageContext.getRequest().getParameter( "formAccess" )!="")) { setFormAccess( pageContext.getRequest().getParameter( "formAccess" ) ); } if (formAccess.equals("Public") || formAccess.equals("PublicEdit")) userid = "public"; // // Locate the formid from the request URL // String form_id = pageContext.getRequest().getParameter( "formid" ); /* need to make sure the user gets the file they ask for, BEFORE we modify the name. Need to modify the file to make sure we rebuild the forms if we ask for a simple form and the xml is newer than the simple form jsp file. */ File requested_file = new File( dir_struct + userid + "_" + form_id + ".jsp" ); try{MraldOutFile.appendToFile( requested_file.getCanonicalPath() );}catch(Exception e){} int loc = form_id.indexOf( "-simple"); if ( loc != -1 ) { form_id = form_id.substring(0 ,loc ); } if ( form_id.equals( "" ) ) { throw new JspException( "No form_id was provided." ); } //MraldOutFile.logToFile( Config.getProperty("LOGFILE") , "DecisionMakerTag : Getting FormType: " + pageContext.getAttribute("formType")); if ( pageContext.getRequest().getParameter( "formType" ) != null ) { setFormType( pageContext.getRequest().getParameter( "formType" ) ); } else {//do default which is just Select setFormType( "Select" ); } //MraldOutFile.logToFile( Config.getProperty("LOGFILE") , "DecisionMakerTag : Getting FormType: " + getFormType()); // // Attempting to identify the files // File xml_file = new File( dir_struct + userid + "_" + form_id + ".xml" ); //File html_file = new File( dir_struct + userid + "_" + form_id + ".html" ); File jsp_file = new File( dir_struct + userid + "_" + form_id + ".jsp" ); File simple_jsp_file = new File( dir_struct + FormUtils.getSimpleJspName( form_id, userid ) ); File redirect_file = requested_file; if ( xml_file.exists() && jsp_file.exists() ) { // // Both exist so you need to check the modified date // if ( xml_file.lastModified() > jsp_file.lastModified() ) { // // Run it through the XSLT for transformation // FormUtils.makeForm( xml_file ); if ( simple_jsp_file.exists() ) { redirect_file = requested_file; } } // if } else if ( xml_file.exists() ) { /* * Only the XML file exists. Need to run it through the XSLT for */ FormUtils.makeForm( xml_file ); if ( simple_jsp_file.exists() ) { redirect_file = simple_jsp_file; } } else if ( !jsp_file.exists() ) { /* * Neither file exists */ throw new JspException( "Couldn't find either the XML or HTML file for the requested form : FormId " + dir_struct + userid + "_" + form_id + ".jsp" ); } // else try { HttpServletResponse res = ( HttpServletResponse ) pageContext.getResponse(); res.sendRedirect( FormUtils.getRedirect( redirect_file.getAbsolutePath() ) ); } catch ( IOException e ) { JspException je = new JspException( e ); throw je; } pageContext.getRequest().removeAttribute( "formAccess"); pageContext.getRequest().removeAttribute( "formType"); return 0; } /** * Sets the formType attribute of the DecisionMakerTag object * *@param formType The new formType value */ protected void setFormType( String formType ) { this.formType = formType; } /** * Gets the formType attribute of the DecisionMakerTag object * *@return The formType value */ protected String getFormType() { return formType; } /** * Sets the formType attribute of the DecisionMakerTag object * *@param formType The new formType value */ public void setFormAccess(String formAccess ) { this.formAccess = formAccess; } /** * Gets the formType attribute of the DecisionMakerTag object * *@return The formType value */ public String getFormAccess() { return formAccess; } /** * Gets the formType attribute of the DecisionMakerTag object * *@return The formType value */ protected String getDirStruct() { return dir_struct; } }
package com.ericturnerdev.Altcoin; import java.util.ArrayList; import java.util.List; /** * Market data type to match Cryptsy JSON Return */ public class Market implements Comparable<Market> { private int marketid = 0; private String label = ""; private double lasttradeprice = 0.00; private double volume = 0.00; private String lasttradetime = ""; private String primaryname = ""; private String primarycode = ""; private String secondaryname = ""; private String secondarycode = ""; private List<TradeItem> recenttrades = new ArrayList<TradeItem>(); private List<BuySellItem> sellorders = new ArrayList<BuySellItem>(); private List<BuySellItem> buyorders = new ArrayList<BuySellItem>(); private boolean visible = false; //For cryptocoincharts API private double price_before_24h = 0.0; private double volume_btc = 0.0; private double price = 0.0; private String id = ""; public Market() { } //Constructor for Pairs class (only data needed to show on Settings page) public Market(int marketid, String secondarycode, String primarycode, String primaryname) { this.marketid = marketid; this.primaryname = primaryname; this.primarycode = primarycode; this.secondarycode = secondarycode; this.visible = false; this.label = secondarycode + "/" + primarycode; this.id = this.label; //Log.i("fromMarket", "Label: " + this.label); } public Market(int marketid, String label, double lasttradeprice, double volume, String lasttradetime, String primaryname, String primarycode, String secondaryname, String secondarycode, List<TradeItem> recenttrades, List<BuySellItem> sellorders, List<BuySellItem> buyorders, boolean visible) { this.marketid = marketid; this.label = label; this.lasttradeprice = lasttradeprice; this.volume = volume; this.lasttradetime = lasttradetime; this.primaryname = primaryname; this.primarycode = primarycode; this.secondaryname = secondaryname; this.secondarycode = secondarycode; this.recenttrades = recenttrades; this.sellorders = sellorders; this.buyorders = buyorders; this.visible = visible; } public String getId() { return id; } public void setPrice_before_24h(double price_before_24h) { this.price_before_24h = price_before_24h; } public void setVolume_btc(double volume_btc) { this.volume_btc = volume_btc; } public void setPrice(double price) { this.price = price; } public double getPrice_before_24h() { return price_before_24h; } public double getVolume_btc() { return volume_btc; } public double getPrice() { return price; } public void setVisible(boolean visible) { this.visible = visible; } public boolean isVisible() { return visible; } public int getMarketid() { return marketid; } public void setMarketid(int marketid) { this.marketid = marketid; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public double getLasttradeprice() { return lasttradeprice; } public void setLasttradeprice(double lasttradeprice) { this.lasttradeprice = lasttradeprice; } public double getVolume() { return volume; } public void setVolume(double volume) { this.volume = volume; } public String getLasttradetime() { return lasttradetime; } public void setLasttradetime(String lasttradetime) { this.lasttradetime = lasttradetime; } public String getPrimaryname() { return primaryname; } public void setPrimaryname(String primaryname) { this.primaryname = primaryname; } public String getPrimarycode() { return primarycode; } public void setPrimarycode(String primarycode) { this.primarycode = primarycode; } public String getSecondaryname() { return secondaryname; } public void setSecondaryname(String secondaryname) { this.secondaryname = secondaryname; } public String getSecondarycode() { return secondarycode; } public void setSecondarycode(String secondarycode) { this.secondarycode = secondarycode; } public List<TradeItem> getRecenttrades() { return recenttrades; } public void setRecenttrades(List<TradeItem> recenttrades) { this.recenttrades = recenttrades; } public List<BuySellItem> getSellorders() { return sellorders; } public void setSellorders(List<BuySellItem> sellorders) { this.sellorders = sellorders; } public List<BuySellItem> getBuyorders() { return buyorders; } public void setBuyorders(List<BuySellItem> buyorders) { this.buyorders = buyorders; } @Override public String toString() { String val = ""; try { val = " marketid: " + getMarketid() + " label: " + getLabel() + " volume: " + getVolume() + " time: " + getLasttradetime() + " primaryname: " + getPrimaryname() + " primarycode: " + getPrimarycode() + " secondaryname: " + getSecondaryname() + " secondarycode: " + getSecondarycode() + " lasttradeprice: " + getLasttradeprice() + " visible: " + visible + "recenttrades:\n" + getRecenttrades().toString() + "\nsellorders:" + getSellorders().toString() + "\nbuyorders:" + getBuyorders().toString(); } catch (RuntimeException e) { e.printStackTrace(); } return val; } //Used for recenttrades only public class TradeItem { public TradeItem(double price, double quantity, double total){ this.price = price; this.quantity = quantity; this.total = total; } private int id = 0; private String time = ""; private double price = 0.0; private double quantity = 0.0; private double total = 0.0; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getTime() { return time; } public void setTime(String time) { this.time = time; } public double getPrice() { return price; } public void setPrice(double price) { this.price = price; } public double getQuantity() { return quantity; } public void setQuantity(double quantity) { this.quantity = quantity; } public double getTotal() { return total; } public void setTotal(double total) { this.total = total; } public String toString() { return " id:" + getId() + " time: " + getTime() + " price:" + getPrice() + " quantity: " + getQuantity() + " total; " + getTotal() + "\n"; } } /* //Used for sellorders and buyorders only public class OrderItem { private double price = 0.0; private double quantity = 0.0; private double total = 0.0; public double getPrice() { return price; } public void setPrice(double price) { this.price = price; } public double getQuantity() { return quantity; } public void setQuantity(double quantity) { this.quantity = quantity; } public double getTotal() { return total; } public void setTotal(double total) { this.total = total; } public String toString() { return " price:" + getPrice() + " quantity: " + getQuantity() + " total; " + getTotal() + "\n"; } } */ public int compareTo(Market m) { if (this.getSecondarycode().compareTo(m.getSecondarycode()) > 0) { return 1; } else if (this.getSecondarycode().compareTo(m.getSecondarycode()) < 0) { return -1; } else { return 0; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.dao.impl; import org.apache.nifi.connectable.Port; import org.apache.nifi.connectable.Position; import org.apache.nifi.controller.FlowController; import org.apache.nifi.controller.ScheduledState; import org.apache.nifi.controller.exception.ValidationException; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.remote.RootGroupPort; import org.apache.nifi.web.NiFiCoreException; import org.apache.nifi.web.ResourceNotFoundException; import org.apache.nifi.web.api.dto.PortDTO; import org.apache.nifi.web.dao.PortDAO; import java.util.ArrayList; import java.util.List; import java.util.Set; public class StandardOutputPortDAO extends ComponentDAO implements PortDAO { private FlowController flowController; private Port locatePort(final String portId) { final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId()); final Port port = rootGroup.findOutputPort(portId); if (port == null) { throw new ResourceNotFoundException(String.format("Unable to find port with id '%s'.", portId)); } else { return port; } } @Override public boolean hasPort(String portId) { final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId()); return rootGroup.findOutputPort(portId) != null; } @Override public Port createPort(String groupId, PortDTO portDTO) { if (isNotNull(portDTO.getParentGroupId()) && !flowController.areGroupsSame(groupId, portDTO.getParentGroupId())) { throw new IllegalArgumentException("Cannot specify a different Parent Group ID than the Group to which the OutputPort is being added."); } // ensure the name has been specified if (portDTO.getName() == null) { throw new IllegalArgumentException("Port name must be specified."); } // get the desired group ProcessGroup group = locateProcessGroup(flowController, groupId); // determine if this is the root group Port port; if (group.getParent() == null) { port = flowController.createRemoteOutputPort(portDTO.getId(), portDTO.getName()); } else { port = flowController.createLocalOutputPort(portDTO.getId(), portDTO.getName()); } // ensure we can perform the update before we add the processor to the flow verifyUpdate(port, portDTO); // configure if (portDTO.getPosition() != null) { port.setPosition(new Position(portDTO.getPosition().getX(), portDTO.getPosition().getY())); } port.setComments(portDTO.getComments()); // add the port group.addOutputPort(port); return port; } @Override public Port getPort(String portId) { return locatePort(portId); } @Override public Set<Port> getPorts(String groupId) { ProcessGroup group = locateProcessGroup(flowController, groupId); return group.getOutputPorts(); } @Override public void verifyUpdate(PortDTO portDTO) { final Port outputPort = locatePort(portDTO.getId()); verifyUpdate(outputPort, portDTO); } private void verifyUpdate(final Port outputPort, final PortDTO portDTO) { if (isNotNull(portDTO.getState())) { final ScheduledState purposedScheduledState = ScheduledState.valueOf(portDTO.getState()); // only attempt an action if it is changing if (!purposedScheduledState.equals(outputPort.getScheduledState())) { // perform the appropriate action switch (purposedScheduledState) { case RUNNING: outputPort.verifyCanStart(); break; case STOPPED: switch (outputPort.getScheduledState()) { case RUNNING: outputPort.verifyCanStop(); break; case DISABLED: outputPort.verifyCanEnable(); break; } break; case DISABLED: outputPort.verifyCanDisable(); break; } } } // see what's be modified if (isAnyNotNull(portDTO.getUserAccessControl(), portDTO.getGroupAccessControl(), portDTO.getConcurrentlySchedulableTaskCount(), portDTO.getName(), portDTO.getComments())) { // validate the request final List<String> requestValidation = validateProposedConfiguration(portDTO); // ensure there was no validation errors if (!requestValidation.isEmpty()) { throw new ValidationException(requestValidation); } // ensure the port can be updated outputPort.verifyCanUpdate(); } } private List<String> validateProposedConfiguration(PortDTO portDTO) { List<String> validationErrors = new ArrayList<>(); if (isNotNull(portDTO.getName()) && portDTO.getName().trim().isEmpty()) { validationErrors.add("Port name cannot be blank."); } if (isNotNull(portDTO.getConcurrentlySchedulableTaskCount()) && portDTO.getConcurrentlySchedulableTaskCount() <= 0) { validationErrors.add("Concurrent tasks must be a positive integer."); } return validationErrors; } @Override public Port updatePort(PortDTO portDTO) { Port outputPort = locatePort(portDTO.getId()); // ensure we can do this update verifyUpdate(outputPort, portDTO); // handle state transition if (portDTO.getState() != null) { final ScheduledState purposedScheduledState = ScheduledState.valueOf(portDTO.getState()); // only attempt an action if it is changing if (!purposedScheduledState.equals(outputPort.getScheduledState())) { try { // perform the appropriate action switch (purposedScheduledState) { case RUNNING: outputPort.getProcessGroup().startOutputPort(outputPort); break; case STOPPED: switch (outputPort.getScheduledState()) { case RUNNING: outputPort.getProcessGroup().stopOutputPort(outputPort); break; case DISABLED: outputPort.getProcessGroup().enableOutputPort(outputPort); break; } break; case DISABLED: outputPort.getProcessGroup().disableOutputPort(outputPort); break; } } catch (IllegalStateException ise) { throw new NiFiCoreException(ise.getMessage(), ise); } } } if (outputPort instanceof RootGroupPort) { final RootGroupPort rootPort = (RootGroupPort) outputPort; if (isNotNull(portDTO.getGroupAccessControl())) { rootPort.setGroupAccessControl(portDTO.getGroupAccessControl()); } if (isNotNull(portDTO.getUserAccessControl())) { rootPort.setUserAccessControl(portDTO.getUserAccessControl()); } } // perform the configuration final String name = portDTO.getName(); final String comments = portDTO.getComments(); final Integer concurrentTasks = portDTO.getConcurrentlySchedulableTaskCount(); if (isNotNull(portDTO.getPosition())) { outputPort.setPosition(new Position(portDTO.getPosition().getX(), portDTO.getPosition().getY())); } if (isNotNull(name)) { outputPort.setName(name); } if (isNotNull(comments)) { outputPort.setComments(comments); } if (isNotNull(concurrentTasks)) { outputPort.setMaxConcurrentTasks(concurrentTasks); } outputPort.getProcessGroup().onComponentModified(); return outputPort; } @Override public void verifyDelete(final String portId) { final Port outputPort = locatePort(portId); outputPort.verifyCanDelete(); } @Override public void deletePort(String portId) { Port outputPort = locatePort(portId); outputPort.getProcessGroup().removeOutputPort(outputPort); } /* setters */ public void setFlowController(FlowController flowController) { this.flowController = flowController; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.empire.jsf2.websample.web; import java.util.Iterator; import java.util.Map; import javax.faces.FacesException; import javax.faces.application.FacesMessage; import javax.faces.context.ExceptionHandler; import javax.faces.context.ExceptionHandlerWrapper; import javax.faces.context.FacesContext; import javax.faces.event.ExceptionQueuedEvent; import javax.faces.event.ExceptionQueuedEventContext; import org.apache.empire.exceptions.EmpireException; import org.apache.empire.exceptions.InternalException; import org.apache.empire.jsf2.app.FacesUtils; import org.apache.empire.jsf2.app.TextResolver; import org.apache.empire.jsf2.app.WebApplication; import org.apache.empire.jsf2.websample.web.pages.SamplePages; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AppExceptionHandler extends ExceptionHandlerWrapper { private static final Logger log = LoggerFactory.getLogger(AppExceptionHandler.class); private final ExceptionHandler wrapped; public AppExceptionHandler(ExceptionHandler delegate) { // super(delegate); this.wrapped = delegate; } @Override public javax.faces.context.ExceptionHandler getWrapped() { return this.wrapped; } @Override public void handle() throws FacesException { // boolean redirectToErrorPage = false; Throwable rootCause = null; Iterator<ExceptionQueuedEvent> events = getUnhandledExceptionQueuedEvents().iterator(); // log each error while (events.hasNext()) { ExceptionQueuedEvent event = events.next(); // handle try { ExceptionQueuedEventContext source = (ExceptionQueuedEventContext) event.getSource(); FacesContext context = source.getContext(); Throwable t = source.getException(); // check t if (t==null) { log.error("Cannot handle exception. Exception not supplied with context!"); setErrorMessage(context, null); continue; } // find root rootCause = t.getCause(); // second option: getRootCause if (rootCause == null) { // get cause rootCause = getRootCause(t); } // third option: use t if (rootCause == null) { rootCause = t; } // Walk up the tree while (true) { // if its an empire-exception: game over if (rootCause instanceof EmpireException) break; // has root cause t = rootCause.getCause(); if (t==null) break; // yes, continue search rootCause = t; } /* if (rootCause instanceof org.icefaces.application.SessionExpiredException) { // expired log.info("Handling SessionExpiredException. No error message is set."); continue; } */ // set message String msg = "Handling exception of type "+rootCause.getClass().getSimpleName(); // log // msg = appendSessionInfo(context, msg); log.error(msg, rootCause); // set message if (!(rootCause instanceof EmpireException)) { // Wrap as internal exception rootCause = new InternalException(rootCause); } setErrorMessage(context, rootCause); } catch(Throwable t2) { log.error("Failed to handle exception: "+t2.getMessage(), t2); } finally { events.remove(); } } // if an error has been found /* if (redirectToErrorPage) { HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse(); try { // put error to session map (will be cleared by ErrorPage) if (rootCause != null) { FacesUtils.getFin2Session().setError(rootCause); } // redirect to error page String errorPage = FacesUtils.getContextPath() + "/pages/error.iface"; Fin2ExceptionHandler.log.debug("Redirecting to error page at '" + errorPage + "'..."); FacesUtils.redirectDirectly(errorPage); } catch (Exception e) { Fin2ExceptionHandler.log.error("Error during exception handling.", e); throw new FacesException(e); } } */ // let next handler deal getWrapped().handle(); } /* private String appendSessionInfo(FacesContext context, String msg) { // Provide session info Fin2Session session = FinUtils.getFin2Session(context, false); Fin2User user = (session!=null ? session.getUser() : null); if (user!=null) { // Add user information String viewId; if (context.getViewRoot()!=null) viewId = context.getViewRoot().getViewId(); else { viewId = "[NO VIEW]"; if (context.getExternalContext()!=null) { // External Context Object request = context.getExternalContext().getRequest(); if (request instanceof HttpServletRequest) { // The HttpServletRequest Servlet Path String path = ((HttpServletRequest)request).getServletPath(); viewId += " ("+String.valueOf(path)+")"; } else if (request!=null) viewId += " {"+request.getClass().getName()+"}"; } } // Message msg += MessageFormat.format(" for User {0} of DUNS {1} on View {2}.", user.getUserId(), user.getDUNS(), viewId); } return msg; } */ private void setErrorMessage(FacesContext fContext, Throwable rootCause) { FacesMessage message = new FacesMessage(); message.setSeverity(FacesMessage.SEVERITY_ERROR); Map<String, Object> sm = fContext.getExternalContext().getSessionMap(); if (sm.containsKey(SampleSession.MANAGED_BEAN_NAME)) { TextResolver resolver = WebApplication.getInstance().getTextResolver(fContext); if (rootCause instanceof Exception) { message.setSummary(resolver.getExceptionMessage((Exception)rootCause)); } else { message.setSummary(resolver.resolveKey("global_general_error")); } // Add message fContext.addMessage(null, message); } else { log.error("Unable to handle exception, redirecting to StartPage.", rootCause); FacesUtils.redirectDirectly(fContext, SamplePages.LoginPage); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.webapp; import static org.apache.hadoop.util.Preconditions.checkNotNull; import static org.apache.hadoop.util.Preconditions.checkState; import static org.apache.hadoop.yarn.util.StringHelper.djoin; import static org.apache.hadoop.yarn.util.StringHelper.join; import static org.apache.hadoop.yarn.util.StringHelper.pjoin; import java.lang.reflect.Method; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.CharMatcher; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; /** * Manages path info to controller#action routing. */ @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) class Router { static final Logger LOG = LoggerFactory.getLogger(Router.class); static final ImmutableList<String> EMPTY_LIST = ImmutableList.of(); static final CharMatcher SLASH = CharMatcher.is('/'); static final Pattern controllerRe = Pattern.compile("^/[A-Za-z_]\\w*(?:/.*)?"); static class Dest { final String prefix; final ImmutableList<String> pathParams; final Method action; final Class<? extends Controller> controllerClass; Class<? extends View> defaultViewClass; final EnumSet<WebApp.HTTP> methods; Dest(String path, Method method, Class<? extends Controller> cls, List<String> pathParams, WebApp.HTTP httpMethod) { prefix = checkNotNull(path); action = checkNotNull(method); controllerClass = checkNotNull(cls); this.pathParams = pathParams != null ? ImmutableList.copyOf(pathParams) : EMPTY_LIST; methods = EnumSet.of(httpMethod); } } Class<?> hostClass; // starting point to look for default classes final TreeMap<String, Dest> routes = Maps.newTreeMap(); // path->dest synchronized Dest add(WebApp.HTTP httpMethod, String path, Class<? extends Controller> cls, String action, List<String> names){ return addWithOptionalDefaultView( httpMethod, path, cls, action, names, true); } synchronized Dest addWithoutDefaultView(WebApp.HTTP httpMethod, String path, Class<? extends Controller> cls, String action, List<String> names){ return addWithOptionalDefaultView(httpMethod, path, cls, action, names, false); } /** * Add a route to the router. * e.g., add(GET, "/foo/show", FooController.class, "show", [name...]); * The name list is from /foo/show/:name/... */ synchronized Dest addWithOptionalDefaultView(WebApp.HTTP httpMethod, String path, Class<? extends Controller> cls, String action, List<String> names, boolean defaultViewNeeded) { LOG.debug("adding {}({})->{}#{}", new Object[]{path, names, cls, action}); Dest dest = addController(httpMethod, path, cls, action, names); if (defaultViewNeeded) { addDefaultView(dest); } return dest; } private Dest addController(WebApp.HTTP httpMethod, String path, Class<? extends Controller> cls, String action, List<String> names) { try { // Look for the method in all public methods declared in the class // or inherited by the class. // Note: this does not distinguish methods with the same signature // but different return types. // TODO: We may want to deal with methods that take parameters in the future Method method = cls.getMethod(action); Dest dest = routes.get(path); if (dest == null) { method.setAccessible(true); // avoid any runtime checks dest = new Dest(path, method, cls, names, httpMethod); routes.put(path, dest); return dest; } dest.methods.add(httpMethod); return dest; } catch (NoSuchMethodException nsme) { throw new WebAppException(action + "() not found in " + cls); } catch (SecurityException se) { throw new WebAppException("Security exception thrown for " + action + "() in " + cls); } } private void addDefaultView(Dest dest) { String controllerName = dest.controllerClass.getSimpleName(); if (controllerName.endsWith("Controller")) { controllerName = controllerName.substring(0, controllerName.length() - 10); } dest.defaultViewClass = find(View.class, dest.controllerClass.getPackage().getName(), join(controllerName + "View")); } void setHostClass(Class<?> cls) { hostClass = cls; } /** * Resolve a path to a destination. */ synchronized Dest resolve(String httpMethod, String path) { WebApp.HTTP method = WebApp.HTTP.valueOf(httpMethod); // can throw Dest dest = lookupRoute(method, path); if (dest == null) { return resolveDefault(method, path); } return dest; } private Dest lookupRoute(WebApp.HTTP method, String path) { String key = path; do { Dest dest = routes.get(key); if (dest != null && methodAllowed(method, dest)) { if ((Object)key == path) { // shut up warnings LOG.debug("exact match for {}: {}", key, dest.action); return dest; } else if (isGoodMatch(dest, path)) { LOG.debug("prefix match2 for {}: {}", key, dest.action); return dest; } return resolveAction(method, dest, path); } Map.Entry<String, Dest> lower = routes.lowerEntry(key); if (lower == null) { return null; } dest = lower.getValue(); if (prefixMatches(dest, path)) { if (methodAllowed(method, dest)) { if (isGoodMatch(dest, path)) { LOG.debug("prefix match for {}: {}", lower.getKey(), dest.action); return dest; } return resolveAction(method, dest, path); } // check other candidates int slashPos = key.lastIndexOf('/'); key = slashPos > 0 ? path.substring(0, slashPos) : "/"; } else { key = "/"; } } while (true); } static boolean methodAllowed(WebApp.HTTP method, Dest dest) { // Accept all methods by default, unless explicity configured otherwise. return dest.methods.contains(method) || (dest.methods.size() == 1 && dest.methods.contains(WebApp.HTTP.GET)); } static boolean prefixMatches(Dest dest, String path) { LOG.debug("checking prefix {}{} for path: {}", new Object[]{dest.prefix, dest.pathParams, path}); if (!path.startsWith(dest.prefix)) { return false; } int prefixLen = dest.prefix.length(); if (prefixLen > 1 && path.length() > prefixLen && path.charAt(prefixLen) != '/') { return false; } // prefix is / or prefix is path or prefix/... return true; } static boolean isGoodMatch(Dest dest, String path) { if (SLASH.countIn(dest.prefix) > 1) { return true; } // We want to match (/foo, :a) for /foo/bar/blah and (/, :a) for /123 // but NOT / for /foo or (/, :a) for /foo or /foo/ because default route // (FooController#index) for /foo and /foo/ takes precedence. if (dest.prefix.length() == 1) { return dest.pathParams.size() > 0 && !maybeController(path); } return dest.pathParams.size() > 0 || // /foo should match /foo/ (path.endsWith("/") && SLASH.countIn(path) == 2); } static boolean maybeController(String path) { return controllerRe.matcher(path).matches(); } // Assume /controller/action style path private Dest resolveDefault(WebApp.HTTP method, String path) { List<String> parts = WebApp.parseRoute(path); String controller = parts.get(WebApp.R_CONTROLLER); String action = parts.get(WebApp.R_ACTION); // NameController is encouraged default Class<? extends Controller> cls = find(Controller.class, join(controller, "Controller")); if (cls == null) { cls = find(Controller.class, controller); } if (cls == null) { throw new WebAppException(join(path, ": controller for ", controller, " not found")); } return add(method, defaultPrefix(controller, action), cls, action, null); } private String defaultPrefix(String controller, String action) { if (controller.equals("default") && action.equals("index")) { return "/"; } if (action.equals("index")) { return join('/', controller); } return pjoin("", controller, action); } private <T> Class<? extends T> find(Class<T> cls, String cname) { String pkg = hostClass.getPackage().getName(); return find(cls, pkg, cname); } private <T> Class<? extends T> find(Class<T> cls, String pkg, String cname) { String name = StringUtils.capitalize(cname); Class<? extends T> found = load(cls, djoin(pkg, name)); if (found == null) { found = load(cls, djoin(pkg, "webapp", name)); } if (found == null) { found = load(cls, join(hostClass.getName(), '$', name)); } return found; } @SuppressWarnings("unchecked") private <T> Class<? extends T> load(Class<T> cls, String className) { LOG.debug("trying: {}", className); try { Class<?> found = Class.forName(className); if (cls.isAssignableFrom(found)) { LOG.debug("found {}", className); return (Class<? extends T>) found; } LOG.warn("found a {} but it's not a {}", className, cls.getName()); } catch (ClassNotFoundException e) { // OK in this case. } return null; } // Dest may contain a candidate controller private Dest resolveAction(WebApp.HTTP method, Dest dest, String path) { if (dest.prefix.length() == 1) { return null; } checkState(!isGoodMatch(dest, path), dest.prefix); checkState(SLASH.countIn(path) > 1, path); List<String> parts = WebApp.parseRoute(path); String controller = parts.get(WebApp.R_CONTROLLER); String action = parts.get(WebApp.R_ACTION); return add(method, pjoin("", controller, action), dest.controllerClass, action, null); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.RowPagesBuilder; import com.facebook.presto.operator.HashBuilderOperator.HashBuilderOperatorFactory; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.testing.TestingTaskContext; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.primitives.Ints; import io.airlift.units.DataSize; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.Random; import java.util.concurrent.ExecutorService; import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder; import static com.facebook.presto.SessionTestUtils.TEST_SESSION; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.util.Threads.checkNotSameThreadExecutor; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.units.DataSize.Unit.GIGABYTE; import static java.lang.String.format; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.openjdk.jmh.annotations.Mode.AverageTime; import static org.openjdk.jmh.annotations.Scope.Thread; @SuppressWarnings("MethodMayBeStatic") @State(Thread) @OutputTimeUnit(MILLISECONDS) @BenchmarkMode(AverageTime) @Fork(3) @Warmup(iterations = 5) @Measurement(iterations = 20) public class BenchmarkHashBuildAndJoinOperators { private static final int HASH_BUILD_OPERATOR_ID = 1; private static final int HASH_JOIN_OPERATOR_ID = 2; private static final PlanNodeId TEST_PLAN_NODE_ID = new PlanNodeId("test"); @State(Thread) public static class BuildContext { protected static final int ROWS_PER_PAGE = 1024; protected static final int BUILD_ROWS_NUMBER = 700_000; @Param({"varchar", "bigint", "all"}) protected String hashColumns; @Param({"false", "true"}) protected boolean buildHashEnabled; protected ExecutorService executor; protected List<Page> buildPages; protected Optional<Integer> hashChannel; protected List<Type> types; protected List<Integer> hashChannels; protected LookupSourceFactory lookupSourceFactory; @Setup public void setup() { switch (hashColumns) { case "varchar": hashChannels = Ints.asList(0); break; case "bigint": hashChannels = Ints.asList(1); break; case "all": hashChannels = Ints.asList(0, 1, 2); break; default: throw new UnsupportedOperationException(format("Unknown hashColumns value [%s]", hashColumns)); } executor = newCachedThreadPool(daemonThreadsNamed("test-%s")); initializeBuildPages(); lookupSourceFactory = new BenchmarkHashBuildAndJoinOperators().benchmarkBuildHash(this); } public TaskContext createTaskContext() { return TestingTaskContext.createTaskContext( checkNotSameThreadExecutor(executor, "executor is null"), TEST_SESSION, new DataSize(2, GIGABYTE)); } public Optional<Integer> getHashChannel() { return hashChannel; } public List<Integer> getHashChannels() { return hashChannels; } public List<Type> getTypes() { return types; } public LookupSourceFactory getLookupSourceFactory() { return lookupSourceFactory; } public List<Page> getBuildPages() { return buildPages; } protected void initializeBuildPages() { RowPagesBuilder buildPagesBuilder = rowPagesBuilder(buildHashEnabled, hashChannels, ImmutableList.of(VARCHAR, BIGINT, BIGINT)); int rows = 0; while (rows < BUILD_ROWS_NUMBER) { int newRows = Math.min(BUILD_ROWS_NUMBER - rows, ROWS_PER_PAGE); buildPagesBuilder.addSequencePage(newRows, rows + 20, rows + 30, rows + 40); buildPagesBuilder.pageBreak(); rows += newRows; } types = buildPagesBuilder.getTypes(); buildPages = buildPagesBuilder.build(); hashChannel = buildPagesBuilder.getHashChannel(); } } @State(Thread) public static class JoinContext extends BuildContext { protected static final int PROBE_ROWS_NUMBER = 700_000; @Param({"0.1", "1", "2"}) protected double matchRate; protected List<Page> probePages; @Setup public void setup() { super.setup(); initializeProbePages(); } public List<Page> getProbePages() { return probePages; } protected void initializeProbePages() { RowPagesBuilder probePagesBuilder = rowPagesBuilder(buildHashEnabled, hashChannels, ImmutableList.of(VARCHAR, BIGINT, BIGINT)); Random random = new Random(42); int remainingRows = PROBE_ROWS_NUMBER; int rowsInPage = 0; while (remainingRows > 0) { double roll = random.nextDouble(); int columnA = 20 + remainingRows; int columnB = 30 + remainingRows; int columnC = 40 + remainingRows; int rowsCount = 1; if (matchRate < 1) { // each row has matchRate chance to join if (roll > matchRate) { // generate not matched row columnA *= -1; columnB *= -1; columnC *= -1; } } else if (matchRate > 1) { // each row has will be repeated between one and 2*matchRate times roll = roll * 2 * matchRate + 1; // example for matchRate == 2: // roll is within [0, 5) range // rowsCount is within [0, 4] range, where each value has same probability // so expected rowsCount is 2 rowsCount = (int) Math.floor(roll); } for (int i = 0; i < rowsCount; i++) { if (rowsInPage >= ROWS_PER_PAGE) { probePagesBuilder.pageBreak(); rowsInPage = 0; } probePagesBuilder.row(format("%d", columnA), columnB, columnC); --remainingRows; rowsInPage++; } } probePages = probePagesBuilder.build(); } } @Benchmark public LookupSourceFactory benchmarkBuildHash(BuildContext buildContext) { DriverContext driverContext = buildContext.createTaskContext().addPipelineContext(true, true).addDriverContext(); HashBuilderOperatorFactory hashBuilderOperatorFactory = new HashBuilderOperatorFactory( HASH_BUILD_OPERATOR_ID, TEST_PLAN_NODE_ID, buildContext.getTypes(), ImmutableMap.of(), buildContext.getHashChannels(), buildContext.getHashChannel(), false, Optional.empty(), 10_000, 1); Operator operator = hashBuilderOperatorFactory.createOperator(driverContext); for (Page page : buildContext.getBuildPages()) { operator.addInput(page); } operator.finish(); if (!operator.isFinished()) { throw new AssertionError("Expected hash build operator to be finished"); } return hashBuilderOperatorFactory.getLookupSourceFactory(); } @Benchmark public List<Page> benchmarkJoinHash(JoinContext joinContext) { LookupSourceFactory lookupSourceFactory = joinContext.getLookupSourceFactory(); OperatorFactory joinOperatorFactory = LookupJoinOperators.innerJoin( HASH_JOIN_OPERATOR_ID, TEST_PLAN_NODE_ID, lookupSourceFactory, joinContext.getTypes(), joinContext.getHashChannels(), joinContext.getHashChannel(), false); DriverContext driverContext = joinContext.createTaskContext().addPipelineContext(true, true).addDriverContext(); Operator joinOperator = joinOperatorFactory.createOperator(driverContext); Iterator<Page> input = joinContext.getProbePages().iterator(); ImmutableList.Builder<Page> outputPages = ImmutableList.builder(); boolean finishing = false; for (int loops = 0; !joinOperator.isFinished() && loops < 1_000_000; loops++) { if (joinOperator.needsInput()) { if (input.hasNext()) { Page inputPage = input.next(); joinOperator.addInput(inputPage); } else if (!finishing) { joinOperator.finish(); finishing = true; } } Page outputPage = joinOperator.getOutput(); if (outputPage != null) { outputPages.add(outputPage); } } return outputPages.build(); } public static void main(String[] args) throws RunnerException { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) .include(".*" + BenchmarkHashBuildAndJoinOperators.class.getSimpleName() + ".*") .build(); new Runner(options).run(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.gamelift.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Represents the input for a request operation. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/gamelift-2015-10-01/DescribeGameSessions" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeGameSessionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN value. * </p> */ private String fleetId; /** * <p> * A unique identifier for the game session to retrieve. * </p> */ private String gameSessionId; /** * <p> * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use either the * alias ID or ARN value. * </p> */ private String aliasId; /** * <p> * A fleet location to get game session details for. You can specify a fleet's home Region or a remote location. Use * the Amazon Web Services Region code format, such as <code>us-west-2</code>. * </p> */ private String location; /** * <p> * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are transitory and * used for only very brief periods of time. * </p> */ private String statusFilter; /** * <p> * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as a set * of sequential pages. * </p> */ private Integer limit; /** * <p> * A token that indicates the start of the next sequential page of results. Use the token that is returned with a * previous call to this operation. To start at the beginning of the result set, do not specify a value. * </p> */ private String nextToken; /** * <p> * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN value. * </p> * * @param fleetId * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN * value. */ public void setFleetId(String fleetId) { this.fleetId = fleetId; } /** * <p> * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN value. * </p> * * @return A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN * value. */ public String getFleetId() { return this.fleetId; } /** * <p> * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN value. * </p> * * @param fleetId * A unique identifier for the fleet to retrieve game sessions for. You can use either the fleet ID or ARN * value. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withFleetId(String fleetId) { setFleetId(fleetId); return this; } /** * <p> * A unique identifier for the game session to retrieve. * </p> * * @param gameSessionId * A unique identifier for the game session to retrieve. */ public void setGameSessionId(String gameSessionId) { this.gameSessionId = gameSessionId; } /** * <p> * A unique identifier for the game session to retrieve. * </p> * * @return A unique identifier for the game session to retrieve. */ public String getGameSessionId() { return this.gameSessionId; } /** * <p> * A unique identifier for the game session to retrieve. * </p> * * @param gameSessionId * A unique identifier for the game session to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withGameSessionId(String gameSessionId) { setGameSessionId(gameSessionId); return this; } /** * <p> * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use either the * alias ID or ARN value. * </p> * * @param aliasId * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use * either the alias ID or ARN value. */ public void setAliasId(String aliasId) { this.aliasId = aliasId; } /** * <p> * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use either the * alias ID or ARN value. * </p> * * @return A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use * either the alias ID or ARN value. */ public String getAliasId() { return this.aliasId; } /** * <p> * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use either the * alias ID or ARN value. * </p> * * @param aliasId * A unique identifier for the alias associated with the fleet to retrieve game sessions for. You can use * either the alias ID or ARN value. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withAliasId(String aliasId) { setAliasId(aliasId); return this; } /** * <p> * A fleet location to get game session details for. You can specify a fleet's home Region or a remote location. Use * the Amazon Web Services Region code format, such as <code>us-west-2</code>. * </p> * * @param location * A fleet location to get game session details for. You can specify a fleet's home Region or a remote * location. Use the Amazon Web Services Region code format, such as <code>us-west-2</code>. */ public void setLocation(String location) { this.location = location; } /** * <p> * A fleet location to get game session details for. You can specify a fleet's home Region or a remote location. Use * the Amazon Web Services Region code format, such as <code>us-west-2</code>. * </p> * * @return A fleet location to get game session details for. You can specify a fleet's home Region or a remote * location. Use the Amazon Web Services Region code format, such as <code>us-west-2</code>. */ public String getLocation() { return this.location; } /** * <p> * A fleet location to get game session details for. You can specify a fleet's home Region or a remote location. Use * the Amazon Web Services Region code format, such as <code>us-west-2</code>. * </p> * * @param location * A fleet location to get game session details for. You can specify a fleet's home Region or a remote * location. Use the Amazon Web Services Region code format, such as <code>us-west-2</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withLocation(String location) { setLocation(location); return this; } /** * <p> * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are transitory and * used for only very brief periods of time. * </p> * * @param statusFilter * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are * transitory and used for only very brief periods of time. */ public void setStatusFilter(String statusFilter) { this.statusFilter = statusFilter; } /** * <p> * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are transitory and * used for only very brief periods of time. * </p> * * @return Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are * transitory and used for only very brief periods of time. */ public String getStatusFilter() { return this.statusFilter; } /** * <p> * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are transitory and * used for only very brief periods of time. * </p> * * @param statusFilter * Game session status to filter results on. You can filter on the following states: <code>ACTIVE</code>, * <code>TERMINATED</code>, <code>ACTIVATING</code>, and <code>TERMINATING</code>. The last two are * transitory and used for only very brief periods of time. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withStatusFilter(String statusFilter) { setStatusFilter(statusFilter); return this; } /** * <p> * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as a set * of sequential pages. * </p> * * @param limit * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as * a set of sequential pages. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as a set * of sequential pages. * </p> * * @return The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as * a set of sequential pages. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as a set * of sequential pages. * </p> * * @param limit * The maximum number of results to return. Use this parameter with <code>NextToken</code> to get results as * a set of sequential pages. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * <p> * A token that indicates the start of the next sequential page of results. Use the token that is returned with a * previous call to this operation. To start at the beginning of the result set, do not specify a value. * </p> * * @param nextToken * A token that indicates the start of the next sequential page of results. Use the token that is returned * with a previous call to this operation. To start at the beginning of the result set, do not specify a * value. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * A token that indicates the start of the next sequential page of results. Use the token that is returned with a * previous call to this operation. To start at the beginning of the result set, do not specify a value. * </p> * * @return A token that indicates the start of the next sequential page of results. Use the token that is returned * with a previous call to this operation. To start at the beginning of the result set, do not specify a * value. */ public String getNextToken() { return this.nextToken; } /** * <p> * A token that indicates the start of the next sequential page of results. Use the token that is returned with a * previous call to this operation. To start at the beginning of the result set, do not specify a value. * </p> * * @param nextToken * A token that indicates the start of the next sequential page of results. Use the token that is returned * with a previous call to this operation. To start at the beginning of the result set, do not specify a * value. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeGameSessionsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFleetId() != null) sb.append("FleetId: ").append(getFleetId()).append(","); if (getGameSessionId() != null) sb.append("GameSessionId: ").append(getGameSessionId()).append(","); if (getAliasId() != null) sb.append("AliasId: ").append(getAliasId()).append(","); if (getLocation() != null) sb.append("Location: ").append(getLocation()).append(","); if (getStatusFilter() != null) sb.append("StatusFilter: ").append(getStatusFilter()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeGameSessionsRequest == false) return false; DescribeGameSessionsRequest other = (DescribeGameSessionsRequest) obj; if (other.getFleetId() == null ^ this.getFleetId() == null) return false; if (other.getFleetId() != null && other.getFleetId().equals(this.getFleetId()) == false) return false; if (other.getGameSessionId() == null ^ this.getGameSessionId() == null) return false; if (other.getGameSessionId() != null && other.getGameSessionId().equals(this.getGameSessionId()) == false) return false; if (other.getAliasId() == null ^ this.getAliasId() == null) return false; if (other.getAliasId() != null && other.getAliasId().equals(this.getAliasId()) == false) return false; if (other.getLocation() == null ^ this.getLocation() == null) return false; if (other.getLocation() != null && other.getLocation().equals(this.getLocation()) == false) return false; if (other.getStatusFilter() == null ^ this.getStatusFilter() == null) return false; if (other.getStatusFilter() != null && other.getStatusFilter().equals(this.getStatusFilter()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFleetId() == null) ? 0 : getFleetId().hashCode()); hashCode = prime * hashCode + ((getGameSessionId() == null) ? 0 : getGameSessionId().hashCode()); hashCode = prime * hashCode + ((getAliasId() == null) ? 0 : getAliasId().hashCode()); hashCode = prime * hashCode + ((getLocation() == null) ? 0 : getLocation().hashCode()); hashCode = prime * hashCode + ((getStatusFilter() == null) ? 0 : getStatusFilter().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeGameSessionsRequest clone() { return (DescribeGameSessionsRequest) super.clone(); } }
package simpledb.systemtest; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.UUID; import org.junit.Assert; import simpledb.*; public class SystemTestUtil { public static final TupleDesc SINGLE_INT_DESCRIPTOR = new TupleDesc(new Type[]{Type.INT_TYPE}); private static final int MAX_RAND_VALUE = 1 << 16; /** @param columnSpecification Mapping between column index and value. */ public static HeapFile createRandomHeapFile( int columns, int rows, Map<Integer, Integer> columnSpecification, ArrayList<ArrayList<Integer>> tuples) throws IOException, DbException, TransactionAbortedException { return createRandomHeapFile(columns, rows, MAX_RAND_VALUE, columnSpecification, tuples); } /** @param columnSpecification Mapping between column index and value. */ public static HeapFile createRandomHeapFile( int columns, int rows, int maxValue, Map<Integer, Integer> columnSpecification, ArrayList<ArrayList<Integer>> tuples) throws IOException, DbException, TransactionAbortedException { File temp = createRandomHeapFileUnopened(columns, rows, maxValue, columnSpecification, tuples); return Utility.openHeapFile(columns, temp); } public static HeapFile createRandomHeapFile( int columns, int rows, Map<Integer, Integer> columnSpecification, ArrayList<ArrayList<Integer>> tuples, String colPrefix) throws IOException, DbException, TransactionAbortedException { return createRandomHeapFile(columns, rows, MAX_RAND_VALUE, columnSpecification, tuples, colPrefix); } public static HeapFile createRandomHeapFile( int columns, int rows, int maxValue, Map<Integer, Integer> columnSpecification, ArrayList<ArrayList<Integer>> tuples, String colPrefix) throws IOException, DbException, TransactionAbortedException { File temp = createRandomHeapFileUnopened(columns, rows, maxValue, columnSpecification, tuples); return Utility.openHeapFile(columns, colPrefix, temp); } public static File createRandomHeapFileUnopened(int columns, int rows, int maxValue, Map<Integer, Integer> columnSpecification, ArrayList<ArrayList<Integer>> tuples) throws IOException { if (tuples != null) { tuples.clear(); } else { tuples = new ArrayList<ArrayList<Integer>>(rows); } Random r = new Random(); // Fill the tuples list with generated values for (int i = 0; i < rows; ++i) { ArrayList<Integer> tuple = new ArrayList<Integer>(columns); for (int j = 0; j < columns; ++j) { // Generate random values, or use the column specification Integer columnValue = null; if (columnSpecification != null) columnValue = columnSpecification.get(j); if (columnValue == null) { columnValue = r.nextInt(maxValue); } tuple.add(columnValue); } tuples.add(tuple); } // Convert the tuples list to a heap file and open it File temp = File.createTempFile("table", ".dat"); temp.deleteOnExit(); HeapFileEncoder.convert(tuples, temp, BufferPool.PAGE_SIZE, columns); return temp; } public static ArrayList<Integer> tupleToList(Tuple tuple) { ArrayList<Integer> list = new ArrayList<Integer>(); for (int i = 0; i < tuple.getTupleDesc().numFields(); ++i) { int value = ((IntField)tuple.getField(i)).getValue(); list.add(value); } return list; } public static void matchTuples(DbFile f, List<ArrayList<Integer>> tuples) throws DbException, TransactionAbortedException, IOException { TransactionId tid = new TransactionId(); matchTuples(f, tid, tuples); Database.getBufferPool().transactionComplete(tid); } public static void matchTuples(DbFile f, TransactionId tid, List<ArrayList<Integer>> tuples) throws DbException, TransactionAbortedException, IOException { SeqScan scan = new SeqScan(tid, f.getId(), ""); matchTuples(scan, tuples); } public static void matchTuples(DbIterator iterator, List<ArrayList<Integer>> tuples) throws DbException, TransactionAbortedException, IOException { ArrayList<ArrayList<Integer>> copy = new ArrayList<ArrayList<Integer>>(tuples); if (Debug.isEnabled()) { Debug.log("Expected tuples:"); for (ArrayList<Integer> t : copy) { Debug.log("\t" + Utility.listToString(t)); } } iterator.open(); while (iterator.hasNext()) { Tuple t = iterator.next(); ArrayList<Integer> list = tupleToList(t); boolean isExpected = copy.remove(list); Debug.log("scanned tuple: %s (%s)", t, isExpected ? "expected" : "not expected"); if (!isExpected) { Assert.fail("expected tuples does not contain: " + t); } } iterator.close(); if (!copy.isEmpty()) { String msg = "expected to find the following tuples:\n"; final int MAX_TUPLES_OUTPUT = 10; int count = 0; for (ArrayList<Integer> t : copy) { if (count == MAX_TUPLES_OUTPUT) { msg += "[" + (copy.size() - MAX_TUPLES_OUTPUT) + " more tuples]"; break; } msg += "\t" + Utility.listToString(t) + "\n"; count += 1; } Assert.fail(msg); } } /** * Returns number of bytes of RAM used by JVM after calling System.gc many times. * @return amount of RAM (in bytes) used by JVM */ public static long getMemoryFootprint() { // Call System.gc in a loop until it stops freeing memory. This is // still no guarantee that all the memory is freed, since System.gc is // just a "hint". Runtime runtime = Runtime.getRuntime(); long memAfter = runtime.totalMemory() - runtime.freeMemory(); long memBefore = memAfter + 1; while (memBefore != memAfter) { memBefore = memAfter; System.gc(); memAfter = runtime.totalMemory() - runtime.freeMemory(); } return memAfter; } /** * Generates a unique string each time it is called. * @return a new unique UUID as a string, using java.util.UUID */ public static String getUUID() { return UUID.randomUUID().toString(); } private static double[] getDiff(double[] sequence) { double ret[] = new double[sequence.length - 1]; for (int i = 0; i < sequence.length - 1; ++i) ret[i] = sequence[i + 1] - sequence[i]; return ret; } /** * Checks if the sequence represents a quadratic sequence (approximately) * ret[0] is true if the sequence is quadratic * ret[1] is the common difference of the sequence if ret[0] is true. * @param sequence * @return ret[0] = true if sequence is qudratic(or sub-quadratic or linear), ret[1] = the coefficient of n^2 */ public static Object[] checkQuadratic(double[] sequence) { Object ret[] = checkLinear(getDiff(sequence)); ret[1] = (Double)ret[1]/2.0; return ret; } /** * Checks if the sequence represents an arithmetic sequence (approximately) * ret[0] is true if the sequence is linear * ret[1] is the common difference of the sequence if ret[0] is true. * @param sequence * @return ret[0] = true if sequence is linear, ret[1] = the common difference */ public static Object[] checkLinear(double[] sequence) { return checkConstant(getDiff(sequence)); } /** * Checks if the sequence represents approximately a fixed sequence (c,c,c,c,..) * ret[0] is true if the sequence is linear * ret[1] is the constant of the sequence if ret[0] is true. * @param sequence * @return ret[0] = true if sequence is constant, ret[1] = the constant */ public static Object[] checkConstant(double[] sequence) { Object[] ret = new Object[2]; //compute average double sum = .0; for(int i = 0; i < sequence.length; ++i) sum += sequence[i]; double av = sum/(sequence.length + .0); //compute standard deviation double sqsum = 0; for(int i = 0; i < sequence.length; ++i) sqsum += (sequence[i] - av)*(sequence[i] - av); double std = Math.sqrt(sqsum/(sequence.length + .0)); ret[0] = std < 1.0 ? Boolean.TRUE : Boolean.FALSE; ret[1] = av; return ret; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.identity.impl.ldap; import static org.camunda.bpm.engine.authorization.Permissions.READ; import static org.camunda.bpm.engine.authorization.Resources.GROUP; import static org.camunda.bpm.engine.authorization.Resources.USER; import java.io.StringWriter; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.naming.AuthenticationException; import javax.naming.Context; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.SearchResult; import javax.naming.ldap.Control; import javax.naming.ldap.InitialLdapContext; import javax.naming.ldap.LdapContext; import javax.naming.ldap.SortControl; import org.camunda.bpm.engine.BadUserRequestException; import org.camunda.bpm.engine.authorization.Permission; import org.camunda.bpm.engine.authorization.Resource; import org.camunda.bpm.engine.identity.Group; import org.camunda.bpm.engine.identity.GroupQuery; import org.camunda.bpm.engine.identity.NativeUserQuery; import org.camunda.bpm.engine.identity.Tenant; import org.camunda.bpm.engine.identity.TenantQuery; import org.camunda.bpm.engine.identity.User; import org.camunda.bpm.engine.identity.UserQuery; import org.camunda.bpm.engine.impl.AbstractQuery; import org.camunda.bpm.engine.impl.QueryOrderingProperty; import org.camunda.bpm.engine.impl.UserQueryImpl; import org.camunda.bpm.engine.impl.UserQueryProperty; import org.camunda.bpm.engine.impl.identity.IdentityProviderException; import org.camunda.bpm.engine.impl.identity.ReadOnlyIdentityProvider; import org.camunda.bpm.engine.impl.interceptor.CommandContext; import org.camunda.bpm.engine.impl.persistence.entity.GroupEntity; import org.camunda.bpm.engine.impl.persistence.entity.UserEntity; /** * <p>LDAP {@link ReadOnlyIdentityProvider}.</p> * * @author Daniel Meyer * */ public class LdapIdentityProviderSession implements ReadOnlyIdentityProvider { private final static Logger LOG = Logger.getLogger(LdapIdentityProviderSession.class.getName()); protected LdapConfiguration ldapConfiguration; protected LdapContext initialContext; public LdapIdentityProviderSession(LdapConfiguration ldapConfiguration) { this.ldapConfiguration = ldapConfiguration; } // Session Lifecycle ////////////////////////////////// public void flush() { // nothing to do } public void close() { if (initialContext != null) { try { initialContext.close(); } catch (Exception e) { // ignore LOG.log(Level.FINE, "exception while closing LDAP DIR CTX", e); } } } protected InitialLdapContext openContext(String userDn, String password) { Hashtable<String, String> env = new Hashtable<String, String>(); env.put(Context.INITIAL_CONTEXT_FACTORY, ldapConfiguration.getInitialContextFactory()); env.put(Context.SECURITY_AUTHENTICATION, ldapConfiguration.getSecurityAuthentication()); env.put(Context.PROVIDER_URL, ldapConfiguration.getServerUrl()); env.put(Context.SECURITY_PRINCIPAL, userDn); env.put(Context.SECURITY_CREDENTIALS, password); // for anonymous login if(ldapConfiguration.isAllowAnonymousLogin() && password.isEmpty()) { env.put(Context.SECURITY_AUTHENTICATION, "none"); } if(ldapConfiguration.isUseSsl()) { env.put(Context.SECURITY_PROTOCOL, "ssl"); } // add additional properties Map<String, String> contextProperties = ldapConfiguration.getContextProperties(); if(contextProperties != null) { env.putAll(contextProperties); } try { return new InitialLdapContext(env, null); } catch(AuthenticationException e) { throw new LdapAuthenticationException("Could not authenticate with LDAP server", e); } catch(NamingException e) { throw new IdentityProviderException("Could not connect to LDAP server", e); } } protected void ensureContextInitialized() { if(initialContext == null) { initialContext = openContext(ldapConfiguration.getManagerDn(), ldapConfiguration.getManagerPassword()); } } // Users ///////////////////////////////////////////////// public User findUserById(String userId) { return createUserQuery(org.camunda.bpm.engine.impl.context.Context.getCommandContext()) .userId(userId) .singleResult(); } public UserQuery createUserQuery() { return new LdapUserQueryImpl(org.camunda.bpm.engine.impl.context.Context.getProcessEngineConfiguration().getCommandExecutorTxRequired()); } public UserQueryImpl createUserQuery(CommandContext commandContext) { return new LdapUserQueryImpl(); } @Override public NativeUserQuery createNativeUserQuery() { throw new BadUserRequestException("Native user queries are not supported for LDAP identity service provider."); } public long findUserCountByQueryCriteria(LdapUserQueryImpl query) { ensureContextInitialized(); return findUserByQueryCriteria(query).size(); } public List<User> findUserByQueryCriteria(LdapUserQueryImpl query) { ensureContextInitialized(); if(query.getGroupId() != null) { // if restriction on groupId is provided, we need to search in group tree first, look for the group and then further restrict on the members return findUsersByGroupId(query); } else { String userBaseDn = composeDn(ldapConfiguration.getUserSearchBase(), ldapConfiguration.getBaseDn()); return findUsersWithoutGroupId(query, userBaseDn, false); } } protected List<User> findUsersByGroupId(LdapUserQueryImpl query) { String baseDn = getDnForGroup(query.getGroupId()); // compose group search filter String groupSearchFilter = "(& " + ldapConfiguration.getGroupSearchFilter() + ")"; NamingEnumeration<SearchResult> enumeration = null; try { enumeration = initialContext.search(baseDn, groupSearchFilter, ldapConfiguration.getSearchControls()); List<String> groupMemberList = new ArrayList<String>(); // first find group while (enumeration.hasMoreElements()) { SearchResult result = enumeration.nextElement(); Attribute memberAttribute = result.getAttributes().get(ldapConfiguration.getGroupMemberAttribute()); if (null != memberAttribute) { NamingEnumeration<?> allMembers = memberAttribute.getAll(); // iterate group members while (allMembers.hasMoreElements()) { groupMemberList.add((String) allMembers.nextElement()); } } } List<User> userList = new ArrayList<User>(); String userBaseDn = composeDn(ldapConfiguration.getUserSearchBase(), ldapConfiguration.getBaseDn()); int memberCount = 0; for (String memberId : groupMemberList) { if (userList.size() < query.getMaxResults() && memberCount >= query.getFirstResult()) { if (ldapConfiguration.isUsePosixGroups()) { query.userId(memberId); } List<User> users = ldapConfiguration.isUsePosixGroups() ? findUsersWithoutGroupId(query, userBaseDn, true) : findUsersWithoutGroupId(query, memberId, true); if (users.size() > 0) { userList.add(users.get(0)); } } memberCount++; } return userList; } catch (NamingException e) { throw new IdentityProviderException("Could not query for users", e); } finally { try { if (enumeration != null) { enumeration.close(); } } catch (Exception e) { // ignore silently } } } public List<User> findUsersWithoutGroupId(LdapUserQueryImpl query, String userBaseDn, boolean ignorePagination) { if(ldapConfiguration.isSortControlSupported()) { applyRequestControls(query); } NamingEnumeration<SearchResult> enumeration = null; try { String filter = getUserSearchFilter(query); enumeration = initialContext.search(userBaseDn, filter, ldapConfiguration.getSearchControls()); // perform client-side paging int resultCount = 0; List<User> userList = new ArrayList<User>(); while (enumeration.hasMoreElements() && (userList.size() < query.getMaxResults() || ignorePagination)) { SearchResult result = enumeration.nextElement(); UserEntity user = transformUser(result); if(isAuthenticatedUser(user) || isAuthorized(READ, USER, user.getId())) { if(resultCount >= query.getFirstResult() || ignorePagination) { userList.add(user); } resultCount ++; } } return userList; } catch (NamingException e) { throw new IdentityProviderException("Could not query for users", e); } finally { try { if (enumeration != null) { enumeration.close(); } } catch (Exception e) { // ignore silently } } } public boolean checkPassword(String userId, String password) { // prevent a null password if(password == null) { return false; } // engine can't work without users if(userId == null || userId.isEmpty()) { return false; } /* * We only allow login with no password if anonymous login is set. * RFC allows such a behavior but discourages the usage so we provide it for * user which have an ldap with anonymous login. */ if(!ldapConfiguration.isAllowAnonymousLogin() && password.equals("")) { return false; } // first search for user using manager DN LdapUserEntity user = (LdapUserEntity) findUserById(userId); close(); if(user == null) { return false; } else { try { // bind authenticate for user + supplied password openContext(user.getDn(), password); return true; } catch(LdapAuthenticationException e) { return false; } } } protected String getUserSearchFilter(LdapUserQueryImpl query) { StringWriter search = new StringWriter(); search.write("(&"); // restrict to users search.write(ldapConfiguration.getUserSearchFilter()); // add additional filters from query if(query.getId() != null) { addFilter(ldapConfiguration.getUserIdAttribute(), escapeLDAPSearchFilter(query.getId()), search); } if(query.getIds() != null && query.getIds().length > 0) { // wrap ids in OR statement search.write("(|"); for (String userId : query.getIds()) { addFilter(ldapConfiguration.getUserIdAttribute(), escapeLDAPSearchFilter(userId), search); } search.write(")"); } if(query.getEmail() != null) { addFilter(ldapConfiguration.getUserEmailAttribute(), query.getEmail(), search); } if(query.getEmailLike() != null) { addFilter(ldapConfiguration.getUserEmailAttribute(), query.getEmailLike(), search); } if(query.getFirstName() != null) { addFilter(ldapConfiguration.getUserFirstnameAttribute(), query.getFirstName(), search); } if(query.getFirstNameLike() != null) { addFilter(ldapConfiguration.getUserFirstnameAttribute(), query.getFirstNameLike(), search); } if(query.getLastName() != null) { addFilter(ldapConfiguration.getUserLastnameAttribute(), query.getLastName(), search); } if(query.getLastNameLike() != null) { addFilter(ldapConfiguration.getUserLastnameAttribute(), query.getLastNameLike(), search); } search.write(")"); return search.toString(); } // Groups /////////////////////////////////////////////// public Group findGroupById(String groupId) { return createGroupQuery(org.camunda.bpm.engine.impl.context.Context.getCommandContext()) .groupId(groupId) .singleResult(); } public GroupQuery createGroupQuery() { return new LdapGroupQuery(org.camunda.bpm.engine.impl.context.Context.getProcessEngineConfiguration().getCommandExecutorTxRequired()); } public GroupQuery createGroupQuery(CommandContext commandContext) { return new LdapGroupQuery(); } public long findGroupCountByQueryCriteria(LdapGroupQuery ldapGroupQuery) { ensureContextInitialized(); return findGroupByQueryCriteria(ldapGroupQuery).size(); } public List<Group> findGroupByQueryCriteria(LdapGroupQuery query) { ensureContextInitialized(); String groupBaseDn = composeDn(ldapConfiguration.getGroupSearchBase(), ldapConfiguration.getBaseDn()); if(ldapConfiguration.isSortControlSupported()) { applyRequestControls(query); } NamingEnumeration<SearchResult> enumeration = null; try { String filter = getGroupSearchFilter(query); enumeration = initialContext.search(groupBaseDn, filter, ldapConfiguration.getSearchControls()); // perform client-side paging int resultCount = 0; List<Group> groupList = new ArrayList<Group>(); while (enumeration.hasMoreElements() && groupList.size() < query.getMaxResults()) { SearchResult result = enumeration.nextElement(); GroupEntity group = transformGroup(result); if(isAuthorized(READ, GROUP, group.getId())) { if(resultCount >= query.getFirstResult()) { groupList.add(group); } resultCount ++; } } return groupList; } catch (NamingException e) { throw new IdentityProviderException("Could not query for users", e); } finally { try { if (enumeration != null) { enumeration.close(); } } catch (Exception e) { // ignore silently } } } protected String getGroupSearchFilter(LdapGroupQuery query) { StringWriter search = new StringWriter(); search.write("(&"); // restrict to groups search.write(ldapConfiguration.getGroupSearchFilter()); // add additional filters from query if(query.getId() != null) { addFilter(ldapConfiguration.getGroupIdAttribute(), query.getId(), search); } if(query.getIds() != null && query.getIds().length > 0) { search.write("(|"); for (String id : query.getIds()) { addFilter(ldapConfiguration.getGroupIdAttribute(), id, search); } search.write(")"); } if(query.getName() != null) { addFilter(ldapConfiguration.getGroupNameAttribute(), query.getName(), search); } if(query.getNameLike() != null) { addFilter(ldapConfiguration.getGroupNameAttribute(), query.getNameLike(), search); } if(query.getUserId() != null) { String userDn = null; if(ldapConfiguration.isUsePosixGroups()) { userDn = query.getUserId(); } else { userDn = getDnForUser(query.getUserId()); } addFilter(ldapConfiguration.getGroupMemberAttribute(), escapeLDAPSearchFilter(userDn), search); } search.write(")"); return search.toString(); } // Utils //////////////////////////////////////////// protected String getDnForUser(String userId) { LdapUserEntity user = (LdapUserEntity) createUserQuery(org.camunda.bpm.engine.impl.context.Context.getCommandContext()) .userId(userId) .singleResult(); if(user == null) { return ""; } else { return user.getDn(); } } protected String getDnForGroup(String groupId) { LdapGroupEntity group = (LdapGroupEntity) createGroupQuery(org.camunda.bpm.engine.impl.context.Context.getCommandContext()) .groupId(groupId) .singleResult(); if(group == null) { return ""; } else { return group.getDn(); } } protected String getStringAttributeValue(String attrName, Attributes attributes) throws NamingException { Attribute attribute = attributes.get(attrName); if(attribute != null){ return (String) attribute.get(); } else { return null; } } protected void addFilter(String attributeName, String attributeValue, StringWriter writer) { writer.write("("); writer.write(attributeName); writer.write("="); writer.write(attributeValue); writer.write(")"); } protected LdapUserEntity transformUser(SearchResult result) throws NamingException { final Attributes attributes = result.getAttributes(); LdapUserEntity user = new LdapUserEntity(); user.setDn(result.getNameInNamespace()); user.setId(getStringAttributeValue(ldapConfiguration.getUserIdAttribute(), attributes)); user.setFirstName(getStringAttributeValue(ldapConfiguration.getUserFirstnameAttribute(), attributes)); user.setLastName(getStringAttributeValue(ldapConfiguration.getUserLastnameAttribute(), attributes)); user.setEmail(getStringAttributeValue(ldapConfiguration.getUserEmailAttribute(), attributes)); return user; } protected GroupEntity transformGroup(SearchResult result) throws NamingException { final Attributes attributes = result.getAttributes(); LdapGroupEntity group = new LdapGroupEntity(); group.setDn(result.getNameInNamespace()); group.setId(getStringAttributeValue(ldapConfiguration.getGroupIdAttribute(), attributes)); group.setName(getStringAttributeValue(ldapConfiguration.getGroupNameAttribute(), attributes)); group.setType(getStringAttributeValue(ldapConfiguration.getGroupTypeAttribute(), attributes)); return group; } protected void applyRequestControls(AbstractQuery<?, ?> query) { try { List<Control> controls = new ArrayList<Control>(); List<QueryOrderingProperty> orderBy = query.getOrderingProperties(); if(orderBy != null) { for (QueryOrderingProperty orderingProperty : orderBy) { String propertyName = orderingProperty.getQueryProperty().getName(); if(UserQueryProperty.USER_ID.getName().equals(propertyName)) { controls.add(new SortControl(ldapConfiguration.getUserIdAttribute(), Control.CRITICAL)); } else if(UserQueryProperty.EMAIL.getName().equals(propertyName)) { controls.add(new SortControl(ldapConfiguration.getUserEmailAttribute(), Control.CRITICAL)); } else if(UserQueryProperty.FIRST_NAME.getName().equals(propertyName)) { controls.add(new SortControl(ldapConfiguration.getUserFirstnameAttribute(), Control.CRITICAL)); } else if(UserQueryProperty.LAST_NAME.getName().equals(propertyName)) { controls.add(new SortControl(ldapConfiguration.getUserLastnameAttribute(), Control.CRITICAL)); } } } initialContext.setRequestControls(controls.toArray(new Control[0])); } catch (Exception e) { throw new IdentityProviderException("Exception while setting paging settings", e); } } protected String composeDn(String... parts) { StringWriter resultDn = new StringWriter(); for (int i = 0; i < parts.length; i++) { String part = parts[i]; if(part == null || part.length()==0) { continue; } if(part.endsWith(",")) { part = part.substring(part.length()-2, part.length()-1); } if(part.startsWith(",")) { part = part.substring(1); } String currentDn = resultDn.toString(); if(!currentDn.endsWith(",") && currentDn.length()>0) { resultDn.write(","); } resultDn.write(part); } return resultDn.toString(); } /** * @return true if the passed-in user is currently authenticated */ protected boolean isAuthenticatedUser(UserEntity user) { if(user.getId() == null) { return false; } return user.getId().equals(org.camunda.bpm.engine.impl.context.Context.getCommandContext().getAuthenticatedUserId()); } protected boolean isAuthorized(Permission permission, Resource resource, String resourceId) { return !ldapConfiguration.isAuthorizationCheckEnabled() || org.camunda.bpm.engine.impl.context.Context.getCommandContext() .getAuthorizationManager() .isAuthorized(permission, resource, resourceId); } // Based on https://www.owasp.org/index.php/Preventing_LDAP_Injection_in_Java protected final String escapeLDAPSearchFilter(String filter) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < filter.length(); i++) { char curChar = filter.charAt(i); switch (curChar) { case '\\': sb.append("\\5c"); break; case '*': sb.append("\\2a"); break; case '(': sb.append("\\28"); break; case ')': sb.append("\\29"); break; case '\u0000': sb.append("\\00"); break; default: sb.append(curChar); } } return sb.toString(); } @Override public TenantQuery createTenantQuery() { return new LdapTenantQuery(org.camunda.bpm.engine.impl.context.Context.getProcessEngineConfiguration().getCommandExecutorTxRequired()); } @Override public TenantQuery createTenantQuery(CommandContext commandContext) { return new LdapTenantQuery(); } @Override public Tenant findTenantById(String id) { // since multi-tenancy is not supported for the LDAP plugin, always return null return null; } }
package org.springframework.security.oauth2.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.lang.reflect.Field; import java.net.URI; import java.util.Collections; import java.util.Date; import java.util.concurrent.atomic.AtomicBoolean; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.client.ClientHttpRequest; import org.springframework.http.client.ClientHttpRequestFactory; import org.springframework.http.client.ClientHttpResponse; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.oauth2.client.http.AccessTokenRequiredException; import org.springframework.security.oauth2.client.resource.BaseOAuth2ProtectedResourceDetails; import org.springframework.security.oauth2.client.resource.OAuth2ProtectedResourceDetails; import org.springframework.security.oauth2.client.resource.UserRedirectRequiredException; import org.springframework.security.oauth2.client.token.AccessTokenProvider; import org.springframework.security.oauth2.client.token.AccessTokenProviderChain; import org.springframework.security.oauth2.client.token.AccessTokenRequest; import org.springframework.security.oauth2.common.DefaultOAuth2AccessToken; import org.springframework.security.oauth2.common.OAuth2AccessToken; import org.springframework.security.oauth2.common.OAuth2RefreshToken; import org.springframework.util.ReflectionUtils; import org.springframework.web.client.RequestCallback; import org.springframework.web.client.ResponseExtractor; import org.springframework.web.util.UriTemplate; /** * @author Ryan Heaton * @author Dave Syer */ public class OAuth2RestTemplateTests { private BaseOAuth2ProtectedResourceDetails resource; private OAuth2RestTemplate restTemplate; private AccessTokenProvider accessTokenProvider = Mockito.mock(AccessTokenProvider.class); private ClientHttpRequest request; private HttpHeaders headers; @Before public void open() throws Exception { resource = new BaseOAuth2ProtectedResourceDetails(); // Facebook and older specs: resource.setTokenName("bearer_token"); restTemplate = new OAuth2RestTemplate(resource); restTemplate.setAccessTokenProvider(accessTokenProvider); request = Mockito.mock(ClientHttpRequest.class); headers = new HttpHeaders(); Mockito.when(request.getHeaders()).thenReturn(headers); ClientHttpResponse response = Mockito.mock(ClientHttpResponse.class); HttpStatus statusCode = HttpStatus.OK; Mockito.when(response.getStatusCode()).thenReturn(statusCode); Mockito.when(request.execute()).thenReturn(response); } @Test public void testNonBearerToken() throws Exception { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("12345"); token.setTokenType("MINE"); restTemplate.getOAuth2ClientContext().setAccessToken(token); ClientHttpRequest http = restTemplate.createRequest(URI.create("https://nowhere.com/api/crap"), HttpMethod.GET); String auth = http.getHeaders().getFirst("Authorization"); assertTrue(auth.startsWith("MINE ")); } @Test public void testCustomAuthenticator() throws Exception { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("12345"); token.setTokenType("MINE"); restTemplate.setAuthenticator(new OAuth2RequestAuthenticator() { @Override public void authenticate(OAuth2ProtectedResourceDetails resource, OAuth2ClientContext clientContext, ClientHttpRequest req) { req.getHeaders().set("X-Authorization", clientContext.getAccessToken().getTokenType() + " " + "Nah-nah-na-nah-nah"); } }); restTemplate.getOAuth2ClientContext().setAccessToken(token); ClientHttpRequest http = restTemplate.createRequest(URI.create("https://nowhere.com/api/crap"), HttpMethod.GET); String auth = http.getHeaders().getFirst("X-Authorization"); assertEquals("MINE Nah-nah-na-nah-nah", auth); } /** * tests appendQueryParameter */ @Test public void testAppendQueryParameter() throws Exception { OAuth2AccessToken token = new DefaultOAuth2AccessToken("12345"); URI appended = restTemplate.appendQueryParameter(URI.create("https://graph.facebook.com/search?type=checkin"), token); assertEquals("https://graph.facebook.com/search?type=checkin&bearer_token=12345", appended.toString()); } /** * tests appendQueryParameter */ @Test public void testAppendQueryParameterWithNoExistingParameters() throws Exception { OAuth2AccessToken token = new DefaultOAuth2AccessToken("12345"); URI appended = restTemplate.appendQueryParameter(URI.create("https://graph.facebook.com/search"), token); assertEquals("https://graph.facebook.com/search?bearer_token=12345", appended.toString()); } /** * tests encoding of access token value */ @Test public void testDoubleEncodingOfParameterValue() throws Exception { OAuth2AccessToken token = new DefaultOAuth2AccessToken("1/qIxxx"); URI appended = restTemplate.appendQueryParameter(URI.create("https://graph.facebook.com/search"), token); assertEquals("https://graph.facebook.com/search?bearer_token=1%2FqIxxx", appended.toString()); } /** * tests no double encoding of existing query parameter */ @Test public void testNonEncodingOfUriTemplate() throws Exception { OAuth2AccessToken token = new DefaultOAuth2AccessToken("12345"); UriTemplate uriTemplate = new UriTemplate("https://graph.facebook.com/fql?q={q}"); URI expanded = uriTemplate.expand("[q: fql]"); URI appended = restTemplate.appendQueryParameter(expanded, token); assertEquals("https://graph.facebook.com/fql?q=%5Bq:%20fql%5D&bearer_token=12345", appended.toString()); } /** * tests URI with fragment value */ @Test public void testFragmentUri() throws Exception { OAuth2AccessToken token = new DefaultOAuth2AccessToken("1234"); URI appended = restTemplate.appendQueryParameter(URI.create("https://graph.facebook.com/search#foo"), token); assertEquals("https://graph.facebook.com/search?bearer_token=1234#foo", appended.toString()); } /** * tests encoding of access token value passed in protected requests ref: SECOAUTH-90 */ @Test public void testDoubleEncodingOfAccessTokenValue() throws Exception { // try with fictitious token value with many characters to encode OAuth2AccessToken token = new DefaultOAuth2AccessToken("1 qI+x:y=z"); // System.err.println(UriUtils.encodeQueryParam(token.getValue(), "UTF-8")); URI appended = restTemplate.appendQueryParameter(URI.create("https://graph.facebook.com/search"), token); assertEquals("https://graph.facebook.com/search?bearer_token=1+qI%2Bx%3Ay%3Dz", appended.toString()); } @Test(expected = AccessTokenRequiredException.class) public void testNoRetryAccessDeniedExceptionForNoExistingToken() throws Exception { restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); restTemplate.setRequestFactory(new ClientHttpRequestFactory() { public ClientHttpRequest createRequest(URI uri, HttpMethod httpMethod) throws IOException { throw new AccessTokenRequiredException(resource); } }); restTemplate.doExecute(new URI("https://foo"), HttpMethod.GET, new NullRequestCallback(), new SimpleResponseExtractor()); } @Test public void testRetryAccessDeniedException() throws Exception { final AtomicBoolean failed = new AtomicBoolean(false); restTemplate.getOAuth2ClientContext().setAccessToken(new DefaultOAuth2AccessToken("TEST")); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); restTemplate.setRequestFactory(new ClientHttpRequestFactory() { public ClientHttpRequest createRequest(URI uri, HttpMethod httpMethod) throws IOException { if (!failed.get()) { failed.set(true); throw new AccessTokenRequiredException(resource); } return request; } }); Boolean result = restTemplate.doExecute(new URI("https://foo"), HttpMethod.GET, new NullRequestCallback(), new SimpleResponseExtractor()); assertTrue(result); } @Test public void testNewTokenAcquiredIfExpired() throws Exception { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("TEST"); token.setExpiration(new Date(System.currentTimeMillis() - 1000)); restTemplate.getOAuth2ClientContext().setAccessToken(token); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); OAuth2AccessToken newToken = restTemplate.getAccessToken(); assertNotNull(newToken); assertTrue(!token.equals(newToken)); } // gh-1478 @Test public void testNewTokenAcquiredWithDefaultClockSkew() { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("TEST"); token.setExpiration(new Date(System.currentTimeMillis() + 29000)); // Default clock skew is 30 secs restTemplate.getOAuth2ClientContext().setAccessToken(token); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); OAuth2AccessToken newToken = restTemplate.getAccessToken(); assertNotNull(newToken); assertTrue(!token.equals(newToken)); } // gh-1478 @Test public void testNewTokenAcquiredIfLessThanConfiguredClockSkew() { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("TEST"); token.setExpiration(new Date(System.currentTimeMillis() + 5000)); restTemplate.setClockSkew(6); restTemplate.getOAuth2ClientContext().setAccessToken(token); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); OAuth2AccessToken newToken = restTemplate.getAccessToken(); assertNotNull(newToken); assertTrue(!token.equals(newToken)); } // gh-1478 @Test public void testNewTokenNotAcquiredIfGreaterThanConfiguredClockSkew() { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("TEST"); token.setExpiration(new Date(System.currentTimeMillis() + 5000)); restTemplate.setClockSkew(4); restTemplate.getOAuth2ClientContext().setAccessToken(token); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); OAuth2AccessToken newToken = restTemplate.getAccessToken(); assertNotNull(newToken); assertTrue(token.equals(newToken)); } // gh-1478 @Test(expected = IllegalArgumentException.class) public void testNegativeClockSkew() { restTemplate.setClockSkew(-1); } // gh-1909 @Test public void testClockSkewPropagationIntoAccessTokenProviderChain() { AccessTokenProvider accessTokenProvider = new AccessTokenProviderChain(Collections.<AccessTokenProvider>emptyList()); restTemplate.setAccessTokenProvider(accessTokenProvider); restTemplate.setClockSkew(5); Field field = ReflectionUtils.findField(accessTokenProvider.getClass(), "clockSkew"); field.setAccessible(true); assertEquals(5, ReflectionUtils.getField(field, accessTokenProvider)); } // gh-1909 @Test public void testApplyClockSkewOnProvidedAccessTokenProviderChain() { AccessTokenProvider accessTokenProvider = new AccessTokenProviderChain(Collections.<AccessTokenProvider>emptyList()); restTemplate.setClockSkew(5); restTemplate.setAccessTokenProvider(accessTokenProvider); Field field = ReflectionUtils.findField(accessTokenProvider.getClass(), "clockSkew"); field.setAccessible(true); assertEquals(5, ReflectionUtils.getField(field, accessTokenProvider)); } // gh-1909 @Test public void testClockSkewPropagationSkippedForNonAccessTokenProviderChainInstances() { restTemplate.setClockSkew(5); restTemplate.setAccessTokenProvider(null); restTemplate.setClockSkew(5); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider()); restTemplate.setClockSkew(5); } @Test public void testTokenIsResetIfInvalid() throws Exception { DefaultOAuth2AccessToken token = new DefaultOAuth2AccessToken("TEST"); token.setExpiration(new Date(System.currentTimeMillis() - 1000)); restTemplate.getOAuth2ClientContext().setAccessToken(token); restTemplate.setAccessTokenProvider(new StubAccessTokenProvider() { @Override public OAuth2AccessToken obtainAccessToken(OAuth2ProtectedResourceDetails details, AccessTokenRequest parameters) throws UserRedirectRequiredException, AccessDeniedException { throw new UserRedirectRequiredException("https://www.foo.com/", Collections.<String, String> emptyMap()); } }); try { OAuth2AccessToken newToken = restTemplate.getAccessToken(); assertNotNull(newToken); fail("Expected UserRedirectRequiredException"); } catch (UserRedirectRequiredException e) { // planned } // context token should be reset as it clearly is invalid at this point assertNull(restTemplate.getOAuth2ClientContext().getAccessToken()); } private final class SimpleResponseExtractor implements ResponseExtractor<Boolean> { public Boolean extractData(ClientHttpResponse response) throws IOException { return true; } } private static class NullRequestCallback implements RequestCallback { public void doWithRequest(ClientHttpRequest request) throws IOException { } } private static class StubAccessTokenProvider implements AccessTokenProvider { public OAuth2AccessToken obtainAccessToken(OAuth2ProtectedResourceDetails details, AccessTokenRequest parameters) throws UserRedirectRequiredException, AccessDeniedException { return new DefaultOAuth2AccessToken("FOO"); } public boolean supportsRefresh(OAuth2ProtectedResourceDetails resource) { return false; } public OAuth2AccessToken refreshAccessToken(OAuth2ProtectedResourceDetails resource, OAuth2RefreshToken refreshToken, AccessTokenRequest request) throws UserRedirectRequiredException { return null; } public boolean supportsResource(OAuth2ProtectedResourceDetails resource) { return true; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck; import org.apache.hadoop.hdfs.tools.DFSck; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.io.RandomAccessFile; import java.util.Random; import java.util.concurrent.TimeoutException; /** * Class is used to test client reporting corrupted block replica to name node. * The reporting policy is if block replica is more than one, if all replicas * are corrupted, client does not report (since the client can handicapped). If * some of the replicas are corrupted, client reports the corrupted block * replicas. In case of only one block replica, client always reports corrupted * replica. */ public class TestClientReportBadBlock { private static final Log LOG = LogFactory.getLog(TestClientReportBadBlock.class); static final long BLOCK_SIZE = 64 * 1024; private static int buffersize; private static MiniDFSCluster cluster; private static DistributedFileSystem dfs; private static int numDataNodes = 3; private static final Configuration conf = new HdfsConfiguration(); Random rand = new Random(); @Before public void startUpCluster() throws IOException { if (System.getProperty("test.build.data") == null) { // to allow test to be // run outside of Ant System.setProperty("test.build.data", "build/test/data"); } // disable block scanner conf.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, -1); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build(); cluster.waitActive(); dfs = (DistributedFileSystem) cluster.getFileSystem(); buffersize = conf.getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096); } @After public void shutDownCluster() throws IOException { dfs.close(); cluster.shutdown(); } /* * This test creates a file with one block replica. Corrupt the block. Make * DFSClient read the corrupted file. Corrupted block is expected to be * reported to name node. */ @Test public void testOneBlockReplica() throws Exception { final short repl = 1; final int corruptBlockNumber = 1; for (int i = 0; i < 2; i++) { // create a file String fileName = "/tmp/testClientReportBadBlock/OneBlockReplica" + i; Path filePath = new Path(fileName); createAFileWithCorruptedBlockReplicas(filePath, repl, corruptBlockNumber); if (i == 0) { dfsClientReadFile(filePath); } else { dfsClientReadFileFromPosition(filePath); } // the only block replica is corrupted. The LocatedBlock should be marked // as corrupted. But the corrupted replica is expected to be returned // when calling Namenode#getBlockLocations() since all(one) replicas are // corrupted. int expectedReplicaCount = 1; verifyCorruptedBlockCount(filePath, expectedReplicaCount); verifyFirstBlockCorrupted(filePath, true); verifyFsckBlockCorrupted(); testFsckListCorruptFilesBlocks(filePath, -1); } } /** * This test creates a file with three block replicas. Corrupt all of the * replicas. Make dfs client read the file. No block corruption should be * reported. */ @Test public void testCorruptAllOfThreeReplicas() throws Exception { final short repl = 3; final int corruptBlockNumber = 3; for (int i = 0; i < 2; i++) { // create a file String fileName = "/tmp/testClientReportBadBlock/testCorruptAllReplicas" + i; Path filePath = new Path(fileName); createAFileWithCorruptedBlockReplicas(filePath, repl, corruptBlockNumber); // ask dfs client to read the file if (i == 0) { dfsClientReadFile(filePath); } else { dfsClientReadFileFromPosition(filePath); } // As all replicas are corrupted. We expect DFSClient does NOT report // corrupted replicas to the name node. int expectedReplicasReturned = repl; verifyCorruptedBlockCount(filePath, expectedReplicasReturned); // LocatedBlock should not have the block marked as corrupted. verifyFirstBlockCorrupted(filePath, false); verifyFsckHealth(""); testFsckListCorruptFilesBlocks(filePath, 0); } } /** * This test creates a file with three block replicas. Corrupt two of the * replicas. Make dfs client read the file. The corrupted blocks with their * owner data nodes should be reported to the name node. */ @Test public void testCorruptTwoOutOfThreeReplicas() throws Exception { final short repl = 3; final int corruptBlocReplicas = 2; for (int i = 0; i < 2; i++) { String fileName = "/tmp/testClientReportBadBlock/CorruptTwoOutOfThreeReplicas" + i; Path filePath = new Path(fileName); createAFileWithCorruptedBlockReplicas(filePath, repl, corruptBlocReplicas); int replicaCount = 0; /* * The order of data nodes in LocatedBlock returned by name node is sorted * by NetworkToplology#pseudoSortByDistance. In current MiniDFSCluster, * when LocatedBlock is returned, the sorting is based on a random order. * That is to say, the DFS client and simulated data nodes in mini DFS * cluster are considered not on the same host nor the same rack. * Therefore, even we corrupted the first two block replicas based in * order. When DFSClient read some block replicas, it is not guaranteed * which block replicas (good/bad) will be returned first. So we try to * re-read the file until we know the expected replicas numbers is * returned. */ while (replicaCount != repl - corruptBlocReplicas) { if (i == 0) { dfsClientReadFile(filePath); } else { dfsClientReadFileFromPosition(filePath); } LocatedBlocks blocks = dfs.dfs.getNamenode(). getBlockLocations(filePath.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; } verifyFirstBlockCorrupted(filePath, false); int expectedReplicaCount = repl - corruptBlocReplicas; verifyCorruptedBlockCount(filePath, expectedReplicaCount); verifyFsckHealth("Target Replicas is 3 but found 1 replica"); testFsckListCorruptFilesBlocks(filePath, 0); } } /** * Create a file with one block and corrupt some/all of the block replicas. */ private void createAFileWithCorruptedBlockReplicas(Path filePath, short repl, int corruptBlockCount) throws IOException, AccessControlException, FileNotFoundException, UnresolvedLinkException, InterruptedException, TimeoutException { DFSTestUtil.createFile(dfs, filePath, BLOCK_SIZE, repl, 0); DFSTestUtil.waitReplication(dfs, filePath, repl); // Locate the file blocks by asking name node final LocatedBlocks locatedblocks = dfs.dfs.getNamenode() .getBlockLocations(filePath.toString(), 0L, BLOCK_SIZE); Assert.assertEquals(repl, locatedblocks.get(0).getLocations().length); // The file only has one block LocatedBlock lblock = locatedblocks.get(0); DatanodeInfo[] datanodeinfos = lblock.getLocations(); ExtendedBlock block = lblock.getBlock(); // corrupt some /all of the block replicas for (int i = 0; i < corruptBlockCount; i++) { DatanodeInfo dninfo = datanodeinfos[i]; final DataNode dn = cluster.getDataNode(dninfo.getIpcPort()); corruptBlock(block, dn); LOG.debug("Corrupted block " + block.getBlockName() + " on data node " + dninfo); } } /** * Verify the first block of the file is corrupted (for all its replica). */ private void verifyFirstBlockCorrupted(Path filePath, boolean isCorrupted) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { final LocatedBlocks locatedBlocks = dfs.dfs.getNamenode() .getBlockLocations(filePath.toUri().getPath(), 0, Long.MAX_VALUE); final LocatedBlock firstLocatedBlock = locatedBlocks.get(0); Assert.assertEquals(isCorrupted, firstLocatedBlock.isCorrupt()); } /** * Verify the number of corrupted block replicas by fetching the block * location from name node. */ private void verifyCorruptedBlockCount(Path filePath, int expectedReplicas) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { final LocatedBlocks lBlocks = dfs.dfs.getNamenode() .getBlockLocations(filePath.toUri().getPath(), 0, Long.MAX_VALUE); // we expect only the first block of the file is used for this test LocatedBlock firstLocatedBlock = lBlocks.get(0); Assert.assertEquals(expectedReplicas, firstLocatedBlock.getLocations().length); } /** * Ask dfs client to read the file */ private void dfsClientReadFile(Path corruptedFile) throws IOException, UnresolvedLinkException { DFSInputStream in = dfs.dfs.open(corruptedFile.toUri().getPath()); byte[] buf = new byte[buffersize]; int nRead = 0; // total number of bytes read try { do { nRead = in.read(buf, 0, buf.length); } while (nRead > 0); } catch (ChecksumException ce) { // caught ChecksumException if all replicas are bad, ignore and continue. LOG.debug("DfsClientReadFile caught ChecksumException."); } catch (BlockMissingException bme) { // caught BlockMissingException, ignore. LOG.debug("DfsClientReadFile caught BlockMissingException."); } } /** * DFS client read bytes starting from the specified position. */ private void dfsClientReadFileFromPosition(Path corruptedFile) throws UnresolvedLinkException, IOException { DFSInputStream in = dfs.dfs.open(corruptedFile.toUri().getPath()); byte[] buf = new byte[buffersize]; int startPosition = 2; int nRead = 0; // total number of bytes read try { do { nRead = in.read(startPosition, buf, 0, buf.length); startPosition += buf.length; } while (nRead > 0); } catch (BlockMissingException bme) { LOG.debug("DfsClientReadFile caught BlockMissingException."); } } /** * Corrupt a block on a data node. Replace the block file content with * content * of 1, 2, ...BLOCK_SIZE. * * @param block * the ExtendedBlock to be corrupted * @param dn * the data node where the block needs to be corrupted * @throws FileNotFoundException * @throws IOException */ private static void corruptBlock(final ExtendedBlock block, final DataNode dn) throws FileNotFoundException, IOException { final File f = DataNodeTestUtils .getBlockFile(dn, block.getBlockPoolId(), block.getLocalBlock()); final RandomAccessFile raFile = new RandomAccessFile(f, "rw"); final byte[] bytes = new byte[(int) BLOCK_SIZE]; for (int i = 0; i < BLOCK_SIZE; i++) { bytes[i] = (byte) (i); } raFile.write(bytes); raFile.close(); } private static void verifyFsckHealth(String expected) throws Exception { // Fsck health has error code 0. // Make sure filesystem is in healthy state String outStr = runFsck(conf, 0, true, "/"); LOG.info(outStr); Assert.assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS)); if (!expected.equals("")) { Assert.assertTrue(outStr.contains(expected)); } } private static void verifyFsckBlockCorrupted() throws Exception { String outStr = runFsck(conf, 1, true, "/"); LOG.info(outStr); Assert.assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS)); } private static void testFsckListCorruptFilesBlocks(Path filePath, int errorCode) throws Exception { String outStr = runFsck(conf, errorCode, true, filePath.toString(), "-list-corruptfileblocks"); LOG.info("fsck -list-corruptfileblocks out: " + outStr); if (errorCode != 0) { Assert.assertTrue(outStr.contains("CORRUPT files")); } } static String runFsck(Configuration conf, int expectedErrCode, boolean checkErrorCode, String... path) throws Exception { ByteArrayOutputStream bStream = new ByteArrayOutputStream(); PrintStream out = new PrintStream(bStream, true); int errCode = ToolRunner.run(new DFSck(conf, out), path); if (checkErrorCode) { Assert.assertEquals(expectedErrCode, errCode); } return bStream.toString(); } }
/* * Copyright 2014 toxbee.se * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package se.toxbee.sleepfighter.challenge; import se.toxbee.sleepfighter.model.challenge.ChallengeConfig; import se.toxbee.sleepfighter.model.challenge.ChallengeConfigSet; import se.toxbee.sleepfighter.model.challenge.ChallengeType; /** * ChallengeParamsReadWriter provides utilities for reading and writing parameters<br/> * for a {ChallengeConfigSet, ChallengeType} == ChallengeConfig.<br/> * This is the preferred way of dealing with challenge parameters. * * @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad. * @version 1.0 * @since Oct 4, 2013 */ public class ChallengeParamsReadWriter { private ChallengeConfigSet challengeSet; private ChallengeType challengeType; /** * Constructs a ChallengeParamsReadWriter without calling<br/> * {@link #setChallengeSet(ChallengeConfigSet)} and {@link #setChallengeType(ChallengeType)}<br/> * These must be called manually after. */ public ChallengeParamsReadWriter() { } /** * Constructs a ChallengeParamsReadWriter with a given ChallengeConfigSet and ChallengeType. * * @param set the ChallengeConfigSet to manage by this instance. * @param type the ChallengeType to manage by this instance. */ public ChallengeParamsReadWriter( ChallengeConfigSet set, ChallengeType type ) { this.setChallengeSet( set ); this.setChallengeType( type ); } /** * Returns the ChallengeConfigSet that is currently being managed by this instance. * * @return the ChallengeConfigSet that is currently being managed by this instance. */ public ChallengeConfigSet getChallengeSet() { return this.challengeSet; } /** * Sets the ChallengeConfigSet that is to be managed by this instance. * * @param challengeSet the ChallengeConfigSet that is to be managed by this instance. */ public void setChallengeSet( ChallengeConfigSet challengeSet ) { this.challengeSet = challengeSet; } /** * Returns the ChallengeType that is currently being managed by this instance. * * @return the ChallengeType that is currently being managed by this instance. */ public ChallengeType getChallengeType() { return this.challengeType; } /** * Sets the ChallengeType that is to be managed by this instance. * * @param challengeType the ChallengeType that is to be managed by this instance. */ public void setChallengeType( ChallengeType challengeType ) { this.challengeType = challengeType; } /* -------------------------------- * Facades for accessors. * -------------------------------- */ /** * Returns the parameter value for key as a String. * * @param key the key. * @return the value as a String. */ public String getRaw( String key ) { return this.getConfig().getParam( key ); } /** * Returns the parameter value for key as a String. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as a String. */ public String getString( String key, String defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : val; } /** * Returns the parameter value for key as an integer. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as an integer. */ public int getInt( String key, int defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : Integer.parseInt( val ); } /** * Returns the parameter value for key as a float. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as a float. */ public float getFloat( String key, float defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : Float.parseFloat( val ); } /** * Returns the parameter value for key as a double. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as a double. */ public double getDouble( String key, double defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : Double.parseDouble( val ); } /** * Returns the parameter value for key as a boolean. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as a boolean. */ public boolean getBoolean( String key, boolean defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : Boolean.parseBoolean( val ); } /** * Returns the parameter value for key as a long. * * @param key the key. * @param defaultValue default value to use instead of null. * @return the value as a long. */ public long getLong( String key, long defaultValue ) { String val = this.getRaw( key ); return val == null ? defaultValue : Long.parseLong( val ); } /* -------------------------------- * Facades for mutators. * -------------------------------- */ /** * Sets the parameter value for key from a string value. * * @param key the key. * @param value the value as a string. */ public void setString( String key, String value ) { this.challengeSet.setConfigParam( this.challengeType, key, value ); } /** * Sets the parameter value for key from an integer value. * * @param key the key. * @param value the value as an integer. */ public void setInt( String key, int value ) { this.setString( key, Integer.toString( value ) ); } /** * Sets the parameter value for key from a float value. * * @param key the key. * @param value the value as a float. */ public void setFloat( String key, float value ) { this.setString( key, Float.toString( value ) ); } /** * Sets the parameter value for key from a double value. * * @param key the key. * @param value the value as a double. */ public void setDouble( String key, double value ) { this.setString( key, Double.toString( value ) ); } /** * Sets the parameter value for key from a double value. * * @param key the key. * @param value the value as a boolean. */ public void setBoolean( String key, boolean value ) { this.setString( key, Boolean.toString( value ) ); } /** * Sets the parameter value for key from a long value. * * @param key the key. * @param value the value as a long. */ public void setLong( String key, long value ) { this.setString( key, Long.toString( value ) ); } /** * Returns the resultant ChallengeConfig that is in reality managed. * * @return the ChallengeConfig. */ public ChallengeConfig getConfig() { return this.challengeSet.getConfig( this.challengeType ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.util.io; import java.io.IOException; import java.io.Reader; import java.text.ParseException; /** * This is not a reader like e.g. FileReader. It rather reads the whole data until the end from a * source reader into memory and besides that it maintains the current position (like a reader) it * provides String like methods which conveniently let you navigate (usually forward) in the stream. * <p> * Because the source data are expected to be text, the line and column numbers are maintained as * well for location precise error messages. But it does NOT automatically update the line and * column numbers. You must call {@link #countLinesTo(int)} * * @author Juergen Donnerstag */ public final class FullyBufferedReader { /** All the chars from the resource */ private final String input; /** Position in parse. */ private int inputPosition; /** Current line number */ private int lineNumber = 1; /** current column number. */ private int columnNumber = 1; /** Last place we counted lines from. */ private int lastLineCountIndex; /** A variable to remember a certain position in the markup */ private int positionMarker; /** * Read all the data from the resource into memory. * * @param reader * The source reader to load the data from * @throws IOException */ public FullyBufferedReader(final Reader reader) throws IOException { this(Streams.readString(reader)); } /** * Construct. * * @param input * The source string */ public FullyBufferedReader(String input) { this.input = input; } /** * Get the characters from the position marker to toPos. * <p> * If toPos < 0, than get all data from the position marker until the end. If toPos less than * the current position marker than return an empty string "" * * @param toPos * Index of first character not included * @return Raw markup (a string) in between these two positions. */ public final CharSequence getSubstring(int toPos) { if (toPos < 0) { toPos = input.length(); } else if (toPos < positionMarker) { return ""; } return input.subSequence(positionMarker, toPos); } /** * Get the characters from in between both positions including the char at fromPos, excluding * the char at toPos * * @param fromPos * first index * @param toPos * second index * @return the string (raw markup) in between both positions */ public final CharSequence getSubstring(final int fromPos, final int toPos) { return input.subSequence(fromPos, toPos); } /** * Gets the current input position * * @return input position */ public final int getPosition() { return inputPosition; } /** * Remember the current position in markup * * @param pos */ public final void setPositionMarker(final int pos) { positionMarker = pos; } /** * @return The markup to be parsed */ @Override public String toString() { return input; } /** * Counts lines starting where we last left off up to the index provided. * * @param end * End index */ public final void countLinesTo(final int end) { for (int i = lastLineCountIndex; i < end; i++) { final char ch = input.charAt(i); if (ch == '\n') { columnNumber = 1; lineNumber++; } else if (ch != '\r') { columnNumber++; } } lastLineCountIndex = end; } /** * Find a char starting at the current input position * * @param ch * The char to search for * @return -1 if not found */ public final int find(final char ch) { return input.indexOf(ch, inputPosition); } /** * Find a char starting at the position provided * * @param ch * The char to search for * @param startPos * The index to start at * @return -1 if not found */ public final int find(final char ch, final int startPos) { return input.indexOf(ch, startPos); } /** * Find the string starting at the current input position * * @param str * The string to search for * @return -1 if not found */ public final int find(final String str) { return input.indexOf(str, inputPosition); } /** * Find the string starting at the position provided * * @param str * The string to search for * @param startPos * The index to start at * @return -1 if not found */ public final int find(final String str, final int startPos) { return input.indexOf(str, startPos); } /** * Find a char starting at the position provided. The char must not be inside a quoted string * (single or double) * * @param ch * The char to search for * @param startPos * The index to start at * @return -1 if not found * @throws ParseException */ public int findOutOfQuotes(final char ch, int startPos) throws ParseException { return findOutOfQuotes(ch, startPos, (char)0); } /** * Find a char starting at the position provided. The char must not be inside a quoted string * (single or double) * * @param ch * The char to search for * @param startPos * The index to start at * @param quotationChar * The current quotation char. Must be ' or ", otherwise will be ignored. * @return -1 if not found * @throws ParseException */ public int findOutOfQuotes(final char ch, int startPos, char quotationChar) throws ParseException { int closeBracketIndex = find(ch, startPos + 1); if (closeBracketIndex != -1) { CharSequence tagCode = getSubstring(startPos, closeBracketIndex + 1); for (int i = 0; i < tagCode.length(); i++) { char currentChar = tagCode.charAt(i); char previousTag = tagCode.charAt(i > 0 ? i - 1 : 0); if (quotationChar == 0 && (currentChar == '\'' || currentChar == '\"')) {// I'm entering inside a quoted string. Set quotationChar quotationChar = currentChar; countLinesTo(startPos + i); } else if (currentChar == quotationChar && previousTag != '\\') { // I'm out of quotes, reset quotationChar quotationChar = 0; } // I've found character but I'm inside quotes if (currentChar == ch && quotationChar != 0) { return findOutOfQuotes(ch, closeBracketIndex + 1, quotationChar); } } } else if (quotationChar != 0) { // quotes not balanced! throw new ParseException("Opening/closing quote not found for quote at " + "(line " + getLineNumber() + ", column " + getColumnNumber() + ")", startPos); } return closeBracketIndex; } /** * Position the reader at the index provided. Could be anywhere within the data * * @param pos * The new current position */ public final void setPosition(final int pos) { inputPosition = pos; } /** * Get the column number. Note: The column number depends on you calling countLinesTo(pos). It * is not necessarily the column number matching the current position in the stream. * * @return column number */ public final int getColumnNumber() { return columnNumber; } /** * Get the line number. Note: The line number depends on you calling countLinesTo(pos). It is * not necessarily the line number matching the current position in the stream. * * @return line number */ public final int getLineNumber() { return lineNumber; } /** * Get the number of character read from the source resource. The whole content, not just until * the current position. * * @return Size of the data */ public final int size() { return input.length(); } /** * Get the character at the position provided * * @param pos * The position * @return char at position */ public final char charAt(final int pos) { return input.charAt(pos); } }
/* * Copyright (C) 2015 The Gifplayer Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.tomorrowkey.android.gifplayer; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Animatable; import android.graphics.drawable.Drawable; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; import android.os.SystemClock; import android.util.Log; /** * A base GIF Drawable with support for animations. * * Inspired by http://code.google.com/p/android-gifview/ */ public class BaseGifDrawable extends Drawable implements Runnable, Animatable, android.os.Handler.Callback { private static final String TAG = "GifDrawable"; // Max decoder pixel stack size private static final int MAX_STACK_SIZE = 4096; private static final int MAX_BITS = 4097; // Frame disposal methods private static final int DISPOSAL_METHOD_UNKNOWN = 0; private static final int DISPOSAL_METHOD_LEAVE = 1; private static final int DISPOSAL_METHOD_BACKGROUND = 2; private static final int DISPOSAL_METHOD_RESTORE = 3; // Message types private static final int READ_FRAME_REQ = 10; private static final int READ_FRAME_RESP = 11; private static final int RESET_DECODER = 12; // Specifies the minimum amount of time before a subsequent frame will be rendered. private static final int MIN_FRAME_SCHEDULE_DELAY_MS = 5; private static final byte[] NETSCAPE2_0 = "NETSCAPE2.0".getBytes(); private static Paint sPaint; private static Paint sScalePaint; protected final BaseGifImage mGifImage; private final byte[] mData; private int mPosition; protected int mIntrinsicWidth; protected int mIntrinsicHeight; private int mWidth; private int mHeight; protected Bitmap mBitmap; protected int[] mColors; private boolean mScale; private float mScaleFactor; // The following are marked volatile because they are read/written in the background decoder // thread and read from the UI thread. No further synchronization is needed because their // values will only ever change from at most once, and it is safe to lazily detect the change // in the UI thread. private volatile boolean mError; private volatile boolean mDone; private volatile boolean mAnimateOnLoad = true; private int mBackgroundColor; private boolean mLocalColorTableUsed; private int mLocalColorTableSize; private int[] mLocalColorTable; private int[] mActiveColorTable; private boolean mInterlace; // Each frame specifies a sub-region of the image that should be updated. The values are // clamped to the GIF dimensions if they exceed the intrinsic dimensions. private int mFrameX, mFrameY, mFrameWidth, mFrameHeight; // This specifies the width of the actual data within a GIF frame. It will be equal to // mFrameWidth unless the frame sub-region was clamped to prevent exceeding the intrinsic // dimensions. private int mFrameStep; private byte[] mBlock = new byte[256]; private int mDisposalMethod = DISPOSAL_METHOD_BACKGROUND; private boolean mTransparency; private int mTransparentColorIndex; // LZW decoder working arrays private short[] mPrefix = new short[MAX_STACK_SIZE]; private byte[] mSuffix = new byte[MAX_STACK_SIZE]; private byte[] mPixelStack = new byte[MAX_STACK_SIZE + 1]; private byte[] mPixels; private boolean mBackupSaved; private int[] mBackup; private int mFrameCount; private long mLastFrameTime; private boolean mRunning; protected int mFrameDelay; private int mNextFrameDelay; protected boolean mScheduled; private boolean mAnimationEnabled = true; private final Handler mHandler = new Handler(Looper.getMainLooper(), this); private static DecoderThread sDecoderThread; private static Handler sDecoderHandler; private boolean mRecycled; protected boolean mFirstFrameReady; private boolean mEndOfFile; private int mLoopCount = 0; // 0 to repeat endlessly. private int mLoopIndex = 0; private final Bitmap.Config mBitmapConfig; private boolean mFirstFrame = true; public BaseGifDrawable(BaseGifImage gifImage, Bitmap.Config bitmapConfig) { this.mBitmapConfig = bitmapConfig; // Create the background decoder thread, if necessary. if (sDecoderThread == null) { sDecoderThread = new DecoderThread(); sDecoderThread.start(); sDecoderHandler = new Handler(sDecoderThread.getLooper(), sDecoderThread); } if (sPaint == null) { sPaint = new Paint(Paint.FILTER_BITMAP_FLAG); sScalePaint = new Paint(Paint.FILTER_BITMAP_FLAG); sScalePaint.setFilterBitmap(true); } mGifImage = gifImage; mData = gifImage.getData(); mPosition = mGifImage.mHeaderSize; mFrameWidth = mFrameStep = mIntrinsicWidth = gifImage.getWidth(); mFrameHeight = mIntrinsicHeight = gifImage.getHeight(); mBackgroundColor = mGifImage.mBackgroundColor; mError = mGifImage.mError; if (!mError) { try { mBitmap = Bitmap.createBitmap(mIntrinsicWidth, mIntrinsicHeight, mBitmapConfig); if (mBitmap == null) { throw new OutOfMemoryError("Cannot allocate bitmap"); } int pixelCount = mIntrinsicWidth * mIntrinsicHeight; mColors = new int[pixelCount]; mPixels = new byte[pixelCount]; mWidth = mIntrinsicHeight; mHeight = mIntrinsicHeight; // Read the first frame sDecoderHandler.sendMessage(sDecoderHandler.obtainMessage(READ_FRAME_REQ, this)); } catch (OutOfMemoryError e) { mError = true; } } } /** * Sets the loop count for multi-frame animation. */ public void setLoopCount(int loopCount) { mLoopCount = loopCount; } /** * Returns the loop count for multi-frame animation. */ public int getLoopCount() { return mLoopCount; } /** * Sets whether to start animation on load or not. */ public void setAnimateOnLoad(boolean animateOnLoad) { mAnimateOnLoad = animateOnLoad; } /** * Returns {@code true} if the GIF is valid and {@code false} otherwise. */ public boolean isValid() { return !mError && mFirstFrameReady; } public void onRecycle() { if (mBitmap != null) { mBitmap.recycle(); } mBitmap = null; mRecycled = true; } /** * Enables or disables the GIF from animating. GIF animations are enabled by default. */ public void setAnimationEnabled(boolean animationEnabled) { if (mAnimationEnabled == animationEnabled) { return; } mAnimationEnabled = animationEnabled; if (mAnimationEnabled) { start(); } else { stop(); } } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); mWidth = bounds.width(); mHeight = bounds.height(); mScale = mWidth != mIntrinsicWidth && mHeight != mIntrinsicHeight; if (mScale) { mScaleFactor = Math.max((float) mWidth / mIntrinsicWidth, (float) mHeight / mIntrinsicHeight); } if (!mError && !mRecycled) { // Request that the decoder reset itself sDecoderHandler.sendMessage(sDecoderHandler.obtainMessage(RESET_DECODER, this)); } } @Override public boolean setVisible(boolean visible, boolean restart) { boolean changed = super.setVisible(visible, restart); if (visible) { if (changed || restart) { start(); } } else { stop(); } return changed; } @Override public void draw(Canvas canvas) { if (mError || mWidth == 0 || mHeight == 0 || mRecycled || !mFirstFrameReady) { return; } if (mScale) { canvas.save(); canvas.scale(mScaleFactor, mScaleFactor, 0, 0); canvas.drawBitmap(mBitmap, 0, 0, sScalePaint); canvas.restore(); } else { canvas.drawBitmap(mBitmap, 0, 0, sPaint); } if (mRunning) { if (!mScheduled) { // Schedule the next frame at mFrameDelay milliseconds from the previous frame or // the minimum sceduling delay from now, whichever is later. mLastFrameTime = Math.max( mLastFrameTime + mFrameDelay, SystemClock.uptimeMillis() + MIN_FRAME_SCHEDULE_DELAY_MS); scheduleSelf(this, mLastFrameTime); } } else if (!mDone) { start(); } else { unscheduleSelf(this); } } @Override public int getIntrinsicWidth() { return mIntrinsicWidth; } @Override public int getIntrinsicHeight() { return mIntrinsicHeight; } @Override public int getOpacity() { return PixelFormat.UNKNOWN; } @Override public void setAlpha(int alpha) { } @Override public void setColorFilter(ColorFilter cf) { } @Override public boolean isRunning() { return mRunning; } @Override public void start() { if (!isRunning()) { mRunning = true; if (!mAnimateOnLoad) { mDone = true; } mLastFrameTime = SystemClock.uptimeMillis(); run(); } } @Override public void stop() { if (isRunning()) { unscheduleSelf(this); } } @Override public void scheduleSelf(Runnable what, long when) { if (mAnimationEnabled) { super.scheduleSelf(what, when); mScheduled = true; } } @Override public void unscheduleSelf(Runnable what) { super.unscheduleSelf(what); mRunning = false; } /** * Moves to the next frame. */ @Override public void run() { if (mRecycled) { return; } // Send request to decoder to read the next frame if (!mDone) { sDecoderHandler.sendMessage(sDecoderHandler.obtainMessage(READ_FRAME_REQ, this)); } } /** * Restarts decoding the image from the beginning. Called from the background thread. */ private void reset() { // Return to the position of the first image frame in the stream. mPosition = mGifImage.mHeaderSize; mBackupSaved = false; mFrameCount = 0; mDisposalMethod = DISPOSAL_METHOD_UNKNOWN; } /** * Restarts animation if a limited number of loops of animation have been previously done. */ public void restartAnimation() { if (mDone && mLoopCount > 0) { reset(); mDone = false; mLoopIndex = 0; run(); } } /** * Reads color table as 256 RGB integer values. Called from the background thread. * * @param ncolors int number of colors to read */ private void readColorTable(int[] colorTable, int ncolors) { for (int i = 0; i < ncolors; i++) { int r = mData[mPosition++] & 0xff; int g = mData[mPosition++] & 0xff; int b = mData[mPosition++] & 0xff; colorTable[i] = 0xff000000 | (r << 16) | (g << 8) | b; } } /** * Reads GIF content blocks. Called from the background thread. * * @return true if the next frame has been parsed successfully, false if EOF * has been reached */ private void readNextFrame() { // Don't clear the image if it is a terminator. if ((mData[mPosition] & 0xff) == 0x3b) { mEndOfFile = true; return; } disposeOfLastFrame(); mDisposalMethod = DISPOSAL_METHOD_UNKNOWN; mTransparency = false; mEndOfFile = false; mNextFrameDelay = 100; mLocalColorTable = null; while (true) { int code = mData[mPosition++] & 0xff; switch (code) { case 0: // Empty block, ignore break; case 0x21: // Extension. Extensions precede the corresponding image. code = mData[mPosition++] & 0xff; switch (code) { case 0xf9: // graphics control extension readGraphicControlExt(); break; case 0xff: // application extension readBlock(); boolean netscape = true; for (int i = 0; i < NETSCAPE2_0.length; i++) { if (mBlock[i] != NETSCAPE2_0[i]) { netscape = false; break; } } if (netscape) { readNetscapeExtension(); } else { skip(); // don't care } break; case 0xfe:// comment extension skip(); break; case 0x01:// plain text extension skip(); break; default: // uninteresting extension skip(); } break; case 0x2C: // Image separator readBitmap(); return; case 0x3b: // Terminator mEndOfFile = true; return; default: // We don't know what this is. Just skip it. break; } } } /** * Disposes of the previous frame. Called from the background thread. */ private void disposeOfLastFrame() { if (mFirstFrame) { mFirstFrame = false; return; } switch (mDisposalMethod) { case DISPOSAL_METHOD_UNKNOWN: case DISPOSAL_METHOD_LEAVE: { mBackupSaved = false; break; } case DISPOSAL_METHOD_RESTORE: { if (mBackupSaved) { System.arraycopy(mBackup, 0, mColors, 0, mBackup.length); } break; } case DISPOSAL_METHOD_BACKGROUND: { mBackupSaved = false; // Fill last image rect area with background color int color = 0; if (!mTransparency) { color = mBackgroundColor; } for (int i = 0; i < mFrameHeight; i++) { int n1 = (mFrameY + i) * mIntrinsicWidth + mFrameX; int n2 = n1 + mFrameWidth; for (int k = n1; k < n2; k++) { mColors[k] = color; } } break; } } } /** * Reads Graphics Control Extension values. Called from the background thread. */ private void readGraphicControlExt() { mPosition++; // Block size, fixed int packed = mData[mPosition++] & 0xff; // Packed fields mDisposalMethod = (packed & 0x1c) >> 2; // Disposal method mTransparency = (packed & 1) != 0; mNextFrameDelay = readShort() * 10; // Delay in milliseconds // It seems that there are broken tools out there that set a 0ms or 10ms // timeout when they really want a "default" one. // Following WebKit's lead (http://trac.webkit.org/changeset/73295) // we use 10 frames per second as the default frame rate. if (mNextFrameDelay <= 10) { mNextFrameDelay = 100; } mTransparentColorIndex = mData[mPosition++] & 0xff; mPosition++; // Block terminator - ignore } /** * Reads Netscape extension to obtain iteration count. Called from the background thread. */ private void readNetscapeExtension() { int count; do { count = readBlock(); } while ((count > 0) && !mError); } /** * Reads next frame image. Called from the background thread. */ private void readBitmap() { mFrameX = readShort(); // (sub)image position & size mFrameY = readShort(); int width = readShort(); int height = readShort(); // Clamp the frame dimensions to the intrinsic dimensions. mFrameWidth = Math.min(width, mIntrinsicWidth - mFrameX); mFrameHeight = Math.min(height, mIntrinsicHeight - mFrameY); // The frame step is set to the specfied frame width before clamping. mFrameStep = width; // Increase the size of the decoding buffer if necessary. int framePixelCount = width * height; if (framePixelCount > mPixels.length) { mPixels = new byte[framePixelCount]; } int packed = mData[mPosition++] & 0xff; // 3 - sort flag // 4-5 - reserved lctSize = 2 << (packed & 7); // 6-8 - local color table size mInterlace = (packed & 0x40) != 0; mLocalColorTableUsed = (packed & 0x80) != 0; // 1 - local color table flag interlace mLocalColorTableSize = (int) Math.pow(2, (packed & 0x07) + 1); if (mLocalColorTableUsed) { if (mLocalColorTable == null) { mLocalColorTable = new int[256]; } readColorTable(mLocalColorTable, mLocalColorTableSize); mActiveColorTable = mLocalColorTable; } else { mActiveColorTable = mGifImage.mGlobalColorTable; if (mGifImage.mBackgroundIndex == mTransparentColorIndex) { mBackgroundColor = 0; } } int savedColor = 0; if (mTransparency) { savedColor = mActiveColorTable[mTransparentColorIndex]; mActiveColorTable[mTransparentColorIndex] = 0; } if (mActiveColorTable == null) { mError = true; } if (mError) { return; } decodeBitmapData(); skip(); if (mError) { return; } if (mDisposalMethod == DISPOSAL_METHOD_RESTORE) { backupFrame(); } populateImageData(); if (mTransparency) { mActiveColorTable[mTransparentColorIndex] = savedColor; } mFrameCount++; } /** * Stores the relevant portion of the current frame so that it can be restored * before the next frame is rendered. Called from the background thread. */ private void backupFrame() { if (mBackupSaved) { return; } if (mBackup == null) { mBackup = null; try { mBackup = new int[mColors.length]; } catch (OutOfMemoryError e) { Log.e(TAG, "GifDrawable.backupFrame threw an OOME", e); } } if (mBackup != null) { System.arraycopy(mColors, 0, mBackup, 0, mColors.length); mBackupSaved = true; } } /** * Decodes LZW image data into pixel array. Called from the background thread. */ private void decodeBitmapData() { int npix = mFrameWidth * mFrameHeight; // Initialize GIF data stream decoder. int dataSize = mData[mPosition++] & 0xff; int clear = 1 << dataSize; int endOfInformation = clear + 1; int available = clear + 2; int oldCode = -1; int codeSize = dataSize + 1; int codeMask = (1 << codeSize) - 1; for (int code = 0; code < clear; code++) { mPrefix[code] = 0; // XXX ArrayIndexOutOfBoundsException mSuffix[code] = (byte) code; } // Decode GIF pixel stream. int datum = 0; int bits = 0; int first = 0; int top = 0; int pi = 0; while (pi < npix) { int blockSize = mData[mPosition++] & 0xff; if (blockSize == 0) { break; } int blockEnd = mPosition + blockSize; while (mPosition < blockEnd) { datum += (mData[mPosition++] & 0xff) << bits; bits += 8; while (bits >= codeSize) { // Get the next code. int code = datum & codeMask; datum >>= codeSize; bits -= codeSize; // Interpret the code if (code == clear) { // Reset decoder. codeSize = dataSize + 1; codeMask = (1 << codeSize) - 1; available = clear + 2; oldCode = -1; continue; } // Check for explicit end-of-stream if (code == endOfInformation) { mPosition = blockEnd; return; } if (oldCode == -1) { mPixels[pi++] = mSuffix[code]; oldCode = code; first = code; continue; } int inCode = code; if (code >= available) { mPixelStack[top++] = (byte) first; code = oldCode; if (top == MAX_BITS) { mError = true; return; } } while (code >= clear) { if (code >= MAX_BITS || code == mPrefix[code]) { mError = true; return; } mPixelStack[top++] = mSuffix[code]; code = mPrefix[code]; if (top == MAX_BITS) { mError = true; return; } } first = mSuffix[code]; mPixelStack[top++] = (byte) first; // Add new code to the dictionary if (available < MAX_STACK_SIZE) { mPrefix[available] = (short) oldCode; mSuffix[available] = (byte) first; available++; if (((available & codeMask) == 0) && (available < MAX_STACK_SIZE)) { codeSize++; codeMask += available; } } oldCode = inCode; // Drain the pixel stack. do { mPixels[pi++] = mPixelStack[--top]; } while (top > 0); } } } while (pi < npix) { mPixels[pi++] = 0; // clear missing pixels } } /** * Populates the color array with pixels for the next frame. */ private void populateImageData() { // Copy each source line to the appropriate place in the destination int pass = 1; int inc = 8; int iline = 0; for (int i = 0; i < mFrameHeight; i++) { int line = i; if (mInterlace) { if (iline >= mFrameHeight) { pass++; switch (pass) { case 2: iline = 4; break; case 3: iline = 2; inc = 4; break; case 4: iline = 1; inc = 2; break; default: break; } } line = iline; iline += inc; } line += mFrameY; if (line < mIntrinsicHeight) { int k = line * mIntrinsicWidth; int dx = k + mFrameX; // start of line in dest int dlim = dx + mFrameWidth; // end of dest line // It is unnecesary to test if dlim is beyond the edge of the destination line, // since mFrameWidth is clamped to a maximum of mIntrinsicWidth - mFrameX. int sx = i * mFrameStep; // start of line in source while (dx < dlim) { // map color and insert in destination int index = mPixels[sx++] & 0xff; int c = mActiveColorTable[index]; if (c != 0) { mColors[dx] = c; } dx++; } } } } /** * Reads next variable length block from input. Called from the background thread. * * @return number of bytes stored in "buffer" */ private int readBlock() { int blockSize = mData[mPosition++] & 0xff; if (blockSize > 0) { System.arraycopy(mData, mPosition, mBlock, 0, blockSize); mPosition += blockSize; } return blockSize; } /** * Reads next 16-bit value, LSB first. Called from the background thread. */ private int readShort() { // read 16-bit value, LSB first int byte1 = mData[mPosition++] & 0xff; int byte2 = mData[mPosition++] & 0xff; return byte1 | (byte2 << 8); } /** * Skips variable length blocks up to and including next zero length block. * Called from the background thread. */ private void skip() { int blockSize; do { blockSize = mData[mPosition++] & 0xff; mPosition += blockSize; } while (blockSize > 0); } @Override public boolean handleMessage(Message msg) { if (msg.what == BaseGifDrawable.READ_FRAME_RESP) { mFrameDelay = msg.arg1; if (mBitmap != null) { mBitmap.setPixels(mColors, 0, mIntrinsicWidth, 0, 0, mIntrinsicWidth, mIntrinsicHeight); postProcessFrame(mBitmap); mFirstFrameReady = true; mScheduled = false; invalidateSelf(); } return true; } return false; } /** * Gives a subclass a chance to apply changes to the mutable bitmap * before showing the frame. */ protected void postProcessFrame(Bitmap bitmap) { } /** * Background thread that handles reading and decoding frames of GIF images. */ private static class DecoderThread extends HandlerThread implements android.os.Handler.Callback { private static final String DECODER_THREAD_NAME = "GifDecoder"; public DecoderThread() { super(DECODER_THREAD_NAME); } @Override public boolean handleMessage(Message msg) { BaseGifDrawable gif = (BaseGifDrawable) msg.obj; if (gif == null || gif.mBitmap == null || gif.mRecycled) { return true; } switch (msg.what) { case READ_FRAME_REQ: // Processed on background thread do { try { gif.readNextFrame(); } catch (ArrayIndexOutOfBoundsException e) { gif.mEndOfFile = true; } // Check for EOF if (gif.mEndOfFile) { if (gif.mFrameCount == 0) { // could not read first frame gif.mError = true; } else if (gif.mFrameCount > 1) { if (gif.mLoopCount == 0 || ++gif.mLoopIndex < gif.mLoopCount) { // Repeat the animation gif.reset(); } else { gif.mDone = true; } } else { // Only one frame. Mark as done. gif.mDone = true; } } } while (gif.mEndOfFile && !gif.mError && !gif.mDone); gif.mHandler.sendMessage(gif.mHandler.obtainMessage(READ_FRAME_RESP, gif.mNextFrameDelay, 0)); return true; case RESET_DECODER: gif.reset(); return true; } return false; } } }
// Copyright 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.ui.base; import android.Manifest; import android.annotation.TargetApi; import android.app.Activity; import android.content.ClipData; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.provider.MediaStore; import android.text.TextUtils; import android.util.Log; import org.chromium.base.ContentUriUtils; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.ui.R; import org.chromium.ui.UiUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * A dialog that is triggered from a file input field that allows a user to select a file based on * a set of accepted file types. The path of the selected file is passed to the native dialog. */ @JNINamespace("ui") public class SelectFileDialog implements WindowAndroid.IntentCallback, WindowAndroid.PermissionCallback { private static final String TAG = "SelectFileDialog"; private static final String IMAGE_TYPE = "image/"; private static final String VIDEO_TYPE = "video/"; private static final String AUDIO_TYPE = "audio/"; private static final String ALL_IMAGE_TYPES = IMAGE_TYPE + "*"; private static final String ALL_VIDEO_TYPES = VIDEO_TYPE + "*"; private static final String ALL_AUDIO_TYPES = AUDIO_TYPE + "*"; private static final String ANY_TYPES = "*/*"; /** * If set, overrides the WindowAndroid passed in {@link selectFile()}. */ private static WindowAndroid sOverrideWindowAndroid = null; private final long mNativeSelectFileDialog; private List<String> mFileTypes; private boolean mCapture; private boolean mAllowMultiple; private Uri mCameraOutputUri; private WindowAndroid mWindowAndroid; private boolean mSupportsImageCapture; private boolean mSupportsVideoCapture; private boolean mSupportsAudioCapture; private SelectFileDialog(long nativeSelectFileDialog) { mNativeSelectFileDialog = nativeSelectFileDialog; } /** * Overrides the WindowAndroid passed in {@link selectFile()}. */ @VisibleForTesting public static void setWindowAndroidForTests(WindowAndroid window) { sOverrideWindowAndroid = window; } /** * Creates and starts an intent based on the passed fileTypes and capture value. * @param fileTypes MIME types requested (i.e. "image/*") * @param capture The capture value as described in http://www.w3.org/TR/html-media-capture/ * @param multiple Whether it should be possible to select multiple files. * @param window The WindowAndroid that can show intents */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2) @CalledByNative private void selectFile( String[] fileTypes, boolean capture, boolean multiple, WindowAndroid window) { mFileTypes = new ArrayList<String>(Arrays.asList(fileTypes)); mCapture = capture; mAllowMultiple = multiple; mWindowAndroid = (sOverrideWindowAndroid == null) ? window : sOverrideWindowAndroid; mSupportsImageCapture = mWindowAndroid.canResolveActivity(new Intent(MediaStore.ACTION_IMAGE_CAPTURE)); mSupportsVideoCapture = mWindowAndroid.canResolveActivity(new Intent(MediaStore.ACTION_VIDEO_CAPTURE)); mSupportsAudioCapture = mWindowAndroid.canResolveActivity( new Intent(MediaStore.Audio.Media.RECORD_SOUND_ACTION)); List<String> missingPermissions = new ArrayList<>(); if (((mSupportsImageCapture && shouldShowImageTypes()) || (mSupportsVideoCapture && shouldShowVideoTypes())) && !window.hasPermission(Manifest.permission.CAMERA)) { missingPermissions.add(Manifest.permission.CAMERA); } if (mSupportsAudioCapture && shouldShowAudioTypes() && !window.hasPermission(Manifest.permission.RECORD_AUDIO)) { missingPermissions.add(Manifest.permission.RECORD_AUDIO); } if (missingPermissions.isEmpty()) { launchSelectFileIntent(); } else { window.requestPermissions( missingPermissions.toArray(new String[missingPermissions.size()]), this); } } private void launchSelectFileIntent() { boolean hasCameraPermission = mWindowAndroid.hasPermission(Manifest.permission.CAMERA); boolean hasAudioPermission = mWindowAndroid.hasPermission(Manifest.permission.RECORD_AUDIO); Intent camera = null; if (mSupportsImageCapture && hasCameraPermission) { camera = getCameraIntent(mWindowAndroid.getApplicationContext()); // The camera intent can be null if we are unable to generate the output URI. If this // occurs while we are in camera capture mode, early exit as there is nothing we can // do at this point. if (camera == null && captureCamera()) { onFileNotSelected(); return; } } Intent camcorder = null; if (mSupportsVideoCapture && hasCameraPermission) { camcorder = new Intent(MediaStore.ACTION_VIDEO_CAPTURE); } Intent soundRecorder = null; if (mSupportsAudioCapture && hasAudioPermission) { soundRecorder = new Intent(MediaStore.Audio.Media.RECORD_SOUND_ACTION); } // Quick check - if the |capture| parameter is set and |fileTypes| has the appropriate MIME // type, we should just launch the appropriate intent. Otherwise build up a chooser based // on the accept type and then display that to the user. if (captureCamera() && camera != null) { if (mWindowAndroid.showIntent(camera, this, R.string.low_memory_error)) return; } else if (captureCamcorder() && camcorder != null) { if (mWindowAndroid.showIntent(camcorder, this, R.string.low_memory_error)) return; } else if (captureMicrophone() && soundRecorder != null) { if (mWindowAndroid.showIntent(soundRecorder, this, R.string.low_memory_error)) return; } Intent getContentIntent = new Intent(Intent.ACTION_GET_CONTENT); getContentIntent.addCategory(Intent.CATEGORY_OPENABLE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 && mAllowMultiple) { getContentIntent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); } ArrayList<Intent> extraIntents = new ArrayList<Intent>(); if (!noSpecificType()) { // Create a chooser based on the accept type that was specified in the webpage. Note // that if the web page specified multiple accept types, we will have built a generic // chooser above. if (shouldShowImageTypes()) { if (camera != null) extraIntents.add(camera); getContentIntent.setType(ALL_IMAGE_TYPES); } else if (shouldShowVideoTypes()) { if (camcorder != null) extraIntents.add(camcorder); getContentIntent.setType(ALL_VIDEO_TYPES); } else if (shouldShowAudioTypes()) { if (soundRecorder != null) extraIntents.add(soundRecorder); getContentIntent.setType(ALL_AUDIO_TYPES); } } if (extraIntents.isEmpty()) { // We couldn't resolve an accept type, so fallback to a generic chooser. getContentIntent.setType(ANY_TYPES); if (camera != null) extraIntents.add(camera); if (camcorder != null) extraIntents.add(camcorder); if (soundRecorder != null) extraIntents.add(soundRecorder); } Intent chooser = new Intent(Intent.ACTION_CHOOSER); if (!extraIntents.isEmpty()) { chooser.putExtra(Intent.EXTRA_INITIAL_INTENTS, extraIntents.toArray(new Intent[] { })); } chooser.putExtra(Intent.EXTRA_INTENT, getContentIntent); if (!mWindowAndroid.showIntent(chooser, this, R.string.low_memory_error)) { onFileNotSelected(); } } private Intent getCameraIntent(Context context) { Intent camera = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); camera.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION | Intent.FLAG_GRANT_WRITE_URI_PERMISSION); try { mCameraOutputUri = UiUtils.getUriForImageCaptureFile(context, getFileForImageCapture(context)); } catch (IOException e) { Log.e(TAG, "Cannot retrieve content uri from file", e); } if (mCameraOutputUri == null) return null; camera.putExtra(MediaStore.EXTRA_OUTPUT, mCameraOutputUri); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { camera.setClipData(ClipData.newUri( context.getContentResolver(), UiUtils.IMAGE_FILE_PATH, mCameraOutputUri)); } return camera; } /** * Get a file for the image capture operation. For devices with JB MR2 or * latter android versions, the file is put under IMAGE_FILE_PATH directory. * For ICS devices, the file is put under CAPTURE_IMAGE_DIRECTORY. * * @param context The application context. * @return file path for the captured image to be stored. */ private File getFileForImageCapture(Context context) throws IOException { File photoFile = File.createTempFile(String.valueOf(System.currentTimeMillis()), ".jpg", UiUtils.getDirectoryForImageCapture(context)); return photoFile; } /** * Callback method to handle the intent results and pass on the path to the native * SelectFileDialog. * @param window The window that has access to the application activity. * @param resultCode The result code whether the intent returned successfully. * @param contentResolver The content resolver used to extract the path of the selected file. * @param results The results of the requested intent. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2) @Override public void onIntentCompleted(WindowAndroid window, int resultCode, ContentResolver contentResolver, Intent results) { if (resultCode != Activity.RESULT_OK) { onFileNotSelected(); return; } if (results == null || (results.getData() == null && (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2 || results.getClipData() == null))) { // If we have a successful return but no data, then assume this is the camera returning // the photo that we requested. // If the uri is a file, we need to convert it to the absolute path or otherwise // android cannot handle it correctly on some earlier versions. // http://crbug.com/423338. String path = ContentResolver.SCHEME_FILE.equals(mCameraOutputUri.getScheme()) ? mCameraOutputUri.getPath() : mCameraOutputUri.toString(); nativeOnFileSelected(mNativeSelectFileDialog, path, mCameraOutputUri.getLastPathSegment()); // Broadcast to the media scanner that there's a new photo on the device so it will // show up right away in the gallery (rather than waiting until the next time the media // scanner runs). window.sendBroadcast(new Intent( Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, mCameraOutputUri)); return; } // Path for when EXTRA_ALLOW_MULTIPLE Intent extra has been defined. Each of the selected // files will be shared as an entry on the Intent's ClipData. This functionality is only // available in Android JellyBean MR2 and higher. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 && results.getData() == null && results.getClipData() != null) { ClipData clipData = results.getClipData(); int itemCount = clipData.getItemCount(); if (itemCount == 0) { onFileNotSelected(); return; } Uri[] filePathArray = new Uri[itemCount]; for (int i = 0; i < itemCount; ++i) { filePathArray[i] = clipData.getItemAt(i).getUri(); } GetDisplayNameTask task = new GetDisplayNameTask(contentResolver, true); task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, filePathArray); return; } if (ContentResolver.SCHEME_FILE.equals(results.getData().getScheme())) { nativeOnFileSelected( mNativeSelectFileDialog, results.getData().getSchemeSpecificPart(), ""); return; } if (ContentResolver.SCHEME_CONTENT.equals(results.getScheme())) { GetDisplayNameTask task = new GetDisplayNameTask(contentResolver, false); task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, results.getData()); return; } onFileNotSelected(); window.showError(R.string.opening_file_error); } @Override public void onRequestPermissionsResult(String[] permissions, int[] grantResults) { for (int i = 0; i < grantResults.length; i++) { if (grantResults[i] == PackageManager.PERMISSION_DENIED && mCapture) { onFileNotSelected(); return; } } launchSelectFileIntent(); } private void onFileNotSelected() { nativeOnFileNotSelected(mNativeSelectFileDialog); } private boolean noSpecificType() { // We use a single Intent to decide the type of the file chooser we display to the user, // which means we can only give it a single type. If there are multiple accept types // specified, we will fallback to a generic chooser (unless a capture parameter has been // specified, in which case we'll try to satisfy that first. return mFileTypes.size() != 1 || mFileTypes.contains(ANY_TYPES); } private boolean shouldShowTypes(String allTypes, String specificType) { if (noSpecificType() || mFileTypes.contains(allTypes)) return true; return acceptSpecificType(specificType); } private boolean shouldShowImageTypes() { return shouldShowTypes(ALL_IMAGE_TYPES, IMAGE_TYPE); } private boolean shouldShowVideoTypes() { return shouldShowTypes(ALL_VIDEO_TYPES, VIDEO_TYPE); } private boolean shouldShowAudioTypes() { return shouldShowTypes(ALL_AUDIO_TYPES, AUDIO_TYPE); } private boolean acceptsSpecificType(String type) { return mFileTypes.size() == 1 && TextUtils.equals(mFileTypes.get(0), type); } private boolean captureCamera() { return mCapture && acceptsSpecificType(ALL_IMAGE_TYPES); } private boolean captureCamcorder() { return mCapture && acceptsSpecificType(ALL_VIDEO_TYPES); } private boolean captureMicrophone() { return mCapture && acceptsSpecificType(ALL_AUDIO_TYPES); } private boolean acceptSpecificType(String accept) { for (String type : mFileTypes) { if (type.startsWith(accept)) { return true; } } return false; } private class GetDisplayNameTask extends AsyncTask<Uri, Void, String[]> { String[] mFilePaths; final ContentResolver mContentResolver; final boolean mIsMultiple; public GetDisplayNameTask(ContentResolver contentResolver, boolean isMultiple) { mContentResolver = contentResolver; mIsMultiple = isMultiple; } @Override protected String[] doInBackground(Uri...uris) { mFilePaths = new String[uris.length]; String[] displayNames = new String[uris.length]; try { for (int i = 0; i < uris.length; i++) { mFilePaths[i] = uris[i].toString(); displayNames[i] = ContentUriUtils.getDisplayName( uris[i], mContentResolver, MediaStore.MediaColumns.DISPLAY_NAME); } } catch (SecurityException e) { // Some third party apps will present themselves as being able // to handle the ACTION_GET_CONTENT intent but then declare themselves // as exported=false (or more often omit the exported keyword in // the manifest which defaults to false after JB). // In those cases trying to access the contents raises a security exception // which we should not crash on. See crbug.com/382367 for details. Log.w(TAG, "Unable to extract results from the content provider"); return null; } return displayNames; } @Override protected void onPostExecute(String[] result) { if (result == null) { onFileNotSelected(); return; } if (mIsMultiple) { nativeOnMultipleFilesSelected(mNativeSelectFileDialog, mFilePaths, result); } else { nativeOnFileSelected(mNativeSelectFileDialog, mFilePaths[0], result[0]); } } } @CalledByNative private static SelectFileDialog create(long nativeSelectFileDialog) { return new SelectFileDialog(nativeSelectFileDialog); } private native void nativeOnFileSelected(long nativeSelectFileDialogImpl, String filePath, String displayName); private native void nativeOnMultipleFilesSelected(long nativeSelectFileDialogImpl, String[] filePathArray, String[] displayNameArray); private native void nativeOnFileNotSelected(long nativeSelectFileDialogImpl); }
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import azkaban.project.ProjectLogEvent.EventType; import azkaban.flow.Flow; import azkaban.user.Permission; import azkaban.user.User; import azkaban.utils.Props; import azkaban.utils.Triple; public class MockProjectLoader implements ProjectLoader { public File dir; public MockProjectLoader(File dir) { this.dir = dir; } @Override public List<Project> fetchAllActiveProjects() throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public Project fetchProjectById(int id) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public Project createNewProject(String name, String description, User creator) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public void removeProject(Project project, String user) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void updatePermission(Project project, String name, Permission perm, boolean isGroup) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void updateDescription(Project project, String description, String user) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public boolean postEvent(Project project, EventType type, String user, String message) { // TODO Auto-generated method stub return false; } @Override public List<ProjectLogEvent> getProjectEvents(Project project, int num, int skip) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public void uploadProjectFile(Project project, int version, String filetype, String filename, File localFile, String user) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public ProjectFileHandler getUploadedFile(Project project, int version) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public ProjectFileHandler getUploadedFile(int projectId, int version) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public void changeProjectVersion(Project project, int version, String user) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void uploadFlows(Project project, int version, Collection<Flow> flows) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void uploadFlow(Project project, int version, Flow flow) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public Flow fetchFlow(Project project, String flowId) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public List<Flow> fetchAllProjectFlows(Project project) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public int getLatestProjectVersion(Project project) throws ProjectManagerException { // TODO Auto-generated method stub return 0; } @Override public void uploadProjectProperty(Project project, Props props) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void uploadProjectProperties(Project project, List<Props> properties) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public Props fetchProjectProperty(Project project, String propsName) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public Map<String, Props> fetchProjectProperties(int projectId, int version) throws ProjectManagerException { Map<String, Props> propertyMap = new HashMap<String, Props>(); for (File file : dir.listFiles()) { String name = file.getName(); if (name.endsWith(".job") || name.endsWith(".properties")) { try { Props props = new Props(null, file); propertyMap.put(name, props); } catch (IOException e) { throw new ProjectManagerException(e.getMessage()); } } } return propertyMap; } @Override public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void updateProjectProperty(Project project, Props props) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public Props fetchProjectProperty(int projectId, int projectVer, String propsName) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public List<Triple<String, Boolean, Permission>> getProjectPermissions( int projectId) throws ProjectManagerException { // TODO Auto-generated method stub return null; } @Override public void updateProjectSettings(Project project) throws ProjectManagerException { // TODO Auto-generated method stub } @Override public void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException { // TODO Auto-generated method stub } }
package com.planet_ink.coffee_mud.CharClasses; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.CharClass.SubClassRule; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Apprentice extends StdCharClass { @Override public String ID() { return "Apprentice"; } private final static String localizedStaticName = CMLib.lang().L("Apprentice"); @Override public String name() { return localizedStaticName; } @Override public String baseClass() { return "Commoner"; } @Override public int getBonusPracLevel() { return 5; } @Override public int getBonusAttackLevel() { return -1; } @Override public int getAttackAttribute() { return CharStats.STAT_WISDOM; } @Override public int getLevelsPerBonusDamage() { return 10; } @Override public int getTrainsFirstLevel() { return 6; } @Override public String getHitPointsFormula() { return "((@x6<@x7)/9)+(1*(1?4))"; } @Override public String getManaFormula() { return "((@x4<@x5)/10)+(1*(1?2))"; } @Override public int getLevelCap() { return 1; } @Override public SubClassRule getSubClassRule() { return SubClassRule.ANY; } @Override public int allowedArmorLevel() { return CharClass.ARMOR_CLOTH; } @Override public int allowedWeaponLevel() { return CharClass.WEAPONS_DAGGERONLY; } private final Set<Integer> disallowedWeapons = buildDisallowedWeaponClasses(); @Override protected Set<Integer> disallowedWeaponClasses(final MOB mob) { return disallowedWeapons; } protected Set<Tickable> currentApprentices = new HashSet<Tickable>(); @Override public void initializeClass() { super.initializeClass(); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Write",true); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Natural",false); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Recall",25,true); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Swim",false); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Climb",true); CMLib.ableMapper().addCharAbilityMapping(ID(),1,"ClanCrafting",false); } @Override public int availabilityCode() { return Area.THEME_FANTASY | Area.THEME_HEROIC | Area.THEME_TECHNOLOGY; } @Override public boolean qualifiesForThisClass(final MOB mob, final boolean quiet) { if(!super.qualifiesForThisClass(mob, quiet)) return false; if(mob==null) return true; final CharClass curClass = mob.baseCharStats().getCurrentClass(); final String currentClassID=curClass.ID(); if(currentClassID.equalsIgnoreCase("StdCharClass")) // this is the starting character rule return true; if(mob.basePhyStats().level()>1) { if(!quiet) mob.tell(L("You are beyond apprentice skill at this point.")); return false; } return true; } @Override public boolean tick(final Tickable ticking, final int tickID) { if((tickID==Tickable.TICKID_MOB) &&(ticking instanceof MOB) &&(!((MOB)ticking).isMonster())) { if(((MOB)ticking).baseCharStats().getCurrentClass().ID().equals(ID())) { if(!currentApprentices.contains(ticking)) currentApprentices.add(ticking); } else if(currentApprentices.contains(ticking)) { currentApprentices.remove(ticking); ((MOB)ticking).tell(L("\n\r\n\r^ZYou are no longer an apprentice!!!!^N\n\r\n\r")); CMLib.leveler().postExperience((MOB)ticking,null,null,1000,false); } } return super.tick(ticking,tickID); } private final String[] raceRequiredList = new String[] { "All" }; @Override public String[] getRequiredRaceList() { return raceRequiredList; } @SuppressWarnings("unchecked") private final Pair<String, Integer>[] minimumStatRequirements = new Pair[] { new Pair<String, Integer>("Wisdom", Integer.valueOf(5)), new Pair<String, Integer>("Intelligence", Integer.valueOf(5)) }; @Override public Pair<String, Integer>[] getMinimumStatRequirements() { return minimumStatRequirements; } @Override public void startCharacter(final MOB mob, final boolean isBorrowedClass, final boolean verifyOnly) { super.startCharacter(mob, isBorrowedClass, verifyOnly); if(!verifyOnly) { if(mob.playerStats()!=null) { mob.playerStats().setBonusCommonSkillLimits(mob.playerStats().getBonusCommonSkillLimits()+1); mob.playerStats().setBonusCraftingSkillLimits(mob.playerStats().getBonusCraftingSkillLimits()+1); mob.playerStats().setBonusNonCraftingSkillLimits(mob.playerStats().getBonusNonCraftingSkillLimits()+1); } } } @Override public List<Item> outfit(final MOB myChar) { if(outfitChoices==null) { final Weapon w=CMClass.getWeapon("Dagger"); if(w == null) return new Vector<Item>(); outfitChoices=new Vector<Item>(); outfitChoices.add(w); } return outfitChoices; } @Override public int adjustExperienceGain(final MOB host, final MOB mob, final MOB victim, int amount) { if((amount > 0)&&(!expless())) { if(mob.charStats().getCurrentClass() == this) { if(mob.getExperience() + amount > mob.getExpNextLevel()) { amount = mob.getExpNextLevel() - mob.getExperience() - 1; if(amount < 0) amount = 0; } } } return amount; } @Override public String getOtherBonusDesc() { return L("Gains lots of xp for training to a new class, and gets bonus common skills."); } }
/* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis.transport.mail; import org.apache.axis.components.logger.LogFactory; import org.apache.axis.i18n.Messages; import org.apache.axis.server.AxisServer; import org.apache.axis.utils.Options; import org.apache.commons.logging.Log; import org.apache.commons.net.pop3.POP3Client; import org.apache.commons.net.pop3.POP3MessageInfo; import javax.mail.Session; import javax.mail.internet.MimeMessage; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.Reader; import java.net.MalformedURLException; import java.util.Properties; /** * This is a simple implementation of an SMTP/POP3 server for processing * SOAP requests via Apache's xml-axis. This is not intended for production * use. Its intended uses are for demos, debugging, and performance * profiling. * * @author Davanum Srinivas &lt;dims@yahoo.com&gt; * @author Rob Jellinghaus (robj@unrealities.com) */ public class MailServer implements Runnable { protected static Log log = LogFactory.getLog(MailServer.class.getName()); private String host; private int port; private String userid; private String password; public MailServer(String host, int port, String userid, String password) { this.host = host; this.port = port; this.userid = userid; this.password = password; } // Are we doing threads? private static boolean doThreads = true; public void setDoThreads(boolean value) { doThreads = value; } public boolean getDoThreads() { return doThreads; } public String getHost() { return host; } // Axis server (shared between instances) private static AxisServer myAxisServer = null; protected static synchronized AxisServer getAxisServer() { if (myAxisServer == null) { myAxisServer = new AxisServer(); } return myAxisServer; } // are we stopped? // latch to true if stop() is called private boolean stopped = false; /** * Accept requests from a given TCP port and send them through the * Axis engine for processing. */ public void run() { log.info(Messages.getMessage("start00", "MailServer", host + ":" + port)); // Accept and process requests from the socket while (!stopped) { try { pop3.connect(host, port); pop3.login(userid, password); POP3MessageInfo[] messages = pop3.listMessages(); if (messages != null && messages.length > 0) { for (int i = 0; i < messages.length; i++) { Reader reader = pop3.retrieveMessage(messages[i].number); if (reader == null) { continue; } StringBuffer buffer = new StringBuffer(); BufferedReader bufferedReader = new BufferedReader(reader); int ch; while ((ch = bufferedReader.read()) != -1) { buffer.append((char) ch); } bufferedReader.close(); ByteArrayInputStream bais = new ByteArrayInputStream(buffer.toString().getBytes()); Properties prop = new Properties(); Session session = Session.getDefaultInstance(prop, null); MimeMessage mimeMsg = new MimeMessage(session, bais); pop3.deleteMessage(messages[i].number); if (mimeMsg != null) { MailWorker worker = new MailWorker(this, mimeMsg); if (doThreads) { Thread thread = new Thread(worker); thread.setDaemon(true); thread.start(); } else { worker.run(); } } } } } catch (java.io.InterruptedIOException iie) { } catch (Exception e) { log.debug(Messages.getMessage("exception00"), e); break; } finally { try { pop3.logout(); pop3.disconnect(); Thread.sleep(3000); } catch (Exception e) { log.error(Messages.getMessage("exception00"), e); } } } log.info(Messages.getMessage("quit00", "MailServer")); } /** * POP3 connection */ private POP3Client pop3; /** * Obtain the serverSocket that that MailServer is listening on. */ public POP3Client getPOP3() { return pop3; } /** * Set the serverSocket this server should listen on. * (note : changing this will not affect a running server, but if you * stop() and then start() the server, the new socket will be used). */ public void setPOP3(POP3Client pop3) { this.pop3 = pop3; } /** * Start this server. * * Spawns a worker thread to listen for HTTP requests. * * @param daemon a boolean indicating if the thread should be a daemon. */ public void start(boolean daemon) throws Exception { if (doThreads) { Thread thread = new Thread(this); thread.setDaemon(daemon); thread.start(); } else { run(); } } /** * Start this server as a NON-daemon. */ public void start() throws Exception { start(false); } /** * Stop this server. * * This will interrupt any pending accept(). */ public void stop() throws Exception { /* * Close the server socket cleanly, but avoid fresh accepts while * the socket is closing. */ stopped = true; log.info(Messages.getMessage("quit00", "MailServer")); // Kill the JVM, which will interrupt pending accepts even on linux. System.exit(0); } /** * Server process. */ public static void main(String args[]) { Options opts = null; try { opts = new Options(args); } catch (MalformedURLException e) { log.error(Messages.getMessage("malformedURLException00"), e); return; } try { doThreads = (opts.isFlagSet('t') > 0); String host = opts.getHost(); int port = ((opts.isFlagSet('p') > 0) ? opts.getPort() : 110); POP3Client pop3 = new POP3Client(); MailServer sas = new MailServer(host, port, opts.getUser(), opts.getPassword()); sas.setPOP3(pop3); sas.start(); } catch (Exception e) { log.error(Messages.getMessage("exception00"), e); return; } } }
package mil.nga.giat.geowave.vector.wms.accumulo; import java.awt.Color; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import javax.media.jai.remote.SerializableState; import javax.media.jai.remote.Serializer; import javax.media.jai.remote.SerializerFactory; import mil.nga.giat.geowave.index.Persistable; import mil.nga.giat.geowave.vector.wms.DelayedBackbufferGraphic; import org.apache.log4j.Logger; import org.geoserver.wms.map.ImageUtils; import org.geotools.renderer.label.LabelCacheImpl.LabelRenderingMode; import org.geotools.renderer.lite.StyledShapePainter; import org.geotools.renderer.lite.StyledShapePainter.TextureAnchorKey; import org.geotools.renderer.style.SLDStyleFactory; import com.sun.media.jai.rmi.SerializableStateImpl; /** * This class persists and encapsulates all of the main rendering information * for a layer (any render configuration that is not self-contained within a * single style). It contains the master image to which all labels will be * rendered on. * */ public class ServerRenderOptions implements Persistable { private final static Logger LOGGER = Logger.getLogger(ServerRenderOptions.class); private static final AtomicBoolean serializerRegistered = new AtomicBoolean(false); protected RenderingHints renderingHints; protected Color bgColor; protected boolean useAlpha; protected LabelRenderingMode labelRenderingMode; /** * The meta buffer for the current layer */ protected int metaBuffer; protected double scaleDenominator; protected double angle; protected boolean clone; protected boolean continuousMapWrapping; protected boolean advancedProjectionHandlingEnabled; protected boolean vectorRenderingEnabled; protected boolean lineOptimizationEnabled; protected Graphics2D masterGraphics; protected DelayedBackbufferGraphic labelGraphics; protected BufferedImage masterImage; /** Factory that will resolve symbolizers into rendered styles */ protected SLDStyleFactory styleFactory; protected ServerRenderOptions() {} public ServerRenderOptions( final RenderingHints renderingHints, final Color bgColor, final LabelRenderingMode labelRenderingMode, final int metaBuffer, final double scaleDenominator, final double angle, final boolean useAlpha, final boolean continuousMapWrapping, final boolean advancedProjectionHandlingEnabled, final boolean clone, final boolean vectorRenderingEnabled, final boolean lineOptimizationEnabled ) { this.renderingHints = renderingHints; this.bgColor = bgColor; this.labelRenderingMode = labelRenderingMode; this.metaBuffer = metaBuffer; this.scaleDenominator = scaleDenominator; this.angle = angle; this.useAlpha = useAlpha; this.continuousMapWrapping = continuousMapWrapping; this.advancedProjectionHandlingEnabled = advancedProjectionHandlingEnabled; this.clone = clone; this.vectorRenderingEnabled = vectorRenderingEnabled; this.lineOptimizationEnabled = lineOptimizationEnabled; } protected void init( final ServerPaintArea paintArea ) { masterImage = prepareImage( paintArea.getWidth(), paintArea.getHeight(), useAlpha); masterGraphics = ImageUtils.prepareTransparency( useAlpha, bgColor, masterImage, null); if (renderingHints != null) { masterGraphics.setRenderingHints(renderingHints); } styleFactory = new SLDStyleFactory(); styleFactory.setRenderingHints(renderingHints); styleFactory.setVectorRenderingEnabled(vectorRenderingEnabled); styleFactory.setLineOptimizationEnabled(lineOptimizationEnabled); masterGraphics.setClip(paintArea.getArea()); labelGraphics = new DelayedBackbufferGraphic( masterGraphics, paintArea.getArea()); } /** * Sets up a {@link BufferedImage#TYPE_4BYTE_ABGR} if the paletteInverter is * not provided, or a indexed image otherwise. Subclasses may override this * method should they need a special kind of image * * @param width * @param height * @param paletteInverter * @return */ protected BufferedImage prepareImage( final int width, final int height, final boolean transparent ) { return ImageUtils.createImage( width, height, null, transparent); } protected RenderedMaster getRenderedMaster( final List<ServerFeatureStyle> styles ) { return new RenderedMaster( styles, labelGraphics.getImage()); } @Override public byte[] toBinary() { registerSerializers(); final SerializableState serializableRenderingHints = SerializerFactory.getState(renderingHints); byte[] renderHintsBinary = new byte[0]; try { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream( baos); oos.writeObject(serializableRenderingHints); renderHintsBinary = baos.toByteArray(); } catch (final IOException e) { LOGGER.warn( "Unable to serialize rendering hints", e); } final ByteBuffer buf = ByteBuffer.allocate(renderHintsBinary.length + 39); buf.putInt(renderHintsBinary.length); buf.put(renderHintsBinary); buf.putInt(bgColor.getRGB()); buf.put((byte) (useAlpha ? 1 : 0)); buf.put((byte) (continuousMapWrapping ? 1 : 0)); buf.put((byte) (advancedProjectionHandlingEnabled ? 1 : 0)); buf.putInt(labelRenderingMode.ordinal()); buf.putInt(metaBuffer); buf.putDouble(scaleDenominator); buf.putDouble(angle); buf.put((byte) (clone ? 1 : 0)); buf.put((byte) (vectorRenderingEnabled ? 1 : 0)); buf.put((byte) (lineOptimizationEnabled ? 1 : 0)); return buf.array(); } private void registerSerializers() { synchronized(serializerRegistered) { if (!serializerRegistered.get()) { SerializerFactory.registerSerializer(new TextureAnchorKeySerializer()); SerializerFactory.registerSerializer(new Point2dSerializer()); serializerRegistered.set(true); } } } @Override public void fromBinary( final byte[] bytes ) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int renderHintsBinaryLength = buf.getInt(); final byte[] renderHintsBinary = new byte[renderHintsBinaryLength]; buf.get(renderHintsBinary); renderingHints = null; try { registerSerializers(); final ByteArrayInputStream bais = new ByteArrayInputStream( renderHintsBinary); final ObjectInputStream ois = new ObjectInputStream( bais); final Object o = ois.readObject(); if ((o instanceof SerializableState) && (((SerializableState) o).getObject() instanceof RenderingHints)) { renderingHints = (RenderingHints) ((SerializableState) o).getObject(); } } catch (final Exception e) { LOGGER.warn( "Unable to deserialize rendering hints", e); } bgColor = new Color( buf.getInt()); useAlpha = buf.get() > 0; continuousMapWrapping = buf.get() > 0; advancedProjectionHandlingEnabled = buf.get() > 0; labelRenderingMode = LabelRenderingMode.values()[buf.getInt()]; metaBuffer = buf.getInt(); scaleDenominator = buf.getDouble(); angle = buf.getDouble(); clone = buf.get() > 0; vectorRenderingEnabled = buf.get() > 0; lineOptimizationEnabled = buf.get() > 0; } private static class TextureAnchorKeySerializer implements Serializer { @Override public SerializableState getState( final Object obj, final RenderingHints renderingHints ) { return new TextureAnchorKeySerializableState( TextureAnchorKey.class, obj, renderingHints); } @Override public Class getSupportedClass() { return TextureAnchorKey.class; } @Override public boolean permitsSubclasses() { return false; } } private static class TextureAnchorKeySerializableState extends SerializableStateImpl { /** * */ private static final long serialVersionUID = 1L; protected TextureAnchorKeySerializableState( final Class arg0, final Object arg1, final RenderingHints arg2 ) { super( arg0, arg1, arg2); } private void writeObject( final ObjectOutputStream out ) throws IOException { } private void readObject( final ObjectInputStream in ) throws IOException, ClassNotFoundException { theObject = StyledShapePainter.TEXTURE_ANCHOR_HINT_KEY; } } private static class Point2dSerializer implements Serializer { @Override public SerializableState getState( final Object obj, final RenderingHints renderingHints ) { return new Point2DSerializableState( Point2D.Double.class, obj, renderingHints); } @Override public Class getSupportedClass() { return Point2D.Double.class; } @Override public boolean permitsSubclasses() { return false; } } private static class Point2DSerializableState extends SerializableStateImpl { /** * */ private static final long serialVersionUID = 1L; protected Point2DSerializableState( final Class arg0, final Object arg1, final RenderingHints arg2 ) { super( arg0, arg1, arg2); } private void writeObject( final ObjectOutputStream out ) throws IOException { out.writeDouble(((Point2D) theObject).getX()); out.writeDouble(((Point2D) theObject).getY()); } private void readObject( final ObjectInputStream in ) throws IOException, ClassNotFoundException { theObject = new Point2D.Double( in.readDouble(), in.readDouble()); } } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 2013, 2014. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.util.stream; import java.util.DoubleSummaryStatistics; import java.util.Objects; import java.util.OptionalDouble; import java.util.PrimitiveIterator; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleConsumer; import java.util.function.DoubleFunction; import java.util.function.DoublePredicate; import java.util.function.DoubleToIntFunction; import java.util.function.DoubleToLongFunction; import java.util.function.DoubleUnaryOperator; import java.util.function.IntFunction; import java.util.function.ObjDoubleConsumer; import java.util.function.Supplier; /** * Abstract base class for an intermediate pipeline stage or pipeline source * stage implementing whose elements are of type {@code double}. * * @param <E_IN> type of elements in the upstream source * * @since 1.8 */ abstract class DoublePipeline<E_IN> extends AbstractPipeline<E_IN, Double, DoubleStream> implements DoubleStream { /** * Constructor for the head of a stream pipeline. * * @param source {@code Supplier<Spliterator>} describing the stream source * @param sourceFlags the source flags for the stream source, described in * {@link StreamOpFlag} */ DoublePipeline(Supplier<? extends Spliterator<Double>> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for the head of a stream pipeline. * * @param source {@code Spliterator} describing the stream source * @param sourceFlags the source flags for the stream source, described in * {@link StreamOpFlag} */ DoublePipeline(Spliterator<Double> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for appending an intermediate operation onto an existing * pipeline. * * @param upstream the upstream element source. * @param opFlags the operation flags */ DoublePipeline(AbstractPipeline<?, E_IN, ?> upstream, int opFlags) { super(upstream, opFlags); } /** * Adapt a {@code Sink<Double> to a {@code DoubleConsumer}, ideally simply * by casting. */ private static DoubleConsumer adapt(Sink<Double> sink) { if (sink instanceof DoubleConsumer) { return (DoubleConsumer) sink; } else { if (Tripwire.ENABLED) Tripwire.trip(AbstractPipeline.class, "using DoubleStream.adapt(Sink<Double> s)"); return sink::accept; } } /** * Adapt a {@code Spliterator<Double>} to a {@code Spliterator.OfDouble}. * * @implNote * The implementation attempts to cast to a Spliterator.OfDouble, and throws * an exception if this cast is not possible. */ private static Spliterator.OfDouble adapt(Spliterator<Double> s) { if (s instanceof Spliterator.OfDouble) { return (Spliterator.OfDouble) s; } else { if (Tripwire.ENABLED) Tripwire.trip(AbstractPipeline.class, "using DoubleStream.adapt(Spliterator<Double> s)"); throw new UnsupportedOperationException("DoubleStream.adapt(Spliterator<Double> s)"); } } // Shape-specific methods @Override final StreamShape getOutputShape() { return StreamShape.DOUBLE_VALUE; } @Override final <P_IN> Node<Double> evaluateToNode(PipelineHelper<Double> helper, Spliterator<P_IN> spliterator, boolean flattenTree, IntFunction<Double[]> generator) { return Nodes.collectDouble(helper, spliterator, flattenTree); } @Override final <P_IN> Spliterator<Double> wrap(PipelineHelper<Double> ph, Supplier<Spliterator<P_IN>> supplier, boolean isParallel) { return new StreamSpliterators.DoubleWrappingSpliterator<>(ph, supplier, isParallel); } @Override @SuppressWarnings("unchecked") final Spliterator.OfDouble lazySpliterator(Supplier<? extends Spliterator<Double>> supplier) { return new StreamSpliterators.DelegatingSpliterator.OfDouble((Supplier<Spliterator.OfDouble>) supplier); } @Override final void forEachWithCancel(Spliterator<Double> spliterator, Sink<Double> sink) { Spliterator.OfDouble spl = adapt(spliterator); DoubleConsumer adaptedSink = adapt(sink); do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink)); } @Override final Node.Builder<Double> makeNodeBuilder(long exactSizeIfKnown, IntFunction<Double[]> generator) { return Nodes.doubleBuilder(exactSizeIfKnown); } // DoubleStream @Override public final PrimitiveIterator.OfDouble iterator() { return Spliterators.iterator(spliterator()); } @Override public final Spliterator.OfDouble spliterator() { return adapt(super.spliterator()); } // Stateless intermediate ops from DoubleStream @Override public final Stream<Double> boxed() { return mapToObj(Double::valueOf); } @Override public final DoubleStream map(DoubleUnaryOperator mapper) { Objects.requireNonNull(mapper); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsDouble(t)); } }; } }; } @Override public final <U> Stream<U> mapToObj(DoubleFunction<? extends U> mapper) { Objects.requireNonNull(mapper); return new ReferencePipeline.StatelessOp<Double, U>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<U> sink) { return new Sink.ChainedDouble<U>(sink) { @Override public void accept(double t) { downstream.accept(mapper.apply(t)); } }; } }; } @Override public final IntStream mapToInt(DoubleToIntFunction mapper) { Objects.requireNonNull(mapper); return new IntPipeline.StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Integer> sink) { return new Sink.ChainedDouble<Integer>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsInt(t)); } }; } }; } @Override public final LongStream mapToLong(DoubleToLongFunction mapper) { Objects.requireNonNull(mapper); return new LongPipeline.StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Long> sink) { return new Sink.ChainedDouble<Long>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsLong(t)); } }; } }; } @Override public final DoubleStream flatMap(DoubleFunction<? extends DoubleStream> mapper) { return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT | StreamOpFlag.NOT_SIZED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void begin(long size) { downstream.begin(-1); } @Override public void accept(double t) { try (DoubleStream result = mapper.apply(t)) { // We can do better that this too; optimize for depth=0 case and just grab spliterator and forEach it if (result != null) result.sequential().forEach(i -> downstream.accept(i)); } } }; } }; } @Override public DoubleStream unordered() { if (!isOrdered()) return this; return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_ORDERED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return sink; } }; } @Override public final DoubleStream filter(DoublePredicate predicate) { Objects.requireNonNull(predicate); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SIZED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void begin(long size) { downstream.begin(-1); } @Override public void accept(double t) { if (predicate.test(t)) downstream.accept(t); } }; } }; } @Override public final DoubleStream peek(DoubleConsumer action) { Objects.requireNonNull(action); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, 0) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void accept(double t) { action.accept(t); downstream.accept(t); } }; } }; } // Stateful intermediate ops from DoubleStream @Override public final DoubleStream limit(long maxSize) { if (maxSize < 0) throw new IllegalArgumentException(Long.toString(maxSize)); return SliceOps.makeDouble(this, (long) 0, maxSize); } @Override public final DoubleStream skip(long n) { if (n < 0) throw new IllegalArgumentException(Long.toString(n)); if (n == 0) return this; else { long limit = -1; return SliceOps.makeDouble(this, n, limit); } } @Override public final DoubleStream sorted() { return SortedOps.makeDouble(this); } @Override public final DoubleStream distinct() { // While functional and quick to implement, this approach is not very efficient. // An efficient version requires a double-specific map/set implementation. return boxed().distinct().mapToDouble(i -> (double) i); } // Terminal ops from DoubleStream @Override public void forEach(DoubleConsumer consumer) { evaluate(ForEachOps.makeDouble(consumer, false)); } @Override public void forEachOrdered(DoubleConsumer consumer) { evaluate(ForEachOps.makeDouble(consumer, true)); } @Override public final double sum() { /* * In the arrays allocated for the collect operation, index 0 * holds the high-order bits of the running sum, index 1 holds * the low-order bits of the sum computed via compensated * summation, and index 2 holds the simple sum used to compute * the proper result if the stream contains infinite values of * the same sign. */ double[] summation = collect(() -> new double[3], (ll, d) -> { Collectors.sumWithCompensation(ll, d); ll[2] += d; }, (ll, rr) -> { Collectors.sumWithCompensation(ll, rr[0]); Collectors.sumWithCompensation(ll, rr[1]); ll[2] += rr[2]; }); return Collectors.computeFinalSum(summation); } @Override public final OptionalDouble min() { return reduce(Math::min); } @Override public final OptionalDouble max() { return reduce(Math::max); } /** * {@inheritDoc} * * @implNote The {@code double} format can represent all * consecutive integers in the range -2<sup>53</sup> to * 2<sup>53</sup>. If the pipeline has more than 2<sup>53</sup> * values, the divisor in the average computation will saturate at * 2<sup>53</sup>, leading to additional numerical errors. */ @Override public final OptionalDouble average() { /* * In the arrays allocated for the collect operation, index 0 * holds the high-order bits of the running sum, index 1 holds * the low-order bits of the sum computed via compensated * summation, index 2 holds the number of values seen, index 3 * holds the simple sum. */ double[] avg = collect(() -> new double[4], (ll, d) -> { ll[2]++; Collectors.sumWithCompensation(ll, d); ll[3] += d; }, (ll, rr) -> { Collectors.sumWithCompensation(ll, rr[0]); Collectors.sumWithCompensation(ll, rr[1]); ll[2] += rr[2]; ll[3] += rr[3]; }); return avg[2] > 0 ? OptionalDouble.of(Collectors.computeFinalSum(avg) / avg[2]) : OptionalDouble.empty(); } @Override public final long count() { return mapToLong(e -> 1L).sum(); } @Override public final DoubleSummaryStatistics summaryStatistics() { return collect(DoubleSummaryStatistics::new, DoubleSummaryStatistics::accept, DoubleSummaryStatistics::combine); } @Override public final double reduce(double identity, DoubleBinaryOperator op) { return evaluate(ReduceOps.makeDouble(identity, op)); } @Override public final OptionalDouble reduce(DoubleBinaryOperator op) { return evaluate(ReduceOps.makeDouble(op)); } @Override public final <R> R collect(Supplier<R> supplier, ObjDoubleConsumer<R> accumulator, BiConsumer<R, R> combiner) { BinaryOperator<R> operator = (left, right) -> { combiner.accept(left, right); return left; }; return evaluate(ReduceOps.makeDouble(supplier, accumulator, operator)); } @Override public final boolean anyMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ANY)); } @Override public final boolean allMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ALL)); } @Override public final boolean noneMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.NONE)); } @Override public final OptionalDouble findFirst() { return evaluate(FindOps.makeDouble(true)); } @Override public final OptionalDouble findAny() { return evaluate(FindOps.makeDouble(false)); } @Override public final double[] toArray() { return Nodes.flattenDouble((Node.OfDouble) evaluateToArrayNode(Double[]::new)) .asPrimitiveArray(); } // /** * Source stage of a DoubleStream * * @param <E_IN> type of elements in the upstream source */ static class Head<E_IN> extends DoublePipeline<E_IN> { /** * Constructor for the source stage of a DoubleStream. * * @param source {@code Supplier<Spliterator>} describing the stream * source * @param sourceFlags the source flags for the stream source, described * in {@link StreamOpFlag} * @param parallel {@code true} if the pipeline is parallel */ Head(Supplier<? extends Spliterator<Double>> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for the source stage of a DoubleStream. * * @param source {@code Spliterator} describing the stream source * @param sourceFlags the source flags for the stream source, described * in {@link StreamOpFlag} * @param parallel {@code true} if the pipeline is parallel */ Head(Spliterator<Double> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } @Override final boolean opIsStateful() { throw new UnsupportedOperationException(); } @Override final Sink<E_IN> opWrapSink(int flags, Sink<Double> sink) { throw new UnsupportedOperationException(); } // Optimized sequential terminal operations for the head of the pipeline @Override public void forEach(DoubleConsumer consumer) { if (!isParallel()) { adapt(sourceStageSpliterator()).forEachRemaining(consumer); } else { super.forEach(consumer); } } @Override public void forEachOrdered(DoubleConsumer consumer) { if (!isParallel()) { adapt(sourceStageSpliterator()).forEachRemaining(consumer); } else { super.forEachOrdered(consumer); } } } /** * Base class for a stateless intermediate stage of a DoubleStream. * * @param <E_IN> type of elements in the upstream source * @since 1.8 */ abstract static class StatelessOp<E_IN> extends DoublePipeline<E_IN> { /** * Construct a new DoubleStream by appending a stateless intermediate * operation to an existing stream. * * @param upstream the upstream pipeline stage * @param inputShape the stream shape for the upstream pipeline stage * @param opFlags operation flags for the new stage */ StatelessOp(AbstractPipeline<?, E_IN, ?> upstream, StreamShape inputShape, int opFlags) { super(upstream, opFlags); assert upstream.getOutputShape() == inputShape; } @Override final boolean opIsStateful() { return false; } } /** * Base class for a stateful intermediate stage of a DoubleStream. * * @param <E_IN> type of elements in the upstream source * @since 1.8 */ abstract static class StatefulOp<E_IN> extends DoublePipeline<E_IN> { /** * Construct a new DoubleStream by appending a stateful intermediate * operation to an existing stream. * * @param upstream the upstream pipeline stage * @param inputShape the stream shape for the upstream pipeline stage * @param opFlags operation flags for the new stage */ StatefulOp(AbstractPipeline<?, E_IN, ?> upstream, StreamShape inputShape, int opFlags) { super(upstream, opFlags); assert upstream.getOutputShape() == inputShape; } @Override final boolean opIsStateful() { return true; } @Override abstract <P_IN> Node<Double> opEvaluateParallel(PipelineHelper<Double> helper, Spliterator<P_IN> spliterator, IntFunction<Double[]> generator); } }
/* * Copyright (c) 2015 - present Nebula Bay. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tascape.reactor; import com.tascape.reactor.data.AbstractCaseData; import com.tascape.reactor.data.CaseDataInfo; import com.tascape.reactor.db.TaskCase; import com.tascape.reactor.suite.AbstractSuite; import com.tascape.reactor.task.AbstractCase; import com.tascape.reactor.task.Priority; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.tascape.reactor.data.CaseData; import com.tascape.reactor.data.CaseDataProvider; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; /** * * @author linsong wang */ public class TaskSuite { private static final Logger LOG = LoggerFactory.getLogger(TaskSuite.class); private String name; private final String projectName; private final int numberOfEnvs; private List<TaskCase> cases = new ArrayList<>(); public TaskSuite(String suiteClass, Pattern caseClassRegex, Pattern caseMethodRegex, List<String> caseList) throws Exception { LOG.debug("Find cases in target suite {}", suiteClass); AbstractSuite suite = AbstractSuite.class.cast(Class.forName(suiteClass).newInstance()); this.name = suite.getName(); this.projectName = suite.getProjectName(); this.numberOfEnvs = suite.getNumberOfEnvs(); if (this.name == null || this.name.isEmpty()) { this.name = suiteClass; } suite.setUpCaseClasses(); for (Class<? extends AbstractCase> clazz : suite.getCaseClasses()) { for (Method method : this.getCaseMethods(clazz)) { TaskCase tc = new TaskCase(); tc.setSuiteClass(suiteClass); tc.setCaseClass(clazz.getName()); tc.setCaseMethod(method.getName()); this.cases.add(tc); } } this.cases = this.processAnnotations(); this.cases = this.filter(caseClassRegex, caseMethodRegex); if ((caseList != null) && (!caseList.isEmpty())) { this.cases = this.filter(caseList); } int priority = SystemConfiguration.getInstance() .getIntProperty(SystemConfiguration.SYSPROP_CASE_PRIORITY, suite.getPriority()); this.cases = this.filter(priority); if (SystemConfiguration.getInstance().isShuffleCases()) { LOG.debug("do case shuffle"); Collections.shuffle(cases); } } public List<TaskCase> getCases() { return cases; } public String getName() { return name; } public String getProjectName() { String pn = SystemConfiguration.getInstance().getSuiteProjectName(); if (StringUtils.isNotBlank(pn)) { LOG.debug("Use suite project name defined in {}", SystemConfiguration.SYSPROP_SUITE_PROJECT_NAME); return pn; } return projectName; } public int getNumberOfEnvs() { return numberOfEnvs; } private List<TaskCase> filter(Pattern caseClassRegex, Pattern caseMethodRegex) { LOG.debug("Use debug class name fileter {}", caseClassRegex); LOG.debug("Use debug method name fileter {}", caseMethodRegex); List<TaskCase> tcs = new ArrayList<>(); this.cases.stream().forEach((tc) -> { Matcher mc = caseClassRegex.matcher(tc.getCaseClass()); Matcher mm = caseMethodRegex.matcher(tc.getCaseMethod()); if (mc.find() && mm.find()) { tcs.add(tc); } }); return tcs; } private List<TaskCase> filter(List<String> caseList) { LOG.debug("Filter based on provided case list"); List<TaskCase> tcs = new ArrayList<>(); this.cases.stream().forEach((tc) -> { if (caseList.contains(tc.getCaseClass() + "." + tc.getCaseMethod())) { tcs.add(tc); } }); return tcs; } /* * runs all cases, which priority is less than or equal to the specified. */ private List<TaskCase> filter(int priority) { LOG.debug("filter cases by priority {}", priority); return this.cases.stream() .filter(tc -> (tc.getPriority() <= priority)) .collect(Collectors.toList()); } private List<TaskCase> processAnnotations() { LOG.debug("Checking method annotation CaseDataProvider for each case"); List<TaskCase> tcs = new ArrayList<>(); Map<String, Class<?>> loadedClasses = new HashMap<>(); this.cases.stream().forEach((tc) -> { try { String className = tc.getCaseClass(); Class<?> caseClass = loadedClasses.get(className); if (caseClass == null) { caseClass = Class.forName(tc.getCaseClass()); loadedClasses.put(className, caseClass); } Method caseMethod = caseClass.getDeclaredMethod(tc.getCaseMethod()); Priority p = caseMethod.getAnnotation(Priority.class); if (p != null) { tc.setPriority(p.level()); } CaseDataProvider tdp = caseMethod.getAnnotation(CaseDataProvider.class); if (tdp == null) { LOG.debug("Adding case {}", tc.format()); tcs.add(tc); } else { LOG.trace("Calling class {}, method {}, with parameter {}", tdp.klass(), tdp.method(), tdp.parameter()); CaseData[] data = AbstractCaseData.getCaseData(tdp.klass(), tdp.method(), tdp.parameter()); LOG.debug("{} is a data-driven case, data size is {}", tc.format(), data.length); int length = (data.length + "").length(); for (int i = 0; i < data.length; i++) { TaskCase t = new TaskCase(tc); CaseDataInfo tdi = new CaseDataInfo(tdp.klass(), tdp.method(), tdp.parameter(), i); t.setCaseDataInfo(tdi.format(length)); String value = data[i].getValue(); if (StringUtils.isEmpty(value)) { value = String.format("%s-%0" + length + "d", data[i].getClassName(), i); } t.setCaseData(value); t.setPriority(Math.min(t.getPriority(), data[i].getPriority())); LOG.debug("Adding case {}", t.format()); tcs.add(t); } } } catch (Exception ex) { LOG.warn("Cannot process case {}, skipping. Check {}", CaseDataProvider.class.getName(), tc.format()); LOG.warn("", ex); } }); return tcs; } private <T extends AbstractCase> List<Method> getCaseMethods(Class<T> caseClass) { List<Method> methods = new ArrayList<>(); for (Method m : caseClass.getDeclaredMethods()) { if (m.getAnnotation(Test.class) != null) { methods.add(m); } } return methods; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.entries; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.apache.geode.internal.cache.DiskId; import org.apache.geode.internal.cache.DiskStoreImpl; import org.apache.geode.internal.cache.InternalRegion; import org.apache.geode.internal.cache.PlaceHolderDiskRegion; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.RegionEntryContext; import org.apache.geode.internal.cache.eviction.EvictionController; import org.apache.geode.internal.cache.eviction.EvictionNode; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; /* * macros whose definition changes this class: * * disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP * * One of the following key macros must be defined: * * key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1: * KEY_STRING1 key string2: KEY_STRING2 */ /** * Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run * ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory). */ public class VMThinDiskLRURegionEntryHeapLongKey extends VMThinDiskLRURegionEntryHeap { // --------------------------------------- common fields ---------------------------------------- private static final AtomicLongFieldUpdater<VMThinDiskLRURegionEntryHeapLongKey> LAST_MODIFIED_UPDATER = AtomicLongFieldUpdater.newUpdater(VMThinDiskLRURegionEntryHeapLongKey.class, "lastModified"); protected int hash; private HashEntry<Object, Object> nextEntry; @SuppressWarnings("unused") private volatile long lastModified; private volatile Object value; // ---------------------------------------- disk fields ----------------------------------------- /** * @since GemFire 5.1 */ protected DiskId id; // --------------------------------------- key fields ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private final long key; public VMThinDiskLRURegionEntryHeapLongKey(final RegionEntryContext context, final long key, final Object value) { super(context, (value instanceof RecoveredEntry ? null : value)); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp initialize(context, value); this.key = key; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override protected Object getValueField() { return this.value; } @Override protected void setValueField(final Object value) { this.value = value; } @Override protected long getLastModifiedField() { return LAST_MODIFIED_UPDATER.get(this); } @Override protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) { return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue); } @Override public int getEntryHash() { return this.hash; } @Override protected void setEntryHash(final int hash) { this.hash = hash; } @Override public HashEntry<Object, Object> getNextEntry() { return this.nextEntry; } @Override public void setNextEntry(final HashEntry<Object, Object> nextEntry) { this.nextEntry = nextEntry; } // ----------------------------------------- disk code ------------------------------------------ // DO NOT modify this class. It was generated from LeafRegionEntry.cpp protected void initialize(final RegionEntryContext context, final Object value) { boolean isBackup; if (context instanceof InternalRegion) { isBackup = ((InternalRegion) context).getDiskRegion().isBackup(); } else if (context instanceof PlaceHolderDiskRegion) { isBackup = true; } else { throw new IllegalArgumentException("expected a InternalRegion or PlaceHolderDiskRegion"); } // Delay the initialization of DiskID if overflow only if (isBackup) { diskInitialize(context, value); } } @Override public synchronized int updateAsyncEntrySize(final EvictionController evictionController) { int oldSize = getEntrySize(); int newSize = evictionController.entrySize(getKeyForSizing(), null); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public DiskId getDiskId() { return this.id; } @Override public void setDiskId(final RegionEntry oldEntry) { this.id = ((DiskEntry) oldEntry).getDiskId(); } private void diskInitialize(final RegionEntryContext context, final Object value) { DiskRecoveryStore diskRecoveryStore = (DiskRecoveryStore) context; DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore(); long maxOplogSize = diskStore.getMaxOplogSize(); // get appropriate instance of DiskId implementation based on maxOplogSize this.id = DiskId.createDiskId(maxOplogSize, true, diskStore.needsLinkedList()); Helper.initialize(this, diskRecoveryStore, value); } // --------------------------------------- eviction code ---------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) { DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore(); long maxOplogSize = diskStore.getMaxOplogSize(); this.id = DiskId.createDiskId(maxOplogSize, false, diskStore.needsLinkedList()); } @Override public synchronized int updateEntrySize(final EvictionController evictionController) { // OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size return updateEntrySize(evictionController, getValue()); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public synchronized int updateEntrySize(final EvictionController evictionController, final Object value) { int oldSize = getEntrySize(); int newSize = evictionController.entrySize(getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } @Override public boolean isRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public void setRecentlyUsed(RegionEntryContext context) { if (!isRecentlyUsed()) { setBits(RECENTLY_USED); context.incRecentlyUsed(); } } @Override public void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } @Override public boolean isEvicted() { return areAnyBitsSet(EVICTED); } @Override public void setEvicted() { setBits(EVICTED); } @Override public void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private EvictionNode nextEvictionNode; private EvictionNode previousEvictionNode; private int size; @Override public void setNext(final EvictionNode nextEvictionNode) { this.nextEvictionNode = nextEvictionNode; } @Override public EvictionNode next() { return this.nextEvictionNode; } @Override public void setPrevious(final EvictionNode previousEvictionNode) { this.previousEvictionNode = previousEvictionNode; } @Override public EvictionNode previous() { return this.previousEvictionNode; } @Override public int getEntrySize() { return this.size; } protected void setEntrySize(final int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // ----------------------------------------- key code ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKey() { return this.key; } @Override public boolean isKeyEqual(final Object key) { if (key instanceof Long) { return ((Long) key).longValue() == this.key; } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }