gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2008 Cameron Zemek
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package net.zeminvaders.lang;
import java.io.IOException;
import java.io.Reader;
/**
* The <a href="http://en.wikipedia.org/wiki/Lexical_analysis#Scanner">lexer</a>
* is used to read characters and identify tokens and pass them to the parser
*
* @author <a href="mailto:grom@zeminvaders.net">Cameron Zemek</a>
*/
public class Lexer {
static final private int END_OF_FILE = -1;
private int lineNo = 1;
private int columnNo = 1;
private PeekReader in;
public Lexer(Reader in) throws IOException {
this.in = new PeekReader(in, 2);
}
private int lookAhead(int i) {
return in.peek(i);
}
private int read() {
try {
int c = in.read();;
if (c == '\n') {
lineNo++;
columnNo = 0;
}
columnNo++;
return c;
} catch (IOException e) {
throw new LexerException(e.getMessage(), lineNo, columnNo);
}
}
private void close() {
try {
in.close();
} catch (IOException e) {
}
}
private int next() {
read();
return lookAhead(1);
}
private char match(char c) {
int input = read();
if (input != c) {
String inputChar = (input != END_OF_FILE) ? "" + (char) input : "END_OF_FILE";
throw new LexerException("Expected '" + c + "' but got '" + inputChar + "'", lineNo, columnNo);
}
return c;
}
private String match(String str) {
for (int i = 0; i < str.length(); i++) {
match(str.charAt(i));
}
return str;
}
private Token createToken(TokenType type, char c) {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
match(c);
return new Token(pos, type, "" + c);
}
private Token createToken(TokenType type, String str) {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
match(str);
return new Token(pos, type, str);
}
public Token getNextToken() {
int character = lookAhead(1);
// Skip whitespace
while (character == ' ' || character == '\t' ||
character == '\r' || character == '\n') {
character = next();
}
switch (character) {
case END_OF_FILE: {
// End of character stream.
// Return null to indicate end of token stream
close();
return null;
}
case ';': {
return createToken(TokenType.END_STATEMENT, ";");
}
case '+': {
return createToken(TokenType.PLUS, '+');
}
case '-': {
return createToken(TokenType.MINUS, '-');
}
case '*': {
return createToken(TokenType.MULTIPLY, '*');
}
case '/': {
int char2 = lookAhead(2);
if (char2 == '/') {
return matchLineComment();
} else if (char2 == '*') {
return matchBlockComment();
} else {
return createToken(TokenType.DIVIDE, '/');
}
}
case '%': {
return createToken(TokenType.MOD, '%');
}
case '^': {
return createToken(TokenType.POWER, '^');
}
case ',': {
return createToken(TokenType.COMMA, ',');
}
case '~': {
return createToken(TokenType.CONCAT, '~');
}
case ':': {
return createToken(TokenType.COLON, ':');
}
case '(': {
return createToken(TokenType.LPAREN, '(');
}
case ')': {
return createToken(TokenType.RPAREN, ')');
}
case '{': {
return createToken(TokenType.LBRACE, '{');
}
case '}': {
return createToken(TokenType.RBRACE, '}');
}
case '[': {
return createToken(TokenType.LBRACKET, '[');
}
case ']': {
return createToken(TokenType.RBRACKET, ']');
}
case '=': {
if (lookAhead(2) == '=') {
return createToken(TokenType.EQUAL, "==");
} else {
return createToken(TokenType.ASSIGN, '=');
}
}
case '|': {
return createToken(TokenType.OR, "||");
}
case '&': {
return createToken(TokenType.AND, "&&");
}
case '!': {
if (lookAhead(2) == '=') {
return createToken(TokenType.NOT_EQUAL, "!=");
} else {
return createToken(TokenType.NOT, '!');
}
}
case '<': {
if (lookAhead(2) == '=') {
return createToken(TokenType.LESS_EQUAL, "<=");
} else {
return createToken(TokenType.LESS_THEN, '<');
}
}
case '>': {
if (lookAhead(2) == '=') {
return createToken(TokenType.GREATER_EQUAL, ">=");
} else {
return createToken(TokenType.GREATER_THEN, '>');
}
}
case '\'':
case '"': {
return matchStringLiteral((char) character);
}
default: {
if (character == '.' || (character >= '0' && character <= '9')) {
return matchNumber();
} else if ((character >= 'A' && character <= 'Z') ||
(character >= 'a' && character <= 'z') ||
character == '_') {
return matchIdentifier();
} else {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
}
}
}
private Token matchLineComment() {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
match("//");
StringBuilder sb = new StringBuilder();
int character = lookAhead(1);
while (character != '\r' && character != '\n' && character != END_OF_FILE) {
sb.append((char) character);
character = next();
}
return new Token(pos, TokenType.COMMENT, sb.toString());
}
private Token matchBlockComment() {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
match("/*");
StringBuilder sb = new StringBuilder();
int character = lookAhead(1);
while (true) {
if (character == END_OF_FILE) {
throw new LexerException("Expecting */ but found end of file", lineNo, columnNo);
}
if (lookAhead(1) == '*' && lookAhead(2) == '/') {
break;
}
sb.append((char) character);
character = next();
}
match("*/");
return new Token(pos, TokenType.COMMENT, sb.toString());
}
private int matchDigits(StringBuilder sb) {
int character = lookAhead(1);
int count = 0;
while (character >= '0' && character <= '9') {
sb.append((char) character);
character = next();
count++;
}
if (count == 0) {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
return count;
}
private void matchDecimalNumber(StringBuilder sb) {
int character = lookAhead(1);
// IntegerPart
if (character >= '0' && character <= '9') {
matchDigits(sb);
character = lookAhead(1);
}
// FractionPart
if (character == '.') {
sb.append('.');
character = next();
matchDigits(sb);
character = lookAhead(1);
}
// Exponent
if (character == 'e' || character == 'E') {
sb.append('e');
character = next();
if (character == '-' || character == '+') {
sb.append(character);
character = next();
}
matchDigits(sb);
}
}
private int matchOctalDigits(StringBuilder sb) {
int character = lookAhead(1);
int count = 0;
while (character >= '0' && character <= '7') {
sb.append((char) character);
character = next();
count++;
}
if (count == 0) {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
return count;
}
private int matchHexDigits(StringBuilder sb) {
int character = lookAhead(1);
int count = 0;
while ( (character >= '0' && character <= '9') ||
(character >= 'a' && character <= 'f') ||
(character >= 'A' && character <= 'F')) {
sb.append((char) character);
character = next();
count++;
}
if (count == 0) {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
return count;
}
private int matchBinaryDigits(StringBuilder sb) {
int character = lookAhead(1);
int count = 0;
while (character == '0' || character == '1') {
sb.append((char) character);
character = next();
count++;
}
if (count == 0) {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
return count;
}
private Token matchNumber() {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
StringBuilder sb = new StringBuilder();
int digit = lookAhead(1);
char secondDigit = (char) lookAhead(2);
if (digit == '0' && (secondDigit == 'o' || secondDigit == 'O')) {
sb.append(match('0'));
sb.append(match(secondDigit));
matchOctalDigits(sb);
} else if (digit == '0' && (secondDigit == 'x' || secondDigit == 'X')) {
sb.append(match('0'));
sb.append(match(secondDigit));
matchHexDigits(sb);
} else if (digit == '0' && (secondDigit == 'b' || secondDigit == 'B')) {
sb.append(match('0'));
sb.append(match(secondDigit));
matchBinaryDigits(sb);
} else {
matchDecimalNumber(sb);
}
/*
* Check that another number does not immediately follow as this means
* we have an invalid number. For example, the input 12.34.5 after the
* above code finishes leaves us with 12.34 matched. Without this
* check .5 will then be matched separately as another valid number.
*/
int character = lookAhead(1);
if (character == '.' || (character >= '0' && character <= '9')) {
throw new LexerException("Unexpected '" + ((char) character) + "' character", lineNo, columnNo);
}
return new Token(pos, TokenType.NUMBER, sb.toString());
}
/**
* An identifier is either a keyword, function, or variable
*
* @return Token
*/
private Token matchIdentifier() {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
StringBuilder sb = new StringBuilder();
int character = lookAhead(1);
while ((character >= 'a' && character <= 'z') ||
(character >= 'A' && character <= 'Z') ||
(character >= '0' && character <= '9') ||
character == '_') {
sb.append((char) character);
character = next();
}
String word = sb.toString();
if (word.equals("true")) {
return new Token(pos, TokenType.TRUE, word);
} else if (word.equals("false")) {
return new Token(pos, TokenType.FALSE, word);
} else if (word.equals("if")) {
return new Token(pos, TokenType.IF, word);
} else if (word.equals("else")) {
return new Token(pos, TokenType.ELSE, word);
} else if (word.equals("while")) {
return new Token(pos, TokenType.WHILE, word);
} else if (word.equals("foreach")) {
return new Token(pos, TokenType.FOR_EACH, word);
} else if (word.equals("as")) {
return new Token(pos, TokenType.AS, word);
} else if (word.equals("function")) {
return new Token(pos, TokenType.FUNCTION, word);
} else if (word.equals("return")) {
return new Token(pos, TokenType.RETURN, word);
} else {
return new Token(pos, TokenType.VARIABLE, word);
}
}
private Token matchStringLiteral(char quote) {
SourcePosition pos = new SourcePosition(lineNo, columnNo);
match(quote);
StringBuilder sb = new StringBuilder();
int character = lookAhead(1);
while (character != quote && character != END_OF_FILE) {
sb.append((char) character);
character = next();
}
match(quote);
return new Token(pos, TokenType.STRING_LITERAL, sb.toString());
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.sdklib.internal.repository.packages;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.sdklib.internal.repository.archives.ArchFilter;
import com.android.sdklib.internal.repository.archives.BitSize;
import com.android.sdklib.internal.repository.archives.HostOs;
import com.android.sdklib.internal.repository.archives.LegacyArch;
import com.android.sdklib.internal.repository.archives.LegacyOs;
import com.android.sdklib.repository.FullRevision;
import com.android.sdklib.repository.NoPreviewRevision;
import com.android.sdklib.repository.MajorRevision;
import com.android.sdklib.repository.PkgProps;
import com.android.sdklib.repository.SdkRepoConstants;
import org.w3c.dom.Node;
import java.util.Properties;
/**
* Misc utilities to help extracting elements and attributes out of a repository XML document.
*
* @deprecated
* com.android.sdklib.internal.repository has moved into Studio as
* com.android.tools.idea.sdk.remote.internal.
*/
@Deprecated
public class PackageParserUtils {
/**
* Parse the {@link ArchFilter} of an <archive> element..
* <p/>
* Starting with repo schema 10, add-on schema 7 and sys-img schema 3, this is done using
* specific optional elements contained within the <archive> element.
* <p/>
* If none of the new element are defined, for backward compatibility we try to find
* the previous style XML attributes "os" and "arch" in the <archive> element.
*
* @param archiveNode
* @return A new {@link ArchFilter}
*/
@NonNull
public static ArchFilter parseArchFilter(@NonNull Node archiveNode) {
String hos = PackageParserUtils.getOptionalXmlString(archiveNode, SdkRepoConstants.NODE_HOST_OS);
String hb = PackageParserUtils.getOptionalXmlString(archiveNode, SdkRepoConstants.NODE_HOST_BITS);
String jb = PackageParserUtils.getOptionalXmlString(archiveNode, SdkRepoConstants.NODE_JVM_BITS);
String mjv = PackageParserUtils.getOptionalXmlString(archiveNode, SdkRepoConstants.NODE_MIN_JVM_VERSION);
if (hos != null || hb != null || jb != null || mjv != null) {
NoPreviewRevision rev = null;
try {
rev = NoPreviewRevision.parseRevision(mjv);
} catch (NumberFormatException ignore) {}
return new ArchFilter(
HostOs.fromXmlName(hos),
BitSize.fromXmlName(hb),
BitSize.fromXmlName(jb),
rev);
}
Properties props = new Properties();
LegacyOs o = (LegacyOs) PackageParserUtils.getEnumAttribute(
archiveNode, SdkRepoConstants.LEGACY_ATTR_OS, LegacyOs.values(), null);
if (o != null) {
props.setProperty(ArchFilter.LEGACY_PROP_OS, o.toString());
}
LegacyArch a = (LegacyArch) PackageParserUtils.getEnumAttribute(
archiveNode, SdkRepoConstants.LEGACY_ATTR_ARCH, LegacyArch.values(), null);
if (a != null) {
props.setProperty(ArchFilter.LEGACY_PROP_ARCH, a.toString());
}
return new ArchFilter(props);
}
/**
* Parses a full revision element such as <revision> or <min-tools-rev>.
* This supports both the single-integer format as well as the full revision
* format with major/minor/micro/preview sub-elements.
*
* @param revisionNode The node to parse.
* @return A new {@link FullRevision}. If parsing failed, major is set to
* {@link FullRevision#MISSING_MAJOR_REV}.
*/
public static FullRevision parseFullRevisionElement(Node revisionNode) {
// This needs to support two modes:
// - For repository XSD >= 7, <revision> contains sub-elements such as <major> or <minor>.
// - Otherwise for repository XSD < 7, <revision> contains an integer.
// The <major> element is mandatory, so it's easy to distinguish between both cases.
int major = FullRevision.MISSING_MAJOR_REV,
minor = FullRevision.IMPLICIT_MINOR_REV,
micro = FullRevision.IMPLICIT_MICRO_REV,
preview = FullRevision.NOT_A_PREVIEW;
if (revisionNode != null) {
if (PackageParserUtils.findChildElement(revisionNode,
SdkRepoConstants.NODE_MAJOR_REV) != null) {
// <revision> has a <major> sub-element, so it's a repository XSD >= 7.
major = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MAJOR_REV, FullRevision.MISSING_MAJOR_REV);
minor = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MINOR_REV, FullRevision.IMPLICIT_MINOR_REV);
micro = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MICRO_REV, FullRevision.IMPLICIT_MICRO_REV);
preview = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_PREVIEW, FullRevision.NOT_A_PREVIEW);
} else {
try {
String majorStr = revisionNode.getTextContent().trim();
major = Integer.parseInt(majorStr);
} catch (Exception e) {
}
}
}
return new FullRevision(major, minor, micro, preview);
}
/**
* Parses a no-preview revision element such as <revision>>.
* This supports both the single-integer format as well as the full revision
* format with major/minor/micro sub-elements.
*
* @param revisionNode The node to parse.
* @return A new {@link NoPreviewRevision}. If parsing failed, major is set to
* {@link FullRevision#MISSING_MAJOR_REV}.
*/
public static NoPreviewRevision parseNoPreviewRevisionElement(Node revisionNode) {
// This needs to support two modes:
// - For addon XSD >= 6, <revision> contains sub-elements such as <major> or <minor>.
// - Otherwise for addon XSD < 6, <revision> contains an integer.
// The <major> element is mandatory, so it's easy to distinguish between both cases.
int major = FullRevision.MISSING_MAJOR_REV,
minor = FullRevision.IMPLICIT_MINOR_REV,
micro = FullRevision.IMPLICIT_MICRO_REV;
if (revisionNode != null) {
if (PackageParserUtils.findChildElement(revisionNode,
SdkRepoConstants.NODE_MAJOR_REV) != null) {
// <revision> has a <major> sub-element, so it's a repository XSD >= 7.
major = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MAJOR_REV, FullRevision.MISSING_MAJOR_REV);
minor = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MINOR_REV, FullRevision.IMPLICIT_MINOR_REV);
micro = PackageParserUtils.getXmlInt(revisionNode,
SdkRepoConstants.NODE_MICRO_REV, FullRevision.IMPLICIT_MICRO_REV);
} else {
try {
String majorStr = revisionNode.getTextContent().trim();
major = Integer.parseInt(majorStr);
} catch (Exception e) {
}
}
}
return new NoPreviewRevision(major, minor, micro);
}
/**
* Returns the first child element with the given XML local name and the same NS URI.
* If xmlLocalName is null, returns the very first child element.
*/
public static Node findChildElement(Node node, String xmlLocalName) {
if (node != null) {
String nsUri = node.getNamespaceURI();
for(Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) {
if (child.getNodeType() == Node.ELEMENT_NODE) {
String nsUriChild = child.getNamespaceURI();
if ((nsUri == null && nsUriChild == null) ||
(nsUri != null && nsUri.equals(nsUriChild))) {
if (xmlLocalName == null || xmlLocalName.equals(child.getLocalName())) {
return child;
}
}
}
}
}
return null;
}
/**
* Retrieves the value of that XML element as a string.
* Returns an empty string whether the element is missing or empty,
* so you can't tell the difference.
* <p/>
* Note: use {@link #getOptionalXmlString(Node, String)} if you need to know when the
* element is missing versus empty.
*
* @param node The XML <em>parent</em> node to parse.
* @param xmlLocalName The XML local name to find in the parent node.
* @return The text content of the element. Returns an empty string whether the element
* is missing or empty, so you can't tell the difference.
*/
public static String getXmlString(Node node, String xmlLocalName) {
return getXmlString(node, xmlLocalName, ""); //$NON-NLS-1$
}
/**
* Retrieves the value of that XML element as a string.
* Returns the defaultValue if the element is missing or empty.
* <p/>
* Note: use {@link #getOptionalXmlString(Node, String)} if you need to know when the
* element is missing versus empty.
*
* @param node The XML <em>parent</em> node to parse.
* @param xmlLocalName The XML local name to find in the parent node.
* @param defaultValue A default value to return if the element is missing.
* @return The text content of the element
* or the defaultValue if the element is missing or empty.
*/
public static String getXmlString(Node node, String xmlLocalName, String defaultValue) {
Node child = findChildElement(node, xmlLocalName);
String content = child == null ? null : child.getTextContent();
return content == null || content.isEmpty() ? defaultValue : content;
}
/**
* Retrieves the value of that XML element as a string.
* Returns null when the element is missing, so you can tell between a missing element
* and an empty one.
* <p/>
* Note: use {@link #getXmlString(Node, String)} if you don't need to know when the
* element is missing versus empty.
*
* @param node The XML <em>parent</em> node to parse.
* @param xmlLocalName The XML local name to find in the parent node.
* @return The text content of the element. Returns null when the element is missing.
* Returns an empty string whether the element is present but empty.
*/
public static String getOptionalXmlString(Node node, String xmlLocalName) {
Node child = findChildElement(node, xmlLocalName);
return child == null ? null : child.getTextContent(); //$NON-NLS-1$
}
/**
* Retrieves the value of that XML element as an integer.
* Returns the default value when the element is missing or is not an integer.
*/
public static int getXmlInt(Node node, String xmlLocalName, int defaultValue) {
String s = getXmlString(node, xmlLocalName);
try {
return Integer.parseInt(s);
} catch (NumberFormatException e) {
return defaultValue;
}
}
/**
* Retrieves the value of that XML element as a long.
* Returns the default value when the element is missing or is not an integer.
*/
public static long getXmlLong(Node node, String xmlLocalName, long defaultValue) {
String s = getXmlString(node, xmlLocalName);
try {
return Long.parseLong(s);
} catch (NumberFormatException e) {
return defaultValue;
}
}
/**
* Retrieve an attribute which value must match one of the given enums using a
* case-insensitive name match.
*
* Returns defaultValue if the attribute does not exist or its value does not match
* the given enum values.
*/
public static Object getEnumAttribute(
Node archiveNode,
String attrName,
Object[] values,
Object defaultValue) {
Node attr = archiveNode.getAttributes().getNamedItem(attrName);
if (attr != null) {
String found = attr.getNodeValue();
for (Object value : values) {
if (value.toString().equalsIgnoreCase(found)) {
return value;
}
}
}
return defaultValue;
}
/**
* Utility method that returns a property from a {@link Properties} object.
* Returns the default value if props is null or if the property is not defined.
*
* @param props The {@link Properties} to search into.
* If null, the default value is returned.
* @param propKey The name of the property. Must not be null.
* @param defaultValue The default value to return if {@code props} is null or if the
* key is not found. Can be null.
* @return The string value of the given key in the properties, or null if the key
* isn't found or if {@code props} is null.
*/
@Nullable
public static String getProperty(
@Nullable Properties props,
@NonNull String propKey,
@Nullable String defaultValue) {
if (props == null) {
return defaultValue;
}
return props.getProperty(propKey, defaultValue);
}
/**
* Utility method that returns an integer property from a {@link Properties} object.
* Returns the default value if props is null or if the property is not defined or
* cannot be parsed to an integer.
*
* @param props The {@link Properties} to search into.
* If null, the default value is returned.
* @param propKey The name of the property. Must not be null.
* @param defaultValue The default value to return if {@code props} is null or if the
* key is not found. Can be null.
* @return The integer value of the given key in the properties, or the {@code defaultValue}.
*/
public static int getPropertyInt(
@Nullable Properties props,
@NonNull String propKey,
int defaultValue) {
String s = props != null ? props.getProperty(propKey, null) : null;
if (s != null) {
try {
return Integer.parseInt(s);
} catch (Exception ignore) {}
}
return defaultValue;
}
/**
* Utility method to parse the {@link PkgProps#PKG_REVISION} property as a full
* revision (major.minor.micro.preview).
*
* @param props The properties to parse.
* @return A {@link FullRevision} or null if there is no such property or it couldn't be parsed.
* @param propKey The name of the property. Must not be null.
*/
@Nullable
public static FullRevision getPropertyFull(
@Nullable Properties props,
@NonNull String propKey) {
String revStr = getProperty(props, propKey, null);
FullRevision rev = null;
if (revStr != null) {
try {
rev = FullRevision.parseRevision(revStr);
} catch (NumberFormatException ignore) {}
}
return rev;
}
/**
* Utility method to parse the {@link PkgProps#PKG_REVISION} property as a major
* revision (major integer, no minor/micro/preview parts.)
*
* @param props The properties to parse.
* @return A {@link MajorRevision} or null if there is no such property or it couldn't be parsed.
* @param propKey The name of the property. Must not be null.
*/
@Nullable
public static MajorRevision getPropertyMajor(
@Nullable Properties props,
@NonNull String propKey) {
String revStr = getProperty(props, propKey, null);
MajorRevision rev = null;
if (revStr != null) {
try {
rev = MajorRevision.parseRevision(revStr);
} catch (NumberFormatException ignore) {}
}
return rev;
}
/**
* Utility method to parse the {@link PkgProps#PKG_REVISION} property as a no-preview
* revision (major.minor.micro integers but no preview part.)
*
* @param props The properties to parse.
* @return A {@link NoPreviewRevision} or
* null if there is no such property or it couldn't be parsed.
* @param propKey The name of the property. Must not be null.
*/
@Nullable
public static NoPreviewRevision getPropertyNoPreview(
@Nullable Properties props,
@NonNull String propKey) {
String revStr = getProperty(props, propKey, null);
NoPreviewRevision rev = null;
if (revStr != null) {
try {
rev = NoPreviewRevision.parseRevision(revStr);
} catch (NumberFormatException ignore) {}
}
return rev;
}
}
| |
/*
* This file is part of MinePass, licensed under the MIT License (MIT).
*
* Copyright (c) MinePass.net <http://www.minepass.net>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.minepass.gs.mc.wrapper;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Properties;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import net.minepass.api.gameserver.MPAsciiArt;
import net.minepass.api.gameserver.MPConfig;
import net.minepass.api.gameserver.MPConfigException;
import net.minepass.api.gameserver.MPStartupException;
import net.minepass.api.gameserver.embed.solidtx.TxLog;
import net.minepass.api.gameserver.embed.solidtx.TxStack;
import net.minepass.gs.mc.MinePassMC;
/**
* MinePass wrapper for Vanilla Minecraft.
* <p/>
* This wrapper facilities the background syncing of MinePass authorization
* data by creating secondary threads before launching the Minecraft server.
* <p/>
* The vanilla server JAR must be renamed/symlinked as minecraft_server.jar
* and located in the same directory as the wrapper.
* <p/>
* In order to facilitate command functions a bridge is created between the
* system's Standard-In (Console) and the Minecraft server's input.
* <p/>
* Standard console input is passed through as normal, however MinePass can
* also send commands to the server as needed.
* <p/>
* In addition, the log4j system is reconfigured to copy server output to
* the MinePass plugin so that it can respond to system and game events.
* <p/>
* Any command line parameters passed to the wrapper are forwarded to the
* vanilla server JAR at startup.
*/
public class MP_MinecraftWrapper {
static final String configFileName = "minepass.config";
static final String serverJarFileName = "minecraft_server.jar"; // must match MANIFEST include
// Main
// ------------------------------------------------------------------------------------------------------------- //
public static void main(String[] args) {
ArrayList<String> serverArgs = new ArrayList<>();
serverArgs.add("nogui"); // GUI mode seems to create logging intercept issues
for (String a : args) {
switch (a) {
case "nogui":
// Ignore, already added.
break;
default:
serverArgs.add(a);
}
}
MP_MinecraftWrapper wrapper = new MP_MinecraftWrapper();
if (wrapper.initMinePass()) {
wrapper.launchManagers();
// Delay to make the wrapper initial log output more visible, and give the managers time to initialize.
try {
Thread.sleep(1500);
} catch (InterruptedException e) {
return;
}
// Launch vanilla Minecraft.
wrapper.launchServer(serverArgs.toArray(new String[serverArgs.size()]));
}
}
// Wrapper Instance
// ------------------------------------------------------------------------------------------------------------- //
private TxLog logger;
private Boolean debug;
private String wrapperVersion;
private File serverJarFile;
private MinePassMC minepass;
private ConsoleManager consoleManager;
private ServerManager serverManager;
private final CommonState state = new CommonState();
public MP_MinecraftWrapper() {
this.wrapperVersion = properties.getProperty("version");
// The vanilla server JAR must be properly named to conform to the wrapper's MANIFEST.
serverJarFile = new File(serverJarFileName);
if (!serverJarFile.exists()) {
System.out.println("ERROR: Could not find " + serverJarFileName);
System.out.println("Please download or rename the vanilla server binary.");
throw new RuntimeException("Could not find " + serverJarFileName);
}
TxLog.log(TxLog.Level.INFO, String.format("MinePass Wrapper (%s) for Minecraft", wrapperVersion));
}
/**
* Ensure MinePass has a valid configuration and perform an initial sync if needed.
* <p/>
* An initial sync is only required following software updates, otherwise MinePass
* data is transmitted asynchronously and the server is unaffected by any possible
* network conditions.
*/
private boolean initMinePass() {
try {
MinePassMC.whitelistFile.createNewFile();
} catch (IOException e) {
throw new RuntimeException("Could not create whitelist.", e);
}
try {
debug = config.getProperty("debug_enabled", "false").equals("true");
if (debug) {
TxStack.debug = true;
}
MPConfig mtc = new MPConfig();
mtc.variant = "MCWrapper ".concat(wrapperVersion);
mtc.api_host = config.getProperty("setup_api_host");
mtc.server_uuid = config.getProperty("setup_server_id");
mtc.server_secret = config.getProperty("setup_server_secret");
mtc.enforce_whitelist = !config.getProperty("enforce_whitelist", "true").equalsIgnoreCase("false");
/**
* The MinePass network stack is built upon SolidTX, an MIT licensed project
* developed in collaboration with BinaryBabel OSS.
*
* The source code for the MinePass game server stack is available at:
* https://github.com/minepass/gameserver-core
*
* The source code and documentation for SolidTX is available at:
* https://github.com/org-binbab/solid-tx
*
*/
this.minepass = new MinePassMC(mtc);
this.logger = minepass.log;
minepass.setContext(this);
logger.info("MinePass Core Version: " + minepass.getVersion(), null);
logger.info("MinePass API Endpoint: " + mtc.api_host, null);
logger.info("MinePass World Server UUID: " + minepass.getServerUUID(), null);
} catch (MPConfigException e) {
e.printStackTrace();
for (String x : MPAsciiArt.getNotice("Configuration Update Required")) {
TxLog.log(TxLog.Level.INFO, x);
}
TxLog.log(TxLog.Level.WARN,"Run the server configuration wizard at http://minepass.net");
TxLog.log(TxLog.Level.WARN,"Then paste the configuration into minepass.config");
return false;
} catch (MPStartupException e) {
e.printStackTrace();
return false;
}
return true;
}
/**
* Launch the primary wrapper managers in secondary threads.
*
* @see ConsoleManager
* @see ServerManager
*/
private void launchManagers() {
if (minepass == null) {
return;
}
this.consoleManager = new ConsoleManager(this);
Thread consoleThread = new Thread(consoleManager, "MPConsole");
consoleThread.setDaemon(true);
consoleThread.start();
this.serverManager = new ServerManager(this);
Thread controlThread = new Thread(serverManager, "MinePass");
controlThread.setDaemon(true);
controlThread.start();
}
/**
* Launch the vanilla Minecraft server with the provided args.
*
* @param args
*/
private void launchServer(String[] args) {
if (minepass == null) {
return;
}
String serverMainClass = null;
try (JarFile serverJarReader = new JarFile(serverJarFile)) {
Manifest manifest = serverJarReader.getManifest();
serverMainClass = manifest.getMainAttributes().getValue("Main-Class");
} catch (IOException e) {
throw new RuntimeException("Could not read manifest from " + serverJarFileName, e);
}
try {
Class serverClass = Class.forName(serverMainClass);
Method serverClassMain = serverClass.getDeclaredMethod("main", String[].class);
Object[] serverArgs = {args};
serverClassMain.invoke(null, serverArgs);
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException | ClassNotFoundException e) {
throw new RuntimeException("Failed to invoke server startup", e);
}
}
public TxLog getLogger() {
return logger;
}
public MinePassMC getMinepass() {
return minepass;
}
public ConsoleManager getConsoleManager() {
return consoleManager;
}
public ServerManager getServerManager() {
return serverManager;
}
public CommonState getState() {
return state;
}
public String getWrapperVersion() {
return wrapperVersion;
}
public boolean getDebug() {
return debug;
}
// Configuration
// ------------------------------------------------------------------------------------------------------------- //
public static final Properties config;
static {
File configFile = new File(configFileName);
InputStream configFileInput;
try {
configFileInput = new FileInputStream(configFile);
} catch (FileNotFoundException e) {
// Use default config file from jar resource.
configFileInput = MP_MinecraftWrapper.class.getResourceAsStream("/config.properties");
}
config = new Properties();
try {
config.load(configFileInput);
} catch (IOException e) {
throw new RuntimeException("Failed to read properties file", e);
} finally {
closeQuietly(configFileInput);
}
// Save default config (if needed).
if (!configFile.exists()) {
try (OutputStream configFileOutput = new FileOutputStream(configFile)) {
config.store(configFileOutput, "MinePass Configuration");
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static final Properties properties;
static {
InputStream propertiesInput = MP_MinecraftWrapper.class.getResourceAsStream("/wrapper.properties");
properties = new Properties();
try {
properties.load(propertiesInput);
} catch (IOException e) {
throw new RuntimeException("Failed to read properties file", e);
} finally {
closeQuietly(propertiesInput);
}
}
// IO Helpers
// ------------------------------------------------------------------------------------------------------------- //
/**
* Close InputStream without extra try/catch.
*
* @param input the input to close
*/
static public void closeQuietly(InputStream input) {
try {
if (input != null) {
input.close();
}
} catch (IOException ioe) {
// Ignore.
}
}
/**
* Close OutputStream without extra try/catch.
*
* @param output the output to close
*/
static public void closeQuietly(OutputStream output) {
try {
if (output != null) {
output.close();
}
} catch (IOException ioe) {
// Ignore.
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.SchemaField;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.*;
@Slow
public class TestRandomFaceting extends SolrTestCaseJ4 {
public static final String FOO_STRING_FIELD = "foo_s1";
public static final String SMALL_STRING_FIELD = "small_s1";
public static final String SMALL_INT_FIELD = "small_i";
@BeforeClass
public static void beforeTests() throws Exception {
System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
initCore("solrconfig.xml","schema12.xml");
}
int indexSize;
List<FldType> types;
Map<Comparable, Doc> model = null;
boolean validateResponses = true;
void init() {
Random rand = random();
clearIndex();
model = null;
indexSize = rand.nextBoolean() ? (rand.nextInt(10) + 1) : (rand.nextInt(100) + 10);
types = new ArrayList<>();
types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4)));
types.add(new FldType("score_f",ONE_ONE, new FVal(1,100)));
types.add(new FldType("small_f",ONE_ONE, new FVal(-4,5)));
types.add(new FldType("small_d",ONE_ONE, new FVal(-4,5)));
types.add(new FldType("foo_i",ZERO_ONE, new IRange(-2,indexSize)));
types.add(new FldType("rare_s1",new IValsPercent(95,0,5,1), new SVal('a','b',1,5)));
types.add(new FldType("str_s1",ZERO_ONE, new SVal('a','z',1,2)));
types.add(new FldType("long_s1",ZERO_ONE, new SVal('a','b',1,5)));
types.add(new FldType("small_s1",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1)));
types.add(new FldType("small2_s1",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1)));
types.add(new FldType("small2_ss",ZERO_TWO, new SVal('a',(char)('c'+indexSize/3),1,1)));
types.add(new FldType("small3_ss",new IRange(0,25), new SVal('A','z',1,1)));
types.add(new FldType("small_i",ZERO_ONE, new IRange(-2,5+indexSize/3)));
types.add(new FldType("small2_i",ZERO_ONE, new IRange(-1,5+indexSize/3)));
types.add(new FldType("small2_is",ZERO_TWO, new IRange(-2,5+indexSize/3)));
types.add(new FldType("small3_is",new IRange(0,25), new IRange(-50,50)));
types.add(new FldType("missing_i",new IRange(0,0), new IRange(0,100)));
types.add(new FldType("missing_is",new IRange(0,0), new IRange(0,100)));
types.add(new FldType("missing_s1",new IRange(0,0), new SVal('a','b',1,1)));
types.add(new FldType("missing_ss",new IRange(0,0), new SVal('a','b',1,1)));
// TODO: doubles, multi-floats, ints with precisionStep>0, booleans
}
void addMoreDocs(int ndocs) throws Exception {
model = indexDocs(types, model, ndocs);
}
void deleteSomeDocs() {
Random rand = random();
int percent = rand.nextInt(100);
if (model == null) return;
ArrayList<String> ids = new ArrayList<>(model.size());
for (Comparable id : model.keySet()) {
if (rand.nextInt(100) < percent) {
ids.add(id.toString());
}
}
if (ids.size() == 0) return;
StringBuilder sb = new StringBuilder("id:(");
for (String id : ids) {
sb.append(id).append(' ');
model.remove(id);
}
sb.append(')');
assertU(delQ(sb.toString()));
if (rand.nextInt(10)==0) {
assertU(optimize());
} else {
assertU(commit("softCommit",""+(rand.nextInt(10)!=0)));
}
}
@Test
public void testRandomFaceting() throws Exception {
try {
Random rand = random();
int iter = atLeast(100);
init();
addMoreDocs(0);
for (int i=0; i<iter; i++) {
doFacetTests();
if (rand.nextInt(100) < 5) {
init();
}
addMoreDocs(rand.nextInt(indexSize) + 1);
if (rand.nextInt(100) < 50) {
deleteSomeDocs();
}
}
} finally {
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity
}
}
void doFacetTests() throws Exception {
for (FldType ftype : types) {
doFacetTests(ftype);
}
}
List<String> multiValuedMethods = Arrays.asList(new String[]{"enum","fc"});
List<String> singleValuedMethods = Arrays.asList(new String[]{"enum","fc","fcs"});
void doFacetTests(FldType ftype) throws Exception {
SolrQueryRequest req = req();
try {
Random rand = random();
boolean validate = validateResponses;
ModifiableSolrParams params = params("facet","true", "wt","json", "indent","true", "omitHeader","true");
params.add("q","*:*", "rows","0"); // TODO: select subsets
params.add("rows","0");
SchemaField sf = req.getSchema().getField(ftype.fname);
boolean multiValued = sf.getType().multiValuedFieldCache();
int offset = 0;
if (rand.nextInt(100) < 20) {
if (rand.nextBoolean()) {
offset = rand.nextInt(100) < 10 ? rand.nextInt(indexSize*2) : rand.nextInt(indexSize/3+1);
}
params.add("facet.offset", Integer.toString(offset));
}
int limit = 100;
if (rand.nextInt(100) < 20) {
if (rand.nextBoolean()) {
limit = rand.nextInt(100) < 10 ? rand.nextInt(indexSize/2+1) : rand.nextInt(indexSize*2);
}
params.add("facet.limit", Integer.toString(limit));
}
if (rand.nextBoolean()) {
params.add("facet.sort", rand.nextBoolean() ? "index" : "count");
}
if ((ftype.vals instanceof SVal) && rand.nextInt(100) < 20) {
// validate = false;
String prefix = ftype.createValue().toString();
if (rand.nextInt(100) < 5) prefix = TestUtil.randomUnicodeString(rand);
else if (rand.nextInt(100) < 10) prefix = Character.toString((char)rand.nextInt(256));
else if (prefix.length() > 0) prefix = prefix.substring(0, rand.nextInt(prefix.length()));
params.add("facet.prefix", prefix);
}
if (rand.nextInt(100) < 10) {
params.add("facet.mincount", Integer.toString(rand.nextInt(5)));
}
if (rand.nextInt(100) < 20) {
params.add("facet.missing", "true");
}
// TODO: randomly add other facet params
String key = ftype.fname;
String facet_field = ftype.fname;
if (random().nextBoolean()) {
key = "alternate_key";
facet_field = "{!key="+key+"}"+ftype.fname;
}
params.set("facet.field", facet_field);
List<String> methods = multiValued ? multiValuedMethods : singleValuedMethods;
List<String> responses = new ArrayList<>(methods.size());
for (String method : methods) {
// params.add("facet.field", "{!key="+method+"}" + ftype.fname);
// TODO: allow method to be passed on local params?
params.set("facet.method", method);
// if (random().nextBoolean()) params.set("facet.mincount", "1"); // uncomment to test that validation fails
String strResponse = h.query(req(params));
// Object realResponse = ObjectBuilder.fromJSON(strResponse);
// System.out.println(strResponse);
responses.add(strResponse);
}
/**
String strResponse = h.query(req(params));
Object realResponse = ObjectBuilder.fromJSON(strResponse);
**/
if (validate) {
for (int i=1; i<methods.size(); i++) {
String err = JSONTestUtil.match("/", responses.get(i), responses.get(0), 0.0);
if (err != null) {
log.error("ERROR: mismatch facet response: " + err +
"\n expected =" + responses.get(0) +
"\n response = " + responses.get(i) +
"\n request = " + params
);
fail(err);
}
}
}
} finally {
req.close();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.schema.types;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.schema.ColumnValueEncoder;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PColumnFamily;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.TrustedByteArrayOutputStream;
public class PArrayDataTypeEncoder implements ColumnValueEncoder {
static private final int BYTE_ARRAY_DEFAULT_SIZE = 128;
private PDataType baseType;
private SortOrder sortOrder;
private List<Integer> offsetPos;
private TrustedByteArrayOutputStream byteStream;
private DataOutputStream oStream;
private int nulls;
private byte serializationVersion;
private boolean rowKeyOrderOptimizable;
public PArrayDataTypeEncoder(PDataType baseType, SortOrder sortOrder) {
this(new TrustedByteArrayOutputStream(BYTE_ARRAY_DEFAULT_SIZE), new LinkedList<Integer>(), baseType, sortOrder, true);
}
public PArrayDataTypeEncoder(TrustedByteArrayOutputStream byteStream, DataOutputStream oStream,
int numElements, PDataType baseType, SortOrder sortOrder, boolean rowKeyOrderOptimizable, byte serializationVersion) {
this(byteStream, oStream, new ArrayList<Integer>(numElements), baseType, sortOrder, rowKeyOrderOptimizable, serializationVersion);
}
public PArrayDataTypeEncoder(TrustedByteArrayOutputStream byteStream, DataOutputStream oStream,
int numElements, PDataType baseType, SortOrder sortOrder, boolean rowKeyOrderOptimizable) {
this(byteStream, oStream, new ArrayList<Integer>(numElements), baseType, sortOrder, rowKeyOrderOptimizable, PArrayDataType.SORTABLE_SERIALIZATION_VERSION);
}
public PArrayDataTypeEncoder(TrustedByteArrayOutputStream byteStream,
List<Integer> offsetPos, PDataType baseType, SortOrder sortOrder, boolean rowKeyOrderOptimizable) {
this(byteStream, new DataOutputStream(byteStream), offsetPos, baseType, sortOrder, rowKeyOrderOptimizable, PArrayDataType.SORTABLE_SERIALIZATION_VERSION);
}
public PArrayDataTypeEncoder(TrustedByteArrayOutputStream byteStream, DataOutputStream oStream,
List<Integer> offsetPos, PDataType baseType, SortOrder sortOrder, boolean rowKeyOrderOptimizable, byte serializationVersion) {
this.baseType = baseType;
this.sortOrder = sortOrder;
this.offsetPos = offsetPos;
this.byteStream = byteStream;
this.oStream = oStream;
this.nulls = 0;
this.serializationVersion = serializationVersion;
this.rowKeyOrderOptimizable = rowKeyOrderOptimizable;
}
private void close() {
try {
if (byteStream != null) byteStream.close();
if (oStream != null) oStream.close();
byteStream = null;
oStream = null;
} catch (IOException ioe) {}
}
// used to represent the absence of a value
@Override
public void appendAbsentValue() {
if (serializationVersion == PArrayDataType.IMMUTABLE_SERIALIZATION_VERSION && !baseType.isFixedWidth()) {
offsetPos.add(-byteStream.size());
nulls++;
}
else {
throw new UnsupportedOperationException("Cannot represent an absent element");
}
}
public void appendValue(byte[] bytes) {
appendValue(bytes, 0, bytes.length);
}
@Override
public void appendValue(byte[] bytes, int offset, int len) {
try {
// track the offset position here from the size of the byteStream
if (!baseType.isFixedWidth()) {
// Any variable length array would follow the below order
// Every element would be seperated by a seperator byte '0'
// Null elements are counted and once a first non null element appears we
// write the count of the nulls prefixed with a seperator byte
// Trailing nulls are not taken into account
// The last non null element is followed by two seperator bytes
// For eg
// a, b, null, null, c, null would be
// 65 0 66 0 0 2 67 0 0 0
// a null null null b c null d would be
// 65 0 0 3 66 0 67 0 0 1 68 0 0 0
if (len == 0) {
offsetPos.add(byteStream.size());
nulls++;
} else {
nulls = PArrayDataType.serializeNulls(oStream, nulls);
offsetPos.add(byteStream.size());
if (sortOrder == SortOrder.DESC) {
SortOrder.invert(bytes, offset, bytes, offset, len);
offset = 0;
}
oStream.write(bytes, offset, len);
if (serializationVersion == PArrayDataType.SORTABLE_SERIALIZATION_VERSION) {
oStream.write(PArrayDataType.getSeparatorByte(rowKeyOrderOptimizable, sortOrder));
}
}
} else {
// No nulls for fixed length
if (sortOrder == SortOrder.DESC) {
SortOrder.invert(bytes, offset, bytes, offset, len);
offset = 0;
}
oStream.write(bytes, offset, len);
}
} catch (IOException e) {}
}
@Override
public byte[] encode() {
try {
if (!baseType.isFixedWidth()) {
int noOfElements = offsetPos.size();
int[] offsetPosArray = new int[noOfElements];
int index = 0, maxOffset = 0;
for (Integer i : offsetPos) {
maxOffset = offsetPosArray[index] = i;
++index;
}
if (serializationVersion == PArrayDataType.SORTABLE_SERIALIZATION_VERSION) {
// Double seperator byte to show end of the non null array
PArrayDataType.writeEndSeperatorForVarLengthArray(oStream, sortOrder, rowKeyOrderOptimizable);
}
noOfElements = PArrayDataType.serializeOffsetArrayIntoStream(oStream, byteStream, noOfElements,
maxOffset, offsetPosArray, serializationVersion);
PArrayDataType.serializeHeaderInfoIntoStream(oStream, noOfElements, serializationVersion);
}
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
ptr.set(byteStream.getBuffer(), 0, byteStream.size());
return ByteUtil.copyKeyBytesIfNecessary(ptr);
} catch (IOException e) {} finally {
close();
}
return null;
}
/**
* @param colValueMap map from column to value
* @return estimated encoded size
*/
public static int getEstimatedByteSize(PTable table, int rowLength,
Map<PColumn, byte[]> colValueMap) {
// iterate over column familiies
int rowSize = 0;
for (PColumnFamily family : table.getColumnFamilies()) {
Collection<PColumn> columns = family.getColumns();
// we add a non null value to the start so that we can represent absent values in the array with negative offsets
int numColumns = columns.size() + 1;
int cellSize = 1;
int nulls = 0;
int maxOffset = 0;
// iterate over columns
for (PColumn column : columns) {
if (colValueMap.containsKey(column)) {
byte[] colValue = colValueMap.get(column);
// the column value is null
if (colValue == null || colValue.length == 0) {
++nulls;
maxOffset = cellSize;
} else {
// count the bytes written to serialize nulls
if (nulls > 0) {
cellSize += (1 + Math.ceil(nulls / 255));
nulls = 0;
}
maxOffset = cellSize;
cellSize += colValue.length;
}
}
// the column value is absent
else {
++nulls;
maxOffset = cellSize;
}
}
// count the bytes used for the offset array
cellSize +=
PArrayDataType.useShortForOffsetArray(maxOffset,
PArrayDataType.IMMUTABLE_SERIALIZATION_VERSION)
? numColumns * Bytes.SIZEOF_SHORT
: numColumns * Bytes.SIZEOF_INT;
cellSize += 4;
// count the bytes used for header information
cellSize += 5;
// add the size of the single cell containing all column values
rowSize +=
KeyValue.getKeyValueDataStructureSize(rowLength,
family.getName().getBytes().length,
QueryConstants.SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES.length, cellSize);
}
return rowSize;
}
}
| |
package com.netflix.governator.lifecycle;
import org.objectweb.asm.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.objectweb.asm.Type.*;
public final class AnnotationFinder extends ClassVisitor {
private static Logger log = LoggerFactory.getLogger(AnnotationFinder.class);
private Set<Type> annotationTypes;
private Set<Class<?>> annotatedClasses = Collections.emptySet();
private Set<Method> annotatedMethods = new HashSet<>();
private Set<Constructor> annotatedConstructors = new HashSet<>();
private Set<Field> annotatedFields = new HashSet<>();
private String className;
private Class<?> clazz;
private ClassLoader classLoader;
private Class<?> selfClass() {
if(clazz == null)
clazz = classFromInternalName(className);
return clazz;
}
private Class<?> classFromInternalName(String name) {
try {
return Class.forName(name.replace('/', '.'), false, classLoader);
}
catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
className = name;
super.visit(version, access, name, signature, superName, interfaces);
}
public AnnotationFinder(ClassLoader classLoader, Collection<Class<? extends Annotation>> annotations) {
super(Opcodes.ASM5);
annotationTypes = new HashSet<>();
for (Class<?> annotation : annotations)
annotationTypes.add(getType(annotation));
this.classLoader = classLoader;
}
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
Type type = getType(desc);
for (Type annotationType : annotationTypes) {
if (annotationType.equals(type)) {
annotatedClasses = Collections.<Class<?>>singleton(selfClass());
break;
}
}
return super.visitAnnotation(desc, visible);
}
@Override
public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) {
return new AnnotationSeekingFieldVisitor(name, super.visitField(access, name, desc, signature, value));
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
return new AnnotationSeekingMethodVisitor(super.visitMethod(access, name, desc, signature, exceptions), name, desc);
}
private class AnnotationSeekingFieldVisitor extends FieldVisitor {
String name;
public AnnotationSeekingFieldVisitor(String name, FieldVisitor fv) {
super(Opcodes.ASM5, fv);
this.name = name;
}
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
Type type = getType(desc);
for (Type annotationType : annotationTypes) {
if (annotationType.equals(type)) {
try {
annotatedFields.add(selfClass().getDeclaredField(name));
break;
}
catch (NoSuchFieldException e) {
throw new IllegalStateException(e);
}
}
}
return super.visitAnnotation(desc, visible);
}
}
private class AnnotationSeekingMethodVisitor extends MethodVisitor {
String name;
String methodDesc;
public AnnotationSeekingMethodVisitor(MethodVisitor mv, String name, String desc) {
super(Opcodes.ASM5, mv);
this.name = name;
this.methodDesc = desc;
}
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
Type type = getType(desc);
for (Type annotationType : annotationTypes) {
if (annotationType.equals(type)) {
Type[] args = methodDesc == null ? new Type[0]
: getArgumentTypes(methodDesc);
Class[] argClasses = new Class[args.length];
for (int i = 0; i < args.length; i++) {
switch (args[i].getSort()) {
case OBJECT:
case ARRAY:
argClasses[i] = classFromInternalName(args[i]
.getInternalName());
break;
case BOOLEAN:
argClasses[i] = boolean.class;
break;
case BYTE:
argClasses[i] = byte.class;
break;
case CHAR:
argClasses[i] = char.class;
break;
case DOUBLE:
argClasses[i] = double.class;
break;
case FLOAT:
argClasses[i] = float.class;
break;
case INT:
argClasses[i] = int.class;
break;
case LONG:
argClasses[i] = long.class;
break;
case SHORT:
argClasses[i] = short.class;
break;
}
}
try {
if ("<init>".equals(name))
annotatedConstructors.add(selfClass()
.getDeclaredConstructor(argClasses));
else
annotatedMethods.add(selfClass().getDeclaredMethod(
name, argClasses));
}
catch (NoClassDefFoundError e) {
log.info("Unable to scan constructor of '{}' NoClassDefFoundError looking for '{}'", selfClass().getName(), e.getMessage());
}
catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
break;
}
}
return super.visitAnnotation(desc, visible);
}
}
/**
* @return a 0 or 1 element Set, depending on whether the class being
* visited has a matching class annotation
*/
public Set<Class<?>> getAnnotatedClasses() {
return annotatedClasses;
}
public Set<Method> getAnnotatedMethods() {
return annotatedMethods;
}
public Set<Constructor> getAnnotatedConstructors() {
return annotatedConstructors;
}
public Set<Field> getAnnotatedFields() {
return annotatedFields;
}
}
| |
package org.ooc.parsers;
import java.io.IOException;
import org.ooc.errors.SourceContext;
import org.ooc.nodes.operators.Assignment;
import org.ooc.nodes.operators.EqualityTest;
import org.ooc.nodes.operators.Minus;
import org.ooc.nodes.operators.Plus;
import org.ooc.nodes.operators.Star;
import org.ooc.nodes.others.RawCode;
import org.ubi.SourceReader;
class OperatorsParser implements Parser {
public boolean parse(final SourceContext context) throws IOException {
SourceReader reader = context.reader;
boolean success;
if (reader.matches("*=", true)) {
context.add(new RawCode(reader.getLocation(), "*= "));
success = true;
} else if (reader.matches("/=", true)) {
context.add(new RawCode(reader.getLocation(), "/= "));
success = true;
} else if (reader.matches("+=", true)) {
context.add(new RawCode(reader.getLocation(), "+= "));
success = true;
} else if (reader.matches("-=", true)) {
context.add(new RawCode(reader.getLocation(), "-= "));
success = true;
} else if (reader.matches("*", true)) {
context.add(new Star(reader.getLocation()));
success = true;
} else if (reader.matches("/", true)) {
context.add(new RawCode(reader.getLocation(), "/ "));
success = true;
} else if (reader.matches("++", true)) {
context.add(new RawCode(reader.getLocation(), "++"));
success = true;
} else if (reader.matches("+", true)) {
context.add(new Plus(reader.getLocation()));
success = true;
} else if (reader.matches("--", true)) {
context.add(new RawCode(reader.getLocation(), "--"));
success = true;
} else if (reader.matches("-", true)) {
context.add(new Minus(reader.getLocation()));
success = true;
} else if (reader.matches("%=", true)) {
context.add(new RawCode(reader.getLocation(), "%= "));
success = true;
} else if (reader.matches("%", true)) {
context.add(new RawCode(reader.getLocation(), "% "));
success = true;
} else if (reader.matches("<=", true)) {
context.add(new RawCode(reader.getLocation(), "<= "));
success = true;
} else if (reader.matches(">=", true)) {
context.add(new RawCode(reader.getLocation(), ">= "));
success = true;
} else if (reader.matches("<<=", true)) {
context.add(new RawCode(reader.getLocation(), "<<= "));
success = true;
} else if (reader.matches("<<", true)) {
context.add(new RawCode(reader.getLocation(), "<< "));
success = true;
} else if (reader.matches(">>=", true)) {
context.add(new RawCode(reader.getLocation(), ">>= "));
success = true;
} else if (reader.matches(">>", true)) {
context.add(new RawCode(reader.getLocation(), ">> "));
success = true;
} else if (reader.matches("<", true)) {
context.add(new RawCode(reader.getLocation(), "< "));
success = true;
} else if (reader.matches(">", true)) {
context.add(new RawCode(reader.getLocation(), "> "));
success = true;
} else if (reader.matches("!=", true)) {
context.add(new RawCode(reader.getLocation(), "!= "));
success = true;
} else if (reader.matches("!", true)) {
context.add(new RawCode(reader.getLocation(), "!"));
success = true;
} else if (reader.matches("==", true)) {
context.add(new EqualityTest(reader.getLocation()));
success = true;
} else if (reader.matches("=", true)) {
context.add(new Assignment(reader.getLocation()));
success = true;
} else if (reader.matches("||", true)) {
context.add(new RawCode(reader.getLocation(), "|| "));
success = true;
} else if (reader.matches("|=", true)) {
context.add(new RawCode(reader.getLocation(), "|= "));
success = true;
} else if (reader.matches("|", true)) {
context.add(new RawCode(reader.getLocation(), "| "));
success = true;
} else if (reader.matches("&&", true)) {
context.add(new RawCode(reader.getLocation(), "&& "));
success = true;
} else if (reader.matches("&=", true)) {
context.add(new RawCode(reader.getLocation(), "&= "));
success = true;
} else if (reader.matches("&", true)) {
context.add(new RawCode(reader.getLocation(), "& "));
success = true;
} else if (reader.matches("?", true)) {
context.add(new RawCode(reader.getLocation(), "? "));
success = true;
} else if (reader.matches(":", true)) {
context.add(new RawCode(reader.getLocation(), ": "));
success = true;
} else if (reader.matches("~", true)) {
context.add(new RawCode(reader.getLocation(), "~"));
success = true;
} else if (reader.matches("^=", true)) {
context.add(new RawCode(reader.getLocation(), "^="));
success = true;
} else if (reader.matches("^", true)) {
context.add(new RawCode(reader.getLocation(), "^"));
success = true;
} else {
success = false;
}
return success;
}
}
| |
/*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.importexport.migration;
import stroom.importexport.api.ExtensionProvider;
import stroom.importexport.shared.ExternalFile;
import javax.xml.bind.annotation.XmlTransient;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
public final class BeanPropertyUtil {
private static final Map<Key, List<Property>> propertyCache = new ConcurrentHashMap<>();
private BeanPropertyUtil() {
// Utility class.
}
/**
* Given a class return all the property names.
*/
public static List<Property> getPropertyList(final Class<?> clazz, final boolean omitAuditFields) {
return propertyCache.computeIfAbsent(new Key(clazz, omitAuditFields), BeanPropertyUtil::create);
}
private static List<Property> create(final Key key) {
final Map<String, List<Method>> methodMap = new HashMap<>();
for (final Method method : key.clazz.getMethods()) {
if (isGetter(method) || isSetter(method)) {
final String propertyName = getPropertyName(method);
final XmlTransient xmlTransient = method.getAnnotation(XmlTransient.class);
// Ignore transient fields.
if (xmlTransient == null) {
if (!(key.omitAuditFields && DocumentEntity.AUDIT_FIELDS.contains(propertyName))) {
methodMap.computeIfAbsent(propertyName, name -> new ArrayList<>()).add(method);
}
}
}
}
final List<Property> list = new ArrayList<>();
methodMap.forEach((propertyName, methods) -> {
if (methods.size() >= 2) {
// Find the get method.
Method getMethod = null;
for (final Method method : methods) {
if (isGetter(method)) {
getMethod = method;
break;
}
}
if (getMethod != null) {
// Find the set method.
Method setMethod = null;
for (final Method method : methods) {
if (isSetter(method) && method.getParameterTypes()[0].equals(getMethod.getReturnType())) {
setMethod = method;
break;
}
}
if (setMethod != null) {
Property exportProperty;
final ExternalFile externalFile = getMethod.getAnnotation(ExternalFile.class);
if (externalFile != null) {
final Class<?> extensionProvider = externalFile.extensionProvider();
if (ExtensionProvider.class.isAssignableFrom(extensionProvider)) {
ExtensionProvider instance;
try {
instance = (ExtensionProvider) extensionProvider.getConstructor().newInstance();
} catch (final NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e.getMessage(), e);
}
exportProperty = new Property(propertyName, true, instance, getMethod, setMethod);
} else {
exportProperty = new Property(propertyName,
true,
new ExtensionProvider(externalFile.value()),
getMethod,
setMethod);
}
} else {
exportProperty = new Property(propertyName, false, null, getMethod, setMethod);
}
list.add(exportProperty);
}
}
}
});
list.sort(Comparator.comparing(Property::getName));
final Iterator<Property> itr = list.iterator();
while (itr.hasNext()) {
final Property next = itr.next();
final String name = next.getName();
// Handle private properties Which could be pXxxx or pxxxxx
if (name.startsWith("p") || name.startsWith("P")) {
final String other1 = name.substring(1);
final String other2 = other1.substring(0, 1).toLowerCase() + other1.substring(1);
boolean found = false;
for (final Property prop : list) {
if (prop.getName().equals(other1) || prop.getName().equals(other2)) {
found = true;
break;
}
}
if (found) {
itr.remove();
}
}
if (name.equals("folder")) {
itr.remove();
}
}
return list;
}
private static boolean isGetter(final Method method) {
final String name = method.getName();
return ((name.length() > 3 && name.startsWith("get")) ||
(name.length() > 2 && name.startsWith("is"))) &&
method.getParameterTypes().length == 0;
}
private static boolean isSetter(final Method method) {
final String name = method.getName();
return name.length() > 3 && name.startsWith("set") && method.getParameterTypes().length == 1;
}
private static String getPropertyName(final Method method) {
final String name = method.getName();
if (name.startsWith("get") || name.startsWith("set")) {
final String propertyName = name.substring(3);
return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1);
}
if (name.startsWith("is")) {
final String propertyName = name.substring(2);
return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1);
}
return null;
}
private static class Key {
private final Class<?> clazz;
private final boolean omitAuditFields;
Key(final Class<?> clazz, final boolean omitAuditFields) {
this.clazz = clazz;
this.omitAuditFields = omitAuditFields;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Key key = (Key) o;
return omitAuditFields == key.omitAuditFields &&
Objects.equals(clazz, key.clazz);
}
@Override
public int hashCode() {
return Objects.hash(clazz, omitAuditFields);
}
}
}
| |
<%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%= packageName %>.config;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.servlet.InstrumentedFilter;
import com.codahale.metrics.servlets.MetricsServlet;
<%_ if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { _%>
import com.hazelcast.config.Config;
import com.hazelcast.core.*;
import com.hazelcast.durableexecutor.DurableExecutorService;
import com.hazelcast.logging.LoggingService;
import com.hazelcast.mapreduce.JobTracker;
import com.hazelcast.quorum.QuorumService;
import com.hazelcast.ringbuffer.Ringbuffer;
import com.hazelcast.transaction.*;
<%_ } _%>
<%_ if (clusteredHttpSession == 'hazelcast') { _%>
import com.hazelcast.web.spring.SpringAwareWebFilter;
<%_ } _%>
import io.github.jhipster.config.JHipsterConstants;
import io.github.jhipster.config.JHipsterProperties;
<%_ if (!skipClient) { _%>
import io.github.jhipster.web.filter.CachingHttpHeadersFilter;
<%_ } _%>
import io.undertow.Undertow;
import io.undertow.Undertow.Builder;
import io.undertow.UndertowOptions;
import org.apache.commons.io.FilenameUtils;
<%_ if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { _%>
import org.h2.server.web.WebServlet;
<%_ } _%>
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.context.embedded.undertow.UndertowEmbeddedServletContainerFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.mock.env.MockEnvironment;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.xnio.OptionMap;
import javax.servlet.*;
import java.util.*;
<%_ if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { _%>
import java.util.concurrent.ConcurrentMap;
<%_ } _%>
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.options;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Unit tests for the WebConfigurer class.
*
* @see WebConfigurer
*/
public class WebConfigurerTest {
private WebConfigurer webConfigurer;
private MockServletContext servletContext;
private MockEnvironment env;
private JHipsterProperties props;
private MetricRegistry metricRegistry;
@Before
public void setup() {
servletContext = spy(new MockServletContext());
doReturn(new MockFilterRegistration())
.when(servletContext).addFilter(anyString(), any(Filter.class));
doReturn(new MockServletRegistration())
.when(servletContext).addServlet(anyString(), any(Servlet.class));
<%_ if (clusteredHttpSession == 'hazelcast') { _%>
doNothing()
.when(servletContext).addListener(any(EventListener.class));
<%_ } _%>
env = new MockEnvironment();
props = new JHipsterProperties();
webConfigurer = new WebConfigurer(env, props<% if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { %>, new MockHazelcastInstance()<% } %>);
metricRegistry = new MetricRegistry();
webConfigurer.setMetricRegistry(metricRegistry);
}
@Test
public void testStartUpProdServletContext() throws ServletException {
env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION);
webConfigurer.onStartup(servletContext);
assertThat(servletContext.getAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE)).isEqualTo(metricRegistry);
assertThat(servletContext.getAttribute(MetricsServlet.METRICS_REGISTRY)).isEqualTo(metricRegistry);
<%_ if (clusteredHttpSession == 'hazelcast') { _%>
verify(servletContext).addFilter(eq("hazelcastWebFilter"), any(SpringAwareWebFilter.class));
<%_ } _%>
verify(servletContext).addFilter(eq("webappMetricsFilter"), any(InstrumentedFilter.class));
verify(servletContext).addServlet(eq("metricsServlet"), any(MetricsServlet.class));
<%_ if (!skipClient) { _%>
verify(servletContext).addFilter(eq("cachingHttpHeadersFilter"), any(CachingHttpHeadersFilter.class));
<%_ } _%>
<%_ if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { _%>
verify(servletContext, never()).addServlet(eq("H2Console"), any(WebServlet.class));
<%_ } _%>
}
@Test
public void testStartUpDevServletContext() throws ServletException {
env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_DEVELOPMENT);
webConfigurer.onStartup(servletContext);
assertThat(servletContext.getAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE)).isEqualTo(metricRegistry);
assertThat(servletContext.getAttribute(MetricsServlet.METRICS_REGISTRY)).isEqualTo(metricRegistry);
<%_ if (clusteredHttpSession == 'hazelcast') { _%>
verify(servletContext).addFilter(eq("hazelcastWebFilter"), any(SpringAwareWebFilter.class));
<%_ } _%>
verify(servletContext).addFilter(eq("webappMetricsFilter"), any(InstrumentedFilter.class));
verify(servletContext).addServlet(eq("metricsServlet"), any(MetricsServlet.class));
<%_ if (!skipClient) { _%>
verify(servletContext, never()).addFilter(eq("cachingHttpHeadersFilter"), any(CachingHttpHeadersFilter.class));
<%_ } _%>
<%_ if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { _%>
verify(servletContext).addServlet(eq("H2Console"), any(WebServlet.class));
<%_ } _%>
}
@Test
public void testCustomizeServletContainer() {
env.setActiveProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION);
UndertowEmbeddedServletContainerFactory container = new UndertowEmbeddedServletContainerFactory();
webConfigurer.customize(container);
assertThat(container.getMimeMappings().get("abs")).isEqualTo("audio/x-mpeg");
assertThat(container.getMimeMappings().get("html")).isEqualTo("text/html;charset=utf-8");
assertThat(container.getMimeMappings().get("json")).isEqualTo("text/html;charset=utf-8");
<%_ if (!skipClient) { _%>
if (container.getDocumentRoot() != null) {
assertThat(container.getDocumentRoot().getPath()).isEqualTo(FilenameUtils.separatorsToSystem("<%= BUILD_DIR %>www"));
}
<%_ } _%>
Builder builder = Undertow.builder();
container.getBuilderCustomizers().forEach(c -> c.customize(builder));
OptionMap.Builder serverOptions = (OptionMap.Builder) ReflectionTestUtils.getField(builder, "serverOptions");
assertThat(serverOptions.getMap().get(UndertowOptions.ENABLE_HTTP2)).isNull();
}
<%_ if (!skipClient && clientFramework == 'angular1') { _%>
@Test
public void testCustomizeServletContainerNotProd() {
UndertowEmbeddedServletContainerFactory container = new UndertowEmbeddedServletContainerFactory();
webConfigurer.customize(container);
assertThat(container.getMimeMappings().get("abs")).isEqualTo("audio/x-mpeg");
assertThat(container.getMimeMappings().get("html")).isEqualTo("text/html;charset=utf-8");
assertThat(container.getMimeMappings().get("json")).isEqualTo("text/html;charset=utf-8");
assertThat(container.getDocumentRoot().getPath()).isEqualTo(FilenameUtils.separatorsToSystem("src/main/webapp"));
Builder builder = Undertow.builder();
container.getBuilderCustomizers().forEach(c -> c.customize(builder));
OptionMap.Builder serverOptions = (OptionMap.Builder) ReflectionTestUtils.getField(builder, "serverOptions");
assertThat(serverOptions.getMap().get(UndertowOptions.ENABLE_HTTP2)).isNull();
}
<%_ } _%>
@Test
public void testUndertowHttp2Enabled() {
props.getHttp().setVersion(JHipsterProperties.Http.Version.V_2_0);
UndertowEmbeddedServletContainerFactory container = new UndertowEmbeddedServletContainerFactory();
webConfigurer.customize(container);
Builder builder = Undertow.builder();
container.getBuilderCustomizers().forEach(c -> c.customize(builder));
OptionMap.Builder serverOptions = (OptionMap.Builder) ReflectionTestUtils.getField(builder, "serverOptions");
assertThat(serverOptions.getMap().get(UndertowOptions.ENABLE_HTTP2)).isTrue();
}
@Test
public void testCorsFilterOnApiPath() throws Exception {
props.getCors().setAllowedOrigins(Collections.singletonList("*"));
props.getCors().setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE"));
props.getCors().setAllowedHeaders(Collections.singletonList("*"));
props.getCors().setMaxAge(1800L);
props.getCors().setAllowCredentials(true);
MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController())
.addFilters(webConfigurer.corsFilter())
.build();
mockMvc.perform(
options("/api/test-cors")
.header(HttpHeaders.ORIGIN, "other.domain.com")
.header(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "POST"))
.andExpect(status().isOk())
.andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "other.domain.com"))
.andExpect(header().string(HttpHeaders.VARY, "Origin"))
.andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET,POST,PUT,DELETE"))
.andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"))
.andExpect(header().string(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1800"));
mockMvc.perform(
get("/api/test-cors")
.header(HttpHeaders.ORIGIN, "other.domain.com"))
.andExpect(status().isOk())
.andExpect(header().string(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "other.domain.com"));
}
@Test
public void testCorsFilterOnOtherPath() throws Exception {
props.getCors().setAllowedOrigins(Collections.singletonList("*"));
props.getCors().setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE"));
props.getCors().setAllowedHeaders(Collections.singletonList("*"));
props.getCors().setMaxAge(1800L);
props.getCors().setAllowCredentials(true);
MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController())
.addFilters(webConfigurer.corsFilter())
.build();
mockMvc.perform(
get("/test/test-cors")
.header(HttpHeaders.ORIGIN, "other.domain.com"))
.andExpect(status().isOk())
.andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN));
}
@Test
public void testCorsFilterDeactivated() throws Exception {
props.getCors().setAllowedOrigins(null);
MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController())
.addFilters(webConfigurer.corsFilter())
.build();
mockMvc.perform(
get("/api/test-cors")
.header(HttpHeaders.ORIGIN, "other.domain.com"))
.andExpect(status().isOk())
.andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN));
}
@Test
public void testCorsFilterDeactivated2() throws Exception {
props.getCors().setAllowedOrigins(new ArrayList<>());
MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController())
.addFilters(webConfigurer.corsFilter())
.build();
mockMvc.perform(
get("/api/test-cors")
.header(HttpHeaders.ORIGIN, "other.domain.com"))
.andExpect(status().isOk())
.andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN));
}
static class MockFilterRegistration implements FilterRegistration, FilterRegistration.Dynamic {
@Override
public void addMappingForServletNames(EnumSet<DispatcherType> dispatcherTypes, boolean isMatchAfter, String... servletNames) {
}
@Override
public Collection<String> getServletNameMappings() {
return null;
}
@Override
public void addMappingForUrlPatterns(EnumSet<DispatcherType> dispatcherTypes, boolean isMatchAfter, String... urlPatterns) {
}
@Override
public Collection<String> getUrlPatternMappings() {
return null;
}
@Override
public void setAsyncSupported(boolean isAsyncSupported) {
}
@Override
public String getName() {
return null;
}
@Override
public String getClassName() {
return null;
}
@Override
public boolean setInitParameter(String name, String value) {
return false;
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Set<String> setInitParameters(Map<String, String> initParameters) {
return null;
}
@Override
public Map<String, String> getInitParameters() {
return null;
}
}
static class MockServletRegistration implements ServletRegistration, ServletRegistration.Dynamic {
@Override
public void setLoadOnStartup(int loadOnStartup) {
}
@Override
public Set<String> setServletSecurity(ServletSecurityElement constraint) {
return null;
}
@Override
public void setMultipartConfig(MultipartConfigElement multipartConfig) {
}
@Override
public void setRunAsRole(String roleName) {
}
@Override
public void setAsyncSupported(boolean isAsyncSupported) {
}
@Override
public Set<String> addMapping(String... urlPatterns) {
return null;
}
@Override
public Collection<String> getMappings() {
return null;
}
@Override
public String getRunAsRole() {
return null;
}
@Override
public String getName() {
return null;
}
@Override
public String getClassName() {
return null;
}
@Override
public boolean setInitParameter(String name, String value) {
return false;
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Set<String> setInitParameters(Map<String, String> initParameters) {
return null;
}
@Override
public Map<String, String> getInitParameters() {
return null;
}
}
<%_ if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { _%>
public static class MockHazelcastInstance implements HazelcastInstance {
@Override
public String getName() {
return "HazelcastInstance";
}
@Override
public <E> IQueue<E> getQueue(String s) {
return null;
}
@Override
public <E> ITopic<E> getTopic(String s) {
return null;
}
@Override
public <E> ISet<E> getSet(String s) {
return null;
}
@Override
public <E> IList<E> getList(String s) {
return null;
}
@Override
public <K, V> IMap<K, V> getMap(String s) {
return null;
}
@Override
public <K, V> ReplicatedMap<K, V> getReplicatedMap(String s) {
return null;
}
@Override
public JobTracker getJobTracker(String s) {
return null;
}
@Override
public <K, V> MultiMap<K, V> getMultiMap(String s) {
return null;
}
@Override
public ILock getLock(String s) {
return null;
}
@Override
public <E> Ringbuffer<E> getRingbuffer(String s) {
return null;
}
@Override
public <E> ITopic<E> getReliableTopic(String s) {
return null;
}
@Override
public Cluster getCluster() {
return null;
}
@Override
public Endpoint getLocalEndpoint() {
return null;
}
@Override
public IExecutorService getExecutorService(String s) {
return null;
}
@Override
public DurableExecutorService getDurableExecutorService(String s) {
return null;
}
@Override
public <T> T executeTransaction(TransactionalTask<T> transactionalTask) throws TransactionException {
return null;
}
@Override
public <T> T executeTransaction(TransactionOptions transactionOptions, TransactionalTask<T> transactionalTask) throws TransactionException {
return null;
}
@Override
public TransactionContext newTransactionContext() {
return null;
}
@Override
public TransactionContext newTransactionContext(TransactionOptions transactionOptions) {
return null;
}
@Override
public IdGenerator getIdGenerator(String s) {
return null;
}
@Override
public IAtomicLong getAtomicLong(String s) {
return null;
}
@Override
public <E> IAtomicReference<E> getAtomicReference(String s) {
return null;
}
@Override
public ICountDownLatch getCountDownLatch(String s) {
return null;
}
@Override
public ISemaphore getSemaphore(String s) {
return null;
}
@Override
public Collection<DistributedObject> getDistributedObjects() {
return null;
}
@Override
public String addDistributedObjectListener(DistributedObjectListener distributedObjectListener) {
return null;
}
@Override
public boolean removeDistributedObjectListener(String s) {
return false;
}
@Override
public Config getConfig() {
return null;
}
@Override
public PartitionService getPartitionService() {
return null;
}
@Override
public QuorumService getQuorumService() {
return null;
}
@Override
public ClientService getClientService() {
return null;
}
@Override
public LoggingService getLoggingService() {
return null;
}
@Override
public LifecycleService getLifecycleService() {
return null;
}
@Override
public <T extends DistributedObject> T getDistributedObject(String s, String s1) {
return null;
}
@Override
public ConcurrentMap<String, Object> getUserContext() {
return null;
}
@Override
public HazelcastXAResource getXAResource() {
return null;
}
@Override
public ICacheManager getCacheManager() {
return null;
}
@Override
public void shutdown() {
}
}
<%_ } _%>
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package net.opengis.gml.impl;
import net.opengis.gml.AngleChoiceType;
import net.opengis.gml.DMSAngleType;
import net.opengis.gml.GmlPackage;
import net.opengis.gml.MeasureType;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Angle Choice Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link net.opengis.gml.impl.AngleChoiceTypeImpl#getAngle <em>Angle</em>}</li>
* <li>{@link net.opengis.gml.impl.AngleChoiceTypeImpl#getDmsAngle <em>Dms Angle</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class AngleChoiceTypeImpl extends EObjectImpl implements AngleChoiceType {
/**
* The cached value of the '{@link #getAngle() <em>Angle</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAngle()
* @generated
* @ordered
*/
protected MeasureType angle;
/**
* The cached value of the '{@link #getDmsAngle() <em>Dms Angle</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDmsAngle()
* @generated
* @ordered
*/
protected DMSAngleType dmsAngle;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AngleChoiceTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return GmlPackage.eINSTANCE.getAngleChoiceType();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MeasureType getAngle() {
return angle;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetAngle(MeasureType newAngle, NotificationChain msgs) {
MeasureType oldAngle = angle;
angle = newAngle;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, GmlPackage.ANGLE_CHOICE_TYPE__ANGLE, oldAngle, newAngle);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setAngle(MeasureType newAngle) {
if (newAngle != angle) {
NotificationChain msgs = null;
if (angle != null)
msgs = ((InternalEObject)angle).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ANGLE_CHOICE_TYPE__ANGLE, null, msgs);
if (newAngle != null)
msgs = ((InternalEObject)newAngle).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ANGLE_CHOICE_TYPE__ANGLE, null, msgs);
msgs = basicSetAngle(newAngle, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ANGLE_CHOICE_TYPE__ANGLE, newAngle, newAngle));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DMSAngleType getDmsAngle() {
return dmsAngle;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetDmsAngle(DMSAngleType newDmsAngle, NotificationChain msgs) {
DMSAngleType oldDmsAngle = dmsAngle;
dmsAngle = newDmsAngle;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE, oldDmsAngle, newDmsAngle);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDmsAngle(DMSAngleType newDmsAngle) {
if (newDmsAngle != dmsAngle) {
NotificationChain msgs = null;
if (dmsAngle != null)
msgs = ((InternalEObject)dmsAngle).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE, null, msgs);
if (newDmsAngle != null)
msgs = ((InternalEObject)newDmsAngle).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE, null, msgs);
msgs = basicSetDmsAngle(newDmsAngle, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE, newDmsAngle, newDmsAngle));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case GmlPackage.ANGLE_CHOICE_TYPE__ANGLE:
return basicSetAngle(null, msgs);
case GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE:
return basicSetDmsAngle(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case GmlPackage.ANGLE_CHOICE_TYPE__ANGLE:
return getAngle();
case GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE:
return getDmsAngle();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case GmlPackage.ANGLE_CHOICE_TYPE__ANGLE:
setAngle((MeasureType)newValue);
return;
case GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE:
setDmsAngle((DMSAngleType)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case GmlPackage.ANGLE_CHOICE_TYPE__ANGLE:
setAngle((MeasureType)null);
return;
case GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE:
setDmsAngle((DMSAngleType)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case GmlPackage.ANGLE_CHOICE_TYPE__ANGLE:
return angle != null;
case GmlPackage.ANGLE_CHOICE_TYPE__DMS_ANGLE:
return dmsAngle != null;
}
return super.eIsSet(featureID);
}
} //AngleChoiceTypeImpl
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.tez;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.pig.PigConfiguration;
import org.apache.pig.PigException;
import org.apache.pig.StoreFuncInterface;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.HDataType;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.JobCreationException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.InputSizeReducerEstimator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler.PigSecondaryKeyGroupComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PhyPlanSetter;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigBigDecimalRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigBigIntegerRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigBooleanRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigBytesRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigCombiner;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigDateTimeRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigDoubleRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigFloatRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigInputFormat;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigIntRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigLongRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputFormat;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSecondaryKeyComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigTextRawComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigTupleSortComparator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigWritableComparators;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.partitioners.SecondaryKeyPartitioner;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.EndOfAllInputSetter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLocalRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.util.PlanHelper;
import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezEdgeDescriptor;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOpPlanVisitor;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperPlan;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperator;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezPOPackageAnnotator.LoRearrangeDiscoverer;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POLocalRearrangeTez;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POShuffleTezLoad;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POStoreTez;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.PartitionerDefinedVertexManager;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.PigGraceShuffleVertexManager;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.PigInputFormatTez;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.PigOutputFormatTez;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.PigProcessor;
import org.apache.pig.backend.hadoop.executionengine.tez.util.MRToTezHelper;
import org.apache.pig.backend.hadoop.executionengine.tez.util.SecurityHelper;
import org.apache.pig.backend.hadoop.executionengine.tez.util.TezUDFContextSeparator;
import org.apache.pig.data.DataType;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.PigImplConstants;
import org.apache.pig.impl.builtin.DefaultIndexableLoader;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.io.NullablePartitionWritable;
import org.apache.pig.impl.io.NullableTuple;
import org.apache.pig.impl.plan.DependencyOrderWalker;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.UDFContext;
import org.apache.pig.impl.util.UDFContextSeparator.UDFType;
import org.apache.pig.tools.pigstats.tez.TezScriptState;
import org.apache.pig.tools.pigstats.tez.TezScriptState.TezDAGScriptInfo;
import org.apache.tez.common.TezUtils;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.DataSinkDescriptor;
import org.apache.tez.dag.api.DataSourceDescriptor;
import org.apache.tez.dag.api.Edge;
import org.apache.tez.dag.api.EdgeManagerPluginDescriptor;
import org.apache.tez.dag.api.EdgeProperty;
import org.apache.tez.dag.api.EdgeProperty.DataMovementType;
import org.apache.tez.dag.api.GroupInputEdge;
import org.apache.tez.dag.api.InputDescriptor;
import org.apache.tez.dag.api.InputInitializerDescriptor;
import org.apache.tez.dag.api.OutputCommitterDescriptor;
import org.apache.tez.dag.api.OutputDescriptor;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.dag.api.VertexGroup;
import org.apache.tez.dag.api.VertexLocationHint;
import org.apache.tez.dag.api.VertexManagerPluginDescriptor;
import org.apache.tez.dag.library.vertexmanager.ShuffleVertexManager;
import org.apache.tez.mapreduce.combine.MRCombiner;
import org.apache.tez.mapreduce.committer.MROutputCommitter;
import org.apache.tez.mapreduce.common.MRInputSplitDistributor;
import org.apache.tez.mapreduce.hadoop.InputSplitInfo;
import org.apache.tez.mapreduce.hadoop.InputSplitInfoMem;
import org.apache.tez.mapreduce.hadoop.MRHelpers;
import org.apache.tez.mapreduce.hadoop.MRJobConfig;
import org.apache.tez.mapreduce.input.MRInput;
import org.apache.tez.mapreduce.output.MROutput;
import org.apache.tez.mapreduce.partition.MRPartitioner;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRInputUserPayloadProto.Builder;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitsProto;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.input.ConcatenatedMergedKeyValueInput;
import org.apache.tez.runtime.library.input.OrderedGroupedKVInput;
import org.apache.tez.runtime.library.input.OrderedGroupedMergedKVInput;
import org.apache.tez.runtime.library.input.UnorderedKVInput;
/**
* A visitor to construct DAG out of Tez plan.
*/
public class TezDagBuilder extends TezOpPlanVisitor {
private static final Log log = LogFactory.getLog(TezDagBuilder.class);
private DAG dag;
private Map<String, LocalResource> localResources;
private PigContext pc;
private Configuration globalConf;
private FileSystem fs;
private long intermediateTaskInputSize;
private Set<String> inputSplitInDiskVertices;
private TezUDFContextSeparator udfContextSeparator;
private String serializedTezPlan;
private String serializedPigContext;
private String serializedUDFImportList;
public TezDagBuilder(PigContext pc, TezOperPlan plan, DAG dag,
Map<String, LocalResource> localResources) {
super(plan, new DependencyOrderWalker<TezOperator, TezOperPlan>(plan));
this.pc = pc;
this.globalConf = ConfigurationUtil.toConfiguration(pc.getProperties(), true);
this.localResources = localResources;
this.dag = dag;
this.inputSplitInDiskVertices = new HashSet<String>();
try {
// Add credentials from binary token file and get tokens for namenodes
// specified in mapreduce.job.hdfs-servers
SecurityHelper.populateTokenCache(globalConf, dag.getCredentials());
} catch (IOException e) {
throw new RuntimeException("Error while fetching delegation tokens", e);
}
try {
fs = FileSystem.get(globalConf);
intermediateTaskInputSize = HadoopShims.getDefaultBlockSize(fs, FileLocalizer.getTemporaryResourcePath(pc));
} catch (Exception e) {
log.warn("Unable to get the block size for temporary directory, defaulting to 128MB", e);
intermediateTaskInputSize = 134217728L;
}
// At least 128MB. Else we will end up with too many tasks
intermediateTaskInputSize = Math.max(intermediateTaskInputSize, 134217728L);
intermediateTaskInputSize = Math.min(intermediateTaskInputSize,
globalConf.getLong(
InputSizeReducerEstimator.BYTES_PER_REDUCER_PARAM,
InputSizeReducerEstimator.DEFAULT_BYTES_PER_REDUCER));
try {
serializedPigContext = ObjectSerializer.serialize(pc);
serializedUDFImportList = ObjectSerializer.serialize(PigContext.getPackageImportList());
udfContextSeparator = new TezUDFContextSeparator(plan,
new DependencyOrderWalker<TezOperator, TezOperPlan>(plan));
udfContextSeparator.visit();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String getSerializedTezPlan() throws IOException {
if (serializedTezPlan == null) {
// Initialize lazy as auto parallelism might not be in play
serializedTezPlan = ObjectSerializer.serialize(getPlan());
}
return serializedTezPlan;
}
// Hack to turn off relocalization till TEZ-2192 is fixed.
public void avoidContainerReuseIfInputSplitInDisk() throws IOException {
if (!inputSplitInDiskVertices.isEmpty()) {
// Create empty job.split file and add as resource to all other
// vertices that are not reading splits from disk so that their
// containers are not reused by vertices that read splits from disk
Path jobSplitFile = new Path(FileLocalizer.getTemporaryPath(pc),
MRJobConfig.JOB_SPLIT);
FSDataOutputStream out = fs.create(jobSplitFile);
out.close();
log.info("Creating empty job.split in " + jobSplitFile);
FileStatus splitFileStatus = fs.getFileStatus(jobSplitFile);
LocalResource localResource = LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(jobSplitFile),
LocalResourceType.FILE,
LocalResourceVisibility.APPLICATION,
splitFileStatus.getLen(),
splitFileStatus.getModificationTime());
for (Vertex vertex : dag.getVertices()) {
if (!inputSplitInDiskVertices.contains(vertex.getName())) {
if (vertex.getTaskLocalFiles().containsKey(
MRJobConfig.JOB_SPLIT)) {
throw new RuntimeException(
"LocalResources already contains a"
+ " resource named "
+ MRJobConfig.JOB_SPLIT);
}
vertex.getTaskLocalFiles().put(MRJobConfig.JOB_SPLIT,
localResource);
}
}
}
}
@Override
public void visitTezOp(TezOperator tezOp) throws VisitorException {
TezOperPlan tezPlan = getPlan();
List<TezOperator> predecessors = tezPlan.getPredecessors(tezOp);
// Construct vertex for the current Tez operator
Vertex to = null;
try {
if (!tezOp.isVertexGroup()) {
to = newVertex(tezOp);
dag.addVertex(to);
} else {
// For union, we construct VertexGroup after iterating the
// predecessors.
}
} catch (Exception e) {
throw new VisitorException("Cannot create vertex for "
+ tezOp.name(), e);
}
// Connect the new vertex with predecessor vertices
if (predecessors != null) {
Vertex[] groupMembers = new Vertex[predecessors.size()];
for (int i = 0; i < predecessors.size(); i++) {
// Since this is a dependency order walker, predecessor vertices
// must have already been created.
TezOperator pred = predecessors.get(i);
try {
if (pred.isVertexGroup()) {
VertexGroup from = pred.getVertexGroupInfo().getVertexGroup();
// The plan of vertex group is empty. Since we create the Edge based on
// some of the operators in the plan refer to one of the vertex group members.
// Both the vertex group and its members reference same EdgeDescriptor object to the
// the successor
GroupInputEdge edge = newGroupInputEdge(
getPlan().getOperator(pred.getVertexGroupMembers().get(0)), tezOp, from, to);
dag.addEdge(edge);
} else {
Vertex from = dag.getVertex(pred.getOperatorKey().toString());
if (tezOp.isVertexGroup()) {
groupMembers[i] = from;
} else {
EdgeProperty prop = newEdge(pred, tezOp, false);
Edge edge = Edge.create(from, to, prop);
dag.addEdge(edge);
}
}
} catch (IOException e) {
throw new VisitorException("Cannot create edge from "
+ pred.name() + " to " + tezOp.name(), e);
}
}
if (tezOp.isVertexGroup()) {
String groupName = tezOp.getOperatorKey().toString();
VertexGroup vertexGroup = dag.createVertexGroup(groupName, groupMembers);
tezOp.getVertexGroupInfo().setVertexGroup(vertexGroup);
POStore store = tezOp.getVertexGroupInfo().getStore();
if (store != null) {
vertexGroup.addDataSink(store.getOperatorKey().toString(),
DataSinkDescriptor.create(tezOp.getVertexGroupInfo().getStoreOutputDescriptor(),
OutputCommitterDescriptor.create(MROutputCommitter.class.getName()), dag.getCredentials()));
}
}
}
}
private GroupInputEdge newGroupInputEdge(TezOperator fromOp,
TezOperator toOp, VertexGroup from, Vertex to) throws IOException {
EdgeProperty edgeProperty = newEdge(fromOp, toOp, true);
String groupInputClass = ConcatenatedMergedKeyValueInput.class.getName();
// In case of SCATTER_GATHER and UnorderedKVInput it will still be
// ConcatenatedMergedKeyValueInput
if(edgeProperty.getDataMovementType().equals(DataMovementType.SCATTER_GATHER)
&& edgeProperty.getEdgeDestination().getClassName().equals(OrderedGroupedKVInput.class.getName())) {
groupInputClass = OrderedGroupedMergedKVInput.class.getName();
}
return GroupInputEdge.create(from, to, edgeProperty,
InputDescriptor.create(groupInputClass).setUserPayload(edgeProperty.getEdgeDestination().getUserPayload()));
}
/**
* Return EdgeProperty that connects two vertices.
*
* @param from
* @param to
* @return EdgeProperty
* @throws IOException
*/
private EdgeProperty newEdge(TezOperator from, TezOperator to, boolean isMergedInput)
throws IOException {
TezEdgeDescriptor edge = to.inEdges.get(from.getOperatorKey());
PhysicalPlan combinePlan = edge.combinePlan;
InputDescriptor in = InputDescriptor.create(edge.inputClassName);
OutputDescriptor out = OutputDescriptor.create(edge.outputClassName);
Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties(), false);
if (!combinePlan.isEmpty()) {
udfContextSeparator.serializeUDFContextForEdge(conf, from, to, UDFType.USERFUNC);
addCombiner(combinePlan, to, conf, isMergedInput);
}
List<POLocalRearrangeTez> lrs = PlanHelper.getPhysicalOperators(from.plan,
POLocalRearrangeTez.class);
for (POLocalRearrangeTez lr : lrs) {
if (lr.getOutputKey().equals(to.getOperatorKey().toString())) {
byte keyType = lr.getKeyType();
setIntermediateOutputKeyValue(keyType, conf, to, lr.isConnectedToPackage(), isMergedInput);
// In case of secondary key sort, main key type is the actual key type
conf.set("pig.reduce.key.type", Byte.toString(lr.getMainKeyType()));
break;
}
}
conf.setIfUnset(TezRuntimeConfiguration.TEZ_RUNTIME_PARTITIONER_CLASS,
MRPartitioner.class.getName());
if (edge.getIntermediateOutputKeyClass() != null) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS,
edge.getIntermediateOutputKeyClass());
}
if (edge.getIntermediateOutputValueClass() != null) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS,
edge.getIntermediateOutputValueClass());
}
if (edge.getIntermediateOutputKeyComparatorClass() != null) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
edge.getIntermediateOutputKeyComparatorClass());
}
conf.setBoolean(MRConfiguration.MAPPER_NEW_API, true);
conf.setBoolean(MRConfiguration.REDUCER_NEW_API, true);
conf.set("pig.pigContext", serializedPigContext);
conf.set("udf.import.list", serializedUDFImportList);
if(to.isGlobalSort() || to.isLimitAfterSort()){
conf.set("pig.sortOrder",
ObjectSerializer.serialize(to.getSortOrder()));
}
if (edge.isUseSecondaryKey()) {
conf.set("pig.secondarySortOrder",
ObjectSerializer.serialize(edge.getSecondarySortOrder()));
conf.set(org.apache.hadoop.mapreduce.MRJobConfig.PARTITIONER_CLASS_ATTR,
SecondaryKeyPartitioner.class.getName());
// These needs to be on the vertex as well for POShuffleTezLoad to pick it up.
// Tez framework also expects this to be per vertex and not edge. IFile.java picks
// up keyClass and valueClass from vertex config. TODO - check with Tez folks
// In MR - job.setSortComparatorClass() or MRJobConfig.KEY_COMPARATOR
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
PigSecondaryKeyComparator.class.getName());
// In MR - job.setOutputKeyClass() or MRJobConfig.OUTPUT_KEY_CLASS
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS, NullableTuple.class.getName());
setGroupingComparator(conf, PigSecondaryKeyGroupComparator.class.getName());
}
if (edge.partitionerClass != null) {
conf.set(org.apache.hadoop.mapreduce.MRJobConfig.PARTITIONER_CLASS_ATTR,
edge.partitionerClass.getName());
}
MRToTezHelper.processMRSettings(conf, globalConf);
in.setUserPayload(TezUtils.createUserPayloadFromConf(conf));
out.setUserPayload(TezUtils.createUserPayloadFromConf(conf));
if (edge.dataMovementType!=DataMovementType.BROADCAST && to.getEstimatedParallelism()!=-1 && to.getVertexParallelism()==-1 && (to.isGlobalSort()||to.isSkewedJoin())) {
// Use custom edge
return EdgeProperty.create((EdgeManagerPluginDescriptor)null,
edge.dataSourceType, edge.schedulingType, out, in);
}
if (to.isUseGraceParallelism()) {
// Put datamovement to null to prevent vertex "to" from starting. It will be started by PigGraceShuffleVertexManager
return EdgeProperty.create((EdgeManagerPluginDescriptor)null, edge.dataSourceType,
edge.schedulingType, out, in);
}
return EdgeProperty.create(edge.dataMovementType, edge.dataSourceType,
edge.schedulingType, out, in);
}
private void addCombiner(PhysicalPlan combinePlan, TezOperator pkgTezOp,
Configuration conf, boolean isMergedInput) throws IOException {
POPackage combPack = (POPackage) combinePlan.getRoots().get(0);
POLocalRearrange combRearrange = (POLocalRearrange) combinePlan
.getLeaves().get(0);
setIntermediateOutputKeyValue(combRearrange.getKeyType(), conf, pkgTezOp, true, isMergedInput);
LoRearrangeDiscoverer lrDiscoverer = new LoRearrangeDiscoverer(
combinePlan, null, pkgTezOp, combPack);
lrDiscoverer.visit();
combinePlan.remove(combPack);
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_COMBINER_CLASS,
MRCombiner.class.getName());
conf.set(MRJobConfig.COMBINE_CLASS_ATTR,
PigCombiner.Combine.class.getName());
conf.set("pig.combinePlan", ObjectSerializer.serialize(combinePlan));
conf.set("pig.combine.package", ObjectSerializer.serialize(combPack));
conf.set("pig.map.keytype", ObjectSerializer
.serialize(new byte[] { combRearrange.getKeyType() }));
}
private Vertex newVertex(TezOperator tezOp) throws IOException,
ClassNotFoundException, InterruptedException {
ProcessorDescriptor procDesc = ProcessorDescriptor.create(
tezOp.getProcessorName());
// Pass physical plans to vertex as user payload.
JobConf payloadConf = new JobConf(ConfigurationUtil.toConfiguration(pc.getProperties(), false));
// We do this so that dag.getCredentials(), job.getCredentials(),
// job.getConfiguration().getCredentials() all reference the same Credentials object
// Unfortunately there is no setCredentials() on Job
payloadConf.setCredentials(dag.getCredentials());
// We won't actually use this job, but we need it to talk with the Load Store funcs
@SuppressWarnings("deprecation")
Job job = new Job(payloadConf);
payloadConf = (JobConf) job.getConfiguration();
payloadConf.setBoolean(MRConfiguration.MAPPER_NEW_API, true);
payloadConf.setBoolean(MRConfiguration.REDUCER_NEW_API, true);
payloadConf.setClass(MRConfiguration.INPUTFORMAT_CLASS,
PigInputFormatTez.class, InputFormat.class);
setOutputFormat(job);
payloadConf.set("udf.import.list", serializedUDFImportList);
payloadConf.set("exectype", "TEZ");
MRToTezHelper.processMRSettings(payloadConf, globalConf);
// Process stores
LinkedList<POStore> stores = processStores(tezOp, payloadConf, job);
Configuration inputPayLoad = null;
Configuration outputPayLoad = null;
if (!stores.isEmpty()) {
outputPayLoad = new Configuration(payloadConf);
outputPayLoad.set(JobControlCompiler.PIG_MAP_STORES,
ObjectSerializer.serialize(new ArrayList<POStore>()));
}
if (!(tezOp.getLoaderInfo().getLoads().isEmpty())) {
payloadConf.set(PigInputFormat.PIG_INPUTS, ObjectSerializer.serialize(tezOp.getLoaderInfo().getInp()));
payloadConf.set(PigInputFormat.PIG_INPUT_SIGNATURES, ObjectSerializer.serialize(tezOp.getLoaderInfo().getInpSignatureLists()));
payloadConf.set(PigInputFormat.PIG_INPUT_LIMITS, ObjectSerializer.serialize(tezOp.getLoaderInfo().getInpLimits()));
inputPayLoad = new Configuration(payloadConf);
if (tezOp.getLoaderInfo().getLoads().get(0).getLoadFunc() instanceof DefaultIndexableLoader) {
inputPayLoad.set("pig.pigContext", serializedPigContext);
}
}
payloadConf.set("pig.pigContext", serializedPigContext);
if (tezOp.getSampleOperator() != null) {
payloadConf.set(PigProcessor.SAMPLE_VERTEX, tezOp.getSampleOperator().getOperatorKey().toString());
}
if (tezOp.getSortOperator() != null) {
// Required by Sample Aggregation job for estimating quantiles
payloadConf.set(PigProcessor.SORT_VERTEX, tezOp.getSortOperator().getOperatorKey().toString());
// PIG-4162: Order by/Skew Join in intermediate stage.
// Increasing order by parallelism may not be required as it is
// usually followed by limit other than store. But would benefit
// cases like skewed join followed by group by.
if (tezOp.getSortOperator().getEstimatedParallelism() != -1
&& tezOp.getSortOperator().isIntermediateReducer()) {
payloadConf.setLong(
InputSizeReducerEstimator.BYTES_PER_REDUCER_PARAM,
intermediateTaskInputSize);
}
}
// Set parent plan for all operators in the Tez plan.
new PhyPlanSetter(tezOp.plan).visit();
// Set the endOfAllInput flag on the physical plan if certain operators that
// use this property (such as STREAM) are present in the plan.
EndOfAllInputSetter.EndOfAllInputChecker checker =
new EndOfAllInputSetter.EndOfAllInputChecker(tezOp.plan);
checker.visit();
if (checker.isEndOfAllInputPresent()) {
payloadConf.set(JobControlCompiler.END_OF_INP_IN_MAP, "true");
}
// Configure the classes for incoming shuffles to this TezOp
// TODO: Refactor out resetting input keys, PIG-3957
List<PhysicalOperator> roots = tezOp.plan.getRoots();
if (roots.size() == 1 && roots.get(0) instanceof POPackage) {
POPackage pack = (POPackage) roots.get(0);
List<PhysicalOperator> succsList = tezOp.plan.getSuccessors(pack);
if (succsList != null) {
succsList = new ArrayList<PhysicalOperator>(succsList);
}
byte keyType = pack.getPkgr().getKeyType();
tezOp.plan.remove(pack);
payloadConf.set("pig.reduce.package", ObjectSerializer.serialize(pack));
POShuffleTezLoad newPack = new POShuffleTezLoad(pack);
if (tezOp.isSkewedJoin()) {
newPack.setSkewedJoins(true);
}
tezOp.plan.add(newPack);
boolean isMergedInput = false;
// Set input keys for POShuffleTezLoad. This is used to identify
// the inputs that are attached to the POShuffleTezLoad in the
// backend.
Map<Integer, String> localRearrangeMap = new TreeMap<Integer, String>();
for (TezOperator pred : mPlan.getPredecessors(tezOp)) {
if (tezOp.getSampleOperator() != null && tezOp.getSampleOperator() == pred) {
// skip sample vertex input
} else {
String inputKey = pred.getOperatorKey().toString();
boolean isVertexGroup = false;
if (pred.isVertexGroup()) {
isVertexGroup = true;
pred = mPlan.getOperator(pred.getVertexGroupMembers().get(0));
}
LinkedList<POLocalRearrangeTez> lrs =
PlanHelper.getPhysicalOperators(pred.plan, POLocalRearrangeTez.class);
for (POLocalRearrangeTez lr : lrs) {
if (lr.isConnectedToPackage()
&& lr.getOutputKey().equals(tezOp.getOperatorKey().toString())) {
localRearrangeMap.put((int) lr.getIndex(), inputKey);
if (isVertexGroup) {
isMergedInput = true;
}
}
}
}
}
for (Map.Entry<Integer, String> entry : localRearrangeMap.entrySet()) {
newPack.addInputKey(entry.getValue());
}
if (succsList != null) {
for (PhysicalOperator succs : succsList) {
tezOp.plan.connect(newPack, succs);
}
}
//POShuffleTezLoad accesses the comparator setting
selectKeyComparator(keyType, payloadConf, tezOp, isMergedInput);
}
// set parent plan in all operators. currently the parent plan is really
// used only when POStream, POSplit are present in the plan
new PhyPlanSetter(tezOp.plan).visit();
// Serialize the execution plan
payloadConf.set(PigProcessor.PLAN,
ObjectSerializer.serialize(tezOp.plan));
udfContextSeparator.serializeUDFContext(payloadConf, tezOp);
if (!pc.inIllustrator) {
for (POStore store : stores) {
// unset inputs for POStore, otherwise, map/reduce plan will be unnecessarily deserialized
store.setInputs(null);
store.setParentPlan(null);
}
// We put them in the reduce because PigOutputCommitter checks the
// ID of the task to see if it's a map, and if not, calls the reduce
// committers.
payloadConf.set(JobControlCompiler.PIG_MAP_STORES,
ObjectSerializer.serialize(new ArrayList<POStore>()));
payloadConf.set(JobControlCompiler.PIG_REDUCE_STORES,
ObjectSerializer.serialize(stores));
}
if (tezOp.isNeedEstimateParallelism()) {
payloadConf.setBoolean(PigProcessor.ESTIMATE_PARALLELISM, true);
log.info("Estimate quantile for sample aggregation vertex " + tezOp.getOperatorKey().toString());
}
// set various parallelism into the job conf for later analysis, PIG-2779
payloadConf.setInt(PigImplConstants.REDUCER_DEFAULT_PARALLELISM, pc.defaultParallel);
payloadConf.setInt(PigImplConstants.REDUCER_REQUESTED_PARALLELISM, tezOp.getRequestedParallelism());
payloadConf.setInt(PigImplConstants.REDUCER_ESTIMATED_PARALLELISM, tezOp.getEstimatedParallelism());
TezScriptState ss = TezScriptState.get();
ss.addVertexSettingsToConf(dag.getName(), tezOp, payloadConf);
// Take our assembled configuration and create a vertex
UserPayload userPayload = TezUtils.createUserPayloadFromConf(payloadConf);
TezDAGScriptInfo dagScriptInfo = TezScriptState.get().getDAGScriptInfo(dag.getName());
String alias = dagScriptInfo.getAlias(tezOp);
String aliasLocation = dagScriptInfo.getAliasLocation(tezOp);
String features = dagScriptInfo.getPigFeatures(tezOp);
String vertexInfo = aliasLocation + " (" + features + ")" ;
procDesc.setUserPayload(userPayload).setHistoryText(TezUtils.convertToHistoryText(vertexInfo, payloadConf));
String vmPluginName = null;
Configuration vmPluginConf = null;
// Set the right VertexManagerPlugin
if (tezOp.getEstimatedParallelism() != -1) {
if (tezOp.isGlobalSort()||tezOp.isSkewedJoin()) {
if (tezOp.getVertexParallelism()==-1 && (
tezOp.isGlobalSort() &&getPlan().getPredecessors(tezOp).size()==1||
tezOp.isSkewedJoin() &&getPlan().getPredecessors(tezOp).size()==2)) {
// Set VertexManagerPlugin to PartitionerDefinedVertexManager, which is able
// to decrease/increase parallelism of sorting vertex dynamically
// based on the numQuantiles calculated by sample aggregation vertex
vmPluginName = PartitionerDefinedVertexManager.class.getName();
log.info("Set VertexManagerPlugin to PartitionerDefinedParallelismVertexManager for vertex " + tezOp.getOperatorKey().toString());
}
} else {
boolean containScatterGather = false;
boolean containCustomPartitioner = false;
for (TezEdgeDescriptor edge : tezOp.inEdges.values()) {
if (edge.dataMovementType == DataMovementType.SCATTER_GATHER) {
containScatterGather = true;
}
if (edge.partitionerClass!=null) {
containCustomPartitioner = true;
}
}
if (containScatterGather && !containCustomPartitioner) {
vmPluginConf = (vmPluginConf == null) ? ConfigurationUtil.toConfiguration(pc.getProperties(), false) : vmPluginConf;
// Use auto-parallelism feature of ShuffleVertexManager to dynamically
// reduce the parallelism of the vertex
if (payloadConf.getBoolean(PigConfiguration.PIG_TEZ_GRACE_PARALLELISM, true)
&& !TezOperPlan.getGrandParentsForGraceParallelism(getPlan(), tezOp).isEmpty()) {
vmPluginName = PigGraceShuffleVertexManager.class.getName();
tezOp.setUseGraceParallelism(true);
vmPluginConf.set("pig.tez.plan", getSerializedTezPlan());
vmPluginConf.set("pig.pigContext", serializedPigContext);
} else {
vmPluginName = ShuffleVertexManager.class.getName();
}
vmPluginConf.setBoolean(ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_ENABLE_AUTO_PARALLEL, true);
if (stores.size() <= 0) {
// Intermediate reduce. Set the bytes per reducer to be block size.
vmPluginConf.setLong(ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_DESIRED_TASK_INPUT_SIZE,
intermediateTaskInputSize);
} else if (vmPluginConf.getLong(InputSizeReducerEstimator.BYTES_PER_REDUCER_PARAM,
InputSizeReducerEstimator.DEFAULT_BYTES_PER_REDUCER) !=
InputSizeReducerEstimator.DEFAULT_BYTES_PER_REDUCER) {
vmPluginConf.setLong(ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_DESIRED_TASK_INPUT_SIZE,
vmPluginConf.getLong(InputSizeReducerEstimator.BYTES_PER_REDUCER_PARAM,
InputSizeReducerEstimator.DEFAULT_BYTES_PER_REDUCER));
}
log.info("Set auto parallelism for vertex " + tezOp.getOperatorKey().toString());
}
}
}
if (tezOp.isLimit() && (vmPluginName == null || vmPluginName.equals(PigGraceShuffleVertexManager.class.getName())||
vmPluginName.equals(ShuffleVertexManager.class.getName()))) {
if (tezOp.inEdges.values().iterator().next().inputClassName.equals(UnorderedKVInput.class.getName())) {
// Setting SRC_FRACTION to 0.00001 so that even if there are 100K source tasks,
// limit job starts when 1 source task finishes.
// If limit is part of a group by or join because their parallelism is 1,
// we should leave the configuration with the defaults.
vmPluginConf = (vmPluginConf == null) ? ConfigurationUtil.toConfiguration(pc.getProperties(), false) : vmPluginConf;
vmPluginConf.set(ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_MIN_SRC_FRACTION, "0.00001");
vmPluginConf.set(ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_MAX_SRC_FRACTION, "0.00001");
log.info("Set " + ShuffleVertexManager.TEZ_SHUFFLE_VERTEX_MANAGER_MIN_SRC_FRACTION + " to 0.00001 for limit vertex " + tezOp.getOperatorKey().toString());
}
}
int parallel = tezOp.getVertexParallelism();
if (tezOp.isUseGraceParallelism()) {
parallel = -1;
}
Resource resource;
if (globalConf.get(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB)!=null &&
globalConf.get(TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES)!=null) {
resource = Resource.newInstance(globalConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB,
TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB_DEFAULT),
globalConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES,
TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES_DEFAULT));
} else {
// If tez setting is not defined, try MR setting
resource = tezOp.isUseMRMapSettings() ? MRHelpers.getResourceForMRMapper(globalConf) : MRHelpers.getResourceForMRReducer(globalConf);
}
Vertex vertex = Vertex.create(tezOp.getOperatorKey().toString(), procDesc, parallel, resource);
Map<String, String> taskEnv = new HashMap<String, String>();
MRHelpers.updateEnvBasedOnMRTaskEnv(globalConf, taskEnv, tezOp.isUseMRMapSettings());
vertex.setTaskEnvironment(taskEnv);
// All these classes are @InterfaceAudience.Private in Hadoop. Switch to Tez methods in TEZ-1012
// set the timestamps, public/private visibility of the archives and files
ClientDistributedCacheManager
.determineTimestampsAndCacheVisibilities(globalConf);
// get DelegationToken for each cached file
ClientDistributedCacheManager.getDelegationTokens(globalConf,
job.getCredentials());
MRApps.setupDistributedCache(globalConf, localResources);
vertex.addTaskLocalFiles(localResources);
String javaOpts;
if (globalConf.get(TezConfiguration.TEZ_TASK_LAUNCH_CMD_OPTS)!=null) {
javaOpts = globalConf.get(TezConfiguration.TEZ_TASK_LAUNCH_CMD_OPTS);
} else {
// If tez setting is not defined, try MR setting
javaOpts = tezOp.isUseMRMapSettings() ? MRHelpers.getJavaOptsForMRMapper(globalConf)
: MRHelpers.getJavaOptsForMRReducer(globalConf);
}
vertex.setTaskLaunchCmdOpts(javaOpts);
log.info("For vertex - " + tezOp.getOperatorKey().toString()
+ ": parallelism=" + tezOp.getVertexParallelism()
+ ", memory=" + vertex.getTaskResource().getMemory()
+ ", java opts=" + vertex.getTaskLaunchCmdOpts()
);
log.info("Processing aliases: " + alias);
log.info("Detailed locations: " + aliasLocation);
log.info("Pig features in the vertex: " + features);
// Right now there can only be one of each of these. Will need to be
// more generic when there can be more.
for (POLoad ld : tezOp.getLoaderInfo().getLoads()) {
// TODO: These should get the globalConf, or a merged version that
// keeps settings like pig.maxCombinedSplitSize
Builder userPayLoadBuilder = MRRuntimeProtos.MRInputUserPayloadProto.newBuilder();
InputSplitInfo inputSplitInfo = tezOp.getLoaderInfo().getInputSplitInfo();
Map<String, LocalResource> additionalLocalResources = null;
int spillThreshold = payloadConf
.getInt(PigConfiguration.PIG_TEZ_INPUT_SPLITS_MEM_THRESHOLD,
PigConfiguration.PIG_TEZ_INPUT_SPLITS_MEM_THRESHOLD_DEFAULT);
// Currently inputSplitInfo is always InputSplitInfoMem at this point
if (inputSplitInfo instanceof InputSplitInfoMem) {
MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto();
int splitsSerializedSize = splitsProto.getSerializedSize();
if(splitsSerializedSize > spillThreshold) {
inputPayLoad.setBoolean(
org.apache.tez.mapreduce.hadoop.MRJobConfig.MR_TEZ_SPLITS_VIA_EVENTS,
false);
// Write splits to disk
Path inputSplitsDir = FileLocalizer.getTemporaryPath(pc);
log.info("Writing input splits to " + inputSplitsDir
+ " for vertex " + vertex.getName()
+ " as the serialized size in memory is "
+ splitsSerializedSize + ". Configured "
+ PigConfiguration.PIG_TEZ_INPUT_SPLITS_MEM_THRESHOLD
+ " is " + spillThreshold);
inputSplitInfo = MRToTezHelper.writeInputSplitInfoToDisk(
(InputSplitInfoMem)inputSplitInfo, inputSplitsDir, payloadConf, fs);
additionalLocalResources = new HashMap<String, LocalResource>();
MRToTezHelper.updateLocalResourcesForInputSplits(
fs, inputSplitInfo,
additionalLocalResources);
inputSplitInDiskVertices.add(vertex.getName());
} else {
// Send splits via RPC to AM
userPayLoadBuilder.setSplits(splitsProto);
}
//Free up memory
tezOp.getLoaderInfo().setInputSplitInfo(null);
}
udfContextSeparator.serializeUDFContext(inputPayLoad, tezOp, UDFType.LOADFUNC);
userPayLoadBuilder.setConfigurationBytes(TezUtils.createByteStringFromConf(inputPayLoad));
vertex.setLocationHint(VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()));
vertex.addDataSource(ld.getOperatorKey().toString(),
DataSourceDescriptor.create(InputDescriptor.create(MRInput.class.getName())
.setUserPayload(UserPayload.create(userPayLoadBuilder.build().toByteString().asReadOnlyByteBuffer())),
InputInitializerDescriptor.create(MRInputSplitDistributor.class.getName()),
inputSplitInfo.getNumTasks(),
dag.getCredentials(),
null,
additionalLocalResources));
}
// Union within a split can have multiple stores writing to same output
Set<String> uniqueStoreOutputs = new HashSet<String>();
for (POStore store : stores) {
ArrayList<POStore> singleStore = new ArrayList<POStore>();
singleStore.add(store);
Configuration outPayLoad = new Configuration(outputPayLoad);
udfContextSeparator.serializeUDFContext(outPayLoad, tezOp, store);
outPayLoad.set(JobControlCompiler.PIG_REDUCE_STORES,
ObjectSerializer.serialize(singleStore));
OutputDescriptor storeOutDescriptor = OutputDescriptor.create(
MROutput.class.getName()).setUserPayload(TezUtils
.createUserPayloadFromConf(outPayLoad));
if (tezOp.getVertexGroupStores() != null) {
OperatorKey vertexGroupKey = tezOp.getVertexGroupStores().get(store.getOperatorKey());
if (vertexGroupKey != null) {
getPlan().getOperator(vertexGroupKey).getVertexGroupInfo()
.setStoreOutputDescriptor(storeOutDescriptor);
continue;
}
}
String outputKey = ((POStoreTez) store).getOutputKey();
if (!uniqueStoreOutputs.contains(outputKey)) {
vertex.addDataSink(outputKey.toString(),
DataSinkDescriptor.create(storeOutDescriptor,
OutputCommitterDescriptor.create(MROutputCommitter.class.getName()),
dag.getCredentials()));
uniqueStoreOutputs.add(outputKey);
}
}
// LoadFunc and StoreFunc add delegation tokens to Job Credentials in
// setLocation and setStoreLocation respectively. For eg: HBaseStorage
// InputFormat add delegation token in getSplits and OutputFormat in
// checkOutputSpecs. For eg: FileInputFormat and FileOutputFormat
if (stores.size() > 0) {
new PigOutputFormat().checkOutputSpecs(job);
}
// else if(tezOp.isLimitAfterSort())
// TODO: PIG-4049 If standalone Limit we need a new VertexManager or new input
// instead of ShuffledMergedInput. For limit part of the sort (order by parallel 1) itself
// need to enhance PartitionerDefinedVertexManager
if (vmPluginName != null) {
VertexManagerPluginDescriptor vmPluginDescriptor = VertexManagerPluginDescriptor.create(vmPluginName);
if (vmPluginConf != null) {
vmPluginDescriptor.setUserPayload(TezUtils.createUserPayloadFromConf(vmPluginConf));
}
vertex.setVertexManagerPlugin(vmPluginDescriptor);
}
// Reset udfcontext jobconf. It is not supposed to be set in the front end
UDFContext.getUDFContext().addJobConf(null);
return vertex;
}
private LinkedList<POStore> processStores(TezOperator tezOp,
Configuration payloadConf, Job job) throws VisitorException,
IOException {
LinkedList<POStore> stores = PlanHelper.getPhysicalOperators(
tezOp.plan, POStore.class);
if (stores.size() > 0) {
ArrayList<POStore> storeLocations = new ArrayList<POStore>();
for (POStore st : stores) {
storeLocations.add(st);
StoreFuncInterface sFunc = st.getStoreFunc();
sFunc.setStoreLocation(st.getSFile().getFileName(), job);
}
Path tmpLocation = null;
if (stores.size() == 1) {
POStore st = stores.get(0);
// set out filespecs
String outputPathString = st.getSFile().getFileName();
if (!outputPathString.contains("://")
|| outputPathString.startsWith("hdfs://")) {
payloadConf.set("pig.streaming.log.dir", new Path(
outputPathString, JobControlCompiler.LOG_DIR)
.toString());
} else {
String tmpLocationStr = FileLocalizer.getTemporaryPath(pc)
.toString();
tmpLocation = new Path(tmpLocationStr);
payloadConf.set("pig.streaming.log.dir", new Path(tmpLocation,
JobControlCompiler.LOG_DIR).toString());
}
payloadConf.set("pig.streaming.task.output.dir", outputPathString);
if(tezOp.plan.getLeaves().get(0) instanceof POSplit) {
// Set this so that we get correct counters
st.setMultiStore(true);
}
} else { // multi store case
log.info("Setting up multi store job");
String tmpLocationStr = FileLocalizer.getTemporaryPath(pc)
.toString();
tmpLocation = new Path(tmpLocationStr);
boolean disableCounter = payloadConf.getBoolean(
"pig.disable.counter", false);
if (disableCounter) {
log.info("Disable Pig custom output counters");
}
int idx = 0;
for (POStore sto : storeLocations) {
sto.setDisableCounter(disableCounter);
sto.setMultiStore(true);
sto.setIndex(idx++);
}
payloadConf.set("pig.streaming.log.dir", new Path(tmpLocation,
JobControlCompiler.LOG_DIR).toString());
payloadConf.set("pig.streaming.task.output.dir",
tmpLocation.toString());
}
}
return stores;
}
@SuppressWarnings("rawtypes")
private void setIntermediateOutputKeyValue(byte keyType, Configuration conf, TezOperator tezOp,
boolean isConnectedToPackage, boolean isMergedInput) throws JobCreationException, ExecException {
if (tezOp != null && tezOp.isUseSecondaryKey() && isConnectedToPackage) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS,
NullableTuple.class.getName());
} else if (tezOp != null && tezOp.isSkewedJoin() && isConnectedToPackage) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS,
NullablePartitionWritable.class.getName());
} else {
Class<? extends WritableComparable> keyClass = HDataType
.getWritableComparableTypes(keyType).getClass();
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS,
keyClass.getName());
}
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS,
NullableTuple.class.getName());
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_PARTITIONER_CLASS,
MRPartitioner.class.getName());
selectKeyComparator(keyType, conf, tezOp, isMergedInput);
}
private static Class<? extends WritableComparator> getRawBytesComparator(
byte keyType) throws JobCreationException {
// These comparators only compare bytes and we will use them except for
// order by for faster sorting.
// This ordering is good enough to be fed to reducer (POShuffleTezLoad)
// which will use the full comparator (GroupingComparator) for correct
// sorting and grouping.
// TODO: PIG-4652. Till Tez exposes a way to get bytes of keys being compared,
// we can use this only for groupby and distinct which are single inputs in
// POShuffleTezLoad and not join which has multiple inputs.
switch (keyType) {
case DataType.BOOLEAN:
return PigWritableComparators.PigBooleanRawBytesComparator.class;
case DataType.INTEGER:
return PigWritableComparators.PigIntRawBytesComparator.class;
case DataType.BIGINTEGER:
return PigWritableComparators.PigBigIntegerRawBytesComparator.class;
case DataType.BIGDECIMAL:
return PigWritableComparators.PigBigDecimalRawBytesComparator.class;
case DataType.LONG:
return PigWritableComparators.PigLongRawBytesComparator.class;
case DataType.FLOAT:
return PigWritableComparators.PigFloatRawBytesComparator.class;
case DataType.DOUBLE:
return PigWritableComparators.PigDoubleRawBytesComparator.class;
case DataType.DATETIME:
return PigWritableComparators.PigDateTimeRawBytesComparator.class;
case DataType.CHARARRAY:
return PigWritableComparators.PigTextRawBytesComparator.class;
case DataType.BYTEARRAY:
return PigWritableComparators.PigBytesRawBytesComparator.class;
case DataType.MAP:
int errCode = 1068;
String msg = "Using Map as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
case DataType.TUPLE:
return PigWritableComparators.PigTupleSortBytesComparator.class;
case DataType.BAG:
errCode = 1068;
msg = "Using Bag as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
default:
errCode = 2036;
msg = "Unhandled key type " + DataType.findTypeName(keyType);
throw new JobCreationException(msg, errCode, PigException.BUG);
}
}
private static Class<? extends WritableComparator> getRawComparator(byte keyType)
throws JobCreationException {
// These are full comparators used in order by jobs and as GroupingComparator in
// POShuffleTezLoad for other operations.
// Mapreduce uses PigGrouping<DataType>WritableComparator for non-orderby jobs.
// In Tez, we will use the raw comparators itself on the reduce side as well as it is
// now fixed to handle nulls for different indexes.
// Also PigGrouping<DataType>WritableComparator use PigNullablePartitionWritable.compareTo
// which is not that efficient for cases like tuple where tuple is iterated for null checking
// instead of taking advantage of TupleRawComparator.hasComparedTupleNull().
// Also skips multi-query index checking
switch (keyType) {
case DataType.BOOLEAN:
return PigBooleanRawComparator.class;
case DataType.INTEGER:
return PigIntRawComparator.class;
case DataType.BIGINTEGER:
return PigBigIntegerRawComparator.class;
case DataType.BIGDECIMAL:
return PigBigDecimalRawComparator.class;
case DataType.LONG:
return PigLongRawComparator.class;
case DataType.FLOAT:
return PigFloatRawComparator.class;
case DataType.DOUBLE:
return PigDoubleRawComparator.class;
case DataType.DATETIME:
return PigDateTimeRawComparator.class;
case DataType.CHARARRAY:
return PigTextRawComparator.class;
case DataType.BYTEARRAY:
return PigBytesRawComparator.class;
case DataType.MAP:
int errCode = 1068;
String msg = "Using Map as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
case DataType.TUPLE:
return PigTupleSortComparator.class;
case DataType.BAG:
errCode = 1068;
msg = "Using Bag as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
default:
errCode = 2036;
msg = "Unhandled key type " + DataType.findTypeName(keyType);
throw new JobCreationException(msg, errCode, PigException.BUG);
}
}
private static Class<? extends WritableComparator> getRawBytesComparatorForSkewedJoin(byte keyType)
throws JobCreationException {
// Extended Raw Bytes Comparators for SkewedJoin which unwrap the NullablePartitionWritable
switch (keyType) {
case DataType.BOOLEAN:
return PigWritableComparators.PigBooleanRawBytesPartitionComparator.class;
case DataType.INTEGER:
return PigWritableComparators.PigIntRawBytesPartitionComparator.class;
case DataType.BIGINTEGER:
return PigWritableComparators.PigBigIntegerRawBytesPartitionComparator.class;
case DataType.BIGDECIMAL:
return PigWritableComparators.PigBigDecimalRawBytesPartitionComparator.class;
case DataType.LONG:
return PigWritableComparators.PigLongRawBytesPartitionComparator.class;
case DataType.FLOAT:
return PigWritableComparators.PigFloatRawBytesPartitionComparator.class;
case DataType.DOUBLE:
return PigWritableComparators.PigDoubleRawBytesPartitionComparator.class;
case DataType.DATETIME:
return PigWritableComparators.PigDateTimeRawBytesPartitionComparator.class;
case DataType.CHARARRAY:
return PigWritableComparators.PigTextRawBytesPartitionComparator.class;
case DataType.BYTEARRAY:
return PigWritableComparators.PigBytesRawBytesPartitionComparator.class;
case DataType.MAP:
int errCode = 1068;
String msg = "Using Map as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
case DataType.TUPLE:
return PigWritableComparators.PigTupleSortBytesPartitionComparator.class;
case DataType.BAG:
errCode = 1068;
msg = "Using Bag as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
default:
errCode = 2036;
msg = "Unhandled key type " + DataType.findTypeName(keyType);
throw new JobCreationException(msg, errCode, PigException.BUG);
}
}
private static Class<? extends WritableComparator> getRawComparatorForSkewedJoin(byte keyType)
throws JobCreationException {
// Extended Raw Comparators for SkewedJoin which unwrap the NullablePartitionWritable
switch (keyType) {
case DataType.BOOLEAN:
return PigWritableComparators.PigBooleanRawPartitionComparator.class;
case DataType.INTEGER:
return PigWritableComparators.PigIntRawPartitionComparator.class;
case DataType.BIGINTEGER:
return PigWritableComparators.PigBigIntegerRawPartitionComparator.class;
case DataType.BIGDECIMAL:
return PigWritableComparators.PigBigDecimalRawPartitionComparator.class;
case DataType.LONG:
return PigWritableComparators.PigLongRawPartitionComparator.class;
case DataType.FLOAT:
return PigWritableComparators.PigFloatRawPartitionComparator.class;
case DataType.DOUBLE:
return PigWritableComparators.PigDoubleRawPartitionComparator.class;
case DataType.DATETIME:
return PigWritableComparators.PigDateTimeRawPartitionComparator.class;
case DataType.CHARARRAY:
return PigWritableComparators.PigTextRawPartitionComparator.class;
case DataType.BYTEARRAY:
return PigWritableComparators.PigBytesRawPartitionComparator.class;
case DataType.MAP:
int errCode = 1068;
String msg = "Using Map as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
case DataType.TUPLE:
return PigWritableComparators.PigTupleSortPartitionComparator.class;
case DataType.BAG:
errCode = 1068;
msg = "Using Bag as key not supported.";
throw new JobCreationException(msg, errCode, PigException.INPUT);
default:
errCode = 2036;
msg = "Unhandled key type " + DataType.findTypeName(keyType);
throw new JobCreationException(msg, errCode, PigException.BUG);
}
}
void selectKeyComparator(byte keyType, Configuration conf, TezOperator tezOp, boolean isMergedInput)
throws JobCreationException {
// TODO: Handle sorting like in JobControlCompiler
// TODO: Group comparators as in JobControlCompiler
if (tezOp == null) {
return;
}
if (tezOp.isUseSecondaryKey()) {
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
PigSecondaryKeyComparator.class.getName());
setGroupingComparator(conf, PigSecondaryKeyGroupComparator.class.getName());
} else {
// If it is not a merged input (OrderedGroupedMergedKVInput) from union then
// use bytes only comparator. This is temporary till PIG-4652 is done
if (!isMergedInput && (tezOp.isGroupBy() || tezOp.isDistinct())) {
conf.setClass(
TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawBytesComparator(keyType), RawComparator.class);
} else if (tezOp.isSkewedJoin()) {
conf.setClass(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawComparatorForSkewedJoin(keyType), RawComparator.class);
} else {
conf.setClass(
TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawComparator(keyType), RawComparator.class);
}
// Comparators now
// groupby/distinct : Comparator - RawBytesComparator
// groupby/distinct after union : Comparator - RawComparator
// orderby : Comparator - RawComparator
// skewed join : Comparator - RawPartitionComparator
// Rest (other joins) : Comparator - RawComparator
//TODO: In PIG-4652: After Tez support for exposing key bytes
// groupby/distinct : Comparator - RawBytesComparator. No grouping comparator required.
// orderby : Comparator - RawComparator. No grouping comparator required.
// skewed join : Comparator - RawBytesPartitionComparator, GroupingComparator - RawPartitionComparator
// Rest (other joins) : Comparator - RawBytesComparator, GroupingComparator - RawComparator
/*
if (tezOp.isSkewedJoin()) {
conf.setClass(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawBytesComparatorForSkewedJoin(keyType), RawComparator.class);
setGroupingComparator(conf, getRawComparatorForSkewedJoin(keyType).getName());
} else if (tezOp.isGroupBy() || tezOp.isDistinct()) {
conf.setClass(
TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawBytesComparator(keyType), RawComparator.class);
} else if (hasOrderby(tezOp)) {
conf.setClass(
TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawComparator(keyType), RawComparator.class);
} else {
conf.setClass(
TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
getRawBytesComparator(keyType), RawComparator.class);
setGroupingComparator(conf, getRawComparator(keyType).getName());
}
*/
}
}
private boolean hasOrderby(TezOperator tezOp) {
boolean hasOrderBy = tezOp.isGlobalSort() || tezOp.isLimitAfterSort();
if (!hasOrderBy) {
// Check if it is a Orderby sampler job
List<TezOperator> succs = getPlan().getSuccessors(tezOp);
if (succs != null && succs.size() == 1) {
if (succs.get(0).isGlobalSort()) {
hasOrderBy = true;
}
}
}
return hasOrderBy;
}
private void setGroupingComparator(Configuration conf, String comparatorClass) {
// In MR - job.setGroupingComparatorClass() or MRJobConfig.GROUP_COMPARATOR_CLASS
// TODO: Check why tez-mapreduce ReduceProcessor use two different tez
// settings for the same MRJobConfig.GROUP_COMPARATOR_CLASS and use only one
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_GROUP_COMPARATOR_CLASS,
comparatorClass);
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_SECONDARY_COMPARATOR_CLASS,
comparatorClass);
}
private void setOutputFormat(org.apache.hadoop.mapreduce.Job job) {
// the OutputFormat we report to Hadoop is always PigOutputFormat which
// can be wrapped with LazyOutputFormat provided if it is supported by
// the Hadoop version and PigConfiguration.PIG_OUTPUT_LAZY is set
if ("true".equalsIgnoreCase(job.getConfiguration().get(PigConfiguration.PIG_OUTPUT_LAZY))) {
try {
Class<?> clazz = PigContext
.resolveClassName("org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat");
Method method = clazz.getMethod("setOutputFormatClass",
org.apache.hadoop.mapreduce.Job.class, Class.class);
method.invoke(null, job, PigOutputFormatTez.class);
} catch (Exception e) {
job.setOutputFormatClass(PigOutputFormatTez.class);
log.warn(PigConfiguration.PIG_OUTPUT_LAZY
+ " is set but LazyOutputFormat couldn't be loaded. Default PigOutputFormat will be used");
}
} else {
job.setOutputFormatClass(PigOutputFormatTez.class);
}
}
}
| |
/*
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.acra.attachment;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.net.Uri;
import android.os.Build;
import android.os.ParcelFileDescriptor;
import android.provider.OpenableColumns;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import android.webkit.MimeTypeMap;
import org.acra.ACRA;
import org.acra.annotation.AcraCore;
import org.acra.file.Directory;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* Provides access to attachments for senders
* For uri schema, see {@link AcraCore#attachmentUris()}
*
* @author F43nd1r
* @since 13.03.2017
*/
public class AcraContentProvider extends ContentProvider {
private static final String[] COLUMNS = {
OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE};
private static final String MIME_TYPE_OCTET_STREAM = "application/octet-stream";
private String authority;
@Override
public boolean onCreate() {
//noinspection ConstantConditions
authority = getAuthority(getContext());
if (ACRA.DEV_LOGGING) ACRA.log.d(ACRA.LOG_TAG, "Registered content provider for authority " + authority);
return true;
}
/**
* Provides file metadata
*
* @param uri the file uri
* @param projection any combination of {@link OpenableColumns#DISPLAY_NAME} and {@link OpenableColumns#SIZE}
* @param selection ignored
* @param selectionArgs ignored
* @param sortOrder ignored
* @return file metadata in a cursor with a single row
*/
@Nullable
@Override
public Cursor query(@NonNull Uri uri, @Nullable String[] projection, @Nullable String selection, @Nullable String[] selectionArgs, @Nullable String sortOrder) {
if (ACRA.DEV_LOGGING) ACRA.log.d(ACRA.LOG_TAG, "Query: " + uri);
final File file = getFileForUri(uri);
if (file == null) {
return null;
}
if (projection == null) {
projection = COLUMNS;
}
final Map<String, Object> columnValueMap = new LinkedHashMap<>();
for (String column : projection) {
if (column.equals(OpenableColumns.DISPLAY_NAME)) {
columnValueMap.put(OpenableColumns.DISPLAY_NAME, file.getName());
} else if (column.equals(OpenableColumns.SIZE)) {
columnValueMap.put(OpenableColumns.SIZE, file.length());
}
}
final MatrixCursor cursor = new MatrixCursor(columnValueMap.keySet().toArray(new String[columnValueMap.size()]), 1);
cursor.addRow(columnValueMap.values());
return cursor;
}
/**
* @param uri the file uri
* @return file represented by uri, or null if it can't be resolved
*/
@Nullable
private File getFileForUri(@NonNull Uri uri) {
if (!ContentResolver.SCHEME_CONTENT.equals(uri.getScheme()) || !authority.equals(uri.getAuthority())) {
return null;
}
final List<String> segments = new ArrayList<>(uri.getPathSegments());
if (segments.size() < 2) return null;
final String dir = segments.remove(0).toUpperCase();
try {
final Directory directory = Directory.valueOf(dir);
//noinspection ConstantConditions
return directory.getFile(getContext(), TextUtils.join(File.separator, segments));
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* Provides file mimeType
*
* @param uri the file uri
* @return mimeType, default is {@link #MIME_TYPE_OCTET_STREAM}
* @see #guessMimeType(Uri)
*/
@NonNull
@Override
public String getType(@NonNull Uri uri) {
return guessMimeType(uri);
}
/**
* @param uri ignored
* @param values ignored
* @throws UnsupportedOperationException always
*/
@Nullable
@Override
public Uri insert(@NonNull Uri uri, @Nullable ContentValues values) {
throw new UnsupportedOperationException("No insert supported");
}
/**
* @param uri ignored
* @param selection ignored
* @param selectionArgs ignored
* @throws UnsupportedOperationException always
*/
@Override
public int delete(@NonNull Uri uri, @Nullable String selection, @Nullable String[] selectionArgs) {
throw new UnsupportedOperationException("No delete supported");
}
/**
* @param uri ignored
* @param values ignored
* @param selection ignored
* @param selectionArgs ignored
* @throws UnsupportedOperationException always
*/
@Override
public int update(@NonNull Uri uri, @Nullable ContentValues values, @Nullable String selection, @Nullable String[] selectionArgs) {
throw new UnsupportedOperationException("No update supported");
}
/**
* Open a file for read
*
* @param uri the file uri
* @param mode ignored
* @return a {@link ParcelFileDescriptor} for the File
* @throws FileNotFoundException if the file cannot be resolved
*/
@NonNull
@Override
public ParcelFileDescriptor openFile(@NonNull Uri uri, @NonNull String mode) throws FileNotFoundException {
final File file = getFileForUri(uri);
if (file == null || !file.exists()) throw new FileNotFoundException("File represented by uri " + uri + " could not be found");
if (ACRA.DEV_LOGGING) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
ACRA.log.d(ACRA.LOG_TAG, getCallingPackage() + " opened " + file.getPath());
} else {
ACRA.log.d(ACRA.LOG_TAG, file.getPath() + " was opened by an application");
}
}
return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY);
}
/**
* @param context a a context
* @return authority of this provider
*/
@NonNull
private static String getAuthority(@NonNull Context context) {
return context.getPackageName() + ".acra";
}
/**
* Get an uri for this content provider for the given file
*
* @param context a context
* @param file the file
* @return the uri
*/
@NonNull
public static Uri getUriForFile(@NonNull Context context, @NonNull File file) {
return getUriForFile(context, Directory.ROOT, file.getPath());
}
/**
* Get an uri for this content provider for the given file
*
* @param context a context
* @param directory the directory, to with the path is relative
* @param relativePath the file path
* @return the uri
*/
@SuppressWarnings("WeakerAccess")
@NonNull
public static Uri getUriForFile(@NonNull Context context, @NonNull Directory directory, @NonNull String relativePath) {
final Uri.Builder builder = new Uri.Builder()
.scheme(ContentResolver.SCHEME_CONTENT)
.authority(getAuthority(context))
.appendPath(directory.name().toLowerCase());
for (String segment : relativePath.split(Pattern.quote(File.separator))) {
if (segment.length() > 0) {
builder.appendPath(segment);
}
}
return builder.build();
}
/**
* Tries to guess the mime type from uri extension
*
* @param uri the uri
* @return the mime type of the uri, with fallback {@link #MIME_TYPE_OCTET_STREAM}
*/
@NonNull
public static String guessMimeType(@NonNull Uri uri) {
String type = null;
final String fileExtension = MimeTypeMap.getFileExtensionFromUrl(uri
.toString());
if (fileExtension != null) {
type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(
fileExtension.toLowerCase());
}
if (type == null) {
type = MIME_TYPE_OCTET_STREAM;
}
return type;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.util;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
/**
* A low memory linked hash set implementation, which uses an array for storing
* the elements and linked lists for collision resolution. In addition it stores
* elements in a linked list to ensure ordered traversal. This class does not
* support null element.
*
* This class is not thread safe.
*
*/
public class LightWeightLinkedSet<T> extends LightWeightHashSet<T> {
/**
* Elements of {@link LightWeightLinkedSet}.
*/
static class DoubleLinkedElement<T> extends LinkedElement<T> {
// references to elements within all-element linked list
private DoubleLinkedElement<T> before;
private DoubleLinkedElement<T> after;
public DoubleLinkedElement(T elem, int hashCode) {
super(elem, hashCode);
this.before = null;
this.after = null;
}
public String toString() {
return super.toString();
}
}
private DoubleLinkedElement<T> head;
private DoubleLinkedElement<T> tail;
/**
* @param initialCapacity
* Recommended size of the internal array.
* @param maxLoadFactor
* used to determine when to expand the internal array
* @param minLoadFactor
* used to determine when to shrink the internal array
*/
public LightWeightLinkedSet(int initCapacity, float maxLoadFactor,
float minLoadFactor) {
super(initCapacity, maxLoadFactor, minLoadFactor);
head = null;
tail = null;
}
public LightWeightLinkedSet() {
this(MINIMUM_CAPACITY, rMaxLoadFactor, rMinLoadFactor);
}
/**
* Add given element to the hash table
*
* @return true if the element was not present in the table, false otherwise
*/
protected boolean addElem(final T element) {
// validate element
if (element == null) {
throw new IllegalArgumentException("Null element is not supported.");
}
// find hashCode & index
final int hashCode = element.hashCode();
final int index = getIndex(hashCode);
// return false if already present
if (containsElem(index, element, hashCode)) {
return false;
}
modification++;
size++;
// update bucket linked list
DoubleLinkedElement<T> le = new DoubleLinkedElement<T>(element, hashCode);
le.next = entries[index];
entries[index] = le;
// insert to the end of the all-element linked list
le.after = null;
le.before = tail;
if (tail != null) {
tail.after = le;
}
tail = le;
if (head == null) {
head = le;
}
return true;
}
/**
* Remove the element corresponding to the key, given key.hashCode() == index.
*
* @return Return the entry with the element if exists. Otherwise return null.
*/
protected DoubleLinkedElement<T> removeElem(final T key) {
DoubleLinkedElement<T> found = (DoubleLinkedElement<T>) (super
.removeElem(key));
if (found == null) {
return null;
}
// update linked list
if (found.after != null) {
found.after.before = found.before;
}
if (found.before != null) {
found.before.after = found.after;
}
if (head == found) {
head = head.after;
}
if (tail == found) {
tail = tail.before;
}
return found;
}
/**
* Remove and return first element on the linked list of all elements.
*
* @return first element
*/
public T pollFirst() {
if (head == null) {
return null;
}
T first = head.element;
this.remove(first);
return first;
}
/**
* Return first element on the linked list of all elements.
*
* @return first element
*/
public T first() {
if (head == null) {
return null;
}
return head.element;
}
/**
* Remove and return first n elements on the linked list of all elements.
*
* @return first element
*/
public List<T> pollN(int n) {
if (n >= size) {
// if we need to remove all elements then do fast polling
return pollAll();
}
List<T> retList = new ArrayList<T>(n);
while (n-- > 0 && head != null) {
T curr = head.element;
this.removeElem(curr);
retList.add(curr);
}
shrinkIfNecessary();
return retList;
}
/**
* Remove all elements from the set and return them in order. Traverse the
* link list, don't worry about hashtable - faster version of the parent
* method.
*/
public List<T> pollAll() {
List<T> retList = new ArrayList<T>(size);
while (head != null) {
retList.add(head.element);
head = head.after;
}
this.clear();
return retList;
}
@Override
@SuppressWarnings("unchecked")
public <U> U[] toArray(U[] a) {
if (a == null) {
throw new NullPointerException("Input array can not be null");
}
if (a.length < size) {
a = (U[]) java.lang.reflect.Array.newInstance(a.getClass()
.getComponentType(), size);
}
int currentIndex = 0;
DoubleLinkedElement<T> current = head;
while (current != null) {
T curr = current.element;
a[currentIndex++] = (U) curr;
current = current.after;
}
return a;
}
public Iterator<T> iterator() {
return new LinkedSetIterator();
}
private class LinkedSetIterator implements Iterator<T> {
/** The starting modification for fail-fast. */
private final int startModification = modification;
/** The next element to return. */
private DoubleLinkedElement<T> next = head;
@Override
public boolean hasNext() {
return next != null;
}
@Override
public T next() {
if (modification != startModification) {
throw new ConcurrentModificationException("modification="
+ modification + " != startModification = " + startModification);
}
if (next == null) {
throw new NoSuchElementException();
}
final T e = next.element;
// find the next element
next = next.after;
return e;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove is not supported.");
}
}
/**
* Clear the set. Resize it to the original capacity.
*/
public void clear() {
super.clear();
this.head = null;
this.tail = null;
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.config.CacheSimpleConfig.ExpiryPolicyFactoryConfig;
import com.hazelcast.config.CacheSimpleConfig.ExpiryPolicyFactoryConfig.DurationConfig;
import com.hazelcast.config.CacheSimpleConfig.ExpiryPolicyFactoryConfig.TimedExpiryPolicyFactoryConfig;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.internal.config.ConfigUtils;
import com.hazelcast.util.CollectionUtil;
import org.apache.commons.lang3.ArrayUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import static java.text.MessageFormat.format;
class ConfigCompatibilityChecker {
/**
* Checks if two {@link Config}'s are compatible. This mostly means that the config values will have the same
* impact on the behaviour of the system but are not necessarily the same (e.g. null value is sometimes the same
* as an empty collection or a disabled config).
* NOTE: This method checks MOST but NOT ALL configuration. As such it is best used in test scenarios to cover
* as much config checks as possible automatically.
*
* @param c1 the {@link Config} to check
* @param c2 the {@link Config} to check
* @return {@code true} if the configs are compatible
* @throws HazelcastException if configs are incompatible
* @throws IllegalArgumentException if one of the configs is {@code null}
*/
static boolean isCompatible(final Config c1, final Config c2) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null) {
throw new IllegalArgumentException("One of the two configs is null");
}
if (!nullSafeEqual(c1.getGroupConfig().getName(), c2.getGroupConfig().getName())) {
return false;
}
if (!nullSafeEqual(c1.getGroupConfig().getPassword(), c2.getGroupConfig().getPassword())) {
throw new HazelcastException("Incompatible group password");
}
checkWanConfigs(c1.getWanReplicationConfigs(), c2.getWanReplicationConfigs());
checkCompatibleConfigs("partition group", c1.getPartitionGroupConfig(), c2.getPartitionGroupConfig(), new PartitionGroupConfigChecker());
checkCompatibleConfigs("serialization", c1.getSerializationConfig(), c2.getSerializationConfig(), new SerializationConfigChecker());
checkCompatibleConfigs("services", c1.getServicesConfig(), c2.getServicesConfig(), new ServicesConfigChecker());
checkCompatibleConfigs("management center", c1.getManagementCenterConfig(), c2.getManagementCenterConfig(), new ManagementCenterConfigChecker());
checkCompatibleConfigs("hot restart", c1.getHotRestartPersistenceConfig(), c2.getHotRestartPersistenceConfig(), new HotRestartConfigChecker());
checkCompatibleConfigs("network", c1.getNetworkConfig(), c2.getNetworkConfig(), new NetworkConfigChecker());
checkCompatibleConfigs("map", c1, c2, c1.getMapConfigs(), c2.getMapConfigs(), new MapConfigChecker());
checkCompatibleConfigs("ringbuffer", c1, c2, c1.getRingbufferConfigs(), c2.getRingbufferConfigs(), new RingbufferConfigChecker());
checkCompatibleConfigs("queue", c1, c2, c1.getQueueConfigs(), c2.getQueueConfigs(), new QueueConfigChecker());
checkCompatibleConfigs("semaphore", c1, c2, getSemaphoreConfigsByName(c1), getSemaphoreConfigsByName(c2), new SemaphoreConfigChecker());
checkCompatibleConfigs("lock", c1, c2, c1.getLockConfigs(), c2.getLockConfigs(), new LockConfigChecker());
checkCompatibleConfigs("topic", c1, c2, c1.getTopicConfigs(), c2.getTopicConfigs(), new TopicConfigChecker());
checkCompatibleConfigs("reliable topic", c1, c2, c1.getReliableTopicConfigs(), c2.getReliableTopicConfigs(), new ReliableTopicConfigChecker());
checkCompatibleConfigs("cache", c1, c2, c1.getCacheConfigs(), c2.getCacheConfigs(), new CacheSimpleConfigChecker());
checkCompatibleConfigs("executor", c1, c2, c1.getExecutorConfigs(), c2.getExecutorConfigs(), new ExecutorConfigChecker());
checkCompatibleConfigs("durable executor", c1, c2, c1.getDurableExecutorConfigs(), c2.getDurableExecutorConfigs(), new DurableExecutorConfigChecker());
checkCompatibleConfigs("scheduled executor", c1, c2, c1.getScheduledExecutorConfigs(), c2.getScheduledExecutorConfigs(), new ScheduledExecutorConfigChecker());
checkCompatibleConfigs("map event journal", c1, c2, c1.getMapEventJournalConfigs(), c2.getMapEventJournalConfigs(), new MapEventJournalConfigChecker());
checkCompatibleConfigs("cache event journal", c1, c2, c1.getCacheEventJournalConfigs(), c2.getCacheEventJournalConfigs(), new CacheEventJournalConfigChecker());
checkCompatibleConfigs("multimap", c1, c2, c1.getMultiMapConfigs(), c2.getMultiMapConfigs(), new MultimapConfigChecker());
checkCompatibleConfigs("list", c1, c2, c1.getListConfigs(), c2.getListConfigs(), new ListConfigChecker());
checkCompatibleConfigs("set", c1, c2, c1.getSetConfigs(), c2.getSetConfigs(), new SetConfigChecker());
checkCompatibleConfigs("job tracker", c1, c2, c1.getJobTrackerConfigs(), c2.getJobTrackerConfigs(), new JobTrackerConfigChecker());
return true;
}
public static void checkWanConfigs(Map<String, WanReplicationConfig> c1, Map<String, WanReplicationConfig> c2) {
if ((c1 != c2 && (c1 == null || c2 == null)) || c1.size() != c2.size()) {
throw new HazelcastException(format("Incompatible wan replication config :\n{0}\n vs \n{1}", c1, c2));
}
final WanReplicationConfigChecker checker = new WanReplicationConfigChecker();
for (Entry<String, WanReplicationConfig> entry : c1.entrySet()) {
checkCompatibleConfigs("wan replication", entry.getValue(), c2.get(entry.getKey()), checker);
}
}
private static Map<String, SemaphoreConfig> getSemaphoreConfigsByName(Config c) {
final Collection<SemaphoreConfig> semaphoreConfigs = c.getSemaphoreConfigs();
final HashMap<String, SemaphoreConfig> configsByName = new HashMap<String, SemaphoreConfig>(semaphoreConfigs.size());
for (SemaphoreConfig config : semaphoreConfigs) {
configsByName.put(config.getName(), config);
}
return configsByName;
}
private static <T> void checkCompatibleConfigs(String type, T c1, T c2, ConfigChecker<T> checker) {
if (!checker.check(c1, c2)) {
throw new HazelcastException(format("Incompatible " + type + " config :\n{0}\n vs \n{1}", c1, c2));
}
}
private static <T> void checkCompatibleConfigs(
String type, Config c1, Config c2,
Map<String, T> configs1, Map<String, T> configs2, ConfigChecker<T> checker) {
final Set<String> configNames = new HashSet<String>(configs1.keySet());
configNames.addAll(configs2.keySet());
for (final String name : configNames) {
final T config1 = ConfigUtils.lookupByPattern(c1.getConfigPatternMatcher(), configs1, name);
final T config2 = ConfigUtils.lookupByPattern(c2.getConfigPatternMatcher(), configs2, name);
if (config1 != null && config2 != null && !checker.check(config1, config2)) {
throw new HazelcastException(format("Incompatible " + type + " config :\n{0}\n vs \n{1}",
config1, config2));
}
}
final T config1 = checker.getDefault(c1);
final T config2 = checker.getDefault(c2);
if (!checker.check(config1, config2)) {
throw new HazelcastException(format("Incompatible default " + type + " config :\n{0}\n vs \n{1}",
config1, config2));
}
}
private abstract static class ConfigChecker<T> {
abstract boolean check(T t1, T t2);
T getDefault(Config c) {
return null;
}
}
private static boolean nullSafeEqual(Object a, Object b) {
return (a == b) || (a != null && a.equals(b));
}
private static <T> boolean isCollectionCompatible(Collection<T> c1, Collection<T> c2, ConfigChecker<T> checker) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null || c1.size() != c2.size()) {
return false;
}
final Iterator<T> i1 = c1.iterator();
final Iterator<T> i2 = c2.iterator();
while (i1.hasNext() && i2.hasNext()) {
final T config1 = i1.next();
final T config2 = i2.next();
if (!checker.check(config1, config2)) {
return false;
}
}
return !(i1.hasNext() || i2.hasNext());
}
private static boolean isCompatible(HotRestartConfig c1, HotRestartConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null && nullSafeEqual(c1.isFsync(), c2.isFsync()));
}
// CONFIG CHECKERS
private static class RingbufferConfigChecker extends ConfigChecker<RingbufferConfig> {
@Override
boolean check(RingbufferConfig c1, RingbufferConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getCapacity(), c2.getCapacity())
&& nullSafeEqual(c1.getTimeToLiveSeconds(), c2.getTimeToLiveSeconds())
&& nullSafeEqual(c1.getInMemoryFormat(), c2.getInMemoryFormat())
&& isCompatible(c1.getRingbufferStoreConfig(), c2.getRingbufferStoreConfig());
}
private static boolean isCompatible(RingbufferStoreConfig c1, RingbufferStoreConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getFactoryClassName(), c2.getFactoryClassName())
&& nullSafeEqual(c1.getProperties(), c2.getProperties()));
}
@Override
RingbufferConfig getDefault(Config c) {
return c.getRingbufferConfig("default");
}
}
public static class EventJournalConfigChecker extends ConfigChecker<EventJournalConfig> {
@Override
boolean check(EventJournalConfig c1, EventJournalConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getMapName(), c2.getMapName())
&& nullSafeEqual(c1.getCacheName(), c2.getCacheName())
&& nullSafeEqual(c1.getCapacity(), c2.getCapacity())
&& nullSafeEqual(c1.getTimeToLiveSeconds(), c2.getTimeToLiveSeconds()));
}
}
public static class MapEventJournalConfigChecker extends EventJournalConfigChecker {
@Override
EventJournalConfig getDefault(Config c) {
return c.getMapEventJournalConfig("default");
}
}
public static class CacheEventJournalConfigChecker extends EventJournalConfigChecker {
@Override
EventJournalConfig getDefault(Config c) {
return c.getCacheEventJournalConfig("default");
}
}
private static class QueueConfigChecker extends ConfigChecker<QueueConfig> {
@Override
boolean check(QueueConfig c1, QueueConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getItemListenerConfigs(), c2.getItemListenerConfigs())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getMaxSize(), c2.getMaxSize())
&& nullSafeEqual(c1.getEmptyQueueTtl(), c2.getEmptyQueueTtl())
&& isCompatible(c1.getQueueStoreConfig(), c2.getQueueStoreConfig())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled())
&& nullSafeEqual(c1.getQuorumName(), c2.getQuorumName());
}
private static boolean isCompatible(QueueStoreConfig c1, QueueStoreConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getFactoryClassName(), c2.getFactoryClassName())
&& nullSafeEqual(c1.getProperties(), c2.getProperties()));
}
@Override
QueueConfig getDefault(Config c) {
return c.getQueueConfig("default");
}
}
private static class SemaphoreConfigChecker extends ConfigChecker<SemaphoreConfig> {
@Override
boolean check(SemaphoreConfig c1, SemaphoreConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getInitialPermits(), c2.getInitialPermits());
}
@Override
SemaphoreConfig getDefault(Config c) {
return c.getSemaphoreConfig("default");
}
}
private static class LockConfigChecker extends ConfigChecker<LockConfig> {
@Override
boolean check(LockConfig c1, LockConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getQuorumName(), c2.getQuorumName());
}
@Override
LockConfig getDefault(Config c) {
return c.getLockConfig("default");
}
}
private static class SetConfigChecker extends ConfigChecker<SetConfig> {
@Override
boolean check(SetConfig c1, SetConfig c2) {
return isCompatible(c1, c2);
}
@Override
SetConfig getDefault(Config c) {
return c.getSetConfig("default");
}
}
private static class ListConfigChecker extends ConfigChecker<ListConfig> {
@Override
boolean check(ListConfig c1, ListConfig c2) {
return isCompatible(c1, c2);
}
@Override
ListConfig getDefault(Config c) {
return c.getListConfig("default");
}
}
private static boolean isCompatible(CollectionConfig c1, CollectionConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getItemListenerConfigs(), c2.getItemListenerConfigs())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getMaxSize(), c2.getMaxSize())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled());
}
private static class TopicConfigChecker extends ConfigChecker<TopicConfig> {
@Override
boolean check(TopicConfig c1, TopicConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.isGlobalOrderingEnabled(), c2.isGlobalOrderingEnabled())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled())
&& nullSafeEqual(c1.isMultiThreadingEnabled(), c2.isMultiThreadingEnabled())
&& nullSafeEqual(c1.getMessageListenerConfigs(), c2.getMessageListenerConfigs());
}
@Override
TopicConfig getDefault(Config c) {
return c.getTopicConfig("default");
}
}
private static class ReliableTopicConfigChecker extends ConfigChecker<ReliableTopicConfig> {
@Override
boolean check(ReliableTopicConfig c1, ReliableTopicConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getReadBatchSize(), c2.getReadBatchSize())
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled())
&& nullSafeEqual(c1.getMessageListenerConfigs(), c2.getMessageListenerConfigs())
&& nullSafeEqual(c1.getTopicOverloadPolicy(), c2.getTopicOverloadPolicy());
}
@Override
ReliableTopicConfig getDefault(Config c) {
return c.getReliableTopicConfig("default");
}
}
private static class ExecutorConfigChecker extends ConfigChecker<ExecutorConfig> {
@Override
boolean check(ExecutorConfig c1, ExecutorConfig c2) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null) {
return false;
}
final int cap1 = c1.getQueueCapacity();
final int cap2 = c2.getQueueCapacity();
return nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getPoolSize(), c2.getPoolSize())
&& (nullSafeEqual(cap1, cap2) || (Math.min(cap1, cap2) == 0 && Math.max(cap1, cap2) == Integer.MAX_VALUE))
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled());
}
@Override
ExecutorConfig getDefault(Config c) {
return c.getExecutorConfig("default");
}
}
private static class DurableExecutorConfigChecker extends ConfigChecker<DurableExecutorConfig> {
@Override
boolean check(DurableExecutorConfig c1, DurableExecutorConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getPoolSize(), c2.getPoolSize())
&& nullSafeEqual(c1.getDurability(), c2.getDurability())
&& nullSafeEqual(c1.getCapacity(), c2.getCapacity());
}
@Override
DurableExecutorConfig getDefault(Config c) {
return c.getDurableExecutorConfig("default");
}
}
private static class ScheduledExecutorConfigChecker extends ConfigChecker<ScheduledExecutorConfig> {
@Override
boolean check(ScheduledExecutorConfig c1, ScheduledExecutorConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getDurability(), c2.getDurability())
&& nullSafeEqual(c1.getPoolSize(), c2.getPoolSize());
}
@Override
ScheduledExecutorConfig getDefault(Config c) {
return c.getScheduledExecutorConfig("default");
}
}
private static class MultimapConfigChecker extends ConfigChecker<MultiMapConfig> {
@Override
boolean check(MultiMapConfig c1, MultiMapConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getValueCollectionType(), c2.getValueCollectionType())
&& nullSafeEqual(c1.getEntryListenerConfigs(), c2.getEntryListenerConfigs())
&& nullSafeEqual(c1.isBinary(), c2.isBinary())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled());
}
@Override
MultiMapConfig getDefault(Config c) {
return c.getMultiMapConfig("default");
}
}
private static class JobTrackerConfigChecker extends ConfigChecker<JobTrackerConfig> {
@Override
boolean check(JobTrackerConfig c1, JobTrackerConfig c2) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null) {
return false;
}
final int max1 = c1.getMaxThreadSize();
final int max2 = c2.getMaxThreadSize();
return nullSafeEqual(c1.getName(), c2.getName())
&& (nullSafeEqual(max1, max2) || (Math.min(max1, max2) == 0 && Math.max(max1, max2) == Runtime.getRuntime().availableProcessors()))
&& nullSafeEqual(c1.getRetryCount(), c2.getRetryCount())
&& nullSafeEqual(c1.getChunkSize(), c2.getChunkSize())
&& nullSafeEqual(c1.getQueueSize(), c2.getQueueSize())
&& nullSafeEqual(c1.isCommunicateStats(), c2.isCommunicateStats())
&& nullSafeEqual(c1.getTopologyChangedStrategy(), c2.getTopologyChangedStrategy());
}
@Override
JobTrackerConfig getDefault(Config c) {
return c.getJobTrackerConfig("default");
}
}
private static class CacheSimpleConfigChecker extends ConfigChecker<CacheSimpleConfig> {
@Override
boolean check(CacheSimpleConfig c1, CacheSimpleConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getKeyType(), c2.getKeyType())
&& nullSafeEqual(c1.getValueType(), c2.getValueType())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled())
&& nullSafeEqual(c1.isManagementEnabled(), c2.isManagementEnabled())
&& nullSafeEqual(c1.isReadThrough(), c2.isReadThrough())
&& nullSafeEqual(c1.isWriteThrough(), c2.isWriteThrough())
&& nullSafeEqual(c1.getCacheLoaderFactory(), c2.getCacheLoaderFactory())
&& nullSafeEqual(c1.getCacheWriterFactory(), c2.getCacheWriterFactory())
&& nullSafeEqual(c1.getCacheLoader(), c2.getCacheLoader())
&& nullSafeEqual(c1.getCacheWriter(), c2.getCacheWriter())
&& isCompatible(c1.getExpiryPolicyFactoryConfig(), c2.getExpiryPolicyFactoryConfig())
&& isCollectionCompatible(c1.getCacheEntryListeners(), c2.getCacheEntryListeners(), new CacheSimpleEntryListenerConfigChecker())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getInMemoryFormat(), c2.getInMemoryFormat())
&& isCompatible(c1.getEvictionConfig(), c2.getEvictionConfig())
&& isCompatible(c1.getWanReplicationRef(), c2.getWanReplicationRef())
&& nullSafeEqual(c1.getQuorumName(), c2.getQuorumName())
&& nullSafeEqual(c1.getPartitionLostListenerConfigs(), c2.getPartitionLostListenerConfigs())
&& nullSafeEqual(c1.getMergePolicy(), c2.getMergePolicy())
&& ConfigCompatibilityChecker.isCompatible(c1.getHotRestartConfig(), c2.getHotRestartConfig());
}
private static boolean isCompatible(ExpiryPolicyFactoryConfig c1, ExpiryPolicyFactoryConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& isCompatible(c1.getTimedExpiryPolicyFactoryConfig(), c2.getTimedExpiryPolicyFactoryConfig());
}
private static boolean isCompatible(TimedExpiryPolicyFactoryConfig c1, TimedExpiryPolicyFactoryConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getExpiryPolicyType(), c2.getExpiryPolicyType())
&& isCompatible(c1.getDurationConfig(), c2.getDurationConfig());
}
private static boolean isCompatible(DurationConfig c1, DurationConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getDurationAmount(), c2.getDurationAmount())
&& nullSafeEqual(c1.getTimeUnit(), c2.getTimeUnit());
}
private static boolean isCompatible(EvictionConfig c1, EvictionConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getSize(), c2.getSize())
&& nullSafeEqual(c1.getMaximumSizePolicy(), c2.getMaximumSizePolicy())
&& nullSafeEqual(c1.getEvictionPolicy(), c2.getEvictionPolicy())
&& nullSafeEqual(c1.getComparatorClassName(), c2.getComparatorClassName());
}
private static boolean isCompatible(WanReplicationRef c1, WanReplicationRef c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getMergePolicy(), c2.getMergePolicy())
&& nullSafeEqual(c1.getFilters(), c2.getFilters())
&& nullSafeEqual(c1.isRepublishingEnabled(), c2.isRepublishingEnabled());
}
@Override
CacheSimpleConfig getDefault(Config c) {
return c.getCacheConfig("default");
}
}
private static class MapConfigChecker extends ConfigChecker<MapConfig> {
@Override
boolean check(MapConfig c1, MapConfig c2) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null) {
return false;
}
final int maxSize1 = c1.getMaxSizeConfig().getSize();
final int maxSize2 = c2.getMaxSizeConfig().getSize();
return nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getInMemoryFormat(), c2.getInMemoryFormat())
&& nullSafeEqual(c1.isStatisticsEnabled(), c2.isStatisticsEnabled())
&& nullSafeEqual(c1.isOptimizeQueries(), c2.isOptimizeQueries())
&& nullSafeEqual(c1.getCacheDeserializedValues(), c2.getCacheDeserializedValues())
&& nullSafeEqual(c1.getBackupCount(), c2.getBackupCount())
&& nullSafeEqual(c1.getAsyncBackupCount(), c2.getAsyncBackupCount())
&& nullSafeEqual(c1.getTimeToLiveSeconds(), c2.getTimeToLiveSeconds())
&& nullSafeEqual(c1.getMaxIdleSeconds(), c2.getMaxIdleSeconds())
&& nullSafeEqual(c1.getEvictionPolicy(), c2.getEvictionPolicy())
&& (nullSafeEqual(maxSize1, maxSize2) || (Math.min(maxSize1, maxSize2) == 0 && Math.max(maxSize1, maxSize2) == Integer.MAX_VALUE))
&& nullSafeEqual(c1.getEvictionPercentage(), c2.getEvictionPercentage())
&& nullSafeEqual(c1.getMinEvictionCheckMillis(), c2.getMinEvictionCheckMillis())
&& nullSafeEqual(c1.getMergePolicy(), c2.getMergePolicy())
&& nullSafeEqual(c1.isReadBackupData(), c2.isReadBackupData())
&& ConfigCompatibilityChecker.isCompatible(c1.getHotRestartConfig(), c2.getHotRestartConfig())
&& isCompatible(c1.getMapStoreConfig(), c2.getMapStoreConfig())
&& isCompatible(c1.getNearCacheConfig(), c2.getNearCacheConfig())
&& isCompatible(c1.getWanReplicationRef(), c2.getWanReplicationRef())
&& isCollectionCompatible(c1.getMapIndexConfigs(), c2.getMapIndexConfigs(), new MapIndexConfigChecker())
&& isCollectionCompatible(c1.getMapAttributeConfigs(), c2.getMapAttributeConfigs(), new MapAttributeConfigChecker())
&& isCollectionCompatible(c1.getEntryListenerConfigs(), c2.getEntryListenerConfigs(), new EntryListenerConfigChecker())
&& nullSafeEqual(c1.getPartitionLostListenerConfigs(), c2.getPartitionLostListenerConfigs())
&& nullSafeEqual(c1.getPartitioningStrategyConfig(), c2.getPartitioningStrategyConfig());
}
private static boolean isCompatible(WanReplicationRef c1, WanReplicationRef c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getMergePolicy(), c2.getMergePolicy())
&& nullSafeEqual(c1.getFilters(), c2.getFilters())
&& nullSafeEqual(c1.isRepublishingEnabled(), c2.isRepublishingEnabled());
}
private static boolean isCompatible(NearCacheConfig c1, NearCacheConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getTimeToLiveSeconds(), c2.getTimeToLiveSeconds())
&& nullSafeEqual(c1.getMaxSize(), c2.getMaxSize())
&& nullSafeEqual(c1.getEvictionPolicy(), c2.getEvictionPolicy())
&& isCompatible(c1.getEvictionConfig(), c2.getEvictionConfig());
}
private static boolean isCompatible(EvictionConfig c1, EvictionConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getSize(), c2.getSize())
&& nullSafeEqual(c1.getMaximumSizePolicy(), c2.getMaximumSizePolicy())
&& nullSafeEqual(c1.getEvictionPolicy(), c2.getEvictionPolicy())
&& nullSafeEqual(c1.getEvictionPolicyType(), c2.getEvictionPolicyType())
&& nullSafeEqual(c1.getEvictionStrategyType(), c2.getEvictionStrategyType())
&& nullSafeEqual(c1.getComparatorClassName(), c2.getComparatorClassName());
}
private static boolean isCompatible(MapStoreConfig c1, MapStoreConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getFactoryClassName(), c2.getFactoryClassName())
&& nullSafeEqual(c1.getProperties(), c2.getProperties()));
}
@Override
MapConfig getDefault(Config c) {
return c.getMapConfig("default");
}
}
private static class MapIndexConfigChecker extends ConfigChecker<MapIndexConfig> {
@Override
boolean check(MapIndexConfig c1, MapIndexConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getAttribute(), c2.getAttribute())
&& nullSafeEqual(c1.isOrdered(), c2.isOrdered());
}
}
private static class CacheSimpleEntryListenerConfigChecker extends ConfigChecker<CacheSimpleEntryListenerConfig> {
@Override
boolean check(CacheSimpleEntryListenerConfig c1, CacheSimpleEntryListenerConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getCacheEntryListenerFactory(), c2.getCacheEntryListenerFactory())
&& nullSafeEqual(c1.getCacheEntryEventFilterFactory(), c2.getCacheEntryEventFilterFactory())
&& nullSafeEqual(c1.isOldValueRequired(), c2.isOldValueRequired())
&& nullSafeEqual(c1.isSynchronous(), c2.isSynchronous());
}
}
private static class EntryListenerConfigChecker extends ConfigChecker<EntryListenerConfig> {
@Override
boolean check(EntryListenerConfig c1, EntryListenerConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.isLocal(), c2.isLocal())
&& nullSafeEqual(c1.isIncludeValue(), c2.isIncludeValue())
&& nullSafeEqual(c1.getClassName(), c2.getClassName());
}
}
private static class MapAttributeConfigChecker extends ConfigChecker<MapAttributeConfig> {
@Override
boolean check(MapAttributeConfig c1, MapAttributeConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getExtractor(), c2.getExtractor());
}
}
private static class DiscoveryStrategyConfigChecker extends ConfigChecker<DiscoveryStrategyConfig> {
@Override
boolean check(DiscoveryStrategyConfig c1, DiscoveryStrategyConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getProperties(), c2.getProperties());
}
}
private static class MemberGroupConfigChecker extends ConfigChecker<MemberGroupConfig> {
@Override
boolean check(MemberGroupConfig c1, MemberGroupConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(new ArrayList<String>(c1.getInterfaces()),
new ArrayList<String>(c2.getInterfaces()));
}
}
private static class SerializerConfigChecker extends ConfigChecker<SerializerConfig> {
@Override
boolean check(SerializerConfig c1, SerializerConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getTypeClass(), c2.getTypeClass())
&& nullSafeEqual(c1.getTypeClassName(), c2.getTypeClassName());
}
}
private static class NetworkConfigChecker extends ConfigChecker<NetworkConfig> {
@Override
boolean check(NetworkConfig c1, NetworkConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getPort(), c2.getPort())
&& nullSafeEqual(c1.getPortCount(), c2.getPortCount())
&& nullSafeEqual(c1.isPortAutoIncrement(), c2.isPortAutoIncrement())
&& nullSafeEqual(c1.isReuseAddress(), c2.isReuseAddress())
&& nullSafeEqual(c1.getPublicAddress(), c2.getPublicAddress())
&& isCompatible(c1.getOutboundPortDefinitions(), c2.getOutboundPortDefinitions())
&& nullSafeEqual(c1.getOutboundPorts(), c2.getOutboundPorts())
&& isCompatible(c1.getInterfaces(), c2.getInterfaces())
&& isCompatible(c1.getJoin(), c2.getJoin())
&& isCompatible(c1.getSymmetricEncryptionConfig(), c2.getSymmetricEncryptionConfig())
&& isCompatible(c1.getSocketInterceptorConfig(), c2.getSocketInterceptorConfig())
&& isCompatible(c1.getSSLConfig(), c2.getSSLConfig());
}
private static boolean isCompatible(Collection<String> portDefinitions1, Collection<String> portDefinitions2) {
final String[] defaultValues = {"0", "*"};
final boolean defaultDefinition1 = CollectionUtil.isEmpty(portDefinitions1) ||
(portDefinitions1.size() == 1 && ArrayUtils.contains(defaultValues, portDefinitions1.iterator().next()));
final boolean defaultDefinition2 = CollectionUtil.isEmpty(portDefinitions2) ||
(portDefinitions2.size() == 1 && ArrayUtils.contains(defaultValues, portDefinitions2.iterator().next()));
return (defaultDefinition1 && defaultDefinition2) || nullSafeEqual(portDefinitions1, portDefinitions2);
}
private static boolean isCompatible(JoinConfig c1, JoinConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& isCompatible(c1.getMulticastConfig(), c2.getMulticastConfig())
&& isCompatible(c1.getTcpIpConfig(), c2.getTcpIpConfig())
&& new AwsConfigChecker().check(c1.getAwsConfig(), c2.getAwsConfig())
&& new DiscoveryConfigChecker().check(c1.getDiscoveryConfig(), c2.getDiscoveryConfig());
}
private static boolean isCompatible(TcpIpConfig c1, TcpIpConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getConnectionTimeoutSeconds(), c2.getConnectionTimeoutSeconds())
&& nullSafeEqual(c1.getMembers(), c2.getMembers()))
&& nullSafeEqual(c1.getRequiredMember(), c2.getRequiredMember());
}
private static boolean isCompatible(MulticastConfig c1, MulticastConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getMulticastGroup(), c2.getMulticastGroup())
&& nullSafeEqual(c1.getMulticastPort(), c2.getMulticastPort()))
&& nullSafeEqual(c1.getMulticastTimeoutSeconds(), c2.getMulticastTimeoutSeconds())
&& nullSafeEqual(c1.getMulticastTimeToLive(), c2.getMulticastTimeToLive())
&& nullSafeEqual(c1.getTrustedInterfaces(), c2.getTrustedInterfaces());
}
private static boolean isCompatible(InterfacesConfig c1, InterfacesConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(new ArrayList<String>(c1.getInterfaces()), new ArrayList<String>(c2.getInterfaces())));
}
private static boolean isCompatible(SymmetricEncryptionConfig c1, SymmetricEncryptionConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getSalt(), c2.getSalt())
&& nullSafeEqual(c1.getPassword(), c2.getPassword()))
&& nullSafeEqual(c1.getIterationCount(), c2.getIterationCount())
&& nullSafeEqual(c1.getAlgorithm(), c2.getAlgorithm())
&& nullSafeEqual(c1.getKey(), c2.getKey());
}
private static boolean isCompatible(SocketInterceptorConfig c1, SocketInterceptorConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getImplementation(), c2.getImplementation()))
&& nullSafeEqual(c1.getProperties(), c2.getProperties());
}
private static boolean isCompatible(SSLConfig c1, SSLConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getFactoryClassName(), c2.getFactoryClassName())
&& nullSafeEqual(c1.getFactoryImplementation(), c2.getFactoryImplementation()))
&& nullSafeEqual(c1.getProperties(), c2.getProperties());
}
}
private static class DiscoveryConfigChecker extends ConfigChecker<DiscoveryConfig> {
@Override
boolean check(DiscoveryConfig c1, DiscoveryConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getNodeFilterClass(), c2.getNodeFilterClass())
&& nullSafeEqual(c1.getDiscoveryServiceProvider(), c2.getDiscoveryServiceProvider())
&& isCollectionCompatible(c1.getDiscoveryStrategyConfigs(), c2.getDiscoveryStrategyConfigs(), new DiscoveryStrategyConfigChecker()));
}
}
private static class AwsConfigChecker extends ConfigChecker<AwsConfig> {
@Override
boolean check(AwsConfig c1, AwsConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getAccessKey(), c2.getAccessKey())
&& nullSafeEqual(c1.getSecretKey(), c2.getSecretKey())
&& nullSafeEqual(c1.getRegion(), c2.getRegion())
&& nullSafeEqual(c1.getSecurityGroupName(), c2.getSecurityGroupName())
&& nullSafeEqual(c1.getTagKey(), c2.getTagKey())
&& nullSafeEqual(c1.getTagValue(), c2.getTagValue())
&& nullSafeEqual(c1.getHostHeader(), c2.getHostHeader())
&& nullSafeEqual(c1.getIamRole(), c2.getIamRole())
&& nullSafeEqual(c1.getConnectionTimeoutSeconds(), c2.getConnectionTimeoutSeconds()));
}
}
private static class WanReplicationConfigChecker extends ConfigChecker<WanReplicationConfig> {
@Override
boolean check(WanReplicationConfig c1, WanReplicationConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getName(), c2.getName())
&& isCompatible(c1.getWanConsumerConfig(), c2.getWanConsumerConfig())
&& isCollectionCompatible(c1.getWanPublisherConfigs(), c2.getWanPublisherConfigs(), new WanPublisherConfigChecker());
}
private boolean isCompatible(WanConsumerConfig c1, WanConsumerConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getImplementation(), c2.getImplementation())
&& nullSafeEqual(c1.getProperties(), c2.getProperties());
}
}
private static class WanPublisherConfigChecker extends ConfigChecker<WanPublisherConfig> {
@Override
boolean check(WanPublisherConfig c1, WanPublisherConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getGroupName(), c2.getGroupName())
&& nullSafeEqual(c1.getQueueCapacity(), c2.getQueueCapacity())
&& nullSafeEqual(c1.getQueueFullBehavior(), c2.getQueueFullBehavior())
&& new AwsConfigChecker().check(c1.getAwsConfig(), c2.getAwsConfig())
&& new DiscoveryConfigChecker().check(c1.getDiscoveryConfig(), c2.getDiscoveryConfig())
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getImplementation(), c2.getImplementation())
&& nullSafeEqual(c1.getProperties(), c2.getProperties());
}
}
private static class PartitionGroupConfigChecker extends ConfigChecker<PartitionGroupConfig> {
@Override
boolean check(PartitionGroupConfig c1, PartitionGroupConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getGroupType(), c2.getGroupType())
&& isCollectionCompatible(c1.getMemberGroupConfigs(), c2.getMemberGroupConfigs(), new MemberGroupConfigChecker()));
}
}
private static class SerializationConfigChecker extends ConfigChecker<SerializationConfig> {
@Override
boolean check(SerializationConfig c1, SerializationConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getPortableVersion(), c2.getPortableVersion())
&& nullSafeEqual(c1.getDataSerializableFactoryClasses(), c2.getDataSerializableFactoryClasses())
&& nullSafeEqual(c1.getPortableFactoryClasses(), c2.getPortableFactoryClasses())
&& isCompatible(c1.getGlobalSerializerConfig(), c2.getGlobalSerializerConfig())
&& isCollectionCompatible(c1.getSerializerConfigs(), c2.getSerializerConfigs(), new SerializerConfigChecker())
&& nullSafeEqual(c1.isCheckClassDefErrors(), c2.isCheckClassDefErrors())
&& nullSafeEqual(c1.isUseNativeByteOrder(), c2.isUseNativeByteOrder())
&& nullSafeEqual(c1.getByteOrder(), c2.getByteOrder())
&& nullSafeEqual(c1.isEnableCompression(), c2.isEnableCompression())
&& nullSafeEqual(c1.isEnableSharedObject(), c2.isEnableSharedObject())
&& nullSafeEqual(c1.isAllowUnsafe(), c2.isAllowUnsafe());
}
private static boolean isCompatible(GlobalSerializerConfig c1, GlobalSerializerConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.isOverrideJavaSerialization(), c2.isOverrideJavaSerialization());
}
}
private static class ServicesConfigChecker extends ConfigChecker<ServicesConfig> {
@Override
boolean check(ServicesConfig c1, ServicesConfig c2) {
return c1 == c2 || !(c1 == null || c2 == null)
&& nullSafeEqual(c1.isEnableDefaults(), c2.isEnableDefaults())
&& isCompatible(c1.getServiceConfigs(), c2.getServiceConfigs());
}
private static boolean isCompatible(Collection<ServiceConfig> c1, Collection<ServiceConfig> c2) {
if (c1 == c2) {
return true;
}
if (c1 == null || c2 == null || c1.size() != c2.size()) {
return false;
}
final HashMap<String, ServiceConfig> config1 = new HashMap<String, ServiceConfig>();
final HashMap<String, ServiceConfig> config2 = new HashMap<String, ServiceConfig>();
for (ServiceConfig serviceConfig : c1) {
config1.put(serviceConfig.getName(), serviceConfig);
}
for (ServiceConfig serviceConfig : c2) {
config2.put(serviceConfig.getName(), serviceConfig);
}
if (!config1.keySet().equals(config2.keySet())) {
return false;
}
for (ServiceConfig serviceConfig : c1) {
if (!isCompatible(serviceConfig, config2.get(serviceConfig.getName()))) {
return false;
}
}
return true;
}
private static boolean isCompatible(ServiceConfig c1, ServiceConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getName(), c2.getName())
&& nullSafeEqual(c1.getClassName(), c2.getClassName())
&& nullSafeEqual(c1.getImplementation(), c2.getImplementation())
&& nullSafeEqual(c1.getProperties(), c2.getProperties())
&& nullSafeEqual(c1.getConfigObject(), c2.getConfigObject()));
}
}
private static class ManagementCenterConfigChecker extends ConfigChecker<ManagementCenterConfig> {
@Override
boolean check(ManagementCenterConfig c1, ManagementCenterConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getUrl(), c2.getUrl())
&& nullSafeEqual(c1.getUpdateInterval(), c2.getUpdateInterval()));
}
}
private static class HotRestartConfigChecker extends ConfigChecker<HotRestartPersistenceConfig> {
@Override
boolean check(HotRestartPersistenceConfig c1, HotRestartPersistenceConfig c2) {
final boolean c1Disabled = c1 == null || !c1.isEnabled();
final boolean c2Disabled = c2 == null || !c2.isEnabled();
return c1 == c2 || (c1Disabled && c2Disabled) ||
(c1 != null && c2 != null
&& nullSafeEqual(c1.getBaseDir(), c2.getBaseDir())
&& nullSafeEqual(c1.getBackupDir(), c2.getBackupDir())
&& nullSafeEqual(c1.getParallelism(), c2.getParallelism())
&& nullSafeEqual(c1.getValidationTimeoutSeconds(), c2.getValidationTimeoutSeconds())
&& nullSafeEqual(c1.getDataLoadTimeoutSeconds(), c2.getDataLoadTimeoutSeconds())
&& nullSafeEqual(c1.getClusterDataRecoveryPolicy(), c2.getClusterDataRecoveryPolicy()));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.query.QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT;
import static org.apache.phoenix.query.QueryConstants.DIVERGED_VIEW_BASE_COLUMN_COUNT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixStatement;
import org.apache.phoenix.schema.ColumnNotFoundException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.SchemaUtil;
import org.junit.Test;
import com.google.common.base.Objects;
public class AlterMultiTenantTableWithViewsIT extends SplitSystemCatalogIT {
private Connection getTenantConnection(String tenantId) throws Exception {
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
return DriverManager.getConnection(getUrl(), tenantProps);
}
private static long getTableSequenceNumber(PhoenixConnection conn, String tableName) throws SQLException {
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), SchemaUtil.normalizeIdentifier(tableName)));
return table.getSequenceNumber();
}
private static short getMaxKeySequenceNumber(PhoenixConnection conn, String tableName) throws SQLException {
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), SchemaUtil.normalizeIdentifier(tableName)));
return SchemaUtil.getMaxKeySeq(table);
}
private static void verifyNewColumns(ResultSet rs, String ... values) throws SQLException {
assertTrue(rs.next());
int i = 1;
for (String value : values) {
assertEquals(value, rs.getString(i++));
}
assertFalse(rs.next());
assertEquals(values.length, i - 1);
}
@Test
public void testAddDropColumnToBaseTablePropagatesToEntireViewHierarchy() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String view2 = SchemaUtil.getTableName(SCHEMA3, generateUniqueName());
String view3 = SchemaUtil.getTableName(SCHEMA4, generateUniqueName());
String view4 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenant1 = TENANT1;
String tenant2 = TENANT2;
/* baseTable
/ | \
view1(tenant1) view3(tenant2) view4(global)
/
view2(tenant1)
*/
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
try (Connection tenant1Conn = getTenantConnection(tenant1)) {
String view1DDL = "CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
String view2DDL = "CREATE VIEW " + view2 + " AS SELECT * FROM " + view1;
tenant1Conn.createStatement().execute(view2DDL);
}
try (Connection tenant2Conn = getTenantConnection(tenant2)) {
String view3DDL = "CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(view3DDL);
}
String view4DDL = "CREATE VIEW " + view4 + " AS SELECT * FROM " + baseTable;
conn.createStatement().execute(view4DDL);
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD V3 VARCHAR";
conn.createStatement().execute(alterBaseTable);
// verify that the column is visible to view4
conn.createStatement().execute("SELECT V3 FROM " + view4);
// verify that the column is visible to view1 and view2
try (Connection tenant1Conn = getTenantConnection(tenant1)) {
tenant1Conn.createStatement().execute("SELECT V3 from " + view1);
tenant1Conn.createStatement().execute("SELECT V3 from " + view2);
}
// verify that the column is visible to view3
try (Connection tenant2Conn = getTenantConnection(tenant2)) {
tenant2Conn.createStatement().execute("SELECT V3 from " + view3);
}
alterBaseTable = "ALTER TABLE " + baseTable + " DROP COLUMN V1";
conn.createStatement().execute(alterBaseTable);
// verify that the column is not visible to view4
try {
conn.createStatement().execute("SELECT V1 FROM " + view4);
fail();
} catch (ColumnNotFoundException e) {
}
// verify that the column is not visible to view1 and view2
try (Connection tenant1Conn = getTenantConnection(tenant1)) {
try {
tenant1Conn.createStatement().execute("SELECT V1 from " + view1);
fail();
} catch (ColumnNotFoundException e) {
}
try {
tenant1Conn.createStatement().execute("SELECT V1 from " + view2);
fail();
} catch (ColumnNotFoundException e) {
}
}
// verify that the column is not visible to view3
try (Connection tenant2Conn = getTenantConnection(tenant2)) {
try {
tenant2Conn.createStatement().execute("SELECT V1 from " + view3);
fail();
} catch (ColumnNotFoundException e) {
}
}
}
}
@Test
public void testChangingPKOfBaseTableChangesPKForAllViews() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String view2 = SchemaUtil.getTableName(SCHEMA3, generateUniqueName());
String view3 = SchemaUtil.getTableName(SCHEMA4, generateUniqueName());
String view4 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenant1 = TENANT1;
String tenant2 = TENANT2;
/* baseTable
/ | \
view1(tenant1) view3(tenant2) view4(global)
/
view2(tenant1)
*/
Connection tenant1Conn = null, tenant2Conn = null;
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
globalConn.createStatement().execute(baseTableDDL);
tenant1Conn = getTenantConnection(tenant1);
String view1DDL = "CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
String view2DDL = "CREATE VIEW " + view2 + " AS SELECT * FROM " + view1;
tenant1Conn.createStatement().execute(view2DDL);
tenant2Conn = getTenantConnection(tenant2);
String view3DDL = "CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(view3DDL);
String view4DDL = "CREATE VIEW " + view4 + " AS SELECT * FROM " + baseTable;
globalConn.createStatement().execute(view4DDL);
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD NEW_PK varchar primary key ";
globalConn.createStatement().execute(alterBaseTable);
// verify that the new column new_pk is now part of the primary key for the entire hierarchy
globalConn.createStatement().execute("SELECT * FROM " + baseTable);
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), "NEW_PK", baseTable));
tenant1Conn.createStatement().execute("SELECT * FROM " + view1);
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), "NEW_PK", view1));
tenant1Conn.createStatement().execute("SELECT * FROM " + view2);
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), "NEW_PK", view2));
tenant2Conn.createStatement().execute("SELECT * FROM " + view3);
assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), "NEW_PK", view3));
globalConn.createStatement().execute("SELECT * FROM " + view4);
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), "NEW_PK", view4));
} finally {
if (tenant1Conn != null) {
try {
tenant1Conn.close();
} catch (Throwable ignore) {}
}
if (tenant2Conn != null) {
try {
tenant2Conn.close();
} catch (Throwable ignore) {}
}
}
}
private boolean checkColumnPartOfPk(PhoenixConnection conn, String columnName, String tableName) throws SQLException {
String normalizedTableName = SchemaUtil.normalizeIdentifier(tableName);
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), normalizedTableName));
List<PColumn> pkCols = table.getPKColumns();
String normalizedColumnName = SchemaUtil.normalizeIdentifier(columnName);
for (PColumn pkCol : pkCols) {
if (pkCol.getName().getString().equals(normalizedColumnName)) {
return true;
}
}
return false;
}
private int getIndexOfPkColumn(PhoenixConnection conn, String columnName, String tableName) throws SQLException {
String normalizedTableName = SchemaUtil.normalizeIdentifier(tableName);
PTable table = conn.getTable(new PTableKey(conn.getTenantId(), normalizedTableName));
List<PColumn> pkCols = table.getPKColumns();
String normalizedColumnName = SchemaUtil.normalizeIdentifier(columnName);
int i = 0;
for (PColumn pkCol : pkCols) {
if (pkCol.getName().getString().equals(normalizedColumnName)) {
return i;
}
i++;
}
return -1;
}
@Test
public void testAddPKColumnToBaseTableWhoseViewsHaveIndices() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String view2 = SchemaUtil.getTableName(SCHEMA3, generateUniqueName());
String view3 = SchemaUtil.getTableName(SCHEMA4, generateUniqueName());
String view2Schema = SCHEMA3;
String view3Schema = SCHEMA4;
String tenant1 = TENANT1;
String tenant2 = TENANT2;
String view2Index = generateUniqueName() + "_IDX";
String view3Index = generateUniqueName() + "_IDX";
/* baseTable(mutli-tenant)
/ \
view1(tenant1) view3(tenant2, index)
/
view2(tenant1, index)
*/
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
// make sure that the tables are empty, but reachable
globalConn
.createStatement()
.execute(
"CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, K1 varchar not null, V1 VARCHAR, V2 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, K1)) MULTI_TENANT = true ");
}
String fullView2IndexName = SchemaUtil.getTableName(view2Schema, view2Index);
try (Connection viewConn = getTenantConnection(tenant1)) {
// create tenant specific view for tenant1 - view1
viewConn.createStatement().execute("CREATE VIEW " + view1 + " AS SELECT * FROM " + baseTable);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(0, getTableSequenceNumber(phxConn, view1));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view1));
// create a view - view2 on view - view1
viewConn.createStatement().execute("CREATE VIEW " + view2 + " AS SELECT * FROM " + view1);
assertEquals(0, getTableSequenceNumber(phxConn, view2));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view2));
// create an index on view2
viewConn.createStatement().execute("CREATE INDEX " + view2Index + " ON " + view2 + " (v1) include (v2)");
assertEquals(0, getTableSequenceNumber(phxConn, fullView2IndexName));
assertEquals(4, getMaxKeySequenceNumber(phxConn, fullView2IndexName));
}
String fullView3IndexName = SchemaUtil.getTableName(view3Schema, view3Index);
try (Connection viewConn = getTenantConnection(tenant2)) {
// create tenant specific view for tenant2 - view3
viewConn.createStatement().execute("CREATE VIEW " + view3 + " AS SELECT * FROM " + baseTable);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(0, getTableSequenceNumber(phxConn, view3));
assertEquals(2, getMaxKeySequenceNumber(phxConn, view3));
// create an index on view3
viewConn.createStatement().execute("CREATE INDEX " + view3Index + " ON " + view3 + " (v1) include (v2)");
assertEquals(0, getTableSequenceNumber(phxConn, fullView3IndexName));
assertEquals(4, getMaxKeySequenceNumber(phxConn, fullView3IndexName));
}
// alter the base table by adding 1 non-pk and 2 pk columns
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
globalConn.createStatement().execute("ALTER TABLE " + baseTable + " ADD v3 VARCHAR, k2 VARCHAR PRIMARY KEY, k3 VARCHAR PRIMARY KEY");
assertEquals(4, getMaxKeySequenceNumber(globalConn.unwrap(PhoenixConnection.class), baseTable));
// Upsert records in the base table
String upsert = "UPSERT INTO " + baseTable + " (TENANT_ID, K1, K2, K3, V1, V2, V3) VALUES (?, ?, ?, ?, ?, ?, ?)";
PreparedStatement stmt = globalConn.prepareStatement(upsert);
stmt.setString(1, tenant1);
stmt.setString(2, "K1");
stmt.setString(3, "K2");
stmt.setString(4, "K3");
stmt.setString(5, "V1");
stmt.setString(6, "V2");
stmt.setString(7, "V3");
stmt.executeUpdate();
stmt.setString(1, tenant2);
stmt.setString(2, "K11");
stmt.setString(3, "K22");
stmt.setString(4, "K33");
stmt.setString(5, "V11");
stmt.setString(6, "V22");
stmt.setString(7, "V33");
stmt.executeUpdate();
globalConn.commit();
}
// Verify now that the sequence number of data table, indexes and views have *not* changed.
// Also verify that the newly added pk columns show up as pk columns of data table, indexes and views.
try (Connection viewConn = getTenantConnection(tenant1)) {
ResultSet rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view1);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view1));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view1));
assertEquals(0, getTableSequenceNumber(phxConn, view1));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view1));
verifyNewColumns(rs, "K2", "K3", "V3");
rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view2);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view2));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view2));
assertEquals(0, getTableSequenceNumber(phxConn, view2));
assertEquals(4, getMaxKeySequenceNumber(phxConn, view2));
verifyNewColumns(rs, "K2", "K3", "V3");
assertEquals(4, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k2"), fullView2IndexName));
assertEquals(5, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k3"), fullView2IndexName));
assertEquals(0, getTableSequenceNumber(phxConn, fullView2IndexName));
assertEquals(6, getMaxKeySequenceNumber(phxConn, fullView2IndexName));
}
try (Connection viewConn = getTenantConnection(tenant2)) {
ResultSet rs = viewConn.createStatement().executeQuery("SELECT K2, K3, V3 FROM " + view3);
PhoenixConnection phxConn = viewConn.unwrap(PhoenixConnection.class);
assertEquals(2, getIndexOfPkColumn(phxConn, "k2", view3));
assertEquals(3, getIndexOfPkColumn(phxConn, "k3", view3));
assertEquals(0, getTableSequenceNumber(phxConn, view3));
verifyNewColumns(rs, "K22", "K33", "V33");
assertEquals(4, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k2"), fullView3IndexName));
assertEquals(5, getIndexOfPkColumn(phxConn, IndexUtil.getIndexColumnName(null, "k3"), fullView3IndexName));
assertEquals(0, getTableSequenceNumber(phxConn, fullView3IndexName));
assertEquals(6, getMaxKeySequenceNumber(phxConn, fullView3IndexName));
}
// Verify that the index is actually being used when using newly added pk col
try (Connection viewConn = getTenantConnection(tenant1)) {
String upsert = "UPSERT INTO " + view2 + " (K1, K2, K3, V1, V2, V3) VALUES ('key1', 'key2', 'key3', 'value1', 'value2', 'value3')";
viewConn.createStatement().executeUpdate(upsert);
viewConn.commit();
Statement stmt = viewConn.createStatement();
String sql = "SELECT V2 FROM " + view2 + " WHERE V1 = 'value1' AND K3 = 'key3'";
QueryPlan plan = stmt.unwrap(PhoenixStatement.class).optimizeQuery(sql);
assertEquals(fullView2IndexName, plan.getTableRef().getTable().getName().getString());
ResultSet rs = viewConn.createStatement().executeQuery(sql);
verifyNewColumns(rs, "value2");
}
}
@Test
public void testAddingPkAndKeyValueColumnsToBaseTableWithDivergedView() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String divergedView = SchemaUtil.getTableName(SCHEMA4, generateUniqueName());
String divergedViewSchemaName = SchemaUtil.getSchemaNameFromFullName(divergedView);
String divergedViewIndex = generateUniqueName() + "_IDX";
String tenant1 = TENANT1;
String tenant2 = TENANT2;
/* baseTable
/ |
view1(tenant1) divergedView(tenant2)
*/
try (Connection conn = DriverManager.getConnection(getUrl());
Connection tenant1Conn = getTenantConnection(tenant1);
Connection tenant2Conn = getTenantConnection(tenant2)) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
String divergedViewDDL = "CREATE VIEW " + divergedView + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant2Conn.createStatement().execute(divergedViewDDL);
// Drop column V2 from the view to have it diverge from the base table
tenant2Conn.createStatement().execute("ALTER VIEW " + divergedView + " DROP COLUMN V2");
// create an index on the diverged view
String indexDDL = "CREATE INDEX " + divergedViewIndex + " ON " + divergedView + " (V1) include (V3)";
tenant2Conn.createStatement().execute(indexDDL);
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD KV VARCHAR, PK2 VARCHAR PRIMARY KEY";
conn.createStatement().execute(alterBaseTable);
// verify that the both columns were added to view1
tenant1Conn.createStatement().execute("SELECT KV from " + view1);
tenant1Conn.createStatement().execute("SELECT PK2 from " + view1);
// verify that only the primary key column PK2 was added to diverged view
tenant2Conn.createStatement().execute("SELECT PK2 from " + divergedView);
try {
tenant2Conn.createStatement().execute("SELECT KV FROM " + divergedView);
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
// Upsert records in diverged view. Verify that the PK column was added to the index on it.
String upsert = "UPSERT INTO " + divergedView + " (PK1, PK2, V1, V3) VALUES ('PK1', 'PK2', 'V1', 'V3')";
try (Connection viewConn = getTenantConnection(tenant2)) {
viewConn.createStatement().executeUpdate(upsert);
viewConn.commit();
Statement stmt = viewConn.createStatement();
String sql = "SELECT V3 FROM " + divergedView + " WHERE V1 = 'V1' AND PK2 = 'PK2'";
QueryPlan plan = stmt.unwrap(PhoenixStatement.class).optimizeQuery(sql);
assertEquals(SchemaUtil.getTableName(divergedViewSchemaName, divergedViewIndex),
plan.getTableRef().getTable().getName().getString());
ResultSet rs = viewConn.createStatement().executeQuery(sql);
verifyNewColumns(rs, "V3");
}
// For non-diverged view, base table columns will be added at the same position as base table
assertTableDefinition(tenant1Conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "PK1", "V1", "V2", "V3", "KV", "PK2", "VIEW_COL1", "VIEW_COL2");
// For a diverged view, only base table's pk column will be added and that too at the end.
assertTableDefinition(tenant2Conn, divergedView, PTableType.VIEW, baseTable, 1, 6, DIVERGED_VIEW_BASE_COLUMN_COUNT, "PK1", "V1", "V3", "PK2", "VIEW_COL1", "VIEW_COL2");
// Adding existing column VIEW_COL2 to the base table isn't allowed.
try {
alterBaseTable = "ALTER TABLE " + baseTable + " ADD VIEW_COL2 CHAR(256)";
conn.createStatement().execute(alterBaseTable);
fail();
}
catch (SQLException e) {
assertEquals("Unexpected exception", SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), e.getErrorCode());
}
}
}
@Test
public void testAddColumnsToSaltedBaseTableWithViews() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenant = TENANT1;
try (Connection conn = DriverManager.getConnection(getUrl());
Connection tenant1Conn = getTenantConnection(tenant)) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 1, 5, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3");
assertTableDefinition(tenant1Conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
String alterBaseTable = "ALTER TABLE " + baseTable + " ADD KV VARCHAR, PK2 VARCHAR PRIMARY KEY";
conn.createStatement().execute(alterBaseTable);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 2, 7, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3", "KV", "PK2");
assertTableDefinition(tenant1Conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
// verify that the both columns were added to view1
tenant1Conn.createStatement().execute("SELECT KV from " + view1);
tenant1Conn.createStatement().execute("SELECT PK2 from " + view1);
}
}
@Test
public void testDropColumnsFromSaltedBaseTableWithViews() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenant = TENANT1;
try (Connection conn = DriverManager.getConnection(getUrl());
Connection tenant1Conn = getTenantConnection(tenant)) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 1, 5, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V2", "V3");
assertTableDefinition(tenant1Conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
String alterBaseTable = "ALTER TABLE " + baseTable + " DROP COLUMN V2";
conn.createStatement().execute(alterBaseTable);
assertTableDefinition(conn, baseTable, PTableType.TABLE, null, 2, 4, BASE_TABLE_BASE_COLUMN_COUNT, "TENANT_ID", "PK1", "V1", "V3");
// column adds and drops are no longer propagated to child views, when the parent view is resolved the dropped column is excluded
assertTableDefinition(tenant1Conn, view1, PTableType.VIEW, baseTable, 0, 7, 5, "PK1", "V1", "V2", "V3", "VIEW_COL1", "VIEW_COL2");
// verify that the dropped columns aren't visible
try {
conn.createStatement().execute("SELECT V2 from " + baseTable);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
try {
tenant1Conn.createStatement().execute("SELECT V2 from " + view1);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), e.getErrorCode());
}
}
}
@Test
public void testAlteringViewConditionallyModifiesHTableMetadata() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String view1 = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenant = TENANT1;
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
TableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
try (Connection tenant1Conn = getTenantConnection(tenant)) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
// This should not modify the base table
String alterView = "ALTER VIEW " + view1 + " ADD NEWCOL1 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
TableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertEquals(tableDesc1, tableDesc2);
// Add a new column family that doesn't already exist in the base table
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL2 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
// Verify that the column family now shows up in the base table descriptor
tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertFalse(tableDesc2.equals(tableDesc1));
assertNotNull(tableDesc2.getColumnFamily(Bytes.toBytes("CF")));
// Add a column with an existing column family. This shouldn't modify the base table.
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL3 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
TableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertTrue(tableDesc3.equals(tableDesc2));
assertNotNull(tableDesc3.getColumnFamily(Bytes.toBytes("CF")));
}
}
}
@Test
public void testCacheInvalidatedAfterAddingColumnToBaseTableWithViews() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String viewName = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenantId = TENANT1;
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String tableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true" ;
globalConn.createStatement().execute(tableDDL);
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
// create a tenant specific view
try (Connection tenantConn = DriverManager.getConnection(getUrl(), tenantProps)) {
String viewDDL = "CREATE VIEW " + viewName + " AS SELECT * FROM " + baseTable;
tenantConn.createStatement().execute(viewDDL);
// Add a column to the base table using global connection
globalConn.createStatement().execute("ALTER TABLE " + baseTable + " ADD NEW_COL VARCHAR");
// Check now whether the tenant connection can see the column that was added
tenantConn.createStatement().execute("SELECT NEW_COL FROM " + viewName);
tenantConn.createStatement().execute("SELECT NEW_COL FROM " + baseTable);
}
}
}
@Test
public void testCacheInvalidatedAfterDroppingColumnFromBaseTableWithViews() throws Exception {
String baseTable = SchemaUtil.getTableName(SCHEMA1, generateUniqueName());
String viewName = SchemaUtil.getTableName(SCHEMA2, generateUniqueName());
String tenantId = TENANT1;
try (Connection globalConn = DriverManager.getConnection(getUrl())) {
String tableDDL =
"CREATE TABLE "
+ baseTable
+ " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true" ;
globalConn.createStatement().execute(tableDDL);
Properties tenantProps = new Properties();
tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
// create a tenant specific view
try (Connection tenantConn = DriverManager.getConnection(getUrl(), tenantProps)) {
String viewDDL = "CREATE VIEW " + viewName + " AS SELECT * FROM " + baseTable;
tenantConn.createStatement().execute(viewDDL);
// Add a column to the base table using global connection
globalConn.createStatement()
.execute("ALTER TABLE " + baseTable + " DROP COLUMN V1");
// Check now whether the tenant connection can see the column that was dropped
try {
tenantConn.createStatement().execute("SELECT V1 FROM " + viewName);
fail();
} catch (ColumnNotFoundException e) {
}
try {
tenantConn.createStatement().execute("SELECT V1 FROM " + baseTable);
fail();
} catch (ColumnNotFoundException e) {
}
}
}
}
public static void assertTableDefinition(Connection conn, String fullTableName, PTableType tableType, String parentTableName, int sequenceNumber, int columnCount, int baseColumnCount, String... columnName) throws Exception {
String schemaName = SchemaUtil.getSchemaNameFromFullName(fullTableName);
String tableName = SchemaUtil.getTableNameFromFullName(fullTableName);
PreparedStatement p = conn.prepareStatement("SELECT * FROM \"SYSTEM\".\"CATALOG\" WHERE TABLE_SCHEM=? AND TABLE_NAME=? AND TABLE_TYPE=?");
p.setString(1, schemaName);
p.setString(2, tableName);
p.setString(3, tableType.getSerializedValue());
ResultSet rs = p.executeQuery();
assertTrue(rs.next());
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in BaseColumnCount"), baseColumnCount, rs.getInt("BASE_COLUMN_COUNT"));
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in columnCount"), columnCount, rs.getInt("COLUMN_COUNT"));
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in sequenceNumber"), sequenceNumber, rs.getInt("TABLE_SEQ_NUM"));
rs.close();
ResultSet parentTableColumnsRs = null;
if (parentTableName != null) {
parentTableColumnsRs = conn.getMetaData().getColumns(null, null, parentTableName, null);
parentTableColumnsRs.next();
}
ResultSet viewColumnsRs = conn.getMetaData().getColumns(null, schemaName, tableName, null);
for (int i = 0; i < columnName.length; i++) {
if (columnName[i] != null) {
assertTrue(viewColumnsRs.next());
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in columnName: i=" + i), columnName[i], viewColumnsRs.getString(PhoenixDatabaseMetaData.COLUMN_NAME));
int viewColOrdinalPos = viewColumnsRs.getInt(PhoenixDatabaseMetaData.ORDINAL_POSITION);
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in ordinalPosition: i=" + i), i+1, viewColOrdinalPos);
// validate that all the columns in the base table are present in the view
if (parentTableColumnsRs != null && !parentTableColumnsRs.isAfterLast()) {
ResultSetMetaData parentTableColumnsMetadata = parentTableColumnsRs.getMetaData();
assertEquals(parentTableColumnsMetadata.getColumnCount(), viewColumnsRs.getMetaData().getColumnCount());
int parentTableColOrdinalRs = parentTableColumnsRs.getInt(PhoenixDatabaseMetaData.ORDINAL_POSITION);
assertEquals(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, "Mismatch in ordinalPosition of view and base table for i=" + i), parentTableColOrdinalRs, viewColOrdinalPos);
for (int columnIndex = 1; columnIndex < parentTableColumnsMetadata.getColumnCount(); columnIndex++) {
String viewColumnValue = viewColumnsRs.getString(columnIndex);
String parentTableColumnValue = parentTableColumnsRs.getString(columnIndex);
if (!Objects.equal(viewColumnValue, parentTableColumnValue)) {
if (parentTableColumnsMetadata.getColumnName(columnIndex).equals(PhoenixDatabaseMetaData.TABLE_NAME)) {
assertEquals(parentTableName, parentTableColumnValue);
assertEquals(fullTableName, viewColumnValue);
}
}
}
parentTableColumnsRs.next();
}
}
}
assertFalse(AlterTableWithViewsIT.getSystemCatalogEntriesForTable(conn, fullTableName, ""), viewColumnsRs.next());
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.testsuite.transport.udt;
import static org.junit.Assert.*;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundMessageHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.udt.UdtChannel;
import io.netty.channel.udt.nio.NioUdtProvider;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.util.CharsetUtil;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Verify UDT connect/disconnect life cycle.
*/
public class UDTClientServerConnectionTest {
static class Client implements Runnable {
static final Logger log = LoggerFactory.getLogger(Client.class);
final String host;
final int port;
volatile Channel channel;
volatile boolean isRunning;
volatile boolean isShutdown;
Client(final String host, final int port) {
this.host = host;
this.port = port;
}
@Override
public void run() {
final Bootstrap boot = new Bootstrap();
final ThreadFactory clientFactory = new ThreadFactory("client");
final NioEventLoopGroup connectGroup = new NioEventLoopGroup(1,
clientFactory, NioUdtProvider.BYTE_PROVIDER);
try {
boot.group(connectGroup)
.channelFactory(NioUdtProvider.BYTE_CONNECTOR)
.handler(new ChannelInitializer<UdtChannel>() {
@Override
protected void initChannel(final UdtChannel ch)
throws Exception {
final ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("framer",
new DelimiterBasedFrameDecoder(8192,
Delimiters.lineDelimiter()));
pipeline.addLast("decoder", new StringDecoder(
CharsetUtil.UTF_8));
pipeline.addLast("encoder", new StringEncoder(
CharsetUtil.UTF_8));
pipeline.addLast("handler", new ClientHandler());
}
});
channel = boot.connect(host, port).sync().channel();
isRunning = true;
log.info("Client ready.");
waitForRunning(false);
log.info("Client closing...");
channel.close().sync();
isShutdown = true;
log.info("Client is done.");
} catch (final Throwable e) {
log.error("Client failed.", e);
} finally {
connectGroup.shutdownGracefully();
}
}
void shutdown() {
isRunning = false;
}
void waitForActive(final boolean isActive) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
Thread.sleep(WAIT_SLEEP);
final ClientHandler handler = channel.pipeline().get(
ClientHandler.class);
if (handler != null && isActive == handler.isActive) {
return;
}
}
}
void waitForRunning(final boolean isRunning) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isRunning == this.isRunning) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
private void waitForShutdown() throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isShutdown) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
}
static class ClientHandler extends
ChannelInboundMessageHandlerAdapter<String> {
static final Logger log = LoggerFactory.getLogger(ClientHandler.class);
volatile boolean isActive;
ClientHandler() {
}
@Override
public void channelActive(final ChannelHandlerContext ctx)
throws Exception {
isActive = true;
log.info("Client active {}", ctx.channel());
super.channelActive(ctx);
}
@Override
public void channelInactive(final ChannelHandlerContext ctx)
throws Exception {
isActive = false;
log.info("Client inactive {}", ctx.channel());
super.channelInactive(ctx);
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx,
final Throwable cause) throws Exception {
log.warn("Client unexpected exception from downstream.", cause);
ctx.close();
}
@Override
public void messageReceived(final ChannelHandlerContext ctx,
final String msg) throws Exception {
log.info("Client received: " + msg);
}
}
static class Server implements Runnable {
static final Logger log = LoggerFactory.getLogger(Server.class);
final ChannelGroup group = new DefaultChannelGroup("server group");
final String host;
final int port;
volatile Channel channel;
volatile boolean isRunning;
volatile boolean isShutdown;
Server(final String host, final int port) {
this.host = host;
this.port = port;
}
@Override
public void run() {
final ServerBootstrap boot = new ServerBootstrap();
final ThreadFactory acceptFactory = new ThreadFactory("accept");
final ThreadFactory serverFactory = new ThreadFactory("server");
final NioEventLoopGroup acceptGroup = new NioEventLoopGroup(1,
acceptFactory, NioUdtProvider.BYTE_PROVIDER);
final NioEventLoopGroup connectGroup = new NioEventLoopGroup(1,
serverFactory, NioUdtProvider.BYTE_PROVIDER);
try {
boot.group(acceptGroup, connectGroup)
.channelFactory(NioUdtProvider.BYTE_ACCEPTOR)
.childHandler(new ChannelInitializer<UdtChannel>() {
@Override
protected void initChannel(final UdtChannel ch)
throws Exception {
final ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("framer",
new DelimiterBasedFrameDecoder(8192,
Delimiters.lineDelimiter()));
pipeline.addLast("decoder", new StringDecoder(
CharsetUtil.UTF_8));
pipeline.addLast("encoder", new StringEncoder(
CharsetUtil.UTF_8));
pipeline.addLast("handler", new ServerHandler(
group));
}
});
channel = boot.bind(port).sync().channel();
isRunning = true;
log.info("Server ready.");
waitForRunning(false);
log.info("Server closing acceptor...");
channel.close().sync();
log.info("Server closing connectors...");
group.close().sync();
isShutdown = true;
log.info("Server is done.");
} catch (final Throwable e) {
log.error("Server failure.", e);
} finally {
acceptGroup.shutdownGracefully();
connectGroup.shutdownGracefully();
}
}
void shutdown() {
isRunning = false;
}
void waitForActive(final boolean isActive) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
Thread.sleep(WAIT_SLEEP);
if (isActive) {
for (final Channel channel : group) {
final ServerHandler handler = channel.pipeline().get(
ServerHandler.class);
if (handler != null && handler.isActive) {
return;
}
}
} else {
if (group.size() == 0) {
return;
}
}
}
}
void waitForRunning(final boolean isRunning) throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isRunning == this.isRunning) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
void waitForShutdown() throws Exception {
for (int k = 0; k < WAIT_COUNT; k++) {
if (isShutdown) {
return;
}
Thread.sleep(WAIT_SLEEP);
}
}
}
static class ServerHandler extends
ChannelInboundMessageHandlerAdapter<String> {
static final Logger log = LoggerFactory.getLogger(ServerHandler.class);
final ChannelGroup group;
volatile boolean isActive;
ServerHandler(final ChannelGroup group) {
this.group = group;
}
@Override
public void channelActive(final ChannelHandlerContext ctx)
throws Exception {
group.add(ctx.channel());
isActive = true;
log.info("Server active : {}", ctx.channel());
super.channelActive(ctx);
}
@Override
public void channelInactive(final ChannelHandlerContext ctx)
throws Exception {
group.remove(ctx.channel());
isActive = false;
log.info("Server inactive: {}", ctx.channel());
super.channelInactive(ctx);
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx,
final Throwable cause) {
log.warn("Server close on exception.", cause);
ctx.close();
}
@Override
public void messageReceived(final ChannelHandlerContext ctx,
final String message) throws Exception {
log.info("Server received: " + message);
}
}
static class ThreadFactory implements java.util.concurrent.ThreadFactory {
static final AtomicInteger counter = new AtomicInteger();
final String name;
ThreadFactory(final String name) {
this.name = name;
}
@Override
public Thread newThread(final Runnable runnable) {
return new Thread(runnable, name + '-' + counter.getAndIncrement());
}
}
static final Logger log = LoggerFactory
.getLogger(UDTClientServerConnectionTest.class);
/**
* Maximum wait time is 5 seconds.
* <p>
* wait-time = {@link #WAIT_COUNT} * {@value #WAIT_SLEEP}
*/
static final int WAIT_COUNT = 50;
static final int WAIT_SLEEP = 100;
/**
* Verify UDT client/server connect and disconnect.
*/
@Test
public void connection() throws Exception {
final String host = "localhost";
final int port = 1234;
log.info("Starting server.");
final Server server = new Server(host, port);
final Thread serverTread = new Thread(server, "server-*");
serverTread.start();
server.waitForRunning(true);
assertTrue(server.isRunning);
log.info("Starting client.");
final Client client = new Client(host, port);
final Thread clientThread = new Thread(client, "client-*");
clientThread.start();
client.waitForRunning(true);
assertTrue(client.isRunning);
log.info("Wait till connection is active.");
client.waitForActive(true);
server.waitForActive(true);
log.info("Verify connection is active.");
assertEquals("group must have one", 1, server.group.size());
log.info("Stopping client.");
client.shutdown();
client.waitForShutdown();
assertTrue(client.isShutdown);
log.info("Wait till connection is inactive.");
client.waitForActive(false);
server.waitForActive(false);
log.info("Verify connection is inactive.");
assertEquals("group must be empty", 0, server.group.size());
log.info("Stopping server.");
server.shutdown();
server.waitForShutdown();
assertTrue(server.isShutdown);
log.info("Finished server.");
}
}
| |
/*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.retry;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import com.google.common.base.Stopwatch;
import com.linecorp.armeria.client.ClientFactory;
import com.linecorp.armeria.client.ClientFactoryBuilder;
import com.linecorp.armeria.client.HttpClient;
import com.linecorp.armeria.client.HttpClientBuilder;
import com.linecorp.armeria.client.ResponseTimeoutException;
import com.linecorp.armeria.common.AggregatedHttpMessage;
import com.linecorp.armeria.common.HttpHeaderNames;
import com.linecorp.armeria.common.HttpHeaders;
import com.linecorp.armeria.common.HttpMethod;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpStatus;
import com.linecorp.armeria.common.MediaType;
import com.linecorp.armeria.common.util.EventLoopGroups;
import com.linecorp.armeria.server.AbstractHttpService;
import com.linecorp.armeria.server.ServerBuilder;
import com.linecorp.armeria.server.ServiceRequestContext;
import com.linecorp.armeria.testing.server.ServerRule;
public class RetryingHttpClientTest {
private final int oneSecForRetryAfter = 1;
private static ClientFactory clientFactory;
@BeforeClass
public static void init() {
// use different eventLoop from server's so that clients don't hang when the eventLoop in server hangs
clientFactory = new ClientFactoryBuilder()
.workerGroup(EventLoopGroups.newEventLoopGroup(2), true).build();
}
@AfterClass
public static void destroy() {
clientFactory.close();
}
@Rule
public TestRule globalTimeout = new DisableOnDebug(new Timeout(10, TimeUnit.SECONDS));
@Rule
public final ServerRule server = new ServerRule() {
@Override
protected void configure(ServerBuilder sb) throws Exception {
sb.service("/retry-content", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 2) {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Need to retry");
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/500-then-success", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 1) {
return HttpResponse.of(HttpStatus.INTERNAL_SERVER_ERROR);
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/503-then-success", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 1) {
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/retry-after-1-second", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 1) {
return HttpResponse.of(
HttpHeaders.of(HttpStatus.SERVICE_UNAVAILABLE)
.setInt(HttpHeaderNames.RETRY_AFTER, oneSecForRetryAfter));
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/retry-after-with-http-date", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 1) {
return HttpResponse.of(
HttpHeaders.of(HttpStatus.SERVICE_UNAVAILABLE)
.setTimeMillis(HttpHeaderNames.RETRY_AFTER,
Duration.ofSeconds(3).toMillis() +
System.currentTimeMillis()));
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/retry-after-one-year", new AbstractHttpService() {
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
return HttpResponse.of(
HttpHeaders.of(HttpStatus.SERVICE_UNAVAILABLE)
.setTimeMillis(HttpHeaderNames.RETRY_AFTER,
Duration.ofDays(365).toMillis() +
System.currentTimeMillis()));
}
});
sb.service("/service-unavailable", new AbstractHttpService() {
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
}
});
sb.service("/get-post", new AbstractHttpService() {
final AtomicInteger reqGetCount = new AtomicInteger();
final AtomicInteger reqPostCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqGetCount.getAndIncrement() < 1) {
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
} else {
TimeUnit.MILLISECONDS.sleep(1000);
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
@Override
protected HttpResponse doPost(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqPostCount.getAndIncrement() < 1) {
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
} else {
TimeUnit.MILLISECONDS.sleep(1000);
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/1sleep-then-success", new AbstractHttpService() {
final AtomicInteger reqCount = new AtomicInteger();
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
if (reqCount.getAndIncrement() < 1) {
TimeUnit.MILLISECONDS.sleep(1000);
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
} else {
return HttpResponse.of(
HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "Succeeded after retry");
}
}
});
sb.service("/post-ping-pong", new AbstractHttpService() {
final AtomicInteger reqPostCount = new AtomicInteger();
@Override
protected HttpResponse doPost(ServiceRequestContext ctx, HttpRequest req)
throws Exception {
return HttpResponse.from(req.aggregate().handle((message, thrown) -> {
if (reqPostCount.getAndIncrement() < 1) {
return HttpResponse.of(HttpStatus.SERVICE_UNAVAILABLE);
} else {
return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8,
message.content().toStringUtf8());
}
}));
}
});
}
};
@Test
public void retryWhenContentMatched() {
final HttpClient client =
new HttpClientBuilder(server.uri("/"))
.factory(clientFactory).defaultResponseTimeoutMillis(0)
.decorator(new RetryingHttpClientBuilder(new RetryOnContent("Need to retry"))
.maxTotalAttempts(100).responseTimeoutForEachAttempt(Duration.ZERO)
.contentPreviewLength(1024).newDecorator())
.build();
final AggregatedHttpMessage res = client.get("/retry-content").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
}
private HttpClient retryingHttpClientOf(RetryStrategy<HttpRequest, HttpResponse> strategy) {
return retryingHttpClientOf(10000 /* default response timeout in tests */, strategy);
}
private HttpClient retryingHttpClientOf(long responseTimeoutMillis,
RetryStrategy<HttpRequest, HttpResponse> strategy) {
return new HttpClientBuilder(server.uri("/"))
.factory(clientFactory).defaultResponseTimeoutMillis(responseTimeoutMillis)
.decorator(new RetryingHttpClientBuilder(strategy)
.useRetryAfter(true).maxTotalAttempts(100).newDecorator())
.build();
}
private static class RetryOnContent implements RetryStrategy<HttpRequest, HttpResponse> {
private final String retryContent;
private final Backoff backoffOnContent = Backoff.fixed(100);
RetryOnContent(String retryContent) {
this.retryContent = retryContent;
}
@Override
public CompletableFuture<Backoff> shouldRetry(HttpRequest request, HttpResponse response) {
final CompletableFuture<AggregatedHttpMessage> future = response.aggregate();
return future.handle((message, unused) -> {
if (message != null &&
message.content().toStringUtf8().equalsIgnoreCase(retryContent)) {
return backoffOnContent;
}
return null;
}
);
}
}
@Test
public void retryWhenStatusMatched() {
final HttpClient client = retryingHttpClientOf(RetryStrategy.onServerErrorStatus());
final AggregatedHttpMessage res = client.get("/503-then-success").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
}
@Test
public void respectRetryAfter() {
final HttpClient client = retryingHttpClientOf(RetryStrategy.onServerErrorStatus());
final Stopwatch sw = Stopwatch.createStarted();
final AggregatedHttpMessage res = client.get("/retry-after-1-second").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isGreaterThanOrEqualTo(
(long) (TimeUnit.SECONDS.toMillis(oneSecForRetryAfter) * 0.9));
}
@Test
public void respectRetryAfterWithHttpDate() {
final HttpClient client = retryingHttpClientOf(RetryStrategy.onServerErrorStatus());
final Stopwatch sw = Stopwatch.createStarted();
final AggregatedHttpMessage res = client.get("/retry-after-with-http-date").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
// Since ZonedDateTime doesn't express exact time,
// just check out whether it is retried after delayed some time.
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isGreaterThanOrEqualTo(1000);
}
@Test
public void retryAfterOneYear() {
long responseTimeoutMillis = 1000;
final HttpClient client = retryingHttpClientOf(
responseTimeoutMillis, RetryStrategy.onServerErrorStatus());
final Stopwatch sw = Stopwatch.createStarted();
assertThatThrownBy(() -> client.get("/retry-after-one-year").aggregate().join())
.hasCauseInstanceOf(ResponseTimeoutException.class);
// Retry after is limited by response time out which is 1 second in this case.
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isLessThanOrEqualTo(
(long) (responseTimeoutMillis * 1.1));
}
@Test
public void timeoutWhenServerSendServiceUnavailable() {
long responseTimeoutMillis = 1000;
final HttpClient client = retryingHttpClientOf(
responseTimeoutMillis, RetryStrategy.onServerErrorStatus(Backoff.fixed(100)));
final Stopwatch sw = Stopwatch.createStarted();
assertThatThrownBy(() -> client.get("/service-unavailable").aggregate().join())
.hasCauseInstanceOf(ResponseTimeoutException.class);
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isGreaterThanOrEqualTo(
(long) (responseTimeoutMillis * 0.9));
}
@Test
public void consecutiveRequests() {
long responseTimeoutMillis = 500;
final HttpClient client = retryingHttpClientOf(
responseTimeoutMillis, RetryStrategy.onServerErrorStatus());
final Stopwatch sw = Stopwatch.createStarted();
assertThatThrownBy(() -> client.get("/service-unavailable").aggregate().join())
.hasCauseInstanceOf(ResponseTimeoutException.class);
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isLessThanOrEqualTo(
(long) (responseTimeoutMillis * 1.1));
// second request
sw.reset();
sw.start();
assertThatThrownBy(() -> client.get("/service-unavailable").aggregate().join())
.hasCauseInstanceOf(ResponseTimeoutException.class);
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isLessThanOrEqualTo(
(long) (responseTimeoutMillis * 1.1));
}
@Test
public void disableResponseTimeout() {
final RetryStrategy<HttpRequest, HttpResponse> strategy = new RetryOnContent("Need to retry");
final HttpClient client =
new HttpClientBuilder(server.uri("/"))
.factory(clientFactory).defaultResponseTimeoutMillis(0)
.decorator(new RetryingHttpClientBuilder(strategy)
.maxTotalAttempts(100).responseTimeoutForEachAttempt(Duration.ZERO)
.contentPreviewLength(1024).newDecorator())
.build();
final AggregatedHttpMessage res = client.get("/retry-content").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
// response timeout did not happen.
}
@Test
public void differentResponseTimeout() {
final RetryStrategy<HttpRequest, HttpResponse> strategy =
RetryStrategy.onServerErrorStatus(Backoff.fixed(10));
final HttpClient client =
new HttpClientBuilder(server.uri("/"))
.factory(clientFactory)
.decorator(RetryingHttpClient.newDecorator(strategy))
.decorator((delegate, ctx, req) -> {
if (req.method() == HttpMethod.GET) {
ctx.setResponseTimeoutMillis(50);
} else {
ctx.setResponseTimeoutMillis(10000);
}
return delegate.execute(ctx, req);
}).build();
assertThatThrownBy(() -> client.get("/get-post").aggregate().join())
.hasCauseInstanceOf(ResponseTimeoutException.class);
final AggregatedHttpMessage res = client.post("/get-post", "foo").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
}
@Test
public void retryOnResponseTimeout() {
RetryStrategy<HttpRequest, HttpResponse> strategy = new RetryStrategy<HttpRequest, HttpResponse>() {
final Backoff backoff = Backoff.fixed(100);
@Override
public CompletableFuture<Backoff> shouldRetry(HttpRequest request,
HttpResponse response) {
return response.aggregate().handle((result, cause) -> {
if (cause instanceof ResponseTimeoutException) {
return backoff;
}
return null;
});
}
};
final HttpClient client =
new HttpClientBuilder(server.uri("/"))
.factory(clientFactory).defaultResponseTimeoutMillis(0)
.decorator(RetryingHttpClient.newDecorator(strategy, 100, 500)).build();
final AggregatedHttpMessage res = client.get("/1sleep-then-success").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
}
@Test
public void differentBackoffBasedOnStatus() {
final HttpClient client = retryingHttpClientOf(RetryStrategy.onStatus(statusBasedBackoff()));
final Stopwatch sw = Stopwatch.createStarted();
AggregatedHttpMessage res = client.get("/503-then-success").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isBetween((long) (10 * 0.9), (long) (10000 * 0.9));
// second request
sw.reset();
sw.start();
res = client.get("/500-then-success").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("Succeeded after retry");
assertThat(sw.elapsed(TimeUnit.MILLISECONDS)).isGreaterThanOrEqualTo((long) (1000 * 0.9));
}
private BiFunction<HttpStatus, Throwable, Backoff> statusBasedBackoff() {
return new BiFunction<HttpStatus, Throwable, Backoff>() {
private final Backoff backoffOn503 = Backoff.fixed(10).withMaxAttempts(2);
private final Backoff backoffOn500 = Backoff.fixed(1000).withMaxAttempts(2);
@Override
public Backoff apply(HttpStatus httpStatus, Throwable unused) {
if (httpStatus == HttpStatus.SERVICE_UNAVAILABLE) {
return backoffOn503;
}
if (httpStatus == HttpStatus.INTERNAL_SERVER_ERROR) {
return backoffOn500;
}
return null;
}
};
}
@Test
public void retryWithRequestBody() {
final HttpClient client = retryingHttpClientOf(RetryStrategy.onServerErrorStatus(Backoff.fixed(10)));
final AggregatedHttpMessage res = client.post("/post-ping-pong", "bar").aggregate().join();
assertThat(res.content().toStringUtf8()).isEqualTo("bar");
}
}
| |
package org.renjin.sexp;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.commons.math.complex.Complex;
import java.util.Iterator;
public abstract class LogicalVector extends AbstractAtomicVector implements Iterable<Logical> {
public static final String TYPE_NAME = "logical";
public static final Type VECTOR_TYPE = new LogicalType();
public static final LogicalVector EMPTY = new LogicalArrayVector(new int[0]);
public static int NA = IntVector.NA;
public static LogicalVector TRUE = new LogicalArrayVector(1);
public static LogicalVector FALSE = new LogicalArrayVector(0);
public static LogicalVector NA_VECTOR = new LogicalArrayVector(NA);
public static SEXP valueOf(boolean value) {
return value ? TRUE : FALSE;
}
public LogicalVector(AttributeMap attributes) {
super(attributes);
}
protected LogicalVector() {
}
@Override
public String getTypeName() {
return TYPE_NAME;
}
@Override
public abstract int length();
@Override
public int getElementAsInt(int index) {
return getElementAsRawLogical(index);
}
@Override
public double getElementAsDouble(int index) {
int value = getElementAsRawLogical(index);
return value == IntVector.NA ? DoubleVector.NA : (double) value;
}
@Override
public SEXP getElementAsSEXP(int index) {
return new LogicalArrayVector(getElementAsRawLogical(index));
}
@Override
public int indexOf(AtomicVector vector, int vectorIndex, int startIndex) {
int value = vector.getElementAsRawLogical(startIndex);
for(int i=0;i<length();++i) {
if(value == getElementAsRawLogical(i)) {
return i;
}
}
return -1;
}
@Override
public int compare(int index1, int index2) {
return getElementAsRawLogical(index1) - getElementAsRawLogical(index2);
}
@Override
public Boolean getElementAsObject(int index) {
int value = getElementAsInt(index);
if(IntVector.isNA(value)) {
throw new IllegalStateException(String.format("The element at index %d is NA," +
" and cannot be represented as a Boolean. Make sure you are calling isElementNA() first.", index));
} else {
return value != 0;
}
}
@Override
public Logical getElementAsLogical(int index) {
return Logical.valueOf(getElementAsRawLogical(index));
}
@Override
public abstract int getElementAsRawLogical(int index);
@Override
public Complex getElementAsComplex(int index) {
if(IntVector.isNA(getElementAsRawLogical(index))) {
return ComplexVector.NA;
}
return new Complex(getElementAsDouble(index), 0);
}
@Override
public Type getVectorType() {
return VECTOR_TYPE;
}
@Override
public String getElementAsString(int index) {
int value = getElementAsRawLogical(index);
if(value == IntVector.NA) {
return StringVector.NA;
} else if(value == 0) {
return "FALSE";
} else {
return "TRUE";
}
}
@Override
public boolean isNumeric() {
return true;
}
@Override
public Logical asLogical() {
return getElementAsLogical(0);
}
@Override
public double asReal() {
if(length() == 0) {
return DoubleVector.NA;
} else {
return getElementAsDouble(0);
}
}
@Override
public void accept(SexpVisitor visitor) {
visitor.visit(this);
}
@Override
public Iterator<Logical> iterator() {
return new UnmodifiableIterator<Logical>() {
private int i=0;
@Override
public boolean hasNext() {
return i<length();
}
@Override
public Logical next() {
return getElementAsLogical(i++);
}
};
}
@Override
public final boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof LogicalVector)) return false;
LogicalVector that = (LogicalVector) o;
if(this.length() != that.length()) {
return false;
}
for(int i=0;i!=length();++i) {
if(this.getElementAsRawLogical(i) != that.getElementAsRawLogical(i)) {
return false;
}
}
return true;
}
@Override
public final int hashCode() {
int hash = 37;
for(int i=0;i!=length();++i) {
hash += getElementAsRawLogical(i);
}
return hash;
}
@Override
public String toString() {
if (length() == 1) {
return toString(getElementAsRawLogical(0));
} else {
StringBuilder sb = new StringBuilder();
sb.append("c(");
for (int i = 0; i != Math.min(5, length()); ++i) {
if(i > 0) {
sb.append(", ");
}
sb.append(toString(getElementAsRawLogical(i)));
}
if (length() > 5) {
sb.append(",... ").append(length()).append(" elements total");
}
sb.append(")");
return sb.toString();
}
}
@Override
public LogicalArrayVector.Builder newCopyBuilder() {
return new LogicalArrayVector.Builder(this);
}
@Override
public LogicalArrayVector.Builder newBuilderWithInitialSize(int initialSize) {
return new LogicalArrayVector.Builder(initialSize, initialSize);
}
@Override
public LogicalArrayVector.Builder newBuilderWithInitialCapacity(int initialCapacity) {
return new LogicalArrayVector.Builder(0, initialCapacity);
}
@Override
protected abstract SEXP cloneWithNewAttributes(AttributeMap attributes);
private String toString(int x) {
if (x == 1) {
return "TRUE";
} else if (x == 0) {
return "FALSE";
} else {
return "NA";
}
}
@Override
public boolean isElementNA(int index) {
return IntVector.isNA(getElementAsRawLogical(index));
}
private static class LogicalType extends Vector.Type {
public LogicalType() {
super(Order.LOGICAL);
}
@Override
public Vector.Builder newBuilder() {
return new LogicalArrayVector.Builder(0, 0);
}
@Override
public Builder newBuilderWithInitialSize(int initialSize) {
return new LogicalArrayVector.Builder(initialSize);
}
@Override
public Builder newBuilderWithInitialCapacity(int initialCapacity) {
return new LogicalArrayVector.Builder(0, initialCapacity);
}
@Override
public Vector getElementAsVector(Vector vector, int index) {
return new LogicalArrayVector(vector.getElementAsRawLogical(index));
}
@Override
public int compareElements(Vector vector1, int index1, Vector vector2, int index2) {
return vector1.getElementAsRawLogical(index1) - vector2.getElementAsRawLogical(index2);
}
@Override
public boolean elementsEqual(Vector vector1, int index1, Vector vector2,
int index2) {
if(vector1.isElementNA(index1) || vector2.isElementNA(index2)) {
return false;
}
return vector1.getElementAsRawLogical(index1) == vector2.getElementAsRawLogical(index2);
}
}
}
| |
/*
* Copyright (C) 2010-2014 Hamburg Sud and the contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.aludratest.service.gui;
import org.aludratest.service.AttachResult;
import org.aludratest.service.ElementName;
import org.aludratest.service.ElementType;
import org.aludratest.service.Interaction;
import org.aludratest.service.TechnicalArgument;
import org.aludratest.service.TechnicalLocator;
import org.aludratest.service.Verification;
import org.aludratest.service.locator.element.GUIElementLocator;
import org.aludratest.service.locator.option.OptionLocator;
import org.aludratest.service.locator.window.TitleLocator;
import org.aludratest.service.locator.window.WindowLocator;
/** The interface {@link Interaction} provides several methods to interact with the active screen of the application under test.
* This means that the application under test can be controlled with the help of these methods. Every class which implements this
* interface must assure that a call of one of these methods results in a interaction with the application under test.<br>
* For verifications of the application under test see {@link Verification}.
* @author Marcel Malitz
* @author Volker Bergmann */
public interface GUIInteraction extends Interaction {
/** Selects a radio button. If this radio button belongs to a group of radio buttons, the other radio buttons of this group
* will be unselected.
* @param elementType the type of the related radio button to log
* @param elementName the name of the related radio button to log
* @param locator to locate one specific radio button in the SUT
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void selectRadiobutton(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Changes the selection state of a checkbox. If this method will be called on a checkbox which is still selected, the
* checkbox will be unselected.
* @param elementType the type of the related checkbox to log
* @param elementName the name of the related checkbox to log
* @param locator to locate one specific checkbox in the application under test
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void changeCheckbox(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Selects a checkbox. If the checkbox is already selected, this method will do nothing.
* @param elementType the type of the related checkbox to log
* @param elementName the name of the related checkbox to log
* @param locator to locate one specific checkbox in the application under test
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void selectCheckbox(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Deselects a checkbox. If the checkbox is not selected, this method will do nothing.
* @param elementType the type of the related checkbox to log
* @param elementName the name of the related checkbox to log
* @param locator to locate one specific checkbox in the application under test
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void deselectCheckbox(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Selects an entry in a dropdownbox with the help of a <code>OptionLocator</code>. First it locates the element with the help
* of the <code>locator</code>, then it tries to select an entry defined by <code>optionLocator</code>.
* @param elementType the type of the related dropdownbox to log
* @param elementName the name of the related dropdownbox to log
* @param locator to locate one specific dropdownbox in the application under test
* @param optionLocator defines which entry of the located dropdownbox shall be selected
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void selectDropDownEntry(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
OptionLocator optionLocator,
@TechnicalArgument int taskCompletionTimeout);
/** Types in some text without conversion/manipulation of the passed through text. The content of the locator will be deleted,
* if the expected text is not set.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator to locate one specific inputfield in the application under test. An inputfield is any GUI element which
* accepts user inputs.
* @param text which shall be typed in without conversion/manipulation
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void type(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator,
String text,
@TechnicalArgument int taskCompletionTimeout);
/** Assigns a file resource of the test project file system to the file chooser specified by the locator.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator to locate the related file selection field in the application under test
* @param filePath the absolute path of the file to be assigned to the file chooser
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of system activities */
void assignFileResource(String elementType, String elementName, GUIElementLocator locator, String filePath,
int taskCompletionTimeout);
/** Clicks with a single click on any kind of element which reacts on click events. A common example are buttons and links.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator to locate buttons, links or any other elements which react on mouse clicks.
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void click(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Does a double click on the element which is identified by the locator.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator which identifies the element which shall be double clicked
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void doubleClick(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Moves the mouse cursor over the element which is identified by the locator, or emulates such a "hover", depending on the
* GUI implementation.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator which identifies the element which shall be "hovered" with the mouse
* @param taskCompletionTimeout the maximum number of milliseconds to wait for the completion of the task */
void hover(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator,
@TechnicalArgument int taskCompletionTimeout);
/** Reads the value of an inputfield and returns it as a String without conversion/manipulation.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator to locate the inputfield in the application under test where the inputfield must be an element for user
* inputs. Two examples are single line inputfields and text areas in web applications. This action works also with
* disabled inputfields.
* @return the value of the inputfield. If the inputfield could not be found, <code>null</code> will be returned */
String getInputFieldValue(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator);
/** Reads the selected label of an input field and returns it as a String without conversion/manipulation.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator to locate the input field in the application under test where the inputfield must be an element for user
* inputs. Two examples are dropdown boxes and lists in web applications. This action works also with disabled
* input fields.
* @return the value of the input field. If the input field could not be found, <code>null</code> will be returned */
String getInputFieldSelectedLabel(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator GUIElementLocator locator);
/** Selects a window using a window locator. Once a window has been selected, all commands go to that window.
* @param locator to locate one specific window of the application under test */
void selectWindow(@TechnicalLocator WindowLocator locator);
/** Gets the text of an element.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator points to one element
* @return the unmodified text of an element */
String getText(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator);
/** Gets the text of an element and is adjustable to the check of the visibility of the element
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator points to one element, visible: to check visibility of the element
* @param checkVisible tells to check if the element is visible
* @return the unmodified text of an element */
String getText(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator,
boolean checkVisible);
/** Gives focus on an element.
* @param elementType the type of the related GUI element to log
* @param elementName the name of the related GUI element to log
* @param locator of the element which shall get the focus */
void focus(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator GUIElementLocator locator);
/** Presses a key on the keyboard. Available key codes depend on service implementation.
* @param keycode is the key which shall be pressed. */
void keyPress(int keycode);
/** Presses keys on the keyboard. Available key codes depend on service implementation.
* @param keysToPress is the key which shall be pressed. */
void keysPress(CharSequence... keysToPress);
/** Closes all windows identified by their name. That means, that if there are several windows with same name all will be
* closed. This method is not waiting for a window to open.
* @param elementType the type of the target windows to log
* @param elementName the name of the target windows to log
* @param locator - name of the window */
void closeWindows(@ElementType String elementType, @ElementName String elementName, @TechnicalLocator TitleLocator locator);
/** Closes all open windows which do not have the specified title.
* @param elementType the type of the target windows to log
* @param elementName the name of the target windows to log
* @param locator is a window locator or just a String which will be automatically converted to one of the default locators
* depending on the underlying driver and the used default localization mechanism */
void closeOtherWindows(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator TitleLocator locator);
/** Waits until the given window is closed, or the given timeout is reached. This is especially useful if a window is closed
* asynchronously some time after some event (e.g. a button click).
*
* @param elementType the type of the target windows to log
* @param elementName the name of the target windows to log
* @param locator is a window locator or just a String which will be automatically converted to one of the default locators
* depending on the underlying driver and the used default localization mechanism
* @param taskCompletionTimeout Timeout to wait until the window is closed. If the timeout is triggered, a TimeoutException is
* raised. A value of -1 indicates to use the default configured timeout. */
void waitForWindowToBeClosed(@ElementType String elementType, @ElementName String elementName,
@TechnicalLocator TitleLocator locator, int taskCompletionTimeout);
/** Performs a screenshot of the currently active window.
*
* @return A BASE64-encoded image with the contents of the currently active window. The image format is up to the UI service
* implementation, although PNG is recommended. */
@AttachResult("Active Window Screenshot")
String captureActiveWindow();
/** Issues a "wrong page flow" error on this service. This should only be called from <code>checkCorrectPage()</code> methods
* of <code>Page</code> subclasses.
*
* @param message Message to log together with the error. */
void wrongPageFlow(String message);
/** Signals a functional error of the underlying SUT. This should only used in cases where simple assertion calls of UI
* components are not sufficient, e.g. where complex checks are required to determine the error state of the SUT.
*
* @param message Message to log together with the error. */
void functionalError(String message);
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.agent;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import com.googlecode.junit.ext.JunitExtRunner;
import com.googlecode.junit.ext.RunIf;
import com.thoughtworks.go.config.ArtifactPlans;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.ExecTask;
import com.thoughtworks.go.config.JobConfig;
import com.thoughtworks.go.config.JobConfigs;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.config.PipelineConfigs;
import com.thoughtworks.go.config.Resources;
import com.thoughtworks.go.config.StageConfig;
import com.thoughtworks.go.config.Tasks;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.domain.builder.Builder;
import com.thoughtworks.go.domain.DefaultSchedulingContext;
import com.thoughtworks.go.domain.JobIdentifier;
import com.thoughtworks.go.domain.JobPlan;
import com.thoughtworks.go.domain.JobResult;
import com.thoughtworks.go.domain.Property;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.helper.BuilderMother;
import com.thoughtworks.go.helper.JobInstanceMother;
import com.thoughtworks.go.junitext.EnhancedOSChecker;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.remote.AgentInstruction;
import com.thoughtworks.go.remote.work.*;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.UpstreamPipelineResolver;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.command.CruiseControlException;
import com.thoughtworks.go.work.FakeWork;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import static com.thoughtworks.go.junitext.EnhancedOSChecker.DO_NOT_RUN_ON;
import static com.thoughtworks.go.junitext.EnhancedOSChecker.WINDOWS;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verifyNoMoreInteractions;
@RunWith(JunitExtRunner.class)
public class JobRunnerTest {
private JobRunner runner;
private FakeWork work;
private List<String> consoleOut;
private List<Enum> statesAndResult;
private List<Property> properties;
private static final String SERVER_URL = "somewhere-does-not-matter";
private static final String JOB_PLAN_NAME = "run-ant";
private static final String TASK_WITH_CANCEL = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"echo\" args=\"should run me before cancellation\" />\n"
+ " <exec command=\"sleep\" args=\"10\">\n"
+ " <oncancel>\n"
+ " <exec command=\"echo\" args=\"cancel in progress\" />\n"
+ " </oncancel>\n"
+ " </exec>\n"
+ " <exec command=\"echo\" args=\"should not run after cancellation\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String TASK_WITH_LONG_RUNNING_CANCEL = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"echo\" args=\"should run me before cancellation\" />\n"
+ " <exec command=\"sleep\" args=\"10\">\n"
+ " <oncancel>\n"
+ " <exec command=\"sleep\" args=\"15\" />\n"
+ " </oncancel>\n"
+ " </exec>\n"
+ " <exec command=\"echo\" args=\"should not run after cancellation\" />\n"
+ " </tasks>\n"
+ "</job>";
private BuildWork buildWork;
private AgentIdentifier agentIdentifier;
private UpstreamPipelineResolver resolver;
@Before
public void setUp() throws Exception {
runner = new JobRunner();
work = new FakeWork();
consoleOut = new ArrayList<String>();
statesAndResult = new ArrayList<Enum>();
properties = new ArrayList<Property>();
agentIdentifier = new AgentIdentifier("localhost", "127.0.0.1", "uuid");
new SystemEnvironment().setProperty("serviceUrl", SERVER_URL);
resolver = mock(UpstreamPipelineResolver.class);
}
@After
public void tearDown() {
verifyNoMoreInteractions(resolver);
}
public static String withJob(String jobXml) {
return "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
+ "<cruise xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xsi:noNamespaceSchemaLocation=\"cruise-config.xsd\" schemaVersion=\""
+ GoConstants.CONFIG_SCHEMA_VERSION + "\">\n"
+ " <server artifactsdir=\"logs\"></server>"
+ " <pipelines>\n"
+ " <pipeline name=\"pipeline1\">\n"
+ " <materials>\n"
+ " <svn url=\"foobar\" checkexternals=\"true\" />\n"
+ " </materials>\n"
+ " <stage name=\"mingle\">\n"
+ " <jobs>\n"
+ jobXml
+ " </jobs>\n"
+ " </stage>\n"
+ " </pipeline>\n"
+ " </pipelines>\n"
+ " <agents>\n"
+ " <agent hostname=\"agent1\" ipaddress=\"1.2.3.4\" uuid=\"ywZRuHFIKvw93TssFeWl8g==\" />\n"
+ " </agents>"
+ "</cruise>";
}
private BuildWork getWork(JobConfig jobConfig) {
CruiseConfig config = new CruiseConfig();
config.server().setArtifactsDir("logs");
String stageName = "mingle";
String pipelineName = "pipeline1";
config.addPipeline(PipelineConfigs.DEFAULT_GROUP, new PipelineConfig(new CaseInsensitiveString(pipelineName), new MaterialConfigs(), new StageConfig(
new CaseInsensitiveString(stageName), new JobConfigs(jobConfig))));
String pipelineLabel = "100";
JobPlan jobPlan = JobInstanceMother.createJobPlan(jobConfig, new JobIdentifier(pipelineName, -2, pipelineLabel, stageName, "100", JOB_PLAN_NAME, 0L), new DefaultSchedulingContext());
jobPlan.setFetchMaterials(true);
jobPlan.setCleanWorkingDir(false);
List<Builder> builder = BuilderMother.createBuildersAssumingAllExecTasks(config, pipelineName, stageName, JOB_PLAN_NAME);
BuildAssignment buildAssignment = BuildAssignment.create(jobPlan, BuildCause.createWithEmptyModifications(), builder, new File(CruiseConfig.WORKING_BASE_DIR + pipelineName));
return new BuildWork(buildAssignment);
}
@Test
public void shouldDoNothingWhenJobIsNotCancelled() throws CruiseControlException {
runner.setWork(work);
runner.handleInstruction(new AgentInstruction(false), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
assertThat(work.getCallCount(), is(0));
}
@Test
public void shouldCancelOncePerJob() throws CruiseControlException {
runner.setWork(work);
runner.handleInstruction(new AgentInstruction(true), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
assertThat(work.getCallCount(), is(1));
runner.handleInstruction(new AgentInstruction(true), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
assertThat(work.getCallCount(), is(1));
}
@Test
public void shoudReturnTrueOnGetJobIsCancelledWhenJobIsCancelled() {
assertThat(runner.isJobCancelled(), is(false));
runner.handleInstruction(new AgentInstruction(true), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
assertThat(runner.isJobCancelled(), is(true));
}
@Test
@RunIf(value = EnhancedOSChecker.class, arguments = {DO_NOT_RUN_ON, WINDOWS})
@Ignore("it will random pass if we sleep a short time, but sleep 15 seconds is too long in unit test")
public void shouldCancelCurrentJob() throws Exception {
final JobRunner jobRunner = new JobRunner();
ExecTask secondExec = new ExecTask("sleep", "10", (String) null);
secondExec.setCancelTask(new ExecTask("echo", "cancel in progress", (String) null));
buildWork = getWork(new JobConfig(new CaseInsensitiveString(JOB_PLAN_NAME), new Resources(), new ArtifactPlans(), new Tasks(
new ExecTask("echo", "should run me before cancellation", (String) null),
secondExec,
new ExecTask("echo", "should not run after cancellation", (String) null))));
Thread worker = new Thread(new Runnable() {
public void run() {
jobRunner.run(buildWork, agentIdentifier,
new BuildRepositoryRemoteStub(), stubPublisher(properties, consoleOut),
AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null), null, null, null);
}
});
Thread cancel = new Thread(new Runnable() {
public void run() {
jobRunner.handleInstruction(new AgentInstruction(true), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
}
});
worker.start();
// sleep 3 secs so the worker thread gets a chance to run first two tasks
try {
Thread.sleep(5000);
} catch (Exception ignore) {
}
cancel.start();
cancel.join();
worker.join();
System.out.println(consoleOut);
assertThat(consoleOut.toString(), containsString("should run me before cancellation"));
assertThat(consoleOut.toString(),
containsString("Start to execute cancel task: <exec command=\"echo\" args=\"cancel in progress\" />"));
assertThat(consoleOut.toString(), containsString("cancel in progress"));
assertThat(consoleOut.toString(), containsString("Task is cancelled"));
assertThat(consoleOut.toString(), not(containsString("should not run after cancellation")));
assertThat(statesAndResult.toString(), statesAndResult.contains(JobResult.Cancelled), is(true));
}
@Test
@RunIf(value = EnhancedOSChecker.class, arguments = {DO_NOT_RUN_ON, WINDOWS})
@Ignore("it will random pass if we sleep a short time, but sleep 15 seconds is too long in unit test")
public void shouldWaitForCancelTaskToFinish() throws Exception {
final JobRunner jobRunner = new JobRunner();
ExecTask secondExec = new ExecTask("sleep", "10", (String) null);
secondExec.setCancelTask(new ExecTask("sleep", "15", (String) null));
buildWork = getWork(new JobConfig(new CaseInsensitiveString(JOB_PLAN_NAME), new Resources(), new ArtifactPlans(), new Tasks(
new ExecTask("echo", "should run me before cancellation", (String) null),
secondExec,
new ExecTask("echo", "should not run after cancellation", (String) null))));
Thread worker = new Thread(new Runnable() {
public void run() {
jobRunner.run(buildWork, agentIdentifier,
new BuildRepositoryRemoteStub(), stubPublisher(properties, consoleOut),
AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null), null, null, null);
}
});
Thread cancel = new Thread(new Runnable() {
public void run() {
jobRunner.handleInstruction(new AgentInstruction(true), AgentRuntimeInfo.fromAgent(agentIdentifier, "cookie", null));
}
});
worker.start();
// sleep 2 secs so the worker thread gets a chance to run first two tasks
try {
Thread.sleep(5000);
} catch (Exception ignore) {
}
cancel.start();
cancel.join();
worker.join();
System.out.println(consoleOut);
String output = consoleOut.toString();
assertThat(output.indexOf("Task is cancelled") < output.indexOf("Job completed"), is(true));
assertThat(statesAndResult.toString(), statesAndResult.contains(JobResult.Cancelled), is(true));
}
private GoArtifactsManipulatorStub stubPublisher(final List<Property> properties,
final List<String> consoleOuts) {
return new GoArtifactsManipulatorStub(properties, consoleOuts);
}
}
| |
/**
* Copyright 2015 David Karnok and Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package hu.akarnokd.rxjava2flow;
import java.lang.reflect.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.Flow.*;
import java.util.function.*;
import org.junit.*;
import hu.akarnokd.rxjava2flow.exceptions.TestException;
import hu.akarnokd.rxjava2flow.schedulers.Schedulers;
import hu.akarnokd.rxjava2flow.subjects.*;
import hu.akarnokd.rxjava2flow.subscribers.TestSubscriber;
/**
* Verifies the operators handle null values properly by emitting/throwing NullPointerExceptions
*/
public class ObservableNullTests {
Observable<Integer> just1 = Observable.just(1);
//***********************************************************
// Static methods
//***********************************************************
@Test(expected = NullPointerException.class)
public void ambVarargsNull() {
Observable.amb((Publisher<Object>[])null);
}
@Test(expected = NullPointerException.class)
public void ambVarargsOneIsNull() {
Observable.amb(Observable.never(), null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void ambIterableNull() {
Observable.amb((Iterable<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void ambIterableIteratorNull() {
Observable.amb(() -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void ambIterableOneIsNull() {
Observable.amb(Arrays.asList(Observable.never(), null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void combineLatestVarargsNull() {
Observable.combineLatest(v -> 1, true, 128, (Publisher<Object>[])null);
}
@Test(expected = NullPointerException.class)
public void combineLatestVarargsOneIsNull() {
Observable.combineLatest(v -> 1, true, 128, Observable.never(), null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void combineLatestIterableNull() {
Observable.combineLatest((Iterable<Publisher<Object>>)null, v -> 1, true, 128);
}
@Test(expected = NullPointerException.class)
public void combineLatestIterableIteratorNull() {
Observable.combineLatest(() -> null, v -> 1, true, 128).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void combineLatestIterableOneIsNull() {
Observable.combineLatest(Arrays.asList(Observable.never(), null), v -> 1, true, 128).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void combineLatestVarargsFunctionNull() {
Observable.combineLatest(null, true, 128, Observable.never());
}
@Test(expected = NullPointerException.class)
public void combineLatestVarargsFunctionReturnsNull() {
Observable.combineLatest(v -> null, true, 128, just1).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void combineLatestIterableFunctionNull() {
Observable.combineLatest(Arrays.asList(just1), null, true, 128);
}
@Test(expected = NullPointerException.class)
public void combineLatestIterableFunctionReturnsNull() {
Observable.combineLatest(Arrays.asList(just1), v -> null, true, 128).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void concatIterableNull() {
Observable.concat((Iterable<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void concatIterableIteratorNull() {
Observable.concat(() -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void concatIterableOneIsNull() {
Observable.concat(Arrays.asList(just1, null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void concatPublisherNull() {
Observable.concat((Publisher<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void concatArrayNull() {
Observable.concatArray((Publisher<Object>[])null);
}
@Test(expected = NullPointerException.class)
public void concatArrayOneIsNull() {
Observable.concatArray(just1, null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void createNull() {
Observable.create(null);
}
@Test(expected = NullPointerException.class)
public void deferFunctionNull() {
Observable.defer(null);
}
@Test(expected = NullPointerException.class)
public void deferFunctionReturnsNull() {
Observable.defer(() -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void errorFunctionNull() {
Observable.error((Supplier<Throwable>)null);
}
@Test(expected = NullPointerException.class)
public void errorFunctionReturnsNull() {
Observable.error(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void errorThrowableNull() {
Observable.error((Throwable)null);
}
@Test(expected = NullPointerException.class)
public void fromArrayNull() {
Observable.fromArray((Object[])null);
}
@Test(expected = NullPointerException.class)
public void fromArrayOneIsNull() {
Observable.fromArray(1, null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void fromCallableNull() {
Observable.fromCallable(null);
}
@Test(expected = NullPointerException.class)
public void fromCallableReturnsNull() {
Observable.fromCallable(() -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void fromFutureNull() {
Observable.fromFuture(null);
}
@Test
public void fromFutureReturnsNull() {
CompletableFuture<Object> f = new CompletableFuture<>();
TestSubscriber<Object> ts = new TestSubscriber<>();
Observable.fromFuture(f).subscribe(ts);
f.complete(null);
ts.assertNoValues();
ts.assertNotComplete();
ts.assertError(NullPointerException.class);
}
@Test(expected = NullPointerException.class)
public void fromFutureTimedFutureNull() {
Observable.fromFuture(null, 1, TimeUnit.SECONDS);
}
@Test(expected = NullPointerException.class)
public void fromFutureTimedUnitNull() {
Observable.fromFuture(new CompletableFuture<>(), 1, null);
}
@Test(expected = NullPointerException.class)
public void fromFutureTimedSchedulerNull() {
Observable.fromFuture(new CompletableFuture<>(), 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void fromFutureTimedReturnsNull() {
CompletableFuture<Object> f = CompletableFuture.completedFuture(null);
Observable.fromFuture(f, 1, TimeUnit.SECONDS).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void fromFutureSchedulerNull() {
Observable.fromFuture(new CompletableFuture<>(), null);
}
@Test(expected = NullPointerException.class)
public void fromIterableNull() {
Observable.fromIterable(null);
}
@Test(expected = NullPointerException.class)
public void fromIterableIteratorNull() {
Observable.fromIterable(() -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void fromIterableValueNull() {
Observable.fromIterable(Arrays.asList(1, null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void fromPublisherNull() {
Observable.fromPublisher(null);
}
@Test(expected = NullPointerException.class)
public void fromStreamNull() {
Observable.fromStream(null);
}
@Test(expected = NullPointerException.class)
public void fromStreamOneIsNull() {
Observable.fromStream(Arrays.asList(1, null).stream()).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void generateConsumerNull() {
Observable.generate(null);
}
@Test(expected = NullPointerException.class)
public void generateConsumerEmitsNull() {
Observable.generate(s -> s.onNext(null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void generateStateConsumerInitialStateNull() {
Observable.generate(null, (BiConsumer<Integer, Subscriber<Integer>>)(s, o) -> o.onNext(1));
}
@Test(expected = NullPointerException.class)
public void generateStateFunctionInitialStateNull() {
Observable.generate(null, (s, o) -> { o.onNext(1); return s; });
}
@Test(expected = NullPointerException.class)
public void generateStateConsumerNull() {
Observable.generate(() -> 1, (BiConsumer<Integer, Subscriber<Object>>)null);
}
@Test
public void generateConsumerStateNullAllowed() {
Observable.generate(() -> null, (BiConsumer<Integer, Subscriber<Integer>>)(s, o) -> o.onComplete()).toBlocking().lastOption();
}
@Test
public void generateFunctionStateNullAllowed() {
Observable.generate(() -> null, (s, o) -> { o.onComplete(); return s; }).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void generateConsumerDisposeNull() {
Observable.generate(() -> 1, (BiConsumer<Integer, Subscriber<Integer>>)(s, o) -> o.onNext(1), null);
}
@Test(expected = NullPointerException.class)
public void generateFunctionDisposeNull() {
Observable.generate(() -> 1, (s, o) -> { o.onNext(1); return s; }, null);
}
@Test(expected = NullPointerException.class)
public void intervalUnitNull() {
Observable.interval(1, null);
}
public void intervalSchedulerNull() {
Observable.interval(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void intervalPeriodUnitNull() {
Observable.interval(1, 1, null);
}
@Test(expected = NullPointerException.class)
public void intervalPeriodSchedulerNull() {
Observable.interval(1, 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void intervalRangeUnitNull() {
Observable.intervalRange(1,1, 1, 1, null);
}
@Test(expected = NullPointerException.class)
public void intervalRangeSchedulerNull() {
Observable.intervalRange(1, 1, 1, 1, TimeUnit.SECONDS, null);
}
@Test
public void justNull() throws Exception {
@SuppressWarnings("rawtypes")
Class<Observable> clazz = Observable.class;
for (int argCount = 1; argCount < 10; argCount++) {
for (int argNull = 1; argNull <= argCount; argNull++) {
Class<?>[] params = new Class[argCount];
Arrays.fill(params, Object.class);
Object[] values = new Object[argCount];
Arrays.fill(values, 1);
values[argNull - 1] = null;
Method m = clazz.getMethod("just", params);
try {
m.invoke(null, values);
Assert.fail("No exception for argCount " + argCount + " / argNull " + argNull);
} catch (InvocationTargetException ex) {
if (!(ex.getCause() instanceof NullPointerException)) {
Assert.fail("Unexpected exception for argCount " + argCount + " / argNull " + argNull + ": " + ex);
}
}
}
}
}
@Test(expected = NullPointerException.class)
public void mergeIterableNull() {
Observable.merge(128, 128, (Iterable<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void mergeIterableIteratorNull() {
Observable.merge(128, 128, () -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void mergeIterableOneIsNull() {
Observable.merge(128, 128, Arrays.asList(just1, null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void mergeArrayNull() {
Observable.merge(128, 128, (Publisher<Object>[])null);
}
@Test(expected = NullPointerException.class)
public void mergeArrayOneIsNull() {
Observable.merge(128, 128, just1, null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void mergeDelayErrorIterableNull() {
Observable.mergeDelayError(128, 128, (Iterable<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void mergeDelayErrorIterableIteratorNull() {
Observable.mergeDelayError(128, 128, () -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void mergeDelayErrorIterableOneIsNull() {
Observable.mergeDelayError(128, 128, Arrays.asList(just1, null)).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void mergeDelayErrorArrayNull() {
Observable.mergeDelayError(128, 128, (Publisher<Object>[])null);
}
@Test(expected = NullPointerException.class)
public void mergeDelayErrorArrayOneIsNull() {
Observable.mergeDelayError(128, 128, just1, null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void sequenceEqualFirstNull() {
Observable.sequenceEqual(null, just1);
}
@Test(expected = NullPointerException.class)
public void sequenceEqualSecondNull() {
Observable.sequenceEqual(just1, null);
}
@Test(expected = NullPointerException.class)
public void sequenceEqualComparatorNull() {
Observable.sequenceEqual(just1, just1, null);
}
@Test(expected = NullPointerException.class)
public void switchOnNextNull() {
Observable.switchOnNext(null);
}
@Test(expected = NullPointerException.class)
public void timerUnitNull() {
Observable.timer(1, null);
}
@Test(expected = NullPointerException.class)
public void timerSchedulerNull() {
Observable.timer(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void usingResourceSupplierNull() {
Observable.using(null, d -> just1, d -> { });
}
@Test(expected = NullPointerException.class)
public void usingObservableSupplierNull() {
Observable.using(() -> 1, null, d -> { });
}
@Test(expected = NullPointerException.class)
public void usingObservableSupplierReturnsNull() {
Observable.using(() -> 1, d -> null, d -> { }).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void usingDisposeNull() {
Observable.using(() -> 1, d -> just1, null);
}
@Test(expected = NullPointerException.class)
public void zipIterableNull() {
Observable.zip((Iterable<Publisher<Object>>)null, v -> 1);
}
@Test(expected = NullPointerException.class)
public void zipIterableIteratorNull() {
Observable.zip(() -> null, v -> 1).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void zipIterableFunctionNull() {
Observable.zip(Arrays.asList(just1, just1), null);
}
@Test(expected = NullPointerException.class)
public void zipIterableFunctionReturnsNull() {
Observable.zip(Arrays.asList(just1, just1), a -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void zipPublisherNull() {
Observable.zip((Publisher<Publisher<Object>>)null, a -> 1);
}
@Test(expected = NullPointerException.class)
public void zipPublisherFunctionNull() {
Observable.zip((Observable.just(just1)), null);
}
@Test(expected = NullPointerException.class)
public void zipPublisherFunctionReturnsNull() {
Observable.zip((Observable.just(just1)), a -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void zipIterable2Null() {
Observable.zipIterable(a -> 1, true, 128, (Iterable<Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void zipIterable2IteratorNull() {
Observable.zipIterable(a -> 1, true, 128, () -> null).toBlocking().lastOption();
}
@Test(expected = NullPointerException.class)
public void zipIterable2FunctionNull() {
Observable.zipIterable(null, true, 128, Arrays.asList(just1, just1));
}
@Test(expected = NullPointerException.class)
public void zipIterable2FunctionReturnsNull() {
Observable.zipIterable(a -> null, true, 128, Arrays.asList(just1, just1)).toBlocking().lastOption();
}
//*************************************************************
// Instance methods
//*************************************************************
@Test(expected = NullPointerException.class)
public void allPredicateNull() {
just1.all(null);
}
@Test(expected = NullPointerException.class)
public void ambWithNull() {
just1.ambWith(null);
}
@Test(expected = NullPointerException.class)
public void anyPredicateNull() {
just1.any(null);
}
@Test(expected = NullPointerException.class)
public void bufferSupplierNull() {
just1.buffer(1, 1, (Supplier<List<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void bufferSupplierReturnsNull() {
just1.buffer(1, 1, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void bufferTimedUnitNull() {
just1.buffer(1L, 1L, null);
}
@Test(expected = NullPointerException.class)
public void bufferTimedSchedulerNull() {
just1.buffer(1L, 1L, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void bufferTimedSupplierNull() {
just1.buffer(1L, 1L, TimeUnit.SECONDS, Schedulers.single(), null);
}
@Test(expected = NullPointerException.class)
public void bufferTimedSupplierReturnsNull() {
just1.buffer(1L, 1L, TimeUnit.SECONDS, Schedulers.single(), () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void bufferOpenCloseOpenNull() {
just1.buffer(null, o -> just1);
}
@Test(expected = NullPointerException.class)
public void bufferOpenCloseCloseNull() {
just1.buffer(just1, (Function<Integer, Publisher<Object>>)null);
}
@Test(expected = NullPointerException.class)
public void bufferOpenCloseCloseReturnsNull() {
just1.buffer(just1, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void bufferBoundaryNull() {
just1.buffer((Publisher<Object>)null);
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplierNull() {
just1.buffer(just1, (Supplier<List<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplierReturnsNull() {
just1.buffer(just1, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplier2Null() {
just1.buffer((Supplier<Publisher<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplier2ReturnsNull() {
just1.buffer(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplier2SupplierNull() {
just1.buffer(() -> just1, null);
}
@Test(expected = NullPointerException.class)
public void bufferBoundarySupplier2SupplierReturnsNull() {
just1.buffer(() -> just1, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void castNull() {
just1.cast(null);
}
@Test(expected = NullPointerException.class)
public void collectInitialSupplierNull() {
just1.collect((Supplier<Integer>)null, (a, b) -> { });
}
@Test(expected = NullPointerException.class)
public void collectInitialSupplierReturnsNull() {
just1.collect(() -> null, (a, b) -> { }).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void collectInitialCollectorNull() {
just1.collect(() -> 1, null);
}
@Test(expected = NullPointerException.class)
public void collectIntoInitialNull() {
just1.collectInto(null, (a, b) -> { });
}
@Test(expected = NullPointerException.class)
public void collectIntoCollectorNull() {
just1.collectInto(1, null);
}
@Test(expected = NullPointerException.class)
public void composeNull() {
just1.compose(null);
}
@Test(expected = NullPointerException.class)
public void concatMapNull() {
just1.concatMap(null);
}
@Test(expected = NullPointerException.class)
public void concatMapReturnsNull() {
just1.concatMap(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void concatMapIterableNull() {
just1.concatMapIterable(null);
}
@Test(expected = NullPointerException.class)
public void concatMapIterableReturnNull() {
just1.concatMapIterable(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void concatMapIterableIteratorNull() {
just1.concatMapIterable(v -> () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void concatWithNull() {
just1.concatWith(null);
}
@Test(expected = NullPointerException.class)
public void containsNull() {
just1.contains(null);
}
@Test(expected = NullPointerException.class)
public void debounceFunctionNull() {
just1.debounce(null);
}
@Test(expected = NullPointerException.class)
public void debounceFunctionReturnsNull() {
just1.debounce(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void debounceTimedUnitNull() {
just1.debounce(1, null);
}
@Test(expected = NullPointerException.class)
public void debounceTimedSchedulerNull() {
just1.debounce(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void defaultIfEmptyNull() {
just1.defaultIfEmpty(null);
}
@Test(expected = NullPointerException.class)
public void delayWithFunctionNull() {
just1.delay(null);
}
@Test(expected = NullPointerException.class)
public void delayWithFunctionReturnsNull() {
just1.delay(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void delayTimedUnitNull() {
just1.delay(1, null);
}
@Test(expected = NullPointerException.class)
public void delayTimedSchedulerNull() {
just1.delay(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void delaySubscriptionTimedUnitNull() {
just1.delaySubscription(1, null);
}
@Test(expected = NullPointerException.class)
public void delaySubscriptionTimedSchedulerNull() {
just1.delaySubscription(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void delaySubscriptionFunctionNull() {
just1.delaySubscription(null);
}
@Test(expected = NullPointerException.class)
public void delayBothInitialSupplierNull() {
just1.delay(null, v -> just1);
}
@Test(expected = NullPointerException.class)
public void delayBothInitialSupplierReturnsNull() {
just1.delay(() -> null, v -> just1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void delayBothItemSupplierNull() {
just1.delay(() -> just1, null);
}
@Test(expected = NullPointerException.class)
public void delayBothItemSupplierReturnsNull() {
just1.delay(() -> just1, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void distinctFunctionNull() {
just1.distinct(null);
}
@Test(expected = NullPointerException.class)
public void distinctSupplierNull() {
just1.distinct(v -> v, null);
}
@Test(expected = NullPointerException.class)
public void distinctSupplierReturnsNull() {
just1.distinct(v -> v, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void distinctFunctionReturnsNull() {
just1.distinct(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void distinctUntilChangedFunctionNull() {
just1.distinctUntilChanged(null);
}
@Test(expected = NullPointerException.class)
public void distinctUntilChangedFunctionReturnsNull() {
just1.distinctUntilChanged(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void doOnCancelNull() {
just1.doOnCancel(null);
}
@Test(expected = NullPointerException.class)
public void doOnCompleteNull() {
just1.doOnComplete(null);
}
@Test(expected = NullPointerException.class)
public void doOnEachSupplierNull() {
just1.doOnEach((Consumer<Try<Optional<Integer>>>)null);
}
@Test(expected = NullPointerException.class)
public void doOnEachSubscriberNull() {
just1.doOnEach((Subscriber<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void doOnErrorNull() {
just1.doOnError(null);
}
@Test(expected = NullPointerException.class)
public void doOnLifecycleOnSubscribeNull() {
just1.doOnLifecycle(null, v -> { }, () -> { });
}
@Test(expected = NullPointerException.class)
public void doOnLifecycleOnRequestNull() {
just1.doOnLifecycle(s -> { }, null, () -> { });
}
@Test(expected = NullPointerException.class)
public void doOnLifecycleOnCancelNull() {
just1.doOnLifecycle(s -> { }, v -> { }, null);
}
@Test(expected = NullPointerException.class)
public void doOnNextNull() {
just1.doOnNext(null);
}
@Test(expected = NullPointerException.class)
public void doOnRequestNull() {
just1.doOnRequest(null);
}
@Test(expected = NullPointerException.class)
public void doOnSubscribeNull() {
just1.doOnSubscribe(null);
}
@Test(expected = NullPointerException.class)
public void doOnTerminatedNull() {
just1.doOnTerminate(null);
}
@Test(expected = NullPointerException.class)
public void elementAtNull() {
just1.elementAt(1, null);
}
@Test(expected = NullPointerException.class)
public void endWithIterableNull() {
just1.endWith((Iterable<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void endWithIterableIteratorNull() {
just1.endWith(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void endWithIterableOneIsNull() {
just1.endWith(Arrays.asList(1, null)).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void endWithPublisherNull() {
just1.endWith((Publisher<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void endWithNull() {
just1.endWith((Integer)null);
}
@Test(expected = NullPointerException.class)
public void endWithArrayNull() {
just1.endWithArray((Integer[])null);
}
@Test(expected = NullPointerException.class)
public void endWithArrayOneIsNull() {
just1.endWithArray(1, null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void filterNull() {
just1.filter(null);
}
@Test(expected = NullPointerException.class)
public void finallyDoNull() {
just1.finallyDo(null);
}
@Test(expected = NullPointerException.class)
public void firstNull() {
just1.first(null);
}
@Test(expected = NullPointerException.class)
public void flatMapNull() {
just1.flatMap(null);
}
@Test(expected = NullPointerException.class)
public void flatMapFunctionReturnsNull() {
just1.flatMap(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnNextNull() {
just1.flatMap(null, e -> just1, () -> just1);
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnNextReturnsNull() {
just1.flatMap(v -> null, e -> just1, () -> just1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnErrorNull() {
just1.flatMap(v -> just1, null, () -> just1);
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnErrorReturnsNull() {
Observable.error(new TestException()).flatMap(v -> just1, e -> null, () -> just1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnCompleteNull() {
just1.flatMap(v -> just1, e -> just1, null);
}
@Test(expected = NullPointerException.class)
public void flatMapNotificationOnCompleteReturnsNull() {
just1.flatMap(v -> just1, e -> just1, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapCombinerMapperNull() {
just1.flatMap(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void flatMapCombinerMapperReturnsNull() {
just1.flatMap(v -> null, (a, b) -> 1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapCombinerCombinerNull() {
just1.flatMap(v -> just1, null);
}
@Test(expected = NullPointerException.class)
public void flatMapCombinerCombinerReturnsNull() {
just1.flatMap(v -> just1, (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapIterableMapperNull() {
just1.flatMapIterable(null);
}
@Test(expected = NullPointerException.class)
public void flatMapIterableMapperReturnsNull() {
just1.flatMapIterable(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapIterableMapperIteratorNull() {
just1.flatMapIterable(v -> () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapIterableMapperIterableOneNull() {
just1.flatMapIterable(v -> Arrays.asList(1, null)).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void flatMapIterableCombinerNull() {
just1.flatMapIterable(v -> Arrays.asList(1), null);
}
@Test(expected = NullPointerException.class)
public void flatMapIterableCombinerReturnsNull() {
just1.flatMapIterable(v -> Arrays.asList(1), (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void forEachNull() {
just1.forEach(null);
}
@Test(expected = NullPointerException.class)
public void forEachWhileNull() {
just1.forEachWhile(null);
}
@Test(expected = NullPointerException.class)
public void forEachWhileOnErrorNull() {
just1.forEachWhile(v -> true, null);
}
@Test(expected = NullPointerException.class)
public void forEachWhileOnCompleteNull() {
just1.forEachWhile(v -> true, e-> { }, null);
}
@Test(expected = NullPointerException.class)
public void groupByNull() {
just1.groupBy(null);
}
public void groupByKeyNull() {
just1.groupBy(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void groupByValueNull() {
just1.groupBy(v -> v, null);
}
@Test(expected = NullPointerException.class)
public void groupByValueReturnsNull() {
just1.groupBy(v -> v, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void lastNull() {
just1.last(null);
}
@Test(expected = NullPointerException.class)
public void liftNull() {
just1.lift(null);
}
@Test(expected = NullPointerException.class)
public void liftReturnsNull() {
just1.lift(s -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void mapNull() {
just1.map(null);
}
@Test(expected = NullPointerException.class)
public void mapReturnsNull() {
just1.map(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void mergeWithNull() {
just1.mergeWith(null);
}
@Test(expected = NullPointerException.class)
public void observeOnNull() {
just1.observeOn(null);
}
@Test(expected = NullPointerException.class)
public void ofTypeNull() {
just1.ofType(null);
}
@Test(expected = NullPointerException.class)
public void onBackpressureBufferOverflowNull() {
just1.onBackpressureBuffer(10, null);
}
@Test(expected = NullPointerException.class)
public void onBackpressureDropActionNull() {
just1.onBackpressureDrop(null);
}
@Test(expected = NullPointerException.class)
public void onErrorResumeNextFunctionNull() {
just1.onErrorResumeNext((Function<Throwable, Publisher<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void onErrorResumeNextFunctionReturnsNull() {
Observable.error(new TestException()).onErrorResumeNext(e -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void onErrorResumeNextPublisherNull() {
just1.onErrorResumeNext((Publisher<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void onErrorReturnFunctionNull() {
just1.onErrorReturn(null);
}
@Test(expected = NullPointerException.class)
public void onErrorReturnValueNull() {
just1.onErrorReturnValue(null);
}
@Test(expected = NullPointerException.class)
public void onErrorReturnFunctionReturnsNull() {
Observable.error(new TestException()).onErrorReturn(e -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void onExceptionResumeNext() {
just1.onExceptionResumeNext(null);
}
@Test(expected = NullPointerException.class)
public void publishFunctionNull() {
just1.publish(null);
}
@Test(expected = NullPointerException.class)
public void publishFunctionReturnsNull() {
just1.publish(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void reduceFunctionNull() {
just1.reduce(null);
}
@Test(expected = NullPointerException.class)
public void reduceFunctionReturnsNull() {
Observable.just(1, 1).reduce((a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void reduceSeedNull() {
just1.reduce(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void reduceSeedFunctionNull() {
just1.reduce(1, null);
}
@Test(expected = NullPointerException.class)
public void reduceSeedFunctionReturnsNull() {
just1.reduce(1, (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void reduceWithSeedNull() {
just1.reduceWith(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void reduceWithSeedReturnsNull() {
just1.reduceWith(() -> null, (a, b) -> 1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void repeatUntilNull() {
just1.repeatUntil(null);
}
@Test(expected = NullPointerException.class)
public void repeatWhenNull() {
just1.repeatWhen(null);
}
@Test(expected = NullPointerException.class)
public void repeatWhenFunctionReturnsNull() {
just1.repeatWhen(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replaySelectorNull() {
just1.replay((Function<Observable<Integer>, Observable<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void replaySelectorReturnsNull() {
just1.replay(o -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replayBoundedSelectorNull() {
just1.replay((Function<Observable<Integer>, Observable<Integer>>)null, 1, 1, TimeUnit.SECONDS);
}
@Test(expected = NullPointerException.class)
public void replayBoundedSelectorReturnsNull() {
just1.replay(v -> null, 1, 1, TimeUnit.SECONDS).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replaySchedulerNull() {
just1.replay((Scheduler)null);
}
@Test(expected = NullPointerException.class)
public void replayBoundedUnitNull() {
just1.replay(v -> v, 1, 1, null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replayBoundedSchedulerNull() {
just1.replay(v -> v, 1, 1, TimeUnit.SECONDS, null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replayTimeBoundedSelectorNull() {
just1.replay(null, 1, TimeUnit.SECONDS, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void replayTimeBoundedSelectorReturnsNull() {
just1.replay(v -> null, 1, TimeUnit.SECONDS, Schedulers.single()).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replaySelectorTimeBoundedUnitNull() {
just1.replay(v -> v, 1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void replaySelectorTimeBoundedSchedulerNull() {
just1.replay(v -> v, 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void replayTimeSizeBoundedUnitNull() {
just1.replay(1, 1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void replayTimeSizeBoundedSchedulerNull() {
just1.replay(1, 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void replayBufferSchedulerNull() {
just1.replay(1, (Scheduler)null);
}
@Test(expected = NullPointerException.class)
public void replayTimeBoundedUnitNull() {
just1.replay(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void replayTimeBoundedSchedulerNull() {
just1.replay(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void retryFunctionNull() {
just1.retry((BiPredicate<Integer, Throwable>)null);
}
@Test(expected = NullPointerException.class)
public void retryCountFunctionNull() {
just1.retry(1, null);
}
@Test(expected = NullPointerException.class)
public void retryPredicateNull() {
just1.retry((Predicate<Throwable>)null);
}
@Test(expected = NullPointerException.class)
public void retryWhenFunctionNull() {
just1.retryWhen(null);
}
@Test(expected = NullPointerException.class)
public void retryWhenFunctionReturnsNull() {
Observable.error(new TestException()).retryWhen(f -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void retryUntil() {
just1.retryUntil(null);
}
@Test(expected = NullPointerException.class)
public void safeSubscribeNull() {
just1.safeSubscribe(null);
}
@Test(expected = NullPointerException.class)
public void sampleUnitNull() {
just1.sample(1, null);
}
@Test(expected = NullPointerException.class)
public void sampleSchedulerNull() {
just1.sample(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void samplePublisherNull() {
just1.sample(null);
}
@Test(expected = NullPointerException.class)
public void scanFunctionNull() {
just1.scan(null);
}
@Test(expected = NullPointerException.class)
public void scanFunctionReturnsNull() {
Observable.just(1, 1).scan((a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void scanSeedNull() {
just1.scan(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void scanSeedFunctionNull() {
just1.scan(1, null);
}
@Test(expected = NullPointerException.class)
public void scanSeedFunctionReturnsNull() {
just1.scan(1, (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void scanSeedSupplierNull() {
just1.scanWith(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void scanSeedSupplierReturnsNull() {
just1.scanWith(() -> null, (a, b) -> 1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void scanSeedSupplierFunctionNull() {
just1.scanWith(() -> 1, null);
}
@Test(expected = NullPointerException.class)
public void scanSeedSupplierFunctionReturnsNull() {
just1.scanWith(() -> 1, (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void singleNull() {
just1.single(null);
}
@Test(expected = NullPointerException.class)
public void skipTimedUnitNull() {
just1.skip(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void skipTimedSchedulerNull() {
just1.skip(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void skipLastTimedUnitNull() {
just1.skipLast(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void skipLastTimedSchedulerNull() {
just1.skipLast(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void skipUntilNull() {
just1.skipUntil(null);
}
@Test(expected = NullPointerException.class)
public void skipWhileNull() {
just1.skipWhile(null);
}
@Test(expected = NullPointerException.class)
public void startWithIterableNull() {
just1.startWith((Iterable<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void startWithIterableIteratorNull() {
just1.startWith(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void startWithIterableOneNull() {
just1.startWith(Arrays.asList(1, null)).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void startWithSingleNull() {
just1.startWith((Integer)null);
}
@Test(expected = NullPointerException.class)
public void startWithPublisherNull() {
just1.startWith((Publisher<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void startWithArrayNull() {
just1.startWithArray((Integer[])null);
}
@Test(expected = NullPointerException.class)
public void startWithArrayOneNull() {
just1.startWithArray(1, null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void subscribeOnNextNull() {
just1.subscribe((Consumer<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void subscribeOnErrorNull() {
just1.subscribe(e -> { }, null);
}
@Test(expected = NullPointerException.class)
public void subscribeOnCompleteNull() {
just1.subscribe(e -> { }, e -> { }, null);
}
@Test(expected = NullPointerException.class)
public void subscribeOnSubscribeNull() {
just1.subscribe(e -> { }, e -> { }, () -> { }, null);
}
@Test(expected = NullPointerException.class)
public void subscribeNull() {
just1.subscribe((Subscriber<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void subscribeOnNull() {
just1.subscribeOn(null);
}
@Test(expected = NullPointerException.class)
public void switchIfEmptyNull() {
just1.switchIfEmpty(null);
}
@Test(expected = NullPointerException.class)
public void switchMapNull() {
just1.switchMap(null);
}
@Test(expected = NullPointerException.class)
public void switchMapFunctionReturnsNull() {
just1.switchMap(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void takeTimedUnitNull() {
just1.take(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void takeTimedSchedulerNull() {
just1.take(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void takeFirstNull() {
just1.takeFirst(null);
}
@Test(expected = NullPointerException.class)
public void takeLastTimedUnitNull() {
just1.takeLast(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void takeLastSizeTimedUnitNull() {
just1.takeLast(1, 1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void takeLastTimedSchedulerNull() {
just1.takeLast(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void takeLastSizeTimedSchedulerNull() {
just1.takeLast(1, 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void takeLastBufferTimedUnitNull() {
just1.takeLastBuffer(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void takeLastBufferTimedSchedulerNull() {
just1.takeLastBuffer(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void takeLastBufferSizeTimedUnitNull() {
just1.takeLastBuffer(1, 1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void takeLastBufferSizeTimedSchedulerNull() {
just1.takeLastBuffer(1, 1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void takeUntilPredicateNull() {
just1.takeUntil((Predicate<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void takeUntilPublisherNull() {
just1.takeUntil((Publisher<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void takeWhileNull() {
just1.takeWhile(null);
}
@Test(expected = NullPointerException.class)
public void throttleFirstUnitNull() {
just1.throttleFirst(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void throttleFirstSchedulerNull() {
just1.throttleFirst(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void throttleLastUnitNull() {
just1.throttleLast(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void throttleLastSchedulerNull() {
just1.throttleLast(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void throttleWithTimeoutUnitNull() {
just1.throttleWithTimeout(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void throttleWithTimeoutSchedulerNull() {
just1.throttleWithTimeout(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void timeIntervalUnitNull() {
just1.timeInterval(null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void timeIntervalSchedulerNull() {
just1.timeInterval(TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void timeoutSelectorNull() {
just1.timeout(null);
}
@Test(expected = NullPointerException.class)
public void timeoutSelectorReturnsNull() {
just1.timeout(v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void timeoutSelectorOtherNull() {
just1.timeout(v -> just1, null);
}
@Test(expected = NullPointerException.class)
public void timeoutUnitNull() {
just1.timeout(1, null, just1, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void timeouOtherNull() {
just1.timeout(1, TimeUnit.SECONDS, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void timeouSchedulerNull() {
just1.timeout(1, TimeUnit.SECONDS, just1, null);
}
@Test(expected = NullPointerException.class)
public void timeoutFirstNull() {
just1.timeout((Supplier<Publisher<Integer>>)null, v -> just1);
}
@Test(expected = NullPointerException.class)
public void timeoutFirstReturnsNull() {
just1.timeout(() -> null, v -> just1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void timeoutFirstItemNull() {
just1.timeout(() -> just1, null);
}
@Test(expected = NullPointerException.class)
public void timeoutFirstItemReturnsNull() {
just1.timeout(() -> just1, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void timestampUnitNull() {
just1.timestamp(null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void timestampSchedulerNull() {
just1.timestamp(TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void toNull() {
just1.to(null);
}
@Test(expected = NullPointerException.class)
public void toListNull() {
just1.toList(null);
}
@Test(expected = NullPointerException.class)
public void toListSupplierReturnsNull() {
just1.toList(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void toSortedListNull() {
just1.toSortedList(null);
}
@Test
public void toMapKeyNullAllowed() {
just1.toMap(null);
}
@Test(expected = NullPointerException.class)
public void toMapValueNull() {
just1.toMap(v -> v, null);
}
@Test
public void toMapValueSelectorReturnsNull() {
just1.toMap(v -> v, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void toMapMapSupplierNull() {
just1.toMap(v -> v, v -> v, null);
}
@Test(expected = NullPointerException.class)
public void toMapMapSupplierReturnsNull() {
just1.toMap(v -> v, v -> v, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void toMultimapKeyNull() {
just1.toMultimap(null);
}
@Test(expected = NullPointerException.class)
public void toMultimapValueNull() {
just1.toMultimap(v -> v, null);
}
@Test
public void toMultiMapValueSelectorReturnsNullAllowed() {
just1.toMap(v -> v, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void toMultimapMapMapSupplierNull() {
just1.toMultimap(v -> v, v -> v, null);
}
@Test(expected = NullPointerException.class)
public void toMultimapMapSupplierReturnsNull() {
just1.toMultimap(v -> v, v -> v, () -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void toMultimapMapMapCollectionSupplierNull() {
just1.toMultimap(v -> v, v -> v, () -> new HashMap<>(), null);
}
@Test(expected = NullPointerException.class)
public void toMultimapMapCollectionSupplierReturnsNull() {
just1.toMultimap(v -> v, v -> v, () -> new HashMap<>(), v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void unsafeSubscribeNull() {
just1.unsafeSubscribe(null);
}
@Test(expected = NullPointerException.class)
public void unsubscribeOnNull() {
just1.unsubscribeOn(null);
}
@Test(expected = NullPointerException.class)
public void windowTimedUnitNull() {
just1.window(1, null, Schedulers.single());
}
@Test(expected = NullPointerException.class)
public void windowSizeTimedUnitNull() {
just1.window(1, null, Schedulers.single(), 1);
}
@Test(expected = NullPointerException.class)
public void windowTimedSchedulerNull() {
just1.window(1, TimeUnit.SECONDS, null);
}
@Test(expected = NullPointerException.class)
public void windowSizeTimedSchedulerNull() {
just1.window(1, TimeUnit.SECONDS, null, 1);
}
@Test(expected = NullPointerException.class)
public void windowBoundaryNull() {
just1.window((Publisher<Integer>)null);
}
@Test(expected = NullPointerException.class)
public void windowOpenCloseOpenNull() {
just1.window(null, v -> just1);
}
@Test(expected = NullPointerException.class)
public void windowOpenCloseCloseNull() {
just1.window(just1, null);
}
@Test(expected = NullPointerException.class)
public void windowOpenCloseCloseReturnsNull() {
Observable.never().window(just1, v -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void windowBoundarySupplierNull() {
just1.window((Supplier<Publisher<Integer>>)null);
}
@Test(expected = NullPointerException.class)
public void windowBoundarySupplierReturnsNull() {
just1.window(() -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void withLatestFromOtherNull() {
just1.withLatestFrom(null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void withLatestFromCombinerNull() {
just1.withLatestFrom(just1, null);
}
@Test(expected = NullPointerException.class)
public void withLatestFromCombinerReturnsNull() {
just1.withLatestFrom(just1, (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void zipWithIterableNull() {
just1.zipWith((Iterable<Integer>)null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void zipWithIterableCombinerNull() {
just1.zipWith(Arrays.asList(1), null);
}
@Test(expected = NullPointerException.class)
public void zipWithIterableCombinerReturnsNull() {
just1.zipWith(Arrays.asList(1), (a, b) -> null).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void zipWithIterableIteratorNull() {
just1.zipWith(() -> null, (a, b) -> 1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void zipWithIterableOneIsNull() {
Observable.just(1, 2).zipWith(Arrays.asList(1, null), (a, b) -> 1).toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void zipWithPublisherNull() {
just1.zipWith((Publisher<Integer>)null, (a, b) -> 1);
}
@Test(expected = NullPointerException.class)
public void zipWithCombinerNull() {
just1.zipWith(just1, null);
}
@Test(expected = NullPointerException.class)
public void zipWithCombinerReturnsNull() {
just1.zipWith(just1, (a, b) -> null).toBlocking().run();
}
//*********************************************
// Subject null tests
//*********************************************
@Test(expected = NullPointerException.class)
public void asyncSubjectOnNextNull() {
Subject<Integer, Integer> subject = AsyncSubject.create();
subject.onNext(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void asyncSubjectOnErrorNull() {
Subject<Integer, Integer> subject = AsyncSubject.create();
subject.onError(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void behaviorSubjectOnNextNull() {
Subject<Integer, Integer> subject = BehaviorSubject.create();
subject.onNext(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void behaviorSubjectOnErrorNull() {
Subject<Integer, Integer> subject = BehaviorSubject.create();
subject.onError(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void publishSubjectOnNextNull() {
Subject<Integer, Integer> subject = PublishSubject.create();
subject.onNext(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void publishSubjectOnErrorNull() {
Subject<Integer, Integer> subject = PublishSubject.create();
subject.onError(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replaycSubjectOnNextNull() {
Subject<Integer, Integer> subject = ReplaySubject.create();
subject.onNext(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void replaySubjectOnErrorNull() {
Subject<Integer, Integer> subject = ReplaySubject.create();
subject.onError(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void serializedcSubjectOnNextNull() {
Subject<Integer, Integer> subject = PublishSubject.<Integer>create().toSerialized();
subject.onNext(null);
subject.toBlocking().run();
}
@Test(expected = NullPointerException.class)
public void serializedSubjectOnErrorNull() {
Subject<Integer, Integer> subject = PublishSubject.<Integer>create().toSerialized();
subject.onError(null);
subject.toBlocking().run();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file.tooling;
import static java.text.DateFormat.getDateTimeInstance;
import static org.apache.jackrabbit.oak.api.Type.BINARIES;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static org.apache.jackrabbit.oak.commons.PathUtils.concat;
import static org.apache.jackrabbit.oak.commons.PathUtils.denotesRoot;
import static org.apache.jackrabbit.oak.commons.PathUtils.getName;
import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static org.apache.jackrabbit.oak.spi.state.NodeStateUtils.getNode;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.segment.SegmentBlob;
import org.apache.jackrabbit.oak.segment.SegmentNodeStore;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.JournalEntry;
import org.apache.jackrabbit.oak.segment.file.JournalReader;
import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore;
import org.apache.jackrabbit.oak.segment.spi.monitor.IOMonitorAdapter;
import org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeState;
/**
* Utility for checking the files of a
* {@link FileStore} for inconsistency and
* reporting that latest consistent revision.
*/
public class ConsistencyChecker implements Closeable {
private static final String CHECKPOINT_INDENT = " ";
private static final String NO_INDENT = "";
private static class StatisticsIOMonitor extends IOMonitorAdapter {
private final AtomicLong ioOperations = new AtomicLong(0);
private final AtomicLong readBytes = new AtomicLong(0);
private final AtomicLong readTime = new AtomicLong(0);
@Override
public void afterSegmentRead(File file, long msb, long lsb, int length, long elapsed) {
ioOperations.incrementAndGet();
readBytes.addAndGet(length);
readTime.addAndGet(elapsed);
}
}
private final StatisticsIOMonitor statisticsIOMonitor = new StatisticsIOMonitor();
private final ReadOnlyFileStore store;
private final long debugInterval;
private final PrintWriter outWriter;
private final PrintWriter errWriter;
private int nodeCount;
private int propertyCount;
private int checkCount;
/**
* Run a full traversal consistency check.
*
* @param directory directory containing the tar files
* @param journalFileName name of the journal file containing the revision history
* @param debugInterval number of seconds between printing progress information to
* the console during the full traversal phase.
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @param checkHead if {@code true} will check the head
* @param checkpoints collection of checkpoints to be checked
* @param filterPaths collection of repository paths to be checked
* @param ioStatistics if {@code true} prints I/O statistics gathered while consistency
* check was performed
* @param outWriter text output stream writer
* @param errWriter text error stream writer
* @throws IOException
* @throws InvalidFileStoreVersionException
*/
public static void checkConsistency(
File directory,
String journalFileName,
long debugInterval,
boolean checkBinaries,
boolean checkHead,
Set<String> checkpoints,
Set<String> filterPaths,
boolean ioStatistics,
PrintWriter outWriter,
PrintWriter errWriter
) throws IOException, InvalidFileStoreVersionException {
try (
JournalReader journal = new JournalReader(new LocalJournalFile(directory, journalFileName));
ConsistencyChecker checker = new ConsistencyChecker(directory, debugInterval, ioStatistics, outWriter, errWriter)
) {
Set<String> checkpointsSet = Sets.newLinkedHashSet();
List<PathToCheck> headPaths = new ArrayList<>();
Map<String, List<PathToCheck>> checkpointPaths = new HashMap<>();
int revisionCount = 0;
if (!checkpoints.isEmpty()) {
checkpointsSet.addAll(checkpoints);
if (checkpointsSet.remove("all")) {
checkpointsSet = Sets
.newLinkedHashSet(SegmentNodeStoreBuilders.builder(checker.store).build().checkpoints());
}
}
for (String path : filterPaths) {
if (checkHead) {
headPaths.add(new PathToCheck(path, null));
checker.checkCount++;
}
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
if (pathList == null) {
pathList = new ArrayList<>();
checkpointPaths.put(checkpoint, pathList);
}
pathList.add(new PathToCheck(path, checkpoint));
checker.checkCount++;
}
}
int initialCount = checker.checkCount;
JournalEntry lastValidJournalEntry = null;
while (journal.hasNext() && checker.checkCount > 0) {
JournalEntry journalEntry = journal.next();
String revision = journalEntry.getRevision();
try {
revisionCount++;
checker.store.setRevision(revision);
boolean overallValid = true;
SegmentNodeStore sns = SegmentNodeStoreBuilders.builder(checker.store).build();
checker.print("\nChecking revision {0}", revision);
if (checkHead) {
boolean mustCheck = headPaths.stream().anyMatch(p -> p.journalEntry == null);
if (mustCheck) {
checker.print("\nChecking head\n");
NodeState root = sns.getRoot();
overallValid = overallValid && checker.checkPathsAtRoot(headPaths, root, journalEntry, checkBinaries);
}
}
if (!checkpointsSet.isEmpty()) {
Map<String, Boolean> checkpointsToCheck = checkpointPaths.entrySet().stream().collect(Collectors.toMap(
Map.Entry::getKey, e -> e.getValue().stream().anyMatch(p -> p.journalEntry == null)));
boolean mustCheck = checkpointsToCheck.values().stream().anyMatch(v -> v == true);
if (mustCheck) {
checker.print("\nChecking checkpoints");
for (String checkpoint : checkpointsSet) {
if (checkpointsToCheck.get(checkpoint)) {
checker.print("\nChecking checkpoint {0}", checkpoint);
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
NodeState root = sns.retrieve(checkpoint);
if (root == null) {
checker.printError("Checkpoint {0} not found in this revision!", checkpoint);
overallValid = false;
} else {
overallValid = overallValid && checker.checkPathsAtRoot(pathList, root,
journalEntry, checkBinaries);
}
}
}
}
}
if (overallValid) {
lastValidJournalEntry = journalEntry;
}
} catch (IllegalArgumentException | SegmentNotFoundException e) {
checker.printError("Skipping invalid record id {0}: {1}", revision, e);
}
}
checker.print("\nSearched through {0} revisions and {1} checkpoints", revisionCount, checkpointsSet.size());
if (initialCount == checker.checkCount) {
checker.print("No good revision found");
} else {
if (checkHead) {
checker.print("\nHead");
checker.printResults(headPaths, NO_INDENT);
}
if (!checkpointsSet.isEmpty()) {
checker.print("\nCheckpoints");
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
checker.print("- {0}", checkpoint);
checker.printResults(pathList, CHECKPOINT_INDENT);
}
}
checker.print("\nOverall");
checker.printOverallResults(lastValidJournalEntry);
}
if (ioStatistics) {
checker.print(
"[I/O] Segment read: Number of operations: {0}",
checker.statisticsIOMonitor.ioOperations
);
checker.print(
"[I/O] Segment read: Total size: {0} ({1} bytes)",
humanReadableByteCount(checker.statisticsIOMonitor.readBytes.get()),
checker.statisticsIOMonitor.readBytes
);
checker.print(
"[I/O] Segment read: Total time: {0} ns",
checker.statisticsIOMonitor.readTime
);
}
}
}
private void printResults(List<PathToCheck> pathList, String indent) {
for (PathToCheck ptc : pathList) {
String revision = ptc.journalEntry != null ? ptc.journalEntry.getRevision() : null;
long timestamp = ptc.journalEntry != null ? ptc.journalEntry.getTimestamp() : -1L;
print("{0}Latest good revision for path {1} is {2} from {3}", indent, ptc.path,
toString(revision), toString(timestamp));
}
}
private void printOverallResults(JournalEntry journalEntry) {
String revision = journalEntry != null ? journalEntry.getRevision() : null;
long timestamp = journalEntry != null ? journalEntry.getTimestamp() : -1L;
print("Latest good revision for paths and checkpoints checked is {0} from {1}", toString(revision), toString(timestamp));
}
private static String toString(String revision) {
if (revision != null) {
return revision;
} else {
return "none";
}
}
private static String toString(long timestamp) {
if (timestamp != -1L) {
return getDateTimeInstance().format(new Date(timestamp));
} else {
return "unknown date";
}
}
/**
* Create a new consistency checker instance
*
* @param directory directory containing the tar files
* @param debugInterval number of seconds between printing progress information to
* the console during the full traversal phase.
* @param ioStatistics if {@code true} prints I/O statistics gathered while consistency
* check was performed
* @param outWriter text output stream writer
* @param errWriter text error stream writer
* @throws IOException
*/
public ConsistencyChecker(File directory, long debugInterval, boolean ioStatistics, PrintWriter outWriter,
PrintWriter errWriter) throws IOException, InvalidFileStoreVersionException {
FileStoreBuilder builder = fileStoreBuilder(directory);
if (ioStatistics) {
builder.withIOMonitor(statisticsIOMonitor);
}
this.store = builder.buildReadOnly();
this.debugInterval = debugInterval;
this.outWriter = outWriter;
this.errWriter = errWriter;
}
/**
* Checks for consistency a list of paths, relative to the same root.
*
* @param paths paths to check
* @param root root relative to which the paths are retrieved
* @param journalEntry entry containing the current revision checked
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @return {@code true}, if the whole list of paths is consistent
*/
private boolean checkPathsAtRoot(List<PathToCheck> paths, NodeState root, JournalEntry journalEntry,
boolean checkBinaries) {
boolean result = true;
for (PathToCheck ptc : paths) {
if (ptc.journalEntry == null) {
String corruptPath = checkPathAtRoot(ptc, root, checkBinaries);
if (corruptPath == null) {
print("Path {0} is consistent", ptc.path);
ptc.journalEntry = journalEntry;
checkCount--;
} else {
result = false;
ptc.corruptPaths.add(corruptPath);
}
}
}
return result;
}
/**
* Checks the consistency of the supplied {@code ptc} relative to the given {@code root}.
*
* @param ptc path to check, provided there are no corrupt paths.
* @param root root relative to which the path is retrieved
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @return {@code null}, if the content tree rooted at path (possibly under a checkpoint)
* is consistent in this revision or the path of the first inconsistency otherwise.
*/
private String checkPathAtRoot(PathToCheck ptc, NodeState root, boolean checkBinaries) {
String result = null;
for (String corruptPath : ptc.corruptPaths) {
try {
NodeWrapper wrapper = NodeWrapper.deriveTraversableNodeOnPath(root, corruptPath);
result = checkNode(wrapper.node, wrapper.path, checkBinaries);
if (result != null) {
return result;
}
} catch (IllegalArgumentException e) {
debug("Path {0} not found", corruptPath);
}
}
nodeCount = 0;
propertyCount = 0;
print("Checking {0}", ptc.path);
try {
NodeWrapper wrapper = NodeWrapper.deriveTraversableNodeOnPath(root, ptc.path);
result = checkNodeAndDescendants(wrapper.node, wrapper.path, checkBinaries);
print("Checked {0} nodes and {1} properties", nodeCount, propertyCount);
return result;
} catch (IllegalArgumentException e) {
printError("Path {0} not found", ptc.path);
return ptc.path;
}
}
/**
* Checks the consistency of a node and its properties at the given path.
*
* @param node node to be checked
* @param path path of the node
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @return {@code null}, if the node is consistent,
* or the path of the first inconsistency otherwise.
*/
private String checkNode(NodeState node, String path, boolean checkBinaries) {
try {
debug("Traversing {0}", path);
nodeCount++;
for (PropertyState propertyState : node.getProperties()) {
Type<?> type = propertyState.getType();
boolean checked = false;
if (type == BINARY) {
checked = traverse(propertyState.getValue(BINARY), checkBinaries);
} else if (type == BINARIES) {
for (Blob blob : propertyState.getValue(BINARIES)) {
checked = checked | traverse(blob, checkBinaries);
}
} else {
propertyState.getValue(type);
propertyCount++;
checked = true;
}
if (checked) {
debug("Checked {0}/{1}", path, propertyState);
}
}
return null;
} catch (RuntimeException | IOException e) {
printError("Error while traversing {0}: {1}", path, e);
return path;
}
}
/**
* Recursively checks the consistency of a node and its descendants at the given path.
* @param node node to be checked
* @param path path of the node
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @return {@code null}, if the node is consistent,
* or the path of the first inconsistency otherwise.
*/
private String checkNodeAndDescendants(NodeState node, String path, boolean checkBinaries) {
String result = checkNode(node, path, checkBinaries);
if (result != null) {
return result;
}
try {
for (ChildNodeEntry cne : node.getChildNodeEntries()) {
String childName = cne.getName();
NodeState child = cne.getNodeState();
result = checkNodeAndDescendants(child, concat(path, childName), checkBinaries);
if (result != null) {
return result;
}
}
return null;
} catch (RuntimeException e) {
printError("Error while traversing {0}: {1}", path, e.getMessage());
return path;
}
}
static class NodeWrapper {
final NodeState node;
final String path;
NodeWrapper(NodeState node, String path) {
this.node = node;
this.path = path;
}
static NodeWrapper deriveTraversableNodeOnPath(NodeState root, String path) {
String parentPath = getParentPath(path);
String name = getName(path);
NodeState parent = getNode(root, parentPath);
if (!denotesRoot(path)) {
if (!parent.hasChildNode(name)) {
throw new IllegalArgumentException("Invalid path: " + path);
}
return new NodeWrapper(parent.getChildNode(name), path);
} else {
return new NodeWrapper(parent, parentPath);
}
}
}
static class PathToCheck {
final String path;
final String checkpoint;
JournalEntry journalEntry;
Set<String> corruptPaths = new LinkedHashSet<>();
PathToCheck(String path, String checkpoint) {
this.path = path;
this.checkpoint = checkpoint;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((checkpoint == null) ? 0 : checkpoint.hashCode());
result = prime * result + ((path == null) ? 0 : path.hashCode());
return result;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
} else if (object instanceof PathToCheck) {
PathToCheck that = (PathToCheck) object;
return path.equals(that.path) && checkpoint.equals(that.checkpoint);
} else {
return false;
}
}
}
private boolean traverse(Blob blob, boolean checkBinaries) throws IOException {
if (checkBinaries && !isExternal(blob)) {
InputStream s = blob.getNewStream();
try {
byte[] buffer = new byte[8192];
int l = s.read(buffer, 0, buffer.length);
while (l >= 0) {
l = s.read(buffer, 0, buffer.length);
}
} finally {
s.close();
}
propertyCount++;
return true;
}
return false;
}
private static boolean isExternal(Blob b) {
if (b instanceof SegmentBlob) {
return ((SegmentBlob) b).isExternal();
}
return false;
}
@Override
public void close() {
store.close();
}
private void print(String format) {
outWriter.println(format);
}
private void print(String format, Object arg) {
outWriter.println(MessageFormat.format(format, arg));
}
private void print(String format, Object arg1, Object arg2) {
outWriter.println(MessageFormat.format(format, arg1, arg2));
}
private void print(String format, Object arg1, Object arg2, Object arg3, Object arg4) {
outWriter.println(MessageFormat.format(format, arg1, arg2, arg3, arg4));
}
private void printError(String format, Object arg) {
errWriter.println(MessageFormat.format(format, arg));
}
private void printError(String format, Object arg1, Object arg2) {
errWriter.println(MessageFormat.format(format, arg1, arg2));
}
private long ts;
private void debug(String format, Object arg) {
if (debug()) {
print(format, arg);
}
}
private void debug(String format, Object arg1, Object arg2) {
if (debug()) {
print(format, arg1, arg2);
}
}
private boolean debug() {
// Avoid calling System.currentTimeMillis(), which is slow on some systems.
if (debugInterval == Long.MAX_VALUE) {
return false;
} else if (debugInterval == 0) {
return true;
}
long ts = System.currentTimeMillis();
if ((ts - this.ts) / 1000 > debugInterval) {
this.ts = ts;
return true;
} else {
return false;
}
}
}
| |
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.DataSourceReferenceTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.jdbcapi;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Properties;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.spi.ObjectFactory;
import junit.framework.Test;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.BaseTestSuite;
import org.apache.derbyTesting.junit.J2EEDataSource;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.JDBCDataSource;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Test obtaining a <code>javax.naming.Reference</code> from a Derby data
* source and recreating a Derby data source from it.
* <p>
* Tests that the recreated value has the same value for all the properties
* the data source supports. The list of properties is obtained
* dynamically from the getter methods that return int, String, boolean,
* short and long. Should Derby data sources support any other bean
* property types then this test should be modified to pick them up and
* handle them. The test will fail when such a property is added.
* <p>
* Default values of the properties are also tested. Default and set
* values of the properties must be specified by creating a data source
* descriptor.
* <p>
* At no point does this test attempt to connect using these data sources.
*/
public class DataSourceReferenceTest
extends BaseJDBCTestCase {
/** Lookup constant for the descriptor array. */
private static final int BASE_DS = 0;
/** Lookup constant for the descriptor array. */
private static final int POOL_DS = 1;
/** Lookup constant for the descriptor array. */
private static final int XA_DS = 2;
/** Descriptor for the basic embedded data source. */
private static final DataSourceDescriptor BASE_EMBEDDED_DS =
new DataSourceDescriptor("Basic embedded data source");
static {
BASE_EMBEDDED_DS.addProperty("attributesAsPassword", "true", "false");
BASE_EMBEDDED_DS.addProperty("connectionAttributes",
"XX_connectionAttributes_2135");
BASE_EMBEDDED_DS.addProperty("createDatabase", "create");
BASE_EMBEDDED_DS.addProperty("dataSourceName",
"XX_dataSourceName_1420");
BASE_EMBEDDED_DS.addProperty("databaseName", "XX_databaseName_1206");
BASE_EMBEDDED_DS.addProperty("description", "XX_description_1188");
BASE_EMBEDDED_DS.addProperty("loginTimeout", "1280", "0");
BASE_EMBEDDED_DS.addProperty("password", "XX_password_883");
BASE_EMBEDDED_DS.addProperty("shutdownDatabase", "shutdown");
BASE_EMBEDDED_DS.addProperty("user", "XX_user_447");
}
/** Descriptor for the basic client data source. */
private static final DataSourceDescriptor BASE_CLIENT_DS =
new DataSourceDescriptor("Basic client data source");
static {
// Properties with default values
BASE_CLIENT_DS.addProperty("loginTimeout", "1280", "0");
BASE_CLIENT_DS.addProperty("portNumber", "1070", "1527");
BASE_CLIENT_DS.addProperty("retrieveMessageText", "false", "true");
BASE_CLIENT_DS.addProperty("securityMechanism", "1851", "4");
BASE_CLIENT_DS.addProperty("serverName", "tmpHostName", "localhost");
BASE_CLIENT_DS.addProperty("ssl", "basic", "off");
BASE_CLIENT_DS.addProperty("user", "XX_user_447", "APP");
// Properties without default values.
BASE_CLIENT_DS.addProperty("connectionAttributes",
"XX_connectionAttributes_2135");
BASE_CLIENT_DS.addProperty("createDatabase", "create");
BASE_CLIENT_DS.addProperty("databaseName", "XX_databaseName_1206");
BASE_CLIENT_DS.addProperty("dataSourceName", "XX_dataSourceName_1420");
BASE_CLIENT_DS.addProperty("description", "XX_description_1188");
BASE_CLIENT_DS.addProperty("password", "XX_password_883");
BASE_CLIENT_DS.addProperty("shutdownDatabase", "shutdown");
BASE_CLIENT_DS.addProperty("traceFile", "XX_traceFile_911");
BASE_CLIENT_DS.addProperty("traceFileAppend", "true", "false");
BASE_CLIENT_DS.addProperty("traceLevel", "1031", "-1");
BASE_CLIENT_DS.addProperty("traceDirectory", "XX_traceDirectory_1476");
}
/** Descriptor for the client connection pool data source. */
private static final DataSourceDescriptor POOL_CLIENT_DS =
new DataSourceDescriptor("Connection pool client data source",
BASE_CLIENT_DS);
static {
POOL_CLIENT_DS.addProperty("maxStatements", "10", "0");
}
/**
* Creates a new fixture.
*
* @param name fixture name
*/
public DataSourceReferenceTest(String name) {
super(name);
}
/**
* Creates a suite with tests for both embedded and client data sources.
*
* @return A suite with the appropriate tests.
*/
public static Test suite() {
Test suite;
if (JDBC.vmSupportsJSR169() || !JDBC.vmSupportsJNDI()) {
// Referenceable is not supported with JSR169 or without JNDI
suite = new BaseTestSuite(
"DatasourceReferenceTest cannot run with JSR169");
} else {
suite = TestConfiguration.defaultSuite(
DataSourceReferenceTest.class);
}
return suite;
}
/**
* Tests a data source, with focus on serialization/deserialization.
* <p>
* For each data source, the following actions are performed:
* <ol> <li>Create an empty data source from the class name.
* <li>Discover and validate the bean property list.
* <li>Create a reference and recreate the data source.
* <li>Compare the original and the empty recreated data source.
* <li>Serialize the data source and recreate.
* <li>Compare the original and the deserialized data source.
* <li>Set a value for every property of the data source.
* <li>Create a reference and recreate the data source.
* <li>Compare the populated original and the recreated data source.
* <li>Serialize the populated data source and recreate.
* <li>Compare the populated original and the deserialized data source.
* </ol>
*
* @throws Exception on a wide variety of error conditions...
*/
public void testDataSourceReference()
throws Exception {
DataSourceDescriptor[] descriptors;
if (usingDerbyNetClient()) {
// Specify client data source descriptors.
descriptors = new DataSourceDescriptor[] {
BASE_CLIENT_DS, // Base
POOL_CLIENT_DS, // Pool
BASE_CLIENT_DS // XA
};
} else {
// Specify embedded data source descriptors.
descriptors = new DataSourceDescriptor[] {
BASE_EMBEDDED_DS, // Base
BASE_EMBEDDED_DS, // Pool
BASE_EMBEDDED_DS // XA
};
}
// Test basic data source.
String className = JDBCDataSource.getDataSource().getClass().getName();
println("Testing base data source: " + className);
assertDataSourceReference(descriptors[BASE_DS], className);
// Test connection pool data source.
className =
J2EEDataSource.getConnectionPoolDataSource().getClass().getName();
println("Testing connection pool data source: " + className);
assertDataSourceReference(descriptors[POOL_DS], className);
// Test XA data source.
className = J2EEDataSource.getXADataSource().getClass().getName();
println("Testing XA data source: " + className);
assertDataSourceReference(descriptors[XA_DS], className);
}
/**
* Performs the test sequence in the data source.
*
* @param dsDesc data source descriptor
* @param className class name of the data source
* @throws Exception on a wide variety of error conditions...
*
* @see #testDataSourceReference
*/
private void assertDataSourceReference(
DataSourceDescriptor dsDesc,
String className)
throws Exception {
// Instantiate a new data source object and get all its properties.
Object dsObj = Class.forName(className).newInstance();
String[] properties = getPropertyBeanList(dsObj);
// Validate property set (existence and naming).
assertDataSourceProperties(dsDesc, properties);
// Test recreating the data source
assertDataSourceReferenceEmpty(dsDesc, className);
assertDataSourceReferencePopulated(dsDesc, className);
}
/**
* Asserts that the properties that are in the data source descriptor are
* found in the list of data source properties, and that the data source
* does not contain properties that are not in the descriptor.
* <p>
* No property values are verified in this assert method.
*
* @param dsDesc data source descriptor
* @param properties list of actual data source properties
*/
private void assertDataSourceProperties(
DataSourceDescriptor dsDesc,
String[] properties) {
println("Testing data source bean properties.");
// Validate the identified property names.
for (int i=0; i < properties.length; i++) {
assertTrue("Property '" + properties[i] + "' not in descriptor '" +
dsDesc.getName() + "'",
dsDesc.hasProperty(properties[i]));
}
// Check that all keys defined by the descriptor is found, and that
// there is only one of each in the data source property list.
Iterator descPropIter = dsDesc.getPropertyIterator();
while (descPropIter.hasNext()) {
String descProp = (String)descPropIter.next();
boolean match = false;
// Iterate through all the data source properties.
for (int i=0; i < properties.length; i++) {
if (properties[i].equals(descProp)) {
if (match) {
fail("Duplicate entry '" + descProp + "' in data " +
"source property list");
}
// Don't break, continue to look for duplicates.
match = true;
}
}
assertTrue("Property '" + descProp + "' not found in data source " +
"property list", match);
}
// Check if the expected number of properties are found.
// Do this last to hopefully get a more descriptive failure
// message which includes the property name above.
assertEquals(dsDesc.getPropertyCount(), properties.length);
}
/**
* Make sure it is possible to create a new data source using
* <code>Referencable</code>, that the new instance has the correct
* default values set for the bean properties and finally that the
* data source can be serialized/deserialized.
*
* @param dsDesc data source descriptor
* @param className data source class name
* @throws Exception on a wide variety of error conditions...
*/
private void assertDataSourceReferenceEmpty(DataSourceDescriptor dsDesc,
String className)
throws Exception {
println("Testing recreated empty data source.");
// Create an empty data source.
Object ds = Class.forName(className).newInstance();
Referenceable refDs = (Referenceable)ds;
Reference dsAsReference = refDs.getReference();
String factoryClassName = dsAsReference.getFactoryClassName();
ObjectFactory factory =
(ObjectFactory)Class.forName(factoryClassName).newInstance();
Object recreatedDs =
factory.getObjectInstance(dsAsReference, null, null, null);
// Empty, recreated data source should not be the same as the one we
// created earlier on.
assertNotNull("Recreated datasource is <null>", recreatedDs);
assertNotSame(recreatedDs, ds);
compareDataSources(dsDesc, ds, recreatedDs, true);
// Serialize and recreate data source with default values.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(ds);
oos.flush();
oos.close();
ByteArrayInputStream bais =
new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
recreatedDs = ois.readObject();
compareDataSources(dsDesc, ds, recreatedDs, true);
}
/**
* Make sure it is possible to recreate and serialize/deserialize a
* populated data source.
* <p>
* Populated means the various bean properties have non-default
* values set.
*
* @param dsDesc data source descriptor
* @param className data source class name
* @throws Exception on a wide variety of error conditions...
*/
private void assertDataSourceReferencePopulated(
DataSourceDescriptor dsDesc,
String className)
throws Exception {
println("Testing recreated populated data source.");
Object ds = Class.forName(className).newInstance();
// Populate the data source.
Iterator propIter = dsDesc.getPropertyIterator();
while (propIter.hasNext()) {
String property = (String)propIter.next();
String value = dsDesc.getPropertyValue(property);
Method getMethod = getGet(property, ds);
Method setMethod = getSet(getMethod, ds);
Class paramType = getMethod.getReturnType();
if (paramType.equals(Integer.TYPE)) {
setMethod.invoke(ds, new Object[] {Integer.valueOf(value)});
} else if (paramType.equals(String.class)) {
setMethod.invoke(ds, new Object[] {value});
} else if (paramType.equals(Boolean.TYPE)) {
setMethod.invoke(ds, new Object[] {Boolean.valueOf(value)});
} else if (paramType.equals(Short.TYPE)) {
setMethod.invoke(ds, new Object[] {Short.valueOf(value)});
} else if (paramType.equals(Long.TYPE)) {
setMethod.invoke(ds, new Object[] {Long.valueOf(value)});
} else {
fail("'" + property + "' not settable - update test!!");
}
}
Referenceable refDs = (Referenceable)ds;
Reference dsAsReference = refDs.getReference();
String factoryClassName = dsAsReference.getFactoryClassName();
ObjectFactory factory =
(ObjectFactory)Class.forName(factoryClassName).newInstance();
Object recreatedDs =
factory.getObjectInstance(dsAsReference, null, null, null);
// Recreated should not be same instance as original.
assertNotSame(recreatedDs, ds);
compareDataSources(dsDesc, ds, recreatedDs, false);
// Serialize and recreate.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(ds);
oos.flush();
oos.close();
ByteArrayInputStream bais =
new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
recreatedDs = ois.readObject();
compareDataSources(dsDesc, ds, recreatedDs, false);
}
/**
* Compares two data sources expected to be equal.
* <p>
* The data source descriptor is expected to contain both default values
* and set values for the relevant bean properties of the data source(s).
*
* @param dsDesc data source descriptor
* @param ds original data source
* @param rds recreated data source
* @param useDefaultsForComparison <code>true</code> if the default values
* should be verified, <code>false</code> if the set values should be
* used for verification
* @throws Exception on a wide variety of error conditions...
* @throws AssertionFailedError if the data sources are not equal
*/
private void compareDataSources(DataSourceDescriptor dsDesc,
Object ds, Object rds,
boolean useDefaultsForComparison)
throws Exception {
Iterator propIter = dsDesc.getPropertyIterator();
while (propIter.hasNext()) {
String property = (String)propIter.next();
Method getMethod = getGet(property, ds);
// Obtain value from original data source, then the recreated one.
Object dsValue = getMethod.invoke(ds, null);
Object rdsValue = getMethod.invoke(rds, null);
if (dsValue == null) {
assertNull(rdsValue);
} else {
assertEquals(dsValue, rdsValue);
}
// Make sure the value is correct.
if (useDefaultsForComparison) {
if (dsValue != null) {
assertEquals("Wrong default value for '" + property + "'",
dsDesc.getPropertyDefault(property),
dsValue.toString());
} else {
assertNull(dsDesc.getPropertyDefault(property));
}
} else if (dsValue != null) {
assertEquals("'" + property + "' has incorrect value",
dsDesc.getPropertyValue(property),
dsValue.toString());
} else {
// We got null from the data source, and we should have set all
// values to something else than null.
fail("Test does not handle this situation...");
}
}
}
/**
* Obtains a list of bean properties through reflection.
*
* @param ds the data source to investigate
* @return A list of bean property names.
*/
private static String[] getPropertyBeanList(Object ds) {
Method[] allMethods = ds.getClass().getMethods();
ArrayList<String> properties = new ArrayList<String>();
for (int i = 0; i < allMethods.length; i++) {
Method method = allMethods[i];
String methodName = method.getName();
// Need at least getXX
if (methodName.length() < 5 || !methodName.startsWith("get") ||
method.getParameterTypes().length != 0) {
continue;
}
Class rt = method.getReturnType();
if (rt.equals(Integer.TYPE) || rt.equals(String.class) ||
rt.equals(Boolean.TYPE) || rt.equals(Short.TYPE) ||
rt.equals(Long.TYPE)) {
// Valid Java Bean property.
// Convert name:
// getPassword -> password
// getRetrieveMessageText -> retrieveMessageText
String beanName = methodName.substring(3,4).toLowerCase()
+ methodName.substring(4);
properties.add(beanName);
} else {
assertFalse("Method '" + methodName + "' with primitive " +
"return type not supported - update test!!",
rt.isPrimitive());
}
}
return properties.toArray(new String[properties.size()]);
}
/**
* Obtains the specified get method.
*
* @param property property/method name
* @param ds data source object
* @return A method object.
*
* @throws NoSuchMethodException if the method does not exist
*/
private static Method getGet(String property, Object ds)
throws NoSuchMethodException {
String methodName =
"get" + property.substring(0,1).toUpperCase()
+ property.substring(1);
Method m = ds.getClass().getMethod(methodName, null);
return m;
}
/**
* Obtains the specified set method.
*
* @param getMethod the corresponding get method
* @param ds data source object
* @return A method object.
*
* @throws NoSuchMethodException if the method does not exist
*/private static Method getSet(Method getMethod, Object ds)
throws NoSuchMethodException {
String methodName = "s" + getMethod.getName().substring(1);
Method m = ds.getClass().getMethod(
methodName, new Class[] {getMethod.getReturnType()});
return m;
}
/**
* A class describing the bean properties of a data source.
* <p>
* A data source is a class implementing
* <code>javax.sql.CommonDataSource</code>.
* <p>
* The data source description consists of the following:
* <ul> <li>A list of property names.
* <li>A list of default values for the properties that have a default.
* <li>A list of set values for properties.
* </ul>
* In addition it has a name for convenience.
*/
private static class DataSourceDescriptor {
/** Name of the description. */
private final String dsName;
/**
* Set values for the data source being described.
* <p>
* Note that the keys of this property object describe which bean
* properties exist for the data source.
*/
private final Properties propertyValues;
/**
* Default values for bean properties having a default.
* <p>
* Note that not all properties have a default, and the data source
* may therefore have more properties than there entries in this
* list of properties.
*/
private final Properties propertyDefaults;
/**
* Creates a new data source description.
*
* @param dsName convenience name for the description/source
*/
DataSourceDescriptor(String dsName) {
this.dsName = dsName;
this.propertyValues = new Properties();
this.propertyDefaults = new Properties();
}
/**
* Creates a new data source description, based off an existing
* description.
* <p>
* All properties and values defined in the existing descriptor will
* also be defined in the new descriptor.
*
* @param dsName convenience name for the description/source
* @param copyFrom existing descriptor to copy properties/values from
*/
DataSourceDescriptor(String dsName, DataSourceDescriptor copyFrom) {
this.dsName = dsName;
this.propertyValues = new Properties();
this.propertyValues.putAll(copyFrom.propertyValues);
this.propertyDefaults = new Properties(copyFrom.propertyDefaults);
this.propertyDefaults.putAll(copyFrom.propertyDefaults);
}
/**
* Returns the convenience name of this descriptor.
*
* @return A convenience name.
*/
String getName() {
return this.dsName;
}
/**
* Adds a property to the description, with a value and no associated
* default value.
*
* @param name property name
* @param value property value
* @throws NullPointerException if <code>name</code> or
* <code>value</code> is <code>null</code>
*/
void addProperty(String name, String value) {
this.propertyValues.setProperty(name, value);
}
/**
* Adds a property to the description, with a value and an associated
* default value.
*
* @param name property name
* @param value property value
* @param defaultValue default property value
* @throws NullPointerException if <code>name</code>, <code>value</code>
* or <code>defaultValue</code> is <code>null</code>
*/
void addProperty(String name, String value, String defaultValue) {
this.propertyValues.setProperty(name, value);
this.propertyDefaults.setProperty(name, defaultValue);
}
/**
* Returns the value of the specified property.
*
* @param name property name
* @return The value set for this property.
*
* @throws NullPointerException if <code>name</code> is
* <code>null</code>
* @throws AssertionFailedError if the property name is not defined by
* this descriptor
*/
String getPropertyValue(String name) {
if (!this.propertyValues.containsKey(name)) {
fail("Property '" + name + "' not in data source descriptor '" +
dsName + "'");
}
return this.propertyValues.getProperty(name);
}
/**
* Returns the default value for the specified property.
*
* @param name property name
* @return The default value if specified, <code>null<code> if a default
* value is not specified.
*
* @throws NullPointerException if <code>name</code> is
* <code>null</code>
* @throws AssertionFailedError if the property name is not defined by
* this descriptor
*/
String getPropertyDefault(String name) {
if (!this.propertyValues.containsKey(name)) {
fail("Property '" + name + "' not in data source descriptor '" +
dsName + "'");
}
return this.propertyDefaults.getProperty(name, null);
}
/**
* Returns an iterator over all bean property names.
*
* @return An iterator.
*/
Iterator getPropertyIterator() {
return this.propertyValues.keySet().iterator();
}
/**
* Tells if the specified property is defined by this descriptor.
*
* @param name property name
* @return <code>true</code> if defined, <code>false</code> if not.
*/
boolean hasProperty(String name) {
return this.propertyValues.containsKey(name);
}
/**
* Returns the number of bean properties defined by this descriptor.
*
* @return The number of bean properties.
*/
int getPropertyCount() {
return this.propertyValues.size();
}
} // End class DataSourceDescriptor
}
| |
//
// Copyright (c) eProtectioneers 2016/17. All rights reserved.
// Licensed under the MIT License. See LICENSE file in the project root for full license information.
//
package org.eprotectioneers.panacea.contactmanagement.view;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.util.ArrayList;
import javax.swing.*;
import org.eprotectioneers.panacea.contactmanagement.components.ImagePanel;
import org.eprotectioneers.panacea.contactmanagement.components.RoundRectangleButton;
import org.eprotectioneers.panacea.contactmanagement.models.ChooseFile;
import org.eprotectioneers.panacea.contactmanagement.models.Contact;
import org.eprotectioneers.panacea.contactmanagement.models.DatabaseC;
import org.eprotectioneers.panacea.contactmanagement.models.EmailValidator;
import org.eprotectioneers.panacea.userinterface.PPCA_PanaceaWindow;
import net.miginfocom.swing.MigLayout;
/**
* A Page to add a Contact
* @author eProtectioneers
*/
public class Page_AddContact extends JFrame {
private JPanel contentPane;
private ImagePanel pnl_image;
private PageItem_new pi_shownname;
private PageItem_new pi_firstname;
private PageItem_new pi_lastname;
private PageItem_new pi_emailaddress;
private PageItem_new pi_phonenumber;
private PageItem_new pi_address;
private Page_AddContact pac=this;
/**
* A list, which contains every pageitem
*/
private ArrayList<PageItem_new> entryfields=new ArrayList<PageItem_new>();
/**
* The button, which saves a new contact
*/
private JButton btnSave;
private JButton btnCancel;
/**
* Create the panel.
*/
public Page_AddContact(Component component) {
super("New Contact");
Point componentLocation = component.getLocation();
Dimension componentDimension=component.getSize();
this.setSize((int) (componentDimension.width-componentDimension.width/3),
(int) (componentDimension.height-componentDimension.height/3));
this.setLocation((int) (componentLocation.x + component.getWidth()/2-this.getWidth()/2),
(int)(componentLocation.y + component.getHeight()/2-this.getHeight()/2));
inizialize();
}
/**
* Constructor, assigns
* @param component
* @param shownname
* @param emailaddress
*/
public Page_AddContact(Component component,String shownname,String emailaddress) {
this(component);
Object options[]={"yes","no"};
pi_shownname.setText(shownname);
pi_emailaddress.setText(emailaddress);
}
/**
* Initializes
*/
public void inizialize(){
contentPane = new JPanel();
setContentPane(contentPane);
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
contentPane.setLayout(new MigLayout("", "[5%][100px:25%:300px][40][30%,grow,fill][40.00][30%,grow,fill][5%]", "[15.00][25px:11%:75px][25px:11%:75px][25px:11%:75px][25px:11%:75px][25px:11%:75px][5.50%][11%][11%][15]"));
pnl_image = new ImagePanel(Contact.getDefaultpicpath());
pnl_image.setBackground(Color.BLACK);
contentPane.add(pnl_image, "cell 1 1 1 5,grow");
ChangeImageListener cil=new ChangeImageListener();
pnl_image.getBtnChangePicture().addActionListener(cil);
pnl_image.getMntmRemovePicture().addActionListener(cil);
pi_shownname = new PageItem_new("Shown Name",contentPane);
entryfields.add(pi_shownname);
pi_emailaddress = new PageItem_new("Email", contentPane);
entryfields.add(pi_emailaddress);
pi_firstname = new PageItem_new("Firstname",contentPane);
entryfields.add(pi_firstname);
pi_lastname = new PageItem_new("Lastname", contentPane);
entryfields.add(pi_lastname);
pi_phonenumber = new PageItem_new("Phonenumber", contentPane);
entryfields.add(pi_phonenumber);
pi_address = new PageItem_new("Address", contentPane);
entryfields.add(pi_address);
for(PageItem_new pi:entryfields){
pi.getTextField().setColumns(10);
pi.getTextField().addActionListener(new PiActionListener());
}
contentPane.add(pi_shownname, "cell 3 2,grow");
contentPane.add(pi_emailaddress, "cell 3 3 3 1,grow");
contentPane.add(pi_firstname, "cell 3 4,grow");
contentPane.add(pi_lastname, "cell 3 5,grow");
contentPane.add(pi_phonenumber, "cell 5 4,grow");
contentPane.add(pi_address, "cell 5 5,grow");
btnSave = new RoundRectangleButton("Save",15);
btnSave.setMaximumSize(new Dimension(75, 25));
btnSave.setAlignmentX(Component.RIGHT_ALIGNMENT);
btnSave.setBackground(Color.WHITE);
btnSave.addActionListener(new BtnSaveActionListener());
btnCancel = new RoundRectangleButton("Cancel", 15);
btnCancel.setMaximumSize(new Dimension(70, 23));
btnCancel.setAlignmentX(Component.RIGHT_ALIGNMENT);
btnCancel.setBackground(Color.WHITE);
btnCancel.addActionListener(new BtnCancelActionListener());
contentPane.add(btnCancel, "cell 3 8");
contentPane.add(btnSave, "cell 5 8,alignx right");
}
/**
* ActionListener to change the Contact's Image
* @author eProtectioneers
*/
private class ChangeImageListener implements ActionListener{
@Override
public void actionPerformed(ActionEvent e) {
if(e.getSource().equals(pnl_image.getBtnChangePicture()))pnl_image.setPicturePath(readFilePath());
contentPane.setVisible(false);
contentPane.setVisible(true);
}
private String readFilePath(){
File file=ChooseFile.getPictoRead();
if(file!=null&&(new ImageIcon(file.getAbsolutePath()).getImage()) instanceof Image)return file.getAbsolutePath();
else return pnl_image.getPicturePath();
}
}
/**
* The listener of every PageItem
* @author eProtectioneers
*/
private class PiActionListener implements ActionListener {
public void actionPerformed(ActionEvent arg0) {
int i=getSelectedPageItem();
if(i!=entryfields.size()-1){
entryfields.get(i+1).requestFocus();
}
else {
btnSave.requestFocus();
btnSave.doClick(100);
}
}
/**
* @return the number of the selected PageItem
*/
private int getSelectedPageItem(){
for(int i=0; i<entryfields.size();i++){
if(entryfields.get(i).getTextField().isFocusOwner())return i;
}
return (Integer)null;
}
}
/**
* Saves the Contact
*/
private void save(){
Contact c=new Contact(DatabaseC.getNewIndex(), pi_shownname.getText(), pi_firstname.getText(),
pi_lastname.getText(), pi_emailaddress.getText(), pi_phonenumber.getText(), pi_address.getText(),
pnl_image.getPicturePath(), false);
DatabaseC.addContact(c);
JOptionPane.showMessageDialog(PPCA_PanaceaWindow.getFrame(), "Contact added", "", JOptionPane.INFORMATION_MESSAGE, null);
}
/**
* @return true, if there's no Contact with this EmailAddress - if there's on it asks, if you really want to add this
*/
private boolean checkSave(){
boolean b=true;
if(DatabaseC.checkContact(pi_emailaddress.getText())!=null){
Object[] options={"yes","no"};
switch(JOptionPane.showOptionDialog(pac, "There's already a Contact with this Email-Address. Do you want to continue?", "Contact already exists",JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null,options,options[0])){
case JOptionPane.YES_OPTION:
break;
default:
b=false;
break;
}
}
return b;
}
/**
* ActionListener to save the New Contact
* @author eProtectioneers
*/
private class BtnSaveActionListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if(!new EmailValidator().validate(pi_emailaddress.getText())){
JOptionPane.showMessageDialog(pac, "Please enter a valid Email-Address","", JOptionPane.ERROR_MESSAGE);
pi_emailaddress.requestFocus();
}
else{
Object options[]={"yes","no"};
switch(JOptionPane.showOptionDialog(pac, "Do you really want to save this Contact?", "Save new Contact", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0])){
case JOptionPane.YES_OPTION:
if(checkSave()){
save();
dispose();
}
break;
default:
break;
}
}
}
}
/**
* ActionListener to cancel
* @author eProtectioneers
*/
private class BtnCancelActionListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
Object options[]={"yes","no","cancel"};
if(lookForChanges()){
boolean b=checkSave();
switch(JOptionPane.showOptionDialog(pac, "Do you want to save this Contact?", "Save new Contact", JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0])){
case JOptionPane.YES_OPTION:
if(!new EmailValidator().validate(pi_emailaddress.getText())){
JOptionPane.showMessageDialog(pac, "Please enter a valid Email-Address","", JOptionPane.ERROR_MESSAGE);
pi_emailaddress.requestFocus();
break;
}
if(b)save();
case JOptionPane.NO_OPTION:
if(b)dispose();
default:
break;
}
}else dispose();
}
/**
* @return false if there are no changes
*/
private boolean lookForChanges(){
if(!pnl_image.getPicturePath().equals(Contact.getDefaultpicpath()))return true;
for(PageItem_new pi:entryfields){
if(!pi.getText().equals(""))return true;
}
return false;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.MatcherAssert;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class ByteSizeValueTests extends ESTestCase {
public void testActualPeta() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.PB).getBytes(), equalTo(4503599627370496L));
}
public void testActualTera() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.TB).getBytes(), equalTo(4398046511104L));
}
public void testActual() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.GB).getBytes(), equalTo(4294967296L));
}
public void testSimple() {
assertThat(ByteSizeUnit.BYTES.toBytes(10), is(new ByteSizeValue(10, ByteSizeUnit.BYTES).getBytes()));
assertThat(ByteSizeUnit.KB.toKB(10), is(new ByteSizeValue(10, ByteSizeUnit.KB).getKb()));
assertThat(ByteSizeUnit.MB.toMB(10), is(new ByteSizeValue(10, ByteSizeUnit.MB).getMb()));
assertThat(ByteSizeUnit.GB.toGB(10), is(new ByteSizeValue(10, ByteSizeUnit.GB).getGb()));
assertThat(ByteSizeUnit.TB.toTB(10), is(new ByteSizeValue(10, ByteSizeUnit.TB).getTb()));
assertThat(ByteSizeUnit.PB.toPB(10), is(new ByteSizeValue(10, ByteSizeUnit.PB).getPb()));
}
public void testEquality() {
String[] equalValues = new String[]{"1GB", "1024MB", "1048576KB", "1073741824B"};
ByteSizeValue value1 = ByteSizeValue.parseBytesSizeValue(randomFrom(equalValues), "equalTest");
ByteSizeValue value2 = ByteSizeValue.parseBytesSizeValue(randomFrom(equalValues), "equalTest");
assertThat(value1, equalTo(value2));
}
public void testToString() {
assertThat("10b", is(new ByteSizeValue(10, ByteSizeUnit.BYTES).toString()));
assertThat("1.5kb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.BYTES).toString()));
assertThat("1.5mb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.KB).toString()));
assertThat("1.5gb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.MB).toString()));
assertThat("1.5tb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.GB).toString()));
assertThat("1.5pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.TB).toString()));
assertThat("1536pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.PB).toString()));
}
public void testParsing() {
assertThat(ByteSizeValue.parseBytesSizeValue("42PB", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42 PB", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42pb", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42 pb", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42P", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42 P", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42p", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42 p", "testParsing").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54TB", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54 TB", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54tb", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54 tb", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54T", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54 T", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54t", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54 t", "testParsing").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12GB", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 GB", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12gb", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 gb", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12G", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 G", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12g", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 g", "testParsing").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12M", "testParsing").toString(), is("12mb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 M", "testParsing").toString(), is("12mb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12m", "testParsing").toString(), is("12mb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12 m", "testParsing").toString(), is("12mb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23KB", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23 KB", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23kb", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23 kb", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23K", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23 K", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23k", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23 k", "testParsing").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("1B", "testParsing").toString(), is("1b"));
assertThat(ByteSizeValue.parseBytesSizeValue("1 B", "testParsing").toString(), is("1b"));
assertThat(ByteSizeValue.parseBytesSizeValue("1b", "testParsing").toString(), is("1b"));
assertThat(ByteSizeValue.parseBytesSizeValue("1 b", "testParsing").toString(), is("1b"));
}
public void testFailOnMissingUnits() {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23", "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
public void testFailOnUnknownUnits() {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23jw", "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
public void testFailOnEmptyParsing() {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb")));
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
}
public void testFailOnEmptyNumberParsing() {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b")));
assertThat(e.getMessage(), containsString("failed to parse [g]"));
}
public void testNoDotsAllowed() {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("42b.", null, "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
public void testCompareEquality() {
long firstRandom = randomNonNegativeLong();
ByteSizeUnit randomUnit = randomFrom(ByteSizeUnit.values());
ByteSizeValue firstByteValue = new ByteSizeValue(firstRandom, randomUnit);
ByteSizeValue secondByteValue = new ByteSizeValue(firstRandom, randomUnit);
assertEquals(0, firstByteValue.compareTo(secondByteValue));
}
public void testCompareValue() {
long firstRandom = randomNonNegativeLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomNonNegativeLong);
ByteSizeUnit unit = randomFrom(ByteSizeUnit.values());
ByteSizeValue firstByteValue = new ByteSizeValue(firstRandom, unit);
ByteSizeValue secondByteValue = new ByteSizeValue(secondRandom, unit);
assertEquals(firstRandom > secondRandom, firstByteValue.compareTo(secondByteValue) > 0);
assertEquals(secondRandom > firstRandom, secondByteValue.compareTo(firstByteValue) > 0);
}
public void testCompareUnits() {
long number = randomNonNegativeLong();
ByteSizeUnit randomUnit = randomValueOtherThan(ByteSizeUnit.PB, ()->randomFrom(ByteSizeUnit.values()));
ByteSizeValue firstByteValue = new ByteSizeValue(number, randomUnit);
ByteSizeValue secondByteValue = new ByteSizeValue(number, ByteSizeUnit.PB);
assertTrue(firstByteValue.compareTo(secondByteValue) < 0);
assertTrue(secondByteValue.compareTo(firstByteValue) > 0);
}
public void testEdgeCompare() {
ByteSizeValue maxLongValuePB = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.PB);
ByteSizeValue maxLongValueB = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES);
assertTrue(maxLongValuePB.compareTo(maxLongValueB) > 0);
}
public void testConversionHashCode() {
ByteSizeValue firstValue = new ByteSizeValue(randomIntBetween(0, Integer.MAX_VALUE), ByteSizeUnit.GB);
ByteSizeValue secondValue = new ByteSizeValue(firstValue.getBytes(), ByteSizeUnit.BYTES);
assertEquals(firstValue.hashCode(), secondValue.hashCode());
}
public void testSerialization() throws IOException {
ByteSizeValue byteSizeValue = new ByteSizeValue(randomNonNegativeLong(), randomFrom(ByteSizeUnit.values()));
try (BytesStreamOutput out = new BytesStreamOutput()) {
byteSizeValue.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {
ByteSizeValue deserializedByteSizeValue = new ByteSizeValue(in);
assertEquals(byteSizeValue.getBytes(), deserializedByteSizeValue.getBytes());
}
}
}
}
| |
/*
* Copyright (C) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.FormBottomLoops.DevTest_01;
public class Main {
private final static int LIMIT = 1200;
public static void main(String[] args) {
new Main().run();
}
private void run() {
System.out.println("testZZZ2NodesSumB: " + testZZZ2NodesSumB());
System.out.println("testZZZ2NodesSumH: " + testZZZ2NodesSumH());
System.out.println("testZZZ2NodesSetB: " + testZZZ2NodesSetB());
System.out.println("testZZZ2NodesSetH: " + testZZZ2NodesSetH());
System.out.println("testZZZ2NodesComplexSumB: " + testZZZ2NodesComplexSumB());
System.out.println("testZZZ2NodesComplexSumH: " + testZZZ2NodesComplexSumH());
System.out.println("testZZZ2NodesComplexSetB: " + testZZZ2NodesComplexSetB());
System.out.println("testZZZ2NodesComplexSetH: " + testZZZ2NodesComplexSetH());
System.out.println("testZZZ3NodesSumBBDirect: " + testZZZ3NodesSumBBDirect());
System.out.println("testZZZ3NodesSumBBReverse: " + testZZZ3NodesSumBBReverse());
System.out.println("testZZZ3NodesSumHHDirect: " + testZZZ3NodesSumHHDirect());
System.out.println("testZZZ3NodesSumHHReverse: " + testZZZ3NodesSumHHReverse());
System.out.println("testZZZ3NodesSumHBDirect: " + testZZZ3NodesSumHBDirect());
System.out.println("testZZZ3NodesSumHBReverse: " + testZZZ3NodesSumHBReverse());
System.out.println("testZZZ3NodesSetBBDirect: " + testZZZ3NodesSetBBDirect());
System.out.println("testZZZ3NodesSetBBReverse: " + testZZZ3NodesSetBBReverse());
System.out.println("testZZZ3NodesSetHHDirect: " + testZZZ3NodesSetHHDirect());
System.out.println("testZZZ3NodesSetHHReverse: " + testZZZ3NodesSetHHReverse());
System.out.println("testZZZ3NodesSetHBDirect: " + testZZZ3NodesSetHBDirect());
System.out.println("testZZZ3NodesSetHBReverse: " + testZZZ3NodesSetHBReverse());
System.out.println("testZZZ3NodesComplexSumBBDirect: " + testZZZ3NodesComplexSumBBDirect());
System.out.println("testZZZ3NodesComplexSumBBReverse: " + testZZZ3NodesComplexSumBBReverse());
System.out.println("testZZZ3NodesComplexSumHHDirect: " + testZZZ3NodesComplexSumHHDirect());
System.out.println("testZZZ3NodesComplexSumHHReverse: " + testZZZ3NodesComplexSumHHReverse());
System.out.println("testZZZ3NodesComplexSumHBDirect: " + testZZZ3NodesComplexSumHBDirect());
System.out.println("testZZZ3NodesComplexSumHBReverse: " + testZZZ3NodesComplexSumHBReverse());
System.out.println("testZZZ3NodesComplexSetBBDirect: " + testZZZ3NodesComplexSetBBDirect());
System.out.println("testZZZ3NodesComplexSetBBReverse: " + testZZZ3NodesComplexSetBBReverse());
System.out.println("testZZZ3NodesComplexSetHHDirect: " + testZZZ3NodesComplexSetHHDirect());
System.out.println("testZZZ3NodesComplexSetHHReverse: " + testZZZ3NodesComplexSetHHReverse());
System.out.println("testZZZ3NodesComplexSetHBDirect: " + testZZZ3NodesComplexSetHBDirect());
System.out.println("testZZZ3NodesComplexSetHBReverse: " + testZZZ3NodesComplexSetHBReverse());
System.out.println("testZZZIntricateReversePhis2H: " + testZZZIntricateReversePhis2H());
System.out.println("testZZZIntricateReversePhis2B: " + testZZZIntricateReversePhis2B());
System.out.println("testZZZIntricateReversePhis3H: " + testZZZIntricateReversePhis3H());
System.out.println("testZZZIntricateReversePhis3B: " + testZZZIntricateReversePhis3B());
System.out.println("testZZZIntricateReversePhis2ComplexH: " + testZZZIntricateReversePhis2ComplexH());
System.out.println("testZZZIntricateReversePhis2ComplexB: " + testZZZIntricateReversePhis2ComplexB());
System.out.println("testZZZIntricateReversePhis3ComplexH: " + testZZZIntricateReversePhis3ComplexH());
System.out.println("testZZZIntricateReversePhis3ComplexB: " + testZZZIntricateReversePhis3ComplexB());
System.out.println("testZZZIntricateDirectPhis3H: " + testZZZIntricateDirectPhis3H());
System.out.println("testZZZIntricateDirectPhis3B: " + testZZZIntricateDirectPhis3B());
System.out.println("testZZZIntricateDirectPhis3ComplexH: " + testZZZIntricateDirectPhis3ComplexH());
System.out.println("testZZZIntricateDirectPhis3ComplexB: " + testZZZIntricateDirectPhis3ComplexB());
System.out.println("testZZZSuper: " + testZZZSuper());
}
private int testZZZIntricateReversePhis2H() {
int i = 0;
int a = 0;
while (true) {
a = i++;
if (i >= LIMIT) break;
}
return a + i;
}
private int testZZZIntricateReversePhis2B() {
int i = 0;
int a = 0;
while (true) {
if (i >= LIMIT) break;
a = i++;
}
return a + i;
}
private int testZZZIntricateReversePhis3H() {
int i = 0;
int a = 0;
int b = 0;
while (true) {
b = a;
a = i++;
if (i >= LIMIT) break;
}
return a + b + i;
}
private int testZZZIntricateReversePhis3B() {
int i = 0;
int a = 0;
int b = 0;
while (true) {
if (i >= LIMIT) break;
b = a;
a = i++;
}
return a + b + i;
}
private int testZZZIntricateReversePhis2ComplexH() {
int i = 0;
int a = 0;
int res = 0;
while (true) {
res += a;
a = i++;
if (i >= LIMIT) break;
res += a;
}
return a + i + res;
}
private int testZZZIntricateReversePhis2ComplexB() {
int i = 0;
int a = 0;
int res = 0;
while (true) {
res += a;
if (i >= LIMIT) break;
a = i++;
res += a;
}
return a + i + res;
}
private int testZZZIntricateReversePhis3ComplexH() {
int i = 0;
int a = 0;
int b = 0;
int res = 0;
while (true) {
res += a;
b = a;
a = i++;
if (i >= LIMIT) break;
res += a;
}
return a + b + i + res;
}
private int testZZZIntricateReversePhis3ComplexB() {
int i = 0;
int a = 0;
int b = 0;
int res = 0;
while (true) {
res += a;
if (i >= LIMIT) break;
b = a;
a = i++;
res += a;
}
return a + b + i + res;
}
private int testZZZIntricateDirectPhis3H() {
int a = 0;
int b = 0;
int res = 0;
for (int i = 0; i < LIMIT; ++i) {
for (int j = 0; j < LIMIT; ++j) {
a = (b++);
}
}
return a + b + res;
}
private int testZZZIntricateDirectPhis3B() {
int a = 0;
int b = 0;
int res = 0;
for (int i = 0; i < LIMIT; ++i) {
for (int j = 0; j < LIMIT; ++j) {
a = (++b);
}
}
return a + b + res;
}
private int testZZZIntricateDirectPhis3ComplexH() {
int a = 0;
int b = 0;
int res = 0;
for (int i = 0; i < LIMIT; ++i) {
for (int j = 0; j < LIMIT; ++j) {
res += a + b;
a = (b++);
res += a + b;
}
}
return a + b + res;
}
private int testZZZIntricateDirectPhis3ComplexB() {
int a = 0;
int b = 0;
int res = 0;
for (int i = 0; i < LIMIT; ++i) {
for (int j = 0; j < LIMIT; ++j) {
res += a + b;
a = (++b);
res += a + b;
}
}
return a + b + res;
}
private int testZZZ2NodesSumB() {
int a = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
}
return a + i;
}
private int testZZZ2NodesSumH() {
int a = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
}
return a + i;
}
private int testZZZ2NodesSetB() {
int a = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a = i;
}
return a + i;
}
private int testZZZ2NodesSetH() {
int a = 0;
int i = 0;
while (true) {
i = i + 1;
a = i;
if (i >= LIMIT) break;
}
return a + i;
}
private int testZZZ2NodesComplexSumB() {
int a = 0;
int i = 0;
int res = 0;
while (true) {
res += a;
a += i + 6;
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
res += a;
}
return a + i + res;
}
private int testZZZ2NodesComplexSumH() {
int a = 0;
int i = 0;
int res = 0;
while (true) {
res += a;
a += i + 6;
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
res += a;
}
return a + i + res;
}
private int testZZZ2NodesComplexSetB() {
int a = 0;
int i = 0;
int res = 0;
while (true) {
res += a;
i = i + 1;
if (i >= LIMIT) break;
a = i;
res += a;
}
return a + i + res;
}
private int testZZZ2NodesComplexSetH() {
int a = 0;
int i = 0;
int res = 0;
while (true) {
res += a;
i = i + 1;
a = i;
if (i >= LIMIT) break;
res += a;
}
return a + i + res;
}
private int testZZZ3NodesSumBBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
b += a + a;
}
return a + b + i;
}
private int testZZZ3NodesSumBBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
b += a + a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesSumHHDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
b += a + a;
if (i >= LIMIT) break;
}
return a + b + i;
}
private int testZZZ3NodesSumHHReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b += a + a;
a += i + 6;
if (i >= LIMIT) break;
}
return a + b + i;
}
private int testZZZ3NodesSumHBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
b += a + a;
}
return a + b + i;
}
private int testZZZ3NodesSumHBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b += a + a;
if (i >= LIMIT) break;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesSetBBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
b = a;
}
return a + b + i;
}
private int testZZZ3NodesSetBBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
b = a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesSetHHDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
b = a;
if (i >= LIMIT) break;
}
return a + b + i;
}
private int testZZZ3NodesSetHHReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b = a;
a += i + 6;
if (i >= LIMIT) break;
}
return a + b + i;
}
private int testZZZ3NodesSetHBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
b = a;
}
return a + b + i;
}
private int testZZZ3NodesSetHBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b = a;
if (i >= LIMIT) break;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumBBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
b += a + a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumBBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
b += a + a;
a += i + 6;
b += a + a;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumHHDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
b += a + a;
if (i >= LIMIT) break;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumHHReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b += a + a;
a += i + 6;
if (i >= LIMIT) break;
b += a + a;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumHBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
b += a + a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSumHBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b += a + a;
if (i >= LIMIT) break;
a += i + 6;
b += a + a;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetBBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
a += i + 6;
b = a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetBBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
if (i >= LIMIT) break;
b = a;
a += i + 6;
b = a;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetHHDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
b = a;
if (i >= LIMIT) break;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetHHReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b = a;
a += i + 6;
if (i >= LIMIT) break;
b = a;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetHBDirect() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
a += i + 6;
if (i >= LIMIT) break;
b = a;
a += i + 6;
}
return a + b + i;
}
private int testZZZ3NodesComplexSetHBReverse() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b = a;
if (i >= LIMIT) break;
a += i + 6;
b = a;
}
return a + b + i;
}
private int testZZZSuper() {
int a = 0;
int b = 0;
int i = 0;
while (true) {
i = i + 1;
b = a;
if (i >= LIMIT) break;
a += i + 6;
b = a;
}
while (true) {
i = i + 1;
b = a;
if (i >= 2 * LIMIT) break;
a += i + 6;
b = a;
}
return a + b + i;
}
}
| |
package com.jeremyfeinstein.slidingmenu.lib;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.os.Build;
import android.support.v4.view.KeyEventCompat;
import android.support.v4.view.MotionEventCompat;
import android.support.v4.view.VelocityTrackerCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewConfigurationCompat;
import android.util.AttributeSet;
import java.lang.Math;
//import android.util.FloatMath;
import android.util.Log;
import android.view.FocusFinder;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.SoundEffectConstants;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.Interpolator;
import android.widget.Scroller;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.OnClosedListener;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.OnOpenedListener;
//import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.OnCloseListener;
//import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.OnOpenListener;
public class CustomViewAbove extends ViewGroup {
private static final String TAG = "CustomViewAbove";
private static final boolean DEBUG = false;
private static final boolean USE_CACHE = false;
private static final int MAX_SETTLE_DURATION = 600; // ms
private static final int MIN_DISTANCE_FOR_FLING = 25; // dips
private static final Interpolator sInterpolator = new Interpolator() {
public float getInterpolation(float t) {
t -= 1.0f;
return t * t * t * t * t + 1.0f;
}
};
private View mContent;
private int mCurItem;
private Scroller mScroller;
private boolean mScrollingCacheEnabled;
private boolean mScrolling;
private boolean mIsBeingDragged;
private boolean mIsUnableToDrag;
private int mTouchSlop;
private float mInitialMotionX;
/**
* Position of the last motion event.
*/
private float mLastMotionX;
private float mLastMotionY;
/**
* ID of the active pointer. This is used to retain consistency during
* drags/flings if multiple pointers are used.
*/
protected int mActivePointerId = INVALID_POINTER;
/**
* Sentinel value for no current active pointer.
* Used by {@link #mActivePointerId}.
*/
private static final int INVALID_POINTER = -1;
/**
* Determines speed during touch scrolling
*/
protected VelocityTracker mVelocityTracker;
private int mMinimumVelocity;
protected int mMaximumVelocity;
private int mFlingDistance;
private CustomViewBehind mViewBehind;
// private int mMode;
private boolean mEnabled = true;
private OnPageChangeListener mOnPageChangeListener;
private OnPageChangeListener mInternalPageChangeListener;
// private OnCloseListener mCloseListener;
// private OnOpenListener mOpenListener;
private OnClosedListener mClosedListener;
private OnOpenedListener mOpenedListener;
private List<View> mIgnoredViews = new ArrayList<View>();
// private int mScrollState = SCROLL_STATE_IDLE;
/**
* Callback interface for responding to changing state of the selected page.
*/
public interface OnPageChangeListener {
/**
* This method will be invoked when the current page is scrolled, either as part
* of a programmatically initiated smooth scroll or a user initiated touch scroll.
*
* @param position Position index of the first page currently being displayed.
* Page position+1 will be visible if positionOffset is nonzero.
* @param positionOffset Value from [0, 1) indicating the offset from the page at position.
* @param positionOffsetPixels Value in pixels indicating the offset from position.
*/
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels);
/**
* This method will be invoked when a new page becomes selected. Animation is not
* necessarily complete.
*
* @param position Position index of the new selected page.
*/
public void onPageSelected(int position);
}
/**
* Simple implementation of the {@link OnPageChangeListener} interface with stub
* implementations of each method. Extend this if you do not intend to override
* every method of {@link OnPageChangeListener}.
*/
public static class SimpleOnPageChangeListener implements OnPageChangeListener {
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
// This space for rent
}
public void onPageSelected(int position) {
// This space for rent
}
public void onPageScrollStateChanged(int state) {
// This space for rent
}
}
public CustomViewAbove(Context context) {
this(context, null);
}
public CustomViewAbove(Context context, AttributeSet attrs) {
super(context, attrs);
initCustomViewAbove();
}
void initCustomViewAbove() {
setWillNotDraw(false);
setDescendantFocusability(FOCUS_AFTER_DESCENDANTS);
setFocusable(true);
final Context context = getContext();
mScroller = new Scroller(context, sInterpolator);
final ViewConfiguration configuration = ViewConfiguration.get(context);
mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration);
mMinimumVelocity = configuration.getScaledMinimumFlingVelocity();
mMaximumVelocity = configuration.getScaledMaximumFlingVelocity();
setInternalPageChangeListener(new SimpleOnPageChangeListener() {
public void onPageSelected(int position) {
if (mViewBehind != null) {
switch (position) {
case 0:
case 2:
mViewBehind.setChildrenEnabled(true);
break;
case 1:
mViewBehind.setChildrenEnabled(false);
break;
}
}
}
});
final float density = context.getResources().getDisplayMetrics().density;
mFlingDistance = (int) (MIN_DISTANCE_FOR_FLING * density);
}
/**
* Set the currently selected page. If the CustomViewPager has already been through its first
* layout there will be a smooth animated transition between the current item and the
* specified item.
*
* @param item Item index to select
*/
public void setCurrentItem(int item) {
setCurrentItemInternal(item, true, false);
}
/**
* Set the currently selected page.
*
* @param item Item index to select
* @param smoothScroll True to smoothly scroll to the new item, false to transition immediately
*/
public void setCurrentItem(int item, boolean smoothScroll) {
setCurrentItemInternal(item, smoothScroll, false);
}
public int getCurrentItem() {
return mCurItem;
}
void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) {
setCurrentItemInternal(item, smoothScroll, always, 0);
}
void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) {
if (!always && mCurItem == item) {
setScrollingCacheEnabled(false);
return;
}
item = mViewBehind.getMenuPage(item);
final boolean dispatchSelected = mCurItem != item;
mCurItem = item;
final int destX = getDestScrollX(mCurItem);
if (dispatchSelected && mOnPageChangeListener != null) {
mOnPageChangeListener.onPageSelected(item);
}
if (dispatchSelected && mInternalPageChangeListener != null) {
mInternalPageChangeListener.onPageSelected(item);
}
if (smoothScroll) {
smoothScrollTo(destX, 0, velocity);
} else {
completeScroll();
scrollTo(destX, 0);
}
}
/**
* Set a listener that will be invoked whenever the page changes or is incrementally
* scrolled. See {@link OnPageChangeListener}.
*
* @param listener Listener to set
*/
public void setOnPageChangeListener(OnPageChangeListener listener) {
mOnPageChangeListener = listener;
}
/*
public void setOnOpenListener(OnOpenListener l) {
mOpenListener = l;
}
public void setOnCloseListener(OnCloseListener l) {
mCloseListener = l;
}
*/
public void setOnOpenedListener(OnOpenedListener l) {
mOpenedListener = l;
}
public void setOnClosedListener(OnClosedListener l) {
mClosedListener = l;
}
/**
* Set a separate OnPageChangeListener for internal use by the support library.
*
* @param listener Listener to set
* @return The old listener that was set, if any.
*/
OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) {
OnPageChangeListener oldListener = mInternalPageChangeListener;
mInternalPageChangeListener = listener;
return oldListener;
}
public void addIgnoredView(View v) {
if (!mIgnoredViews.contains(v)) {
mIgnoredViews.add(v);
}
}
public void removeIgnoredView(View v) {
mIgnoredViews.remove(v);
}
public void clearIgnoredViews() {
mIgnoredViews.clear();
}
// We want the duration of the page snap animation to be influenced by the distance that
// the screen has to travel, however, we don't want this duration to be effected in a
// purely linear fashion. Instead, we use this method to moderate the effect that the distance
// of travel has on the overall snap duration.
float distanceInfluenceForSnapDuration(float f) {
f -= 0.5f; // center the values about 0.
f *= 0.3f * Math.PI / 2.0f;
return (float) Math.sin(f);
}
public int getDestScrollX(int page) {
switch (page) {
case 0:
case 2:
return mViewBehind.getMenuLeft(mContent, page);
case 1:
return mContent.getLeft();
}
return 0;
}
private int getLeftBound() {
return mViewBehind.getAbsLeftBound(mContent);
}
private int getRightBound() {
return mViewBehind.getAbsRightBound(mContent);
}
public int getContentLeft() {
return mContent.getLeft() + mContent.getPaddingLeft();
}
public boolean isMenuOpen() {
return mCurItem == 0 || mCurItem == 2;
}
private boolean isInIgnoredView(MotionEvent ev) {
Rect rect = new Rect();
for (View v : mIgnoredViews) {
v.getHitRect(rect);
if (rect.contains((int)ev.getX(), (int)ev.getY())) return true;
}
return false;
}
public int getBehindWidth() {
if (mViewBehind == null) {
return 0;
} else {
return mViewBehind.getBehindWidth();
}
}
public int getChildWidth(int i) {
switch (i) {
case 0:
return getBehindWidth();
case 1:
return mContent.getWidth();
default:
return 0;
}
}
public boolean isSlidingEnabled() {
return mEnabled;
}
public void setSlidingEnabled(boolean b) {
mEnabled = b;
}
/**
* Like {@link View#scrollBy}, but scroll smoothly instead of immediately.
*
* @param x the number of pixels to scroll by on the X axis
* @param y the number of pixels to scroll by on the Y axis
*/
void smoothScrollTo(int x, int y) {
smoothScrollTo(x, y, 0);
}
/**
* Like {@link View#scrollBy}, but scroll smoothly instead of immediately.
*
* @param x the number of pixels to scroll by on the X axis
* @param y the number of pixels to scroll by on the Y axis
* @param velocity the velocity associated with a fling, if applicable. (0 otherwise)
*/
void smoothScrollTo(int x, int y, int velocity) {
if (getChildCount() == 0) {
// Nothing to do.
setScrollingCacheEnabled(false);
return;
}
int sx = getScrollX();
int sy = getScrollY();
int dx = x - sx;
int dy = y - sy;
if (dx == 0 && dy == 0) {
completeScroll();
if (isMenuOpen()) {
if (mOpenedListener != null)
mOpenedListener.onOpened();
} else {
if (mClosedListener != null)
mClosedListener.onClosed();
}
return;
}
setScrollingCacheEnabled(true);
mScrolling = true;
final int width = getBehindWidth();
final int halfWidth = width / 2;
final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dx) / width);
final float distance = halfWidth + halfWidth *
distanceInfluenceForSnapDuration(distanceRatio);
int duration = 0;
velocity = Math.abs(velocity);
if (velocity > 0) {
duration = 4 * Math.round(1000 * Math.abs(distance / velocity));
} else {
final float pageDelta = (float) Math.abs(dx) / width;
duration = (int) ((pageDelta + 1) * 100);
duration = MAX_SETTLE_DURATION;
}
duration = Math.min(duration, MAX_SETTLE_DURATION);
mScroller.startScroll(sx, sy, dx, dy, duration);
invalidate();
}
public void setContent(View v) {
if (mContent != null)
this.removeView(mContent);
mContent = v;
addView(mContent);
}
public View getContent() {
return mContent;
}
public void setCustomViewBehind(CustomViewBehind cvb) {
mViewBehind = cvb;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = getDefaultSize(0, widthMeasureSpec);
int height = getDefaultSize(0, heightMeasureSpec);
setMeasuredDimension(width, height);
final int contentWidth = getChildMeasureSpec(widthMeasureSpec, 0, width);
final int contentHeight = getChildMeasureSpec(heightMeasureSpec, 0, height);
mContent.measure(contentWidth, contentHeight);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
// Make sure scroll position is set correctly.
if (w != oldw) {
// [ChrisJ] - This fixes the onConfiguration change for orientation issue..
// maybe worth having a look why the recomputeScroll pos is screwing
// up?
completeScroll();
scrollTo(getDestScrollX(mCurItem), getScrollY());
}
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final int width = r - l;
final int height = b - t;
mContent.layout(0, 0, width, height);
}
public void setAboveOffset(int i) {
// RelativeLayout.LayoutParams params = ((RelativeLayout.LayoutParams)mContent.getLayoutParams());
// params.setMargins(i, params.topMargin, params.rightMargin, params.bottomMargin);
mContent.setPadding(i, mContent.getPaddingTop(),
mContent.getPaddingRight(), mContent.getPaddingBottom());
}
@Override
public void computeScroll() {
if (!mScroller.isFinished()) {
if (mScroller.computeScrollOffset()) {
int oldX = getScrollX();
int oldY = getScrollY();
int x = mScroller.getCurrX();
int y = mScroller.getCurrY();
if (oldX != x || oldY != y) {
scrollTo(x, y);
pageScrolled(x);
}
// Keep on drawing until the animation has finished.
invalidate();
return;
}
}
// Done with scroll, clean up state.
completeScroll();
}
private void pageScrolled(int xpos) {
final int widthWithMargin = getWidth();
final int position = xpos / widthWithMargin;
final int offsetPixels = xpos % widthWithMargin;
final float offset = (float) offsetPixels / widthWithMargin;
onPageScrolled(position, offset, offsetPixels);
}
/**
* This method will be invoked when the current page is scrolled, either as part
* of a programmatically initiated smooth scroll or a user initiated touch scroll.
* If you override this method you must call through to the superclass implementation
* (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled
* returns.
*
* @param position Position index of the first page currently being displayed.
* Page position+1 will be visible if positionOffset is nonzero.
* @param offset Value from [0, 1) indicating the offset from the page at position.
* @param offsetPixels Value in pixels indicating the offset from position.
*/
protected void onPageScrolled(int position, float offset, int offsetPixels) {
if (mOnPageChangeListener != null) {
mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels);
}
if (mInternalPageChangeListener != null) {
mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels);
}
}
private void completeScroll() {
boolean needPopulate = mScrolling;
if (needPopulate) {
// Done with scroll, no longer want to cache view drawing.
setScrollingCacheEnabled(false);
mScroller.abortAnimation();
int oldX = getScrollX();
int oldY = getScrollY();
int x = mScroller.getCurrX();
int y = mScroller.getCurrY();
if (oldX != x || oldY != y) {
scrollTo(x, y);
}
if (isMenuOpen()) {
if (mOpenedListener != null)
mOpenedListener.onOpened();
} else {
if (mClosedListener != null)
mClosedListener.onClosed();
}
}
mScrolling = false;
}
protected int mTouchMode = SlidingMenu.TOUCHMODE_MARGIN;
public void setTouchMode(int i) {
mTouchMode = i;
}
public int getTouchMode() {
return mTouchMode;
}
private boolean thisTouchAllowed(MotionEvent ev) {
int x = (int) (ev.getX() + mScrollX);
if (isMenuOpen()) {
return mViewBehind.menuOpenTouchAllowed(mContent, mCurItem, x);
} else {
switch (mTouchMode) {
case SlidingMenu.TOUCHMODE_FULLSCREEN:
return !isInIgnoredView(ev);
case SlidingMenu.TOUCHMODE_NONE:
return false;
case SlidingMenu.TOUCHMODE_MARGIN:
return mViewBehind.marginTouchAllowed(mContent, x);
}
}
return false;
}
private boolean thisSlideAllowed(float dx) {
boolean allowed = false;
if (isMenuOpen()) {
allowed = mViewBehind.menuOpenSlideAllowed(dx);
} else {
allowed = mViewBehind.menuClosedSlideAllowed(dx);
}
if (DEBUG)
Log.v(TAG, "this slide allowed " + allowed + " dx: " + dx);
return allowed;
}
private int getPointerIndex(MotionEvent ev, int id) {
int activePointerIndex = MotionEventCompat.findPointerIndex(ev, id);
if (activePointerIndex == -1)
mActivePointerId = INVALID_POINTER;
return activePointerIndex;
}
private boolean mQuickReturn = false;
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
if (!mEnabled)
return false;
final int action = ev.getAction() & MotionEventCompat.ACTION_MASK;
if (DEBUG)
if (action == MotionEvent.ACTION_DOWN)
Log.v(TAG, "Received ACTION_DOWN");
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP
|| (action != MotionEvent.ACTION_DOWN && mIsUnableToDrag)) {
endDrag();
return false;
}
switch (action) {
case MotionEvent.ACTION_MOVE:
determineDrag(ev);
break;
case MotionEvent.ACTION_DOWN:
int index = MotionEventCompat.getActionIndex(ev);
mActivePointerId = MotionEventCompat.getPointerId(ev, index);
if (mActivePointerId == INVALID_POINTER)
break;
mLastMotionX = mInitialMotionX = MotionEventCompat.getX(ev, index);
mLastMotionY = MotionEventCompat.getY(ev, index);
if (thisTouchAllowed(ev)) {
mIsBeingDragged = false;
mIsUnableToDrag = false;
if (isMenuOpen() && mViewBehind.menuTouchInQuickReturn(mContent, mCurItem, ev.getX() + mScrollX)) {
mQuickReturn = true;
}
} else {
mIsUnableToDrag = true;
}
break;
case MotionEventCompat.ACTION_POINTER_UP:
onSecondaryPointerUp(ev);
break;
}
if (!mIsBeingDragged) {
if (mVelocityTracker == null) {
mVelocityTracker = VelocityTracker.obtain();
}
mVelocityTracker.addMovement(ev);
}
return mIsBeingDragged || mQuickReturn;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (!mEnabled)
return false;
if (!mIsBeingDragged && !thisTouchAllowed(ev))
return false;
// if (!mIsBeingDragged && !mQuickReturn)
// return false;
final int action = ev.getAction();
if (mVelocityTracker == null) {
mVelocityTracker = VelocityTracker.obtain();
}
mVelocityTracker.addMovement(ev);
switch (action & MotionEventCompat.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
/*
* If being flinged and user touches, stop the fling. isFinished
* will be false if being flinged.
*/
completeScroll();
// Remember where the motion event started
int index = MotionEventCompat.getActionIndex(ev);
mActivePointerId = MotionEventCompat.getPointerId(ev, index);
mLastMotionX = mInitialMotionX = ev.getX();
break;
case MotionEvent.ACTION_MOVE:
if (!mIsBeingDragged) {
determineDrag(ev);
if (mIsUnableToDrag)
return false;
}
if (mIsBeingDragged) {
// Scroll to follow the motion event
final int activePointerIndex = getPointerIndex(ev, mActivePointerId);
if (mActivePointerId == INVALID_POINTER)
break;
final float x = MotionEventCompat.getX(ev, activePointerIndex);
final float deltaX = mLastMotionX - x;
mLastMotionX = x;
float oldScrollX = getScrollX();
float scrollX = oldScrollX + deltaX;
final float leftBound = getLeftBound();
final float rightBound = getRightBound();
if (scrollX < leftBound) {
scrollX = leftBound;
} else if (scrollX > rightBound) {
scrollX = rightBound;
}
// Don't lose the rounded component
mLastMotionX += scrollX - (int) scrollX;
scrollTo((int) scrollX, getScrollY());
pageScrolled((int) scrollX);
}
break;
case MotionEvent.ACTION_UP:
if (mIsBeingDragged) {
final VelocityTracker velocityTracker = mVelocityTracker;
velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity);
int initialVelocity = (int) VelocityTrackerCompat.getXVelocity(
velocityTracker, mActivePointerId);
final int scrollX = getScrollX();
final float pageOffset = (float) (scrollX - getDestScrollX(mCurItem)) / getBehindWidth();
final int activePointerIndex = getPointerIndex(ev, mActivePointerId);
if (mActivePointerId != INVALID_POINTER) {
final float x = MotionEventCompat.getX(ev, activePointerIndex);
final int totalDelta = (int) (x - mInitialMotionX);
int nextPage = determineTargetPage(pageOffset, initialVelocity, totalDelta);
setCurrentItemInternal(nextPage, true, true, initialVelocity);
} else {
setCurrentItemInternal(mCurItem, true, true, initialVelocity);
}
mActivePointerId = INVALID_POINTER;
endDrag();
} else if (mQuickReturn && mViewBehind.menuTouchInQuickReturn(mContent, mCurItem, ev.getX() + mScrollX)) {
// close the menu
setCurrentItem(1);
endDrag();
}
break;
case MotionEvent.ACTION_CANCEL:
if (mIsBeingDragged) {
setCurrentItemInternal(mCurItem, true, true);
mActivePointerId = INVALID_POINTER;
endDrag();
}
break;
case MotionEventCompat.ACTION_POINTER_DOWN: {
final int indexx = MotionEventCompat.getActionIndex(ev);
mLastMotionX = MotionEventCompat.getX(ev, indexx);
mActivePointerId = MotionEventCompat.getPointerId(ev, indexx);
break;
}
case MotionEventCompat.ACTION_POINTER_UP:
onSecondaryPointerUp(ev);
int pointerIndex = getPointerIndex(ev, mActivePointerId);
if (mActivePointerId == INVALID_POINTER)
break;
mLastMotionX = MotionEventCompat.getX(ev, pointerIndex);
break;
}
return true;
}
private void determineDrag(MotionEvent ev) {
final int activePointerId = mActivePointerId;
final int pointerIndex = getPointerIndex(ev, activePointerId);
if (activePointerId == INVALID_POINTER || pointerIndex == INVALID_POINTER)
return;
final float x = MotionEventCompat.getX(ev, pointerIndex);
final float dx = x - mLastMotionX;
final float xDiff = Math.abs(dx);
final float y = MotionEventCompat.getY(ev, pointerIndex);
final float dy = y - mLastMotionY;
final float yDiff = Math.abs(dy);
if (xDiff > (isMenuOpen()?mTouchSlop/2:mTouchSlop) && xDiff > yDiff && thisSlideAllowed(dx)) {
startDrag();
mLastMotionX = x;
mLastMotionY = y;
setScrollingCacheEnabled(true);
// TODO add back in touch slop check
} else if (xDiff > mTouchSlop) {
mIsUnableToDrag = true;
}
}
@Override
public void scrollTo(int x, int y) {
super.scrollTo(x, y);
mScrollX = x;
mViewBehind.scrollBehindTo(mContent, x, y);
((SlidingMenu)getParent()).manageLayers(getPercentOpen());
}
private int determineTargetPage(float pageOffset, int velocity, int deltaX) {
int targetPage = mCurItem;
if (Math.abs(deltaX) > mFlingDistance && Math.abs(velocity) > mMinimumVelocity) {
if (velocity > 0 && deltaX > 0) {
targetPage -= 1;
} else if (velocity < 0 && deltaX < 0){
targetPage += 1;
}
} else {
targetPage = (int) Math.round(mCurItem + pageOffset);
}
return targetPage;
}
protected float getPercentOpen() {
return Math.abs(mScrollX-mContent.getLeft()) / getBehindWidth();
}
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
// Draw the margin drawable if needed.
mViewBehind.drawShadow(mContent, canvas);
mViewBehind.drawFade(mContent, canvas, getPercentOpen());
mViewBehind.drawSelector(mContent, canvas, getPercentOpen());
}
// variables for drawing
private float mScrollX = 0.0f;
private void onSecondaryPointerUp(MotionEvent ev) {
if (DEBUG) Log.v(TAG, "onSecondaryPointerUp called");
final int pointerIndex = MotionEventCompat.getActionIndex(ev);
final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex);
if (pointerId == mActivePointerId) {
// This was our active pointer going up. Choose a new
// active pointer and adjust accordingly.
final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
mLastMotionX = MotionEventCompat.getX(ev, newPointerIndex);
mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex);
if (mVelocityTracker != null) {
mVelocityTracker.clear();
}
}
}
private void startDrag() {
mIsBeingDragged = true;
mQuickReturn = false;
}
private void endDrag() {
mQuickReturn = false;
mIsBeingDragged = false;
mIsUnableToDrag = false;
mActivePointerId = INVALID_POINTER;
if (mVelocityTracker != null) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
}
private void setScrollingCacheEnabled(boolean enabled) {
if (mScrollingCacheEnabled != enabled) {
mScrollingCacheEnabled = enabled;
if (USE_CACHE) {
final int size = getChildCount();
for (int i = 0; i < size; ++i) {
final View child = getChildAt(i);
if (child.getVisibility() != GONE) {
child.setDrawingCacheEnabled(enabled);
}
}
}
}
}
/**
* Tests scrollability within child views of v given a delta of dx.
*
* @param v View to test for horizontal scrollability
* @param checkV Whether the view v passed should itself be checked for scrollability (true),
* or just its children (false).
* @param dx Delta scrolled in pixels
* @param x X coordinate of the active touch point
* @param y Y coordinate of the active touch point
* @return true if child views of v can be scrolled by delta of dx.
*/
protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) {
if (v instanceof ViewGroup) {
final ViewGroup group = (ViewGroup) v;
final int scrollX = v.getScrollX();
final int scrollY = v.getScrollY();
final int count = group.getChildCount();
// Count backwards - let topmost views consume scroll distance first.
for (int i = count - 1; i >= 0; i--) {
final View child = group.getChildAt(i);
if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() &&
y + scrollY >= child.getTop() && y + scrollY < child.getBottom() &&
canScroll(child, true, dx, x + scrollX - child.getLeft(),
y + scrollY - child.getTop())) {
return true;
}
}
}
return checkV && ViewCompat.canScrollHorizontally(v, -dx);
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
// Let the focused view and/or our descendants get the key first
return super.dispatchKeyEvent(event) || executeKeyEvent(event);
}
/**
* You can call this function yourself to have the scroll view perform
* scrolling from a key event, just as if the event had been dispatched to
* it by the view hierarchy.
*
* @param event The key event to execute.
* @return Return true if the event was handled, else false.
*/
public boolean executeKeyEvent(KeyEvent event) {
boolean handled = false;
if (event.getAction() == KeyEvent.ACTION_DOWN) {
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_DPAD_LEFT:
handled = arrowScroll(FOCUS_LEFT);
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
handled = arrowScroll(FOCUS_RIGHT);
break;
case KeyEvent.KEYCODE_TAB:
if (Build.VERSION.SDK_INT >= 11) {
// The focus finder had a bug handling FOCUS_FORWARD and FOCUS_BACKWARD
// before Android 3.0. Ignore the tab key on those devices.
if (KeyEventCompat.hasNoModifiers(event)) {
handled = arrowScroll(FOCUS_FORWARD);
} else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) {
handled = arrowScroll(FOCUS_BACKWARD);
}
}
break;
}
}
return handled;
}
public boolean arrowScroll(int direction) {
View currentFocused = findFocus();
if (currentFocused == this) currentFocused = null;
boolean handled = false;
View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused,
direction);
if (nextFocused != null && nextFocused != currentFocused) {
if (direction == View.FOCUS_LEFT) {
handled = nextFocused.requestFocus();
} else if (direction == View.FOCUS_RIGHT) {
// If there is nothing to the right, or this is causing us to
// jump to the left, then what we really want to do is page right.
if (currentFocused != null && nextFocused.getLeft() <= currentFocused.getLeft()) {
handled = pageRight();
} else {
handled = nextFocused.requestFocus();
}
}
} else if (direction == FOCUS_LEFT || direction == FOCUS_BACKWARD) {
// Trying to move left and nothing there; try to page.
handled = pageLeft();
} else if (direction == FOCUS_RIGHT || direction == FOCUS_FORWARD) {
// Trying to move right and nothing there; try to page.
handled = pageRight();
}
if (handled) {
playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction));
}
return handled;
}
boolean pageLeft() {
if (mCurItem > 0) {
setCurrentItem(mCurItem-1, true);
return true;
}
return false;
}
boolean pageRight() {
if (mCurItem < 1) {
setCurrentItem(mCurItem+1, true);
return true;
}
return false;
}
}
| |
/*
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.jndi.ldap;
import javax.naming.*;
import javax.naming.ldap.Control;
import java.util.Hashtable;
import java.util.Vector;
/**
* This exception is raised when a referral to an alternative context
* is encountered.
* <p>
* An <tt>LdapReferralException</tt> object contains one or more referrals.
* Each referral is an alternative location for the same target entry.
* For example, a referral may be an LDAP URL.
* The referrals are attempted in sequence until one is successful or
* all have failed. In the case of the latter then the exception generated
* by the final referral is recorded and presented later.
* <p>
* A referral may be skipped or may be retried. For example, in the case
* of an authentication error, a referral may be retried with different
* environment properties.
* <p>
* An <tt>LdapReferralException</tt> object may also contain a reference
* to a chain of unprocessed <tt>LdapReferralException</tt> objects.
* Once the current set of referrals have been exhausted and unprocessed
* <tt>LdapReferralException</tt> objects remain, then the
* <tt>LdapReferralException</tt> object referenced by the current
* object is thrown and the cycle continues.
* <p>
* If new <tt>LdapReferralException</tt> objects are generated while
* following an existing referral then these new objects are appended
* to the end of the chain of unprocessed <tt>LdapReferralException</tt>
* objects.
* <p>
* If an exception was recorded while processing a chain of
* <tt>LdapReferralException</tt> objects then is is throw once
* processing has completed.
*
* @author Vincent Ryan
*/
final public class LdapReferralException extends
javax.naming.ldap.LdapReferralException {
private static final long serialVersionUID = 627059076356906399L;
// ----------- fields initialized in constructor ---------------
private int handleReferrals;
private Hashtable<?,?> envprops;
private String nextName;
private Control[] reqCtls;
// ----------- fields that have defaults -----------------------
private Vector<?> referrals = null; // alternatives,set by setReferralInfo()
private int referralIndex = 0; // index into referrals
private int referralCount = 0; // count of referrals
private boolean foundEntry = false; // will stop when entry is found
private boolean skipThisReferral = false;
private int hopCount = 1;
private NamingException errorEx = null;
private String newRdn = null;
private boolean debug = false;
LdapReferralException nextReferralEx = null; // referral ex. chain
/**
* Constructs a new instance of LdapReferralException.
* @param resolvedName The part of the name that has been successfully
* resolved.
* @param resolvedObj The object to which resolution was successful.
* @param remainingName The remaining unresolved portion of the name.
* @param explanation Additional detail about this exception.
*/
LdapReferralException(Name resolvedName,
Object resolvedObj,
Name remainingName,
String explanation,
Hashtable<?,?> envprops,
String nextName,
int handleReferrals,
Control[] reqCtls) {
super(explanation);
if (debug)
System.out.println("LdapReferralException constructor");
setResolvedName(resolvedName);
setResolvedObj(resolvedObj);
setRemainingName(remainingName);
this.envprops = envprops;
this.nextName = nextName;
this.handleReferrals = handleReferrals;
// If following referral, request controls are passed to referral ctx
this.reqCtls =
(handleReferrals == LdapClient.LDAP_REF_FOLLOW ? reqCtls : null);
}
/**
* Gets a context at which to continue processing.
* The current environment properties are re-used.
*/
public Context getReferralContext() throws NamingException {
return getReferralContext(envprops, null);
}
/**
* Gets a context at which to continue processing.
* The supplied environment properties are used.
*/
public Context getReferralContext(Hashtable<?,?> newProps) throws
NamingException {
return getReferralContext(newProps, null);
}
/**
* Gets a context at which to continue processing.
* The supplied environment properties and connection controls are used.
*/
public Context getReferralContext(Hashtable<?,?> newProps, Control[] connCtls)
throws NamingException {
if (debug)
System.out.println("LdapReferralException.getReferralContext");
LdapReferralContext refCtx = new LdapReferralContext(
this, newProps, connCtls, reqCtls,
nextName, skipThisReferral, handleReferrals);
refCtx.setHopCount(hopCount + 1);
if (skipThisReferral) {
skipThisReferral = false; // reset
}
return (Context)refCtx;
}
/**
* Gets referral information.
*/
public Object getReferralInfo() {
if (debug) {
System.out.println("LdapReferralException.getReferralInfo");
System.out.println(" referralIndex=" + referralIndex);
}
if (hasMoreReferrals()) {
return referrals.elementAt(referralIndex);
} else {
return null;
}
}
/**
* Marks the current referral as one to be retried.
*/
public void retryReferral() {
if (debug)
System.out.println("LdapReferralException.retryReferral");
if (referralIndex > 0)
referralIndex--; // decrement index
}
/**
* Marks the current referral as one to be ignored.
* Returns false when there are no referrals remaining to be processed.
*/
public boolean skipReferral() {
if (debug)
System.out.println("LdapReferralException.skipReferral");
skipThisReferral = true;
// advance to next referral
try {
getNextReferral();
} catch (ReferralException e) {
// mask the referral exception
}
return (hasMoreReferrals() || hasMoreReferralExceptions());
}
/**
* Sets referral information.
*/
void setReferralInfo(Vector<?> referrals, boolean continuationRef) {
// %%% continuationRef is currently ignored
if (debug)
System.out.println("LdapReferralException.setReferralInfo");
this.referrals = referrals;
if (referrals != null) {
referralCount = referrals.size();
}
if (debug) {
for (int i = 0; i < referralCount; i++) {
System.out.println(" [" + i + "] " + referrals.elementAt(i));
}
}
}
/**
* Gets the next referral. When the current set of referrals have
* been exhausted then the next referral exception is thrown, if available.
*/
String getNextReferral() throws ReferralException {
if (debug)
System.out.println("LdapReferralException.getNextReferral");
if (hasMoreReferrals()) {
return (String)referrals.elementAt(referralIndex++);
} else if (hasMoreReferralExceptions()) {
throw nextReferralEx;
} else {
return null;
}
}
/**
* Appends the supplied (chain of) referral exception onto the end of
* the current (chain of) referral exception. Spent referral exceptions
* are trimmed off.
*/
LdapReferralException
appendUnprocessedReferrals(LdapReferralException back) {
if (debug) {
System.out.println(
"LdapReferralException.appendUnprocessedReferrals");
dump();
if (back != null) {
back.dump();
}
}
LdapReferralException front = this;
if (! front.hasMoreReferrals()) {
front = nextReferralEx; // trim
if ((errorEx != null) && (front != null)) {
front.setNamingException(errorEx); //advance the saved exception
}
}
// don't append onto itself
if (this == back) {
return front;
}
if ((back != null) && (! back.hasMoreReferrals())) {
back = back.nextReferralEx; // trim
}
if (back == null) {
return front;
}
// Locate the end of the current chain
LdapReferralException ptr = front;
while (ptr.nextReferralEx != null) {
ptr = ptr.nextReferralEx;
}
ptr.nextReferralEx = back; // append
return front;
}
/**
* Tests if there are any referrals remaining to be processed.
* If name resolution has already completed then any remaining
* referrals (in the current referral exception) will be ignored.
*/
boolean hasMoreReferrals() {
if (debug)
System.out.println("LdapReferralException.hasMoreReferrals");
return (! foundEntry) && (referralIndex < referralCount);
}
/**
* Tests if there are any referral exceptions remaining to be processed.
*/
boolean hasMoreReferralExceptions() {
if (debug)
System.out.println(
"LdapReferralException.hasMoreReferralExceptions");
return (nextReferralEx != null);
}
/**
* Sets the counter which records the number of hops that result
* from following a sequence of referrals.
*/
void setHopCount(int hopCount) {
if (debug)
System.out.println("LdapReferralException.setHopCount");
this.hopCount = hopCount;
}
/**
* Sets the flag to indicate that the target name has been resolved.
*/
void setNameResolved(boolean resolved) {
if (debug)
System.out.println("LdapReferralException.setNameResolved");
foundEntry = resolved;
}
/**
* Sets the exception generated while processing a referral.
* Only the first exception is recorded.
*/
void setNamingException(NamingException e) {
if (debug)
System.out.println("LdapReferralException.setNamingException");
if (errorEx == null) {
e.setRootCause(this); //record the referral exception that caused it
errorEx = e;
}
}
/**
* Gets the new RDN name.
*/
String getNewRdn() {
if (debug)
System.out.println("LdapReferralException.getNewRdn");
return newRdn;
}
/**
* Sets the new RDN name so that the rename operation can be completed
* (when a referral is being followed).
*/
void setNewRdn(String newRdn) {
if (debug)
System.out.println("LdapReferralException.setNewRdn");
this.newRdn = newRdn;
}
/**
* Gets the exception generated while processing a referral.
*/
NamingException getNamingException() {
if (debug)
System.out.println("LdapReferralException.getNamingException");
return errorEx;
}
/**
* Display the state of each element in a chain of LdapReferralException
* objects.
*/
void dump() {
System.out.println();
System.out.println("LdapReferralException.dump");
LdapReferralException ptr = this;
while (ptr != null) {
ptr.dumpState();
ptr = ptr.nextReferralEx;
}
}
/**
* Display the state of this LdapReferralException object.
*/
private void dumpState() {
System.out.println("LdapReferralException.dumpState");
System.out.println(" hashCode=" + hashCode());
System.out.println(" foundEntry=" + foundEntry);
System.out.println(" skipThisReferral=" + skipThisReferral);
System.out.println(" referralIndex=" + referralIndex);
if (referrals != null) {
System.out.println(" referrals:");
for (int i = 0; i < referralCount; i++) {
System.out.println(" [" + i + "] " + referrals.elementAt(i));
}
} else {
System.out.println(" referrals=null");
}
System.out.println(" errorEx=" + errorEx);
if (nextReferralEx == null) {
System.out.println(" nextRefEx=null");
} else {
System.out.println(" nextRefEx=" + nextReferralEx.hashCode());
}
System.out.println();
}
}
| |
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.util;
import com.google.common.base.Converter;
import java.util.Comparator;
/**
* Byte manipulation utilities.
*/
public final class ByteUtil {
/**
* An empty byte array. This is the minimum value according to {@link #COMPARATOR}.
*/
public static final byte[] EMPTY = new byte[0];
/**
* {@link Comparator} that compares two byte arrays lexicographically using unsigned values.
*/
public static final Comparator<byte[]> COMPARATOR = new Comparator<byte[]>() {
@Override
public int compare(byte[] b1, byte[] b2) {
return ByteUtil.compare(b1, b2);
}
};
/**
* A {@link Converter} that converts between {@code byte[]} arrays and hexadecimal {@link String}s.
*/
public static final Converter<byte[], String> STRING_CONVERTER = new Converter<byte[], String>() {
@Override
public String doForward(byte[] b) {
return b != null ? ByteUtil.toString(b) : null;
}
@Override
public byte[] doBackward(String s) {
return s != null ? ByteUtil.parse(s) : null;
}
};
private ByteUtil() {
}
/**
* Compare two byte arrays lexicographically using unsigned values.
*
* @param b1 first byte array
* @param b2 second byte array
* @return -1 if {@code b1 < b2}, 1 if {@code b1 > b2}, or zero if {@code b1 = b2}
* @throws NullPointerException if {@code b1} or {@code b2} is null
*/
public static int compare(byte[] b1, byte[] b2) {
if (b1 == b2)
return 0;
final int sharedLength = Math.min(b1.length, b2.length);
for (int i = 0; i < sharedLength; i++) {
final int v1 = b1[i] & 0xff;
final int v2 = b2[i] & 0xff;
if (v1 < v2)
return -1;
if (v1 > v2)
return 1;
}
if (b1.length < b2.length)
return -1;
if (b1.length > b2.length)
return 1;
return 0;
}
/**
* Determine the smaller of two byte arrays when compared lexicographically using unsigned values.
*
* @param b1 first byte array
* @param b2 second byte array
* @return {@code b1} if {@code b1 <= b2}, otherwise {@code b2}
* @throws NullPointerException if {@code b1} or {@code b2} is null
*/
public static byte[] min(byte[] b1, byte[] b2) {
return ByteUtil.compare(b1, b2) <= 0 ? b1 : b2;
}
/**
* Determine the larger of two byte arrays when compared lexicographically using unsigned values.
*
* @param b1 first byte array
* @param b2 second byte array
* @return {@code b1} if {@code b1 >= b2}, otherwise {@code b2}
* @throws NullPointerException if {@code b1} or {@code b2} is null
*/
public static byte[] max(byte[] b1, byte[] b2) {
return ByteUtil.compare(b1, b2) >= 0 ? b1 : b2;
}
/**
* Determine if the first of two {@code byte[]} arrays is a prefix of the second.
*
* @param prefix prefix to check
* @param value value to check for having {@code prefix} as a prefix
* @return true if {@code prefix} is a prefix of {@code value}
* @throws NullPointerException if {@code prefix} or {@code value} is null
*/
public static boolean isPrefixOf(byte[] prefix, byte[] value) {
if (prefix.length > value.length)
return false;
for (int i = 0; i < prefix.length; i++) {
if (value[i] != prefix[i])
return false;
}
return true;
}
/**
* Get the next key greater than the given key in unsigned lexicographic ordering.
* This creates a new key simply by appending a {@code 0x00} byte to the data
* contained in the given key.
*
* @param key previous key
* @return next key after {@code key}
* @throws NullPointerException if {@code key} is null
*/
public static byte[] getNextKey(byte[] key) {
final byte[] nextKey = new byte[key.length + 1];
System.arraycopy(key, 0, nextKey, 0, key.length);
return nextKey;
}
/**
* Determine whether {@code key2} is the next key after {@code key1}.
*
* @param key1 first key
* @param key2 second key
* @return true if {@code key2} immediately follows {@code key1}
* @throws NullPointerException if either parameter is null
*/
public static boolean isConsecutive(byte[] key1, byte[] key2) {
if (key2.length != key1.length + 1)
return false;
if (key2[key1.length] != 0)
return false;
for (int i = 0; i < key1.length; i++) {
if (key1[i] != key2[i])
return false;
}
return true;
}
/**
* Get the first key that would be greater than the given key in unsigned lexicographic
* ordering <i>and</i> that does not have the given key as a prefix.
*
* @param prefix lower bound prefix key
* @return next key not having {@code prefix} as a prefix
* @throws IllegalArgumentException if {@code prefix} has zero length
* @throws IllegalArgumentException if {@code prefix} contains only {@code 0xff} bytes
* @throws NullPointerException if {@code prefix} is null
*/
public static byte[] getKeyAfterPrefix(byte[] prefix) {
int len = prefix.length;
if (len == 0)
throw new IllegalArgumentException("empty prefix");
while (len > 0 && prefix[len - 1] == (byte)0xff)
len--;
if (len <= 0)
throw new IllegalArgumentException("prefix contains only 0xff bytes");
final byte[] buf = new byte[len];
System.arraycopy(prefix, 0, buf, 0, len);
buf[len - 1]++;
return buf;
}
/**
* Convert a byte array into a string of hex digits, or {@code "null"} if {@code buf} is null.
*
* @param buf bytes
* @return string encoding of {@code buf}
* @see #parse parse()
*/
public static String toString(byte[] buf) {
if (buf == null)
return "null";
final char[] result = new char[buf.length * 2];
int off = 0;
for (int i = 0; i < buf.length; i++) {
int value = buf[i];
result[off++] = Character.forDigit((value >> 4) & 0x0f, 16);
result[off++] = Character.forDigit(value & 0x0f, 16);
}
return new String(result);
}
/**
* Decode a hexadecimal {@link String} into a {@code byte[]} array. The string must have an even
* number of digits and contain no other characters (e.g., whitespace).
*
* @param text string previously encoded by {@link #toString(byte[])}
* @return {@code byte[]} decoding of {@code text}
* @throws IllegalArgumentException if any non-hexadecimal characters are found or the number of characters is odd
* @throws NullPointerException if {@code text} is null
* @see #toString(byte[]) toString()
*/
public static byte[] parse(String text) {
if ((text.length() & 1) != 0)
throw new IllegalArgumentException("byte array has an odd number of digits");
final byte[] array = new byte[text.length() / 2];
int pos = 0;
for (int i = 0; pos < text.length(); i++) {
final int nib1 = Character.digit(text.charAt(pos++), 16);
if (nib1 == -1)
throw new IllegalArgumentException("invalid hex digit `" + text.charAt(pos - 1) + "'");
final int nib2 = Character.digit(text.charAt(pos++), 16);
if (nib2 == -1)
throw new IllegalArgumentException("invalid hex digit `" + text.charAt(pos - 1) + "'");
array[i] = (byte)((nib1 << 4) | nib2);
}
return array;
}
/**
* Read an {@code int} as four big-endian bytes.
*
* @param reader input
* @return decoded integer
* @throws IndexOutOfBoundsException if less than four bytes remain in {@code reader}
* @throws NullPointerException if {@code reader} is null
* @see #writeInt writeInt()
*/
public static int readInt(ByteReader reader) {
return (reader.readByte() << 24) | (reader.readByte() << 16) | (reader.readByte() << 8) | reader.readByte();
}
/**
* Write an {@code int} as four big-endian bytes.
*
* @param writer byte destination
* @param value value to write
* @see #readInt readInt()
* @throws NullPointerException if {@code writer} is null
*/
public static void writeInt(ByteWriter writer, int value) {
writer.writeByte(value >> 24);
writer.writeByte(value >> 16);
writer.writeByte(value >> 8);
writer.writeByte(value);
}
/**
* Read a {@code long} as eight big-endian bytes.
*
* @param reader input
* @return decoded long
* @throws IndexOutOfBoundsException if less than eight bytes remain in {@code reader}
* @see #writeLong writeLong()
*/
public static long readLong(ByteReader reader) {
return
((long)reader.readByte() << 56) | ((long)reader.readByte() << 48)
| ((long)reader.readByte() << 40) | ((long)reader.readByte() << 32)
| ((long)reader.readByte() << 24) | ((long)reader.readByte() << 16)
| ((long)reader.readByte() << 8) | (long)reader.readByte();
}
/**
* Write a {@code long} as eight big-endian bytes.
*
* @param writer byte destination
* @param value value to write
* @see #readLong readLong()
* @throws NullPointerException if {@code writer} is null
*/
public static void writeLong(ByteWriter writer, long value) {
writer.writeByte((int)(value >> 56));
writer.writeByte((int)(value >> 48));
writer.writeByte((int)(value >> 40));
writer.writeByte((int)(value >> 32));
writer.writeByte((int)(value >> 24));
writer.writeByte((int)(value >> 16));
writer.writeByte((int)(value >> 8));
writer.writeByte((int)value);
}
}
| |
package net.metadata.dataspace.atom.adapter;
import net.metadata.dataspace.app.TestConstants;
import net.metadata.dataspace.atom.util.ClientHelper;
import net.metadata.dataspace.atom.util.XPathHelper;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.PutMethod;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.activation.MimeType;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import java.io.InputStream;
import static junit.framework.Assert.*;
/**
* Author: alabri
* Date: 05/11/2010
* Time: 3:08:31 PM
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = TestConstants.TEST_CONTEXT)
public class ActivityIT {
public void testActivityCRUD() throws Exception {
//create a client
HttpClient client = new HttpClient();
//authenticate
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//Post Entry
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
//Get entry
GetMethod getMethod = ClientHelper.getEntry(client, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not get entry after post", 200, getMethod.getStatusCode());
//get first version
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/1", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not get first version of entry after post", 200, getMethod.getStatusCode());
//Edit Entry
fileName = "/files/put/update-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not edit entry", 200, putMethod.getStatusCode());
//get second version
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/2", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not get second version of entry after edit", 200, getMethod.getStatusCode());
//Get version history
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/version-history", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not get version history", 200, getMethod.getStatusCode());
//Delete Entry
DeleteMethod deleteMethod = ClientHelper.deleteEntry(client, newEntryLocation);
assertEquals("Could not delete entry", 200, deleteMethod.getStatusCode());
//check that entry is deleted (but may be reinstated later)
getMethod = ClientHelper.getEntry(client, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Entry should not be found", 404, getMethod.getStatusCode());
}
@Test
public void testActivityUnauthorized() throws Exception {
//create a client
HttpClient client = new HttpClient();
//post without authentication
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Posting without authenticating, Wrong status code", 401, postMethod.getStatusCode());
//login
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//post with authentication
postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
//logout
status = ClientHelper.logout(client);
assertEquals("Could not logout", 200, status);
//get without authenticating
GetMethod getMethod = ClientHelper.getEntry(client, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Get without authenticating, Wrong status code", 404, getMethod.getStatusCode());
//get first version without authenticating
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/1", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Get first version without authenticating, Wrong status code", 401, getMethod.getStatusCode());
//get working copy without authenticating
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/working-copy", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Get working copy without authenticating, Wrong status code", 401, getMethod.getStatusCode());
//get version history without authenticating
getMethod = ClientHelper.getEntry(client, newEntryLocation + "/version-history", TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Get version history without authenticating, Wrong status code", 401, getMethod.getStatusCode());
//Edit without authenticating
fileName = "/files/put/update-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Editing without authenticating, Wrong status code", 401, putMethod.getStatusCode());
//Delete Entry
DeleteMethod deleteMethod = ClientHelper.deleteEntry(client, newEntryLocation);
assertEquals("Deleting without authenticating, Wrong status code", 401, deleteMethod.getStatusCode());
}
@Test
public void testActivityPublishing() throws Exception {
//create a client
HttpClient client = new HttpClient();
//authenticate
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//Post Entry
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
//publish entry
fileName = "/files/put/published-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not publish entry", 200, putMethod.getStatusCode());
//logout
status = ClientHelper.logout(client);
assertEquals("Could not logout", 200, status);
//get without authenticating
GetMethod getMethod = ClientHelper.getEntry(client, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Get without authenticating should now return OK", 200, getMethod.getStatusCode());
}
@Test
public void testActivityFeed() throws Exception {
//create a client
HttpClient client = new HttpClient();
//authenticate
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//Post Entry
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
//publish entry
fileName = "/files/put/published-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not publish entry", 200, putMethod.getStatusCode());
//logout
status = ClientHelper.logout(client);
assertEquals("Could not logout", 200, status);
String feedUrl = TestConstants.URL_PREFIX + TestConstants.PATH_FOR_ACTIVITIES;
//get without authenticating
GetMethod getMethod = ClientHelper.getEntry(client, feedUrl, TestConstants.ATOM_FEED_MIMETYPE);
assertEquals("Could not get feed", 200, getMethod.getStatusCode());
}
@Test
public void testActivityRecordContent() throws Exception {
//create a client
HttpClient client = new HttpClient();
//authenticate
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//Post Entry
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
XPath xpath = XPathHelper.getXPath();
InputStream responseBodyAsStream = postMethod.getResponseBodyAsStream();
Document docFromStream = XPathHelper.getDocFromStream(responseBodyAsStream);
Document docFromFile = XPathHelper.getDocFromFile(fileName);
String id = xpath.evaluate(TestConstants.RECORD_ID_PATH, docFromStream);
assertNotNull("Entry missing id", id);
String originalId = xpath.evaluate(TestConstants.RECORD_ID_PATH, docFromFile);
assertNotNull("Original Entry missing title", originalId);
assertEquals("Entry's title is incorrect", originalId, id);
String relDescribes = xpath.evaluate(TestConstants.RECORD_REL_DESCRIBES_PATH, docFromStream);
assertNotNull("Entry missing \"describes\" relation", relDescribes);
assertTrue("Entry's \"describes\" relation does not contain path to entry: "+relDescribes, relDescribes.contains(TestConstants.PATH_FOR_ACTIVITIES));
String title = xpath.evaluate(TestConstants.RECORD_TITLE_PATH, docFromStream);
assertNotNull("Entry missing title", title);
String originalTitle = xpath.evaluate(TestConstants.RECORD_TITLE_PATH, docFromFile);
assertNotNull("Original Entry missing title", originalTitle);
assertEquals("Entry's title is incorrect", originalTitle, title);
String content = xpath.evaluate(TestConstants.RECORD_CONTENT_PATH, docFromStream);
assertNotNull("Entry missing content", content);
String originalContent = xpath.evaluate(TestConstants.RECORD_CONTENT_PATH, docFromFile);
assertNotNull("Original Entry missing content", originalContent);
assertEquals("Entry's content is incorrect", originalContent, content);
String updated = xpath.evaluate(TestConstants.RECORD_UPDATED_PATH, docFromStream);
assertNotNull("Entry missing updated", updated);
String authorName = xpath.evaluate(TestConstants.RECORD_AUTHOR_NAME_PATH, docFromStream);
assertNotNull("Entry missing author name", authorName);
String originalAuthorName = xpath.evaluate(TestConstants.RECORD_AUTHOR_NAME_PATH, docFromFile);
assertNotNull("Original Entry missing author name", originalAuthorName);
assertEquals("Entry's author name is incorrect", originalAuthorName, authorName);
String draft = xpath.evaluate(TestConstants.RECORD_DRAFT_PATH, docFromStream);
assertNotNull("Entry missing draft element", draft);
assertEquals("Entry's should be draft", "yes", draft);
//publish entry
fileName = "/files/put/published-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not publish entry", 200, putMethod.getStatusCode());
docFromStream = XPathHelper.getDocFromStream(putMethod.getResponseBodyAsStream());
draft = xpath.evaluate(TestConstants.RECORD_DRAFT_PATH, docFromStream);
assertNotNull("Entry missing draft element", draft);
assertEquals("Entry's should be published", "no", draft);
Element selfLink = (Element) xpath.evaluate(TestConstants.RECORD_LINK_PATH + "[@rel='self']", docFromStream, XPathConstants.NODE);
assertNotNull("Entry missing self link", selfLink);
String entryLocation = selfLink.getAttribute("href");
Element xhtmlLinkElement = (Element) xpath.evaluate(TestConstants.RECORD_LINK_PATH + "[@type='" + TestConstants.MIME_TYPE_XHTML + "']", docFromStream, XPathConstants.NODE);
assertNotNull("Entry missing xhtml link", xhtmlLinkElement);
String xhtmlLink = xhtmlLinkElement.getAttribute("href");
String expectedXhtmlLink = entryLocation + "?repr=" + TestConstants.MIME_TYPE_XHTML;
assertEquals(expectedXhtmlLink, xhtmlLink);
Element rdfLinkElement = (Element) xpath.evaluate(TestConstants.RECORD_LINK_PATH + "[@type='" + TestConstants.MIME_TYPE_RDF + "']", docFromStream, XPathConstants.NODE);
assertNotNull("Entry missing rdf link", rdfLinkElement);
String rdfLink = rdfLinkElement.getAttribute("href");
String expectedRdfLink = entryLocation + "?repr=" + TestConstants.MIME_TYPE_RDF;
assertEquals(expectedRdfLink, rdfLink);
Element rifcsLinkElement = (Element) xpath.evaluate(TestConstants.RECORD_LINK_PATH + "[@type='" + TestConstants.MIME_TYPE_XHTML + "']", docFromStream, XPathConstants.NODE);
assertNotNull("Entry missing rifcs link", rifcsLinkElement);
String rifcsLink = rifcsLinkElement.getAttribute("href");
String expectedRifcsLink = entryLocation + "?repr=" + TestConstants.MIME_TYPE_XHTML;
assertEquals(expectedRifcsLink, rifcsLink);
}
@Test
public void testActivityFeedContent() throws Exception {
//create a client
HttpClient client = new HttpClient();
//authenticate
int status = ClientHelper.login(client, TestConstants.USERNAME, TestConstants.PASSWORD);
assertEquals("Could not authenticate", 200, status);
//Post Entry
String fileName = "/files/post/new-activity.xml";
PostMethod postMethod = ClientHelper.postEntry(client, fileName, TestConstants.PATH_FOR_ACTIVITIES);
assertEquals("Could not post entry", 201, postMethod.getStatusCode());
String newEntryLocation = postMethod.getResponseHeader("Location").getValue();
//publish entry
fileName = "/files/put/published-activity.xml";
PutMethod putMethod = ClientHelper.putEntry(client, fileName, newEntryLocation, TestConstants.ATOM_ENTRY_MIMETYPE);
assertEquals("Could not publish entry", 200, putMethod.getStatusCode());
String feedUrl = TestConstants.URL_PREFIX + TestConstants.PATH_FOR_ACTIVITIES;
//get without authenticating
GetMethod getMethod = ClientHelper.getEntry(client, feedUrl, TestConstants.ATOM_FEED_MIMETYPE);
assertEquals("Could not get feed", 200, getMethod.getStatusCode());
XPath xpath = XPathHelper.getXPath();
InputStream responseBodyAsStream = getMethod.getResponseBodyAsStream();
Document docFromStream = XPathHelper.getDocFromStream(responseBodyAsStream);
String id = xpath.evaluate(TestConstants.FEED_ID_PATH, docFromStream);
assertNotNull("Feed missing id", id);
assertTrue("Feed's id does not contain path to entry", id.contains(TestConstants.PATH_FOR_ACTIVITIES));
String title = xpath.evaluate(TestConstants.FEED_TITLE_PATH, docFromStream);
assertNotNull("Feed missing title", title);
assertEquals("Feed's title is incorrect", TestConstants.TITLE_FOR_ACTIVITIES, title);
String updated = xpath.evaluate(TestConstants.FEED_UPDATED_PATH, docFromStream);
assertNotNull("Feed missing updated", updated);
String authorName = xpath.evaluate(TestConstants.FEED_AUTHOR_NAME_PATH, docFromStream);
assertNotNull("Feed missing author name", authorName);
Element selfLink = (Element) xpath.evaluate(TestConstants.FEED_LINK_PATH + "[@rel='self']", docFromStream, XPathConstants.NODE);
assertNotNull("Feed missing self link", selfLink);
String feedSelfLink = selfLink.getAttribute("href");
String feedSelfType = selfLink.getAttribute("type");
assertTrue("Incorrect SELF link file extension: "+feedSelfLink, feedSelfLink.contains(".atom"));
assertTrue("Incorrect SELF mime-type: "+feedSelfType, (new MimeType(TestConstants.ATOM_FEED_MIMETYPE)).match(feedSelfType));
Element alternateLink = (Element) xpath.evaluate(TestConstants.FEED_LINK_PATH + "[@rel='alternate']", docFromStream, XPathConstants.NODE);
assertNotNull("Feed missing alternate link", alternateLink);
//Number of entries in the feed
NodeList nodes = (NodeList) xpath.evaluate(TestConstants.FEED_PATH + "/atom:entry", docFromStream, XPathConstants.NODESET);
int numberOfEntries = nodes.getLength();
assertTrue("There should be at least one entry in this feed", numberOfEntries > 0);
Node entry = nodes.item(0);
String entryId = xpath.evaluate(TestConstants.FEED_PATH + TestConstants.RECORD_ID_PATH, entry);
assertNotNull("Feed entry missing id", entryId);
assertTrue("Feed entry's id does not contain path to entry", entryId.contains(TestConstants.PATH_FOR_ACTIVITIES));
String entryTitle = xpath.evaluate(TestConstants.FEED_PATH + TestConstants.RECORD_TITLE_PATH, entry);
assertNotNull("Feed entry missing title", entryTitle);
assertFalse("Feed entry title is empty", entryTitle.isEmpty());
String entryContent = xpath.evaluate(TestConstants.FEED_PATH + TestConstants.RECORD_CONTENT_PATH, entry);
assertNotNull("Feed entry missing content", entryContent);
assertFalse("Feed entry content is empty", entryContent.isEmpty());
String entryUpdated = xpath.evaluate(TestConstants.FEED_PATH + TestConstants.RECORD_UPDATED_PATH, entry);
assertNotNull("Feed entry missing updated", entryUpdated);
assertFalse("Feed entry updated is empty", entryContent.isEmpty());
String draft = xpath.evaluate(TestConstants.FEED_PATH + TestConstants.RECORD_DRAFT_PATH, entry);
assertNotNull("Feed Entry missing draft element", draft);
assertFalse("Feed entry draft is empty", entryContent.isEmpty());
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.talent.v4beta1;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.talent.v4beta1.stub.ProfileServiceStub;
import com.google.cloud.talent.v4beta1.stub.ProfileServiceStubSettings;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND SERVICE
/**
* Service Description: A service that handles profile management, including profile CRUD,
* enumeration and search.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>
* <code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* Profile profile = Profile.newBuilder().build();
* Profile response = profileServiceClient.createProfile(parent, profile);
* }
* </code>
* </pre>
*
* <p>Note: close() needs to be called on the profileServiceClient object to clean up resources such
* as threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <p>The surface of this class includes several types of Java methods for each of the API's
* methods:
*
* <ol>
* <li>A "flattened" method. With this type of method, the fields of the request type have been
* converted into function parameters. It may be the case that not all fields are available as
* parameters, and not every API method will have a flattened method entry point.
* <li>A "request object" method. This type of method only takes one parameter, a request object,
* which must be constructed before the call. Not every API method will have a request object
* method.
* <li>A "callable" method. This type of method takes no parameters and returns an immutable API
* callable object, which can be used to initiate calls to the service.
* </ol>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of ProfileServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>
* <code>
* ProfileServiceSettings profileServiceSettings =
* ProfileServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* ProfileServiceClient profileServiceClient =
* ProfileServiceClient.create(profileServiceSettings);
* </code>
* </pre>
*
* To customize the endpoint:
*
* <pre>
* <code>
* ProfileServiceSettings profileServiceSettings =
* ProfileServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* ProfileServiceClient profileServiceClient =
* ProfileServiceClient.create(profileServiceSettings);
* </code>
* </pre>
*/
@Generated("by gapic-generator")
@BetaApi
public class ProfileServiceClient implements BackgroundResource {
private final ProfileServiceSettings settings;
private final ProfileServiceStub stub;
/** Constructs an instance of ProfileServiceClient with default settings. */
public static final ProfileServiceClient create() throws IOException {
return create(ProfileServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of ProfileServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final ProfileServiceClient create(ProfileServiceSettings settings)
throws IOException {
return new ProfileServiceClient(settings);
}
/**
* Constructs an instance of ProfileServiceClient, using the given stub for making calls. This is
* for advanced usage - prefer to use ProfileServiceSettings}.
*/
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public static final ProfileServiceClient create(ProfileServiceStub stub) {
return new ProfileServiceClient(stub);
}
/**
* Constructs an instance of ProfileServiceClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected ProfileServiceClient(ProfileServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((ProfileServiceStubSettings) settings.getStubSettings()).createStub();
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
protected ProfileServiceClient(ProfileServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final ProfileServiceSettings getSettings() {
return settings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ProfileServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists profiles by filter. The order is unspecified.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* for (Profile element : profileServiceClient.listProfiles(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param parent Required.
* <p>The resource name of the tenant under which the job is created.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}", for example,
* "projects/api-test-project/tenants/foo".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListProfilesPagedResponse listProfiles(TenantName parent) {
ListProfilesRequest request =
ListProfilesRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listProfiles(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists profiles by filter. The order is unspecified.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* for (Profile element : profileServiceClient.listProfiles(parent.toString()).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param parent Required.
* <p>The resource name of the tenant under which the job is created.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}", for example,
* "projects/api-test-project/tenants/foo".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListProfilesPagedResponse listProfiles(String parent) {
ListProfilesRequest request = ListProfilesRequest.newBuilder().setParent(parent).build();
return listProfiles(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists profiles by filter. The order is unspecified.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* ListProfilesRequest request = ListProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .build();
* for (Profile element : profileServiceClient.listProfiles(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListProfilesPagedResponse listProfiles(ListProfilesRequest request) {
return listProfilesPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists profiles by filter. The order is unspecified.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* ListProfilesRequest request = ListProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .build();
* ApiFuture<ListProfilesPagedResponse> future = profileServiceClient.listProfilesPagedCallable().futureCall(request);
* // Do something
* for (Profile element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListProfilesRequest, ListProfilesPagedResponse>
listProfilesPagedCallable() {
return stub.listProfilesPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists profiles by filter. The order is unspecified.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* ListProfilesRequest request = ListProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .build();
* while (true) {
* ListProfilesResponse response = profileServiceClient.listProfilesCallable().call(request);
* for (Profile element : response.getProfilesList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListProfilesRequest, ListProfilesResponse> listProfilesCallable() {
return stub.listProfilesCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates and returns a new profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* Profile profile = Profile.newBuilder().build();
* Profile response = profileServiceClient.createProfile(parent, profile);
* }
* </code></pre>
*
* @param parent Required.
* <p>The name of the tenant this profile belongs to.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}", for example,
* "projects/api-test-project/tenants/foo".
* @param profile Required.
* <p>The profile to be created.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile createProfile(TenantName parent, Profile profile) {
CreateProfileRequest request =
CreateProfileRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.setProfile(profile)
.build();
return createProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates and returns a new profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* Profile profile = Profile.newBuilder().build();
* Profile response = profileServiceClient.createProfile(parent.toString(), profile);
* }
* </code></pre>
*
* @param parent Required.
* <p>The name of the tenant this profile belongs to.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}", for example,
* "projects/api-test-project/tenants/foo".
* @param profile Required.
* <p>The profile to be created.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile createProfile(String parent, Profile profile) {
CreateProfileRequest request =
CreateProfileRequest.newBuilder().setParent(parent).setProfile(profile).build();
return createProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates and returns a new profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* Profile profile = Profile.newBuilder().build();
* CreateProfileRequest request = CreateProfileRequest.newBuilder()
* .setParent(parent.toString())
* .setProfile(profile)
* .build();
* Profile response = profileServiceClient.createProfile(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile createProfile(CreateProfileRequest request) {
return createProfileCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates and returns a new profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* Profile profile = Profile.newBuilder().build();
* CreateProfileRequest request = CreateProfileRequest.newBuilder()
* .setParent(parent.toString())
* .setProfile(profile)
* .build();
* ApiFuture<Profile> future = profileServiceClient.createProfileCallable().futureCall(request);
* // Do something
* Profile response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<CreateProfileRequest, Profile> createProfileCallable() {
return stub.createProfileCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* Profile response = profileServiceClient.getProfile(name);
* }
* </code></pre>
*
* @param name Required.
* <p>Resource name of the profile to get.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for
* example, "projects/api-test-project/tenants/foo/profiles/bar".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile getProfile(ProfileName name) {
GetProfileRequest request =
GetProfileRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* Profile response = profileServiceClient.getProfile(name.toString());
* }
* </code></pre>
*
* @param name Required.
* <p>Resource name of the profile to get.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for
* example, "projects/api-test-project/tenants/foo/profiles/bar".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile getProfile(String name) {
GetProfileRequest request = GetProfileRequest.newBuilder().setName(name).build();
return getProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* GetProfileRequest request = GetProfileRequest.newBuilder()
* .setName(name.toString())
* .build();
* Profile response = profileServiceClient.getProfile(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile getProfile(GetProfileRequest request) {
return getProfileCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* GetProfileRequest request = GetProfileRequest.newBuilder()
* .setName(name.toString())
* .build();
* ApiFuture<Profile> future = profileServiceClient.getProfileCallable().futureCall(request);
* // Do something
* Profile response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<GetProfileRequest, Profile> getProfileCallable() {
return stub.getProfileCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Updates the specified profile and returns the updated result.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* Profile profile = Profile.newBuilder().build();
* Profile response = profileServiceClient.updateProfile(profile);
* }
* </code></pre>
*
* @param profile Required.
* <p>Profile to be updated.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile updateProfile(Profile profile) {
UpdateProfileRequest request = UpdateProfileRequest.newBuilder().setProfile(profile).build();
return updateProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Updates the specified profile and returns the updated result.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* Profile profile = Profile.newBuilder().build();
* UpdateProfileRequest request = UpdateProfileRequest.newBuilder()
* .setProfile(profile)
* .build();
* Profile response = profileServiceClient.updateProfile(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Profile updateProfile(UpdateProfileRequest request) {
return updateProfileCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Updates the specified profile and returns the updated result.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* Profile profile = Profile.newBuilder().build();
* UpdateProfileRequest request = UpdateProfileRequest.newBuilder()
* .setProfile(profile)
* .build();
* ApiFuture<Profile> future = profileServiceClient.updateProfileCallable().futureCall(request);
* // Do something
* Profile response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<UpdateProfileRequest, Profile> updateProfileCallable() {
return stub.updateProfileCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* profileServiceClient.deleteProfile(name);
* }
* </code></pre>
*
* @param name Required.
* <p>Resource name of the profile to be deleted.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for
* example, "projects/api-test-project/tenants/foo/profiles/bar".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteProfile(ProfileName name) {
DeleteProfileRequest request =
DeleteProfileRequest.newBuilder().setName(name == null ? null : name.toString()).build();
deleteProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* profileServiceClient.deleteProfile(name.toString());
* }
* </code></pre>
*
* @param name Required.
* <p>Resource name of the profile to be deleted.
* <p>The format is "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for
* example, "projects/api-test-project/tenants/foo/profiles/bar".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteProfile(String name) {
DeleteProfileRequest request = DeleteProfileRequest.newBuilder().setName(name).build();
deleteProfile(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* DeleteProfileRequest request = DeleteProfileRequest.newBuilder()
* .setName(name.toString())
* .build();
* profileServiceClient.deleteProfile(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteProfile(DeleteProfileRequest request) {
deleteProfileCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the specified profile.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* ProfileName name = ProfileName.of("[PROJECT]", "[TENANT]", "[PROFILE]");
* DeleteProfileRequest request = DeleteProfileRequest.newBuilder()
* .setName(name.toString())
* .build();
* ApiFuture<Void> future = profileServiceClient.deleteProfileCallable().futureCall(request);
* // Do something
* future.get();
* }
* </code></pre>
*/
public final UnaryCallable<DeleteProfileRequest, Empty> deleteProfileCallable() {
return stub.deleteProfileCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Searches for profiles within a tenant.
*
* <p>For example, search by raw queries "software engineer in Mountain View" or search by
* structured filters (location filter, education filter, etc.).
*
* <p>See [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest] for more
* information.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* RequestMetadata requestMetadata = RequestMetadata.newBuilder().build();
* SearchProfilesRequest request = SearchProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .setRequestMetadata(requestMetadata)
* .build();
* for (HistogramQueryResult element : profileServiceClient.searchProfiles(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final SearchProfilesPagedResponse searchProfiles(SearchProfilesRequest request) {
return searchProfilesPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Searches for profiles within a tenant.
*
* <p>For example, search by raw queries "software engineer in Mountain View" or search by
* structured filters (location filter, education filter, etc.).
*
* <p>See [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest] for more
* information.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* RequestMetadata requestMetadata = RequestMetadata.newBuilder().build();
* SearchProfilesRequest request = SearchProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .setRequestMetadata(requestMetadata)
* .build();
* ApiFuture<SearchProfilesPagedResponse> future = profileServiceClient.searchProfilesPagedCallable().futureCall(request);
* // Do something
* for (HistogramQueryResult element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*/
public final UnaryCallable<SearchProfilesRequest, SearchProfilesPagedResponse>
searchProfilesPagedCallable() {
return stub.searchProfilesPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Searches for profiles within a tenant.
*
* <p>For example, search by raw queries "software engineer in Mountain View" or search by
* structured filters (location filter, education filter, etc.).
*
* <p>See [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest] for more
* information.
*
* <p>Sample code:
*
* <pre><code>
* try (ProfileServiceClient profileServiceClient = ProfileServiceClient.create()) {
* TenantName parent = TenantName.of("[PROJECT]", "[TENANT]");
* RequestMetadata requestMetadata = RequestMetadata.newBuilder().build();
* SearchProfilesRequest request = SearchProfilesRequest.newBuilder()
* .setParent(parent.toString())
* .setRequestMetadata(requestMetadata)
* .build();
* while (true) {
* SearchProfilesResponse response = profileServiceClient.searchProfilesCallable().call(request);
* for (HistogramQueryResult element : response.getHistogramQueryResultsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* </code></pre>
*/
public final UnaryCallable<SearchProfilesRequest, SearchProfilesResponse>
searchProfilesCallable() {
return stub.searchProfilesCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListProfilesPagedResponse
extends AbstractPagedListResponse<
ListProfilesRequest,
ListProfilesResponse,
Profile,
ListProfilesPage,
ListProfilesFixedSizeCollection> {
public static ApiFuture<ListProfilesPagedResponse> createAsync(
PageContext<ListProfilesRequest, ListProfilesResponse, Profile> context,
ApiFuture<ListProfilesResponse> futureResponse) {
ApiFuture<ListProfilesPage> futurePage =
ListProfilesPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
new ApiFunction<ListProfilesPage, ListProfilesPagedResponse>() {
@Override
public ListProfilesPagedResponse apply(ListProfilesPage input) {
return new ListProfilesPagedResponse(input);
}
},
MoreExecutors.directExecutor());
}
private ListProfilesPagedResponse(ListProfilesPage page) {
super(page, ListProfilesFixedSizeCollection.createEmptyCollection());
}
}
public static class ListProfilesPage
extends AbstractPage<ListProfilesRequest, ListProfilesResponse, Profile, ListProfilesPage> {
private ListProfilesPage(
PageContext<ListProfilesRequest, ListProfilesResponse, Profile> context,
ListProfilesResponse response) {
super(context, response);
}
private static ListProfilesPage createEmptyPage() {
return new ListProfilesPage(null, null);
}
@Override
protected ListProfilesPage createPage(
PageContext<ListProfilesRequest, ListProfilesResponse, Profile> context,
ListProfilesResponse response) {
return new ListProfilesPage(context, response);
}
@Override
public ApiFuture<ListProfilesPage> createPageAsync(
PageContext<ListProfilesRequest, ListProfilesResponse, Profile> context,
ApiFuture<ListProfilesResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListProfilesFixedSizeCollection
extends AbstractFixedSizeCollection<
ListProfilesRequest,
ListProfilesResponse,
Profile,
ListProfilesPage,
ListProfilesFixedSizeCollection> {
private ListProfilesFixedSizeCollection(List<ListProfilesPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListProfilesFixedSizeCollection createEmptyCollection() {
return new ListProfilesFixedSizeCollection(null, 0);
}
@Override
protected ListProfilesFixedSizeCollection createCollection(
List<ListProfilesPage> pages, int collectionSize) {
return new ListProfilesFixedSizeCollection(pages, collectionSize);
}
}
public static class SearchProfilesPagedResponse
extends AbstractPagedListResponse<
SearchProfilesRequest,
SearchProfilesResponse,
HistogramQueryResult,
SearchProfilesPage,
SearchProfilesFixedSizeCollection> {
public static ApiFuture<SearchProfilesPagedResponse> createAsync(
PageContext<SearchProfilesRequest, SearchProfilesResponse, HistogramQueryResult> context,
ApiFuture<SearchProfilesResponse> futureResponse) {
ApiFuture<SearchProfilesPage> futurePage =
SearchProfilesPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
new ApiFunction<SearchProfilesPage, SearchProfilesPagedResponse>() {
@Override
public SearchProfilesPagedResponse apply(SearchProfilesPage input) {
return new SearchProfilesPagedResponse(input);
}
},
MoreExecutors.directExecutor());
}
private SearchProfilesPagedResponse(SearchProfilesPage page) {
super(page, SearchProfilesFixedSizeCollection.createEmptyCollection());
}
}
public static class SearchProfilesPage
extends AbstractPage<
SearchProfilesRequest, SearchProfilesResponse, HistogramQueryResult, SearchProfilesPage> {
private SearchProfilesPage(
PageContext<SearchProfilesRequest, SearchProfilesResponse, HistogramQueryResult> context,
SearchProfilesResponse response) {
super(context, response);
}
private static SearchProfilesPage createEmptyPage() {
return new SearchProfilesPage(null, null);
}
@Override
protected SearchProfilesPage createPage(
PageContext<SearchProfilesRequest, SearchProfilesResponse, HistogramQueryResult> context,
SearchProfilesResponse response) {
return new SearchProfilesPage(context, response);
}
@Override
public ApiFuture<SearchProfilesPage> createPageAsync(
PageContext<SearchProfilesRequest, SearchProfilesResponse, HistogramQueryResult> context,
ApiFuture<SearchProfilesResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class SearchProfilesFixedSizeCollection
extends AbstractFixedSizeCollection<
SearchProfilesRequest,
SearchProfilesResponse,
HistogramQueryResult,
SearchProfilesPage,
SearchProfilesFixedSizeCollection> {
private SearchProfilesFixedSizeCollection(List<SearchProfilesPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static SearchProfilesFixedSizeCollection createEmptyCollection() {
return new SearchProfilesFixedSizeCollection(null, 0);
}
@Override
protected SearchProfilesFixedSizeCollection createCollection(
List<SearchProfilesPage> pages, int collectionSize) {
return new SearchProfilesFixedSizeCollection(pages, collectionSize);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.xmlcache;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Locale;
import java.util.Properties;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.RestoreSystemProperties;
import org.junit.experimental.categories.Category;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.apache.geode.cache.client.ClientCache;
import org.apache.geode.cache.client.ClientCacheFactory;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.test.junit.categories.IntegrationTest;
/**
* Test cases for {@link CacheXmlParser}.
*
* @since GemFire 8.1
*/
@Category(IntegrationTest.class)
public class CacheXmlParserJUnitTest {
@Rule
public final RestoreSystemProperties restoreSystemProperties = new RestoreSystemProperties();
private static final String NAMESPACE_URI =
"urn:java:org.apache.geode.internal.cache.xmlcache.MockXmlParser";
@After
public void tearDown() throws Exception {
InternalDistributedSystem.removeSystem(InternalDistributedSystem.getConnectedInstance());
}
/**
* Test {@link CacheXmlParser#getDelegate(String)}.
*
* Asserts that a delegate is found and that the stack and logWriter are setup correctly.
*
* Asserts that delegate is cached between calls and that the same instance is returned.
*
* Asserts that null is returned when no {@link XmlParser} is registered for namespace.
*
* @since GemFire 8.1
*/
@Test
public void testGetDelegate() {
final TestCacheXmlParser cacheXmlParser = new TestCacheXmlParser();
assertTrue("delegates should be empty.", cacheXmlParser.getDelegates().isEmpty());
final MockXmlParser delegate = (MockXmlParser) cacheXmlParser.getDelegate(NAMESPACE_URI);
assertNotNull("Delegate should be found in classpath.", delegate);
assertSame("Should have same stack as cacheXmlParser.", cacheXmlParser.stack, delegate.stack);
assertSame("Should have same stack as cacheXmlParser.", cacheXmlParser.documentLocator,
delegate.documentLocator);
assertEquals("Should be exactly 1 delegate.", 1, cacheXmlParser.getDelegates().size());
assertNotNull("There should be an entry in delegates cache.",
cacheXmlParser.getDelegates().get(NAMESPACE_URI));
assertSame("Cached delegate should match the one from get.", delegate,
cacheXmlParser.getDelegates().get(NAMESPACE_URI));
final MockXmlParser delegate2 = (MockXmlParser) cacheXmlParser.getDelegate(NAMESPACE_URI);
assertSame("Delegate should be the same between gets.", delegate, delegate2);
assertEquals("Should still be exactly 1 delegate.", 1, cacheXmlParser.getDelegates().size());
assertNull(cacheXmlParser.getDelegate("--nothing-should-use-this-namespace--"));
}
/**
* Test that {@link CacheXmlParser} can parse the test cache.xml file.
*
* @since Geode 1.2
*/
@Test
public void testCacheXmlParserWithSimplePool() {
assertNotNull("Did not find simple config.xml file", getClass()
.getResourceAsStream("CacheXmlParserJUnitTest.testSimpleClientCacheXml.cache.xml"));
Properties nonDefault = new Properties();
nonDefault.setProperty(MCAST_PORT, "0"); // loner
ClientCache cache = new ClientCacheFactory(nonDefault).set("cache-xml-file",
"xmlcache/CacheXmlParserJUnitTest.testSimpleClientCacheXml.cache.xml").create();
cache.close();
}
/**
* Test that {@link CacheXmlParser} can parse the test cache.xml file, using the Apache Xerces XML
* parser.
*
* @since Geode 1.3
*/
@Test
public void testCacheXmlParserWithSimplePoolXerces() {
System.setProperty("javax.xml.parsers.SAXParserFactory",
"org.apache.xerces.jaxp.SAXParserFactoryImpl");
testCacheXmlParserWithSimplePool();
}
/**
* Test that {@link CacheXmlParser} falls back to DTD parsing when locale language is not English.
*
* @since Geode 1.0
*/
@Test
public void testDTDFallbackWithNonEnglishLocal() {
CacheXmlParser.parse(this.getClass().getResourceAsStream(
"CacheXmlParserJUnitTest.testDTDFallbackWithNonEnglishLocal.cache.xml"));
final Locale previousLocale = Locale.getDefault();
try {
Locale.setDefault(Locale.JAPAN);
CacheXmlParser.parse(this.getClass().getResourceAsStream(
"CacheXmlParserJUnitTest.testDTDFallbackWithNonEnglishLocal.cache.xml"));
} finally {
Locale.setDefault(previousLocale);
}
}
/**
* Test that {@link CacheXmlParser} falls back to DTD parsing when locale language is not English,
* using the Apache Xerces XML parser.
*
* @since Geode 1.3
*/
@Test
public void testDTDFallbackWithNonEnglishLocalXerces() {
System.setProperty("javax.xml.parsers.SAXParserFactory",
"org.apache.xerces.jaxp.SAXParserFactoryImpl");
testDTDFallbackWithNonEnglishLocal();
}
/**
* Get access to {@link CacheXmlParser} protected methods and fields.
*
* @since GemFire 8.1
*/
private static class TestCacheXmlParser extends CacheXmlParser {
static Field delegatesField;
static Method getDelegateMethod;
static {
try {
delegatesField = CacheXmlParser.class.getDeclaredField("delegates");
delegatesField.setAccessible(true);
getDelegateMethod = CacheXmlParser.class.getDeclaredMethod("getDelegate", String.class);
getDelegateMethod.setAccessible(true);
} catch (NoSuchFieldException | SecurityException | NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
/**
* @return {@link CacheXmlParser} private delegates field.
* @since GemFire 8.1
*/
@SuppressWarnings("unchecked")
public HashMap<String, XmlParser> getDelegates() {
try {
return (HashMap<String, XmlParser>) delegatesField.get(this);
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new IllegalStateException(e);
}
}
/**
* Access to {@link CacheXmlParser} getDelegate(String) method.
*
* @since GemFire 8.1
*/
public XmlParser getDelegate(final String namespaceUri) {
try {
return (XmlParser) getDelegateMethod.invoke(this, namespaceUri);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
}
public static class MockXmlParser extends AbstractXmlParser {
@Override
public String getNamespaceUri() {
return "urn:java:org.apache.geode.internal.cache.xmlcache.MockXmlParser";
}
@Override
public void startElement(String uri, String localName, String qName, Attributes atts)
throws SAXException {
throw new UnsupportedOperationException();
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
throw new UnsupportedOperationException();
}
}
}
| |
package extracells.part;
import appeng.api.config.Actionable;
import appeng.api.networking.security.MachineSource;
import appeng.api.parts.IPartHost;
import appeng.api.parts.IPartRenderHelper;
import appeng.api.storage.IMEMonitor;
import appeng.api.storage.data.IAEFluidStack;
import appeng.api.util.AEColor;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import extracells.render.TextureManager;
import extracells.util.FluidUtil;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import net.minecraft.util.Vec3;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.IFluidContainerItem;
import org.apache.commons.lang3.tuple.MutablePair;
public class PartConversionMonitor extends PartStorageMonitor {
@Override
public boolean onActivate(EntityPlayer player, Vec3 pos) {
boolean b = super.onActivate(player, pos);
if (b)
return b;
if (player == null || player.worldObj == null)
return true;
if (player.worldObj.isRemote)
return true;
ItemStack s = player.getCurrentEquippedItem();
IMEMonitor<IAEFluidStack> mon = getFluidStorage();
if (this.locked && s != null && mon != null) {
ItemStack s2 = s.copy();
s2.stackSize = 1;
if (FluidUtil.isFilled(s2)) {
FluidStack f = FluidUtil.getFluidFromContainer(s2);
if (f == null)
return true;
IAEFluidStack fl = FluidUtil.createAEFluidStack(f);
IAEFluidStack not = mon.injectItems(fl.copy(),
Actionable.SIMULATE, new MachineSource(this));
if (mon.canAccept(fl)
&& (not == null || not.getStackSize() == 0L)) {
mon.injectItems(fl, Actionable.MODULATE, new MachineSource(
this));
MutablePair<Integer, ItemStack> empty1 = FluidUtil
.drainStack(s2, f);
ItemStack empty = empty1.right;
if (empty != null) {
dropItems(getHost().getTile().getWorldObj(), getHost()
.getTile().xCoord + getSide().offsetX,
getHost().getTile().yCoord + getSide().offsetY,
getHost().getTile().zCoord + getSide().offsetZ,
empty);
}
ItemStack s3 = s.copy();
s3.stackSize = s3.stackSize - 1;
if (s3.stackSize == 0) {
player.inventory.setInventorySlotContents(
player.inventory.currentItem, null);
} else {
player.inventory.setInventorySlotContents(
player.inventory.currentItem, s3);
}
}
return true;
} else if (FluidUtil.isEmpty(s2)) {
if (this.fluid == null)
return true;
IAEFluidStack extract;
if (s2.getItem() instanceof IFluidContainerItem) {
extract = mon.extractItems(FluidUtil.createAEFluidStack(
this.fluid, ((IFluidContainerItem) s2.getItem())
.getCapacity(s2)), Actionable.SIMULATE,
new MachineSource(this));
} else
extract = mon.extractItems(
FluidUtil.createAEFluidStack(this.fluid),
Actionable.SIMULATE, new MachineSource(this));
if (extract != null) {
mon.extractItems(FluidUtil
.createAEFluidStack(new FluidStack(this.fluid,
(int) extract.getStackSize())),
Actionable.MODULATE, new MachineSource(this));
MutablePair<Integer, ItemStack> empty1 = FluidUtil
.fillStack(s2, extract.getFluidStack());
if (empty1.left == 0) {
mon.injectItems(FluidUtil
.createAEFluidStack(new FluidStack(this.fluid,
(int) extract.getStackSize())),
Actionable.MODULATE, new MachineSource(this));
return true;
}
ItemStack empty = empty1.right;
if (empty != null) {
dropItems(getHost().getTile().getWorldObj(), getHost()
.getTile().xCoord + getSide().offsetX,
getHost().getTile().yCoord + getSide().offsetY,
getHost().getTile().zCoord + getSide().offsetZ,
empty);
}
ItemStack s3 = s.copy();
s3.stackSize = s3.stackSize - 1;
if (s3.stackSize == 0) {
player.inventory.setInventorySlotContents(
player.inventory.currentItem, null);
} else {
player.inventory.setInventorySlotContents(
player.inventory.currentItem, s3);
}
}
return true;
}
}
return false;
}
@Override
@SideOnly(Side.CLIENT)
public void renderInventory(IPartRenderHelper rh, RenderBlocks renderer) {
Tessellator ts = Tessellator.instance;
IIcon side = TextureManager.TERMINAL_SIDE.getTexture();
rh.setTexture(side);
rh.setBounds(4, 4, 13, 12, 12, 14);
rh.renderInventoryBox(renderer);
rh.setTexture(side, side, side, TextureManager.BUS_BORDER.getTexture(),
side, side);
rh.setBounds(2, 2, 14, 14, 14, 16);
rh.renderInventoryBox(renderer);
ts.setBrightness(13 << 20 | 13 << 4);
rh.setInvColor(0xFFFFFF);
rh.renderInventoryFace(TextureManager.BUS_BORDER.getTexture(),
ForgeDirection.SOUTH, renderer);
rh.setBounds(3, 3, 15, 13, 13, 16);
rh.setInvColor(AEColor.Transparent.blackVariant);
rh.renderInventoryFace(
TextureManager.CONVERSION_MONITOR.getTextures()[0],
ForgeDirection.SOUTH, renderer);
rh.setInvColor(AEColor.Transparent.mediumVariant);
rh.renderInventoryFace(
TextureManager.CONVERSION_MONITOR.getTextures()[1],
ForgeDirection.SOUTH, renderer);
rh.setInvColor(AEColor.Transparent.whiteVariant);
rh.renderInventoryFace(
TextureManager.CONVERSION_MONITOR.getTextures()[2],
ForgeDirection.SOUTH, renderer);
rh.setBounds(5, 5, 12, 11, 11, 13);
renderInventoryBusLights(rh, renderer);
}
@Override
@SideOnly(Side.CLIENT)
public void renderStatic(int x, int y, int z, IPartRenderHelper rh,
RenderBlocks renderer) {
Tessellator ts = Tessellator.instance;
IIcon side = TextureManager.TERMINAL_SIDE.getTexture();
rh.setTexture(side);
rh.setBounds(4, 4, 13, 12, 12, 14);
rh.renderBlock(x, y, z, renderer);
rh.setTexture(side, side, side, TextureManager.BUS_BORDER.getTexture(),
side, side);
rh.setBounds(2, 2, 14, 14, 14, 16);
rh.renderBlock(x, y, z, renderer);
if (isActive())
Tessellator.instance.setBrightness(13 << 20 | 13 << 4);
ts.setColorOpaque_I(0xFFFFFF);
rh.renderFace(x, y, z, TextureManager.BUS_BORDER.getTexture(),
ForgeDirection.SOUTH, renderer);
IPartHost host = getHost();
rh.setBounds(3, 3, 15, 13, 13, 16);
ts.setColorOpaque_I(host.getColor().mediumVariant);
rh.renderFace(x, y, z,
TextureManager.CONVERSION_MONITOR.getTextures()[0],
ForgeDirection.SOUTH, renderer);
ts.setColorOpaque_I(host.getColor().whiteVariant);
rh.renderFace(x, y, z,
TextureManager.CONVERSION_MONITOR.getTextures()[1],
ForgeDirection.SOUTH, renderer);
ts.setColorOpaque_I(host.getColor().blackVariant);
rh.renderFace(x, y, z,
TextureManager.CONVERSION_MONITOR.getTextures()[2],
ForgeDirection.SOUTH, renderer);
rh.setBounds(5, 5, 12, 11, 11, 13);
renderStaticBusLights(x, y, z, rh, renderer);
}
}
| |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.schedassist.impl;
import java.sql.SQLException;
import junit.framework.Assert;
import net.fortuna.ical4j.model.PropertyList;
import net.fortuna.ical4j.model.component.VEvent;
import net.fortuna.ical4j.model.property.Attendee;
import org.easymock.EasyMock;
import org.jasig.schedassist.ICalendarDataDao;
import org.jasig.schedassist.NullAffiliationSourceImpl;
import org.jasig.schedassist.SchedulingException;
import org.jasig.schedassist.impl.owner.AvailableScheduleDao;
import org.jasig.schedassist.impl.owner.DefaultScheduleOwnerImpl;
import org.jasig.schedassist.impl.visitor.DefaultScheduleVisitorImpl;
import org.jasig.schedassist.model.AppointmentRole;
import org.jasig.schedassist.model.AvailableBlock;
import org.jasig.schedassist.model.AvailableBlockBuilder;
import org.jasig.schedassist.model.CommonDateOperations;
import org.jasig.schedassist.model.DefaultEventUtilsImpl;
import org.jasig.schedassist.model.mock.MockCalendarAccount;
import org.junit.Test;
import org.springframework.jdbc.CannotGetJdbcConnectionException;
/**
* Test bench for {@link SchedulingAssistantServiceImpl}.
*
* @author Nicholas Blair, nblair@doit.wisc.edu
* @version $Id: AvailableServiceImplTest.java 1914 2010-04-14 21:17:42Z npblair $
*/
public class SchedulingAssistantServiceImplTest {
/**
* Expect a OracleCalendarDataAccessException to bubble up.
* @throws Exception
*/
@Test
public void testScheduleAppointmentCalendarDaoUnavailable() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("user2");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
mockCalendarDao.checkForConflicts(owner, targetBlock);
EasyMock.expectLastCall().andThrow(new RuntimeException());
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao, mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
try {
serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description");
Assert.fail("expected RuntimeException not thrown");
} catch(RuntimeException e) {
// success
}
EasyMock.verify(mockCalendarDao, mockScheduleDao);
}
/**
* Expect a DataAccessException to bubble up.
* @throws Exception
*/
@Test
public void testScheduleAppointmentAvailableScheduleDaoUnavailable() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("user2");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, targetBlock.getStartTime())).andThrow(new CannotGetJdbcConnectionException("database unavailable", new SQLException()));
EasyMock.replay(mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
try {
serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description");
Assert.fail("expected CannotGetJdbcConnectionException not thrown");
} catch(CannotGetJdbcConnectionException e) {
// success
}
EasyMock.verify(mockScheduleDao);
}
/**
* Expect {@link AvailableServiceImpl#scheduleAppointment(ScheduleVisitor, ScheduleOwner, AvailableBlock, String)}
* to return null for {@link ScheduleOwner#isSamePerson(ScheduleVisitor)} returning true.
* @throws Exception
*/
@Test
public void testScheduleAppointmentOwnerVisitorSamePerson() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor from same person
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(ownerAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
Assert.assertNull(serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description"));
}
/**
* Expect a SchedulingException
* @throws Exception
*/
@Test
public void testScheduleAppointmentNotInSchedule() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("user2");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, targetBlock.getStartTime())).andReturn(null);
EasyMock.replay(mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
try {
serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description");
Assert.fail("expected SchedulingException not thrown");
} catch(SchedulingException e) {
// success
}
EasyMock.verify(mockScheduleDao);
}
/**
* Expect a SchedulingException
* @throws Exception
*/
@Test
public void testScheduleAppointmentVisitorLimitExceeded() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct 2nd visitor
MockCalendarAccount visitor2Account = new MockCalendarAccount();
visitor2Account.setUsername("v2");
visitor2Account.setEmailAddress("v2@doit.wisc.edu");
visitor2Account.setDisplayName("VISITOR TWO");
DefaultScheduleVisitorImpl visitor2 = new DefaultScheduleVisitorImpl(visitor2Account);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 2);
DefaultEventUtilsImpl eventUtils = new DefaultEventUtilsImpl(new NullAffiliationSourceImpl());
// construct expected VEvent
VEvent expectedEvent = eventUtils.constructAvailableAppointment(targetBlock, owner, visitor, "description");
expectedEvent.getProperties().add(eventUtils.constructSchedulingAssistantAttendee(visitor2.getCalendarAccount(), AppointmentRole.VISITOR));
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
EasyMock.expect(mockCalendarDao.getExistingAppointment(owner, targetBlock)).andReturn(expectedEvent);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao, mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
serviceImpl.setEventUtils(new DefaultEventUtilsImpl(new NullAffiliationSourceImpl()));
// construct 3rd visitor
MockCalendarAccount visitor3Account = new MockCalendarAccount();
visitor3Account.setUsername("v3");
visitor3Account.setEmailAddress("v3@doit.wisc.edu");
visitor3Account.setDisplayName("VISITOR THREE");
DefaultScheduleVisitorImpl visitor3 = new DefaultScheduleVisitorImpl(visitor3Account);
try {
serviceImpl.scheduleAppointment(visitor3, owner, targetBlock, "description");
Assert.fail("expected SchedulingException not thrown");
} catch (SchedulingException e) {
// success
}
EasyMock.verify(mockCalendarDao, mockScheduleDao);
}
/**
*
* @throws Exception
*/
@Test
public void testScheduleAppointmentControl() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
// construct successfull VEvent
VEvent expectedEvent = new VEvent();
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
mockCalendarDao.checkForConflicts(owner, targetBlock);
EasyMock.expectLastCall();
EasyMock.expect(mockCalendarDao.createAppointment(visitor, owner, targetBlock, "description")).andReturn(expectedEvent);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao, mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
VEvent event = serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description");
Assert.assertEquals(expectedEvent, event);
EasyMock.verify(mockCalendarDao, mockScheduleDao);
}
/**
*
* @throws Exception
*/
@Test
public void testScheduleAppointmentMultipleVisitorsCreate() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 2);
// construct expected VEvent
VEvent expectedEvent = new VEvent();
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
EasyMock.expect(mockCalendarDao.getExistingAppointment(owner, targetBlock)).andReturn(null);
mockCalendarDao.checkForConflicts(owner, targetBlock);
EasyMock.expectLastCall();
EasyMock.expect(mockCalendarDao.createAppointment(visitor, owner, targetBlock, "description")).andReturn(expectedEvent);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao, mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
VEvent event = serviceImpl.scheduleAppointment(visitor, owner, targetBlock, "description");
Assert.assertEquals(expectedEvent, event);
EasyMock.verify(mockCalendarDao, mockScheduleDao);
}
/**
*
* @throws Exception
*/
@Test
public void testScheduleAppointmentMultipleVisitorsJoin() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl alreadyAcceptedVisitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 2);
DefaultEventUtilsImpl eventUtils = new DefaultEventUtilsImpl(new NullAffiliationSourceImpl());
// construct existing VEvent
VEvent existingEvent = eventUtils.constructAvailableAppointment(targetBlock, owner, alreadyAcceptedVisitor, null);
// construct 2nd visitor
MockCalendarAccount visitor2Account = new MockCalendarAccount();
visitor2Account.setUsername("v2");
visitor2Account.setEmailAddress("v2@doit.wisc.edu");
visitor2Account.setDisplayName("VISITOR TWO");
DefaultScheduleVisitorImpl newVisitor = new DefaultScheduleVisitorImpl(visitor2Account);
// construct expected result event
VEvent expectedEvent = new VEvent(new PropertyList(existingEvent.getProperties()));
Attendee newAttendee = eventUtils.constructSchedulingAssistantAttendee(newVisitor.getCalendarAccount(), AppointmentRole.VISITOR);
expectedEvent.getProperties().add(newAttendee);
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
EasyMock.expect(mockCalendarDao.getExistingAppointment(owner, targetBlock)).andReturn(existingEvent);
EasyMock.expect(mockCalendarDao.joinAppointment(newVisitor, owner, existingEvent)).andReturn(expectedEvent);
AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao, mockScheduleDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
serviceImpl.setEventUtils(new DefaultEventUtilsImpl(new NullAffiliationSourceImpl()));
VEvent event = serviceImpl.scheduleAppointment(newVisitor, owner, targetBlock, null);
Assert.assertEquals(expectedEvent, event);
EasyMock.verify(mockCalendarDao, mockScheduleDao);
}
/**
* Expect {@link AvailableServiceImpl#cancelAppointment(ScheduleVisitor, ScheduleOwner, VEvent)}
* to return immediately for {@link ScheduleOwner#isSamePerson(ScheduleVisitor)} returning true.
* @throws Exception
*/
@Test
public void testCancelAppointmentOwnerVisitorSamePerson() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor from same person
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(ownerAccount);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
// this call will throw exception due to missing daos if owner.isSamePerson(visitor) equals false
serviceImpl.cancelAppointment(visitor, owner, null, null, "cancel reason");
}
/**
*
* @throws Exception
*/
@Test
public void testCancelAppointmentMultipleVisitorsLeave() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 2);
DefaultEventUtilsImpl eventUtils = new DefaultEventUtilsImpl(new NullAffiliationSourceImpl());
// construct existing VEvent
VEvent existingEvent = eventUtils.constructAvailableAppointment(targetBlock, owner, visitor, "event description");
Attendee newAttendee = eventUtils.constructSchedulingAssistantAttendee(visitor.getCalendarAccount(), AppointmentRole.VISITOR);
existingEvent.getProperties().add(newAttendee);
// construct expected result
VEvent expectedEvent = new VEvent(new PropertyList(existingEvent.getProperties()));
expectedEvent.getProperties().remove(newAttendee);
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
EasyMock.expect(mockCalendarDao.getExistingAppointment(owner, targetBlock)).andReturn(existingEvent);
EasyMock.expect(mockCalendarDao.leaveAppointment(visitor, owner, existingEvent)).andReturn(expectedEvent);
//AvailableScheduleDao mockScheduleDao = EasyMock.createMock(AvailableScheduleDao.class);
//EasyMock.expect(mockScheduleDao.retrieveTargetBlock(owner, CommonDateOperations.parseDateTimePhrase("20091111-1330"))).andReturn(targetBlock);
EasyMock.replay(mockCalendarDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
//serviceImpl.setAvailableScheduleDao(mockScheduleDao);
serviceImpl.setCalendarDataDao(mockCalendarDao);
serviceImpl.setEventUtils(new DefaultEventUtilsImpl(new NullAffiliationSourceImpl()));
serviceImpl.cancelAppointment(visitor, owner, existingEvent, targetBlock, "cancel reason");
EasyMock.verify(mockCalendarDao);
}
/**
*
* @throws Exception
*/
@Test
public void testCancelAppointmentControl() throws Exception {
// construct a schedule owner
MockCalendarAccount ownerAccount = new MockCalendarAccount();
ownerAccount.setUsername("user1");
ownerAccount.setEmailAddress("owner@domain.com");
ownerAccount.setDisplayName("OWNER OWNER");
DefaultScheduleOwnerImpl owner = new DefaultScheduleOwnerImpl(ownerAccount, 1);
// construct a schedule visitor
MockCalendarAccount visitorAccount = new MockCalendarAccount();
visitorAccount.setUsername("v1");
visitorAccount.setEmailAddress("v1@doit.wisc.edu");
visitorAccount.setDisplayName("VISITOR ONE");
DefaultScheduleVisitorImpl visitor = new DefaultScheduleVisitorImpl(visitorAccount);
// construct target availableblock for appointment
AvailableBlock targetBlock = AvailableBlockBuilder.createBlock("20091111-1330", "20091111-1400", 1);
DefaultEventUtilsImpl eventUtils = new DefaultEventUtilsImpl(new NullAffiliationSourceImpl());
// construct successfull VEvent
VEvent expectedEvent = eventUtils.constructAvailableAppointment(targetBlock, owner, visitor, "description");
// create mock CalendarDao and AvailableScheduleDao
ICalendarDataDao mockCalendarDao = EasyMock.createMock(ICalendarDataDao.class);
EasyMock.expect(mockCalendarDao.getExistingAppointment(owner, targetBlock)).andReturn(expectedEvent);
mockCalendarDao.cancelAppointment(visitor, owner, expectedEvent);
EasyMock.expectLastCall();
EasyMock.replay(mockCalendarDao);
SchedulingAssistantServiceImpl serviceImpl = new SchedulingAssistantServiceImpl();
serviceImpl.setCalendarDataDao(mockCalendarDao);
serviceImpl.setEventUtils(new DefaultEventUtilsImpl(new NullAffiliationSourceImpl()));
serviceImpl.cancelAppointment(visitor, owner, expectedEvent, targetBlock, "cancel reason");
EasyMock.verify(mockCalendarDao);
}
}
| |
package com.vumobile.celeb.Adapters;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import com.sdsmdg.tastytoast.TastyToast;
import com.squareup.picasso.Picasso;
import com.vumobile.Config.Api;
import com.vumobile.celeb.R;
import com.vumobile.celeb.Utils.CelebrityClass;
import com.vumobile.fan.login.Session;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by toukirul on 12/4/2017.
*/
public class CelebrityListAdapter extends ArrayAdapter<CelebrityClass> {
Context mContext;
public CelebrityListAdapter(Context context, int textViewResourceId) {
super(context, textViewResourceId);
}
public CelebrityListAdapter(Context context, int resource, List<CelebrityClass> items) {
super(context, resource, items);
this.mContext = context;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
LayoutInflater vi;
vi = LayoutInflater.from(getContext());
v = vi.inflate(R.layout.celeb_list_row, null);
}
CelebrityClass p = getItem(position);
if (p != null) {
TextView tt1 = (TextView) v.findViewById(R.id.txtCelebName);
ImageView tt2 = (ImageView) v.findViewById(R.id.imgCeleb);
ImageView flw = (ImageView) v.findViewById(R.id.imageViewFollower);
ImageView imageViewOnlineStatus = (ImageView) v.findViewById(R.id.imageViewOnlineStatus);
TextView textViewFollowerCount = (TextView) v.findViewById(R.id.textViewFollowerCount);
textViewFollowerCount.setText(p.getFollowerCount());
if (tt1 != null) {
tt1.setText(p.getCeleb_name());
}
if (tt2 != null) {
Picasso.with(mContext).load(p.getCeleb_image()).into(tt2);
}
// live logic
String isOnline = p.getIsOnline();
if (isOnline.equals("1") || isOnline.matches("1")) {
imageViewOnlineStatus.setImageDrawable(mContext.getResources().getDrawable(R.drawable.followicononlinet));
} else {
imageViewOnlineStatus.setImageDrawable(mContext.getResources().getDrawable(R.drawable.followicononline));
}
// Next live logic
if (p.getNextLiveStatus() == 1) {
flw.setVisibility(View.INVISIBLE);
textViewFollowerCount.setText("Up Coming: " + p.getNextLive());
if (isOnline.equals("1") || isOnline.matches("1")) {
textViewFollowerCount.setText("Live Now...");
}
} else if (p.getNextLiveStatus() == 2) {
flw.setVisibility(View.VISIBLE);
textViewFollowerCount.setText("Live Now...");
}
// Set follow button
Log.d("follll", "getView: " + p.getIsfollow().equals("1"));
if (p.getIsfollow().equals("1")) {
flw.setImageDrawable(mContext.getResources().getDrawable(R.drawable.ic_action_unfollow));
} else {
flw.setImageDrawable(mContext.getResources().getDrawable(R.drawable.ic_action_follow_plus));
}
flw.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CelebrityClass p = getItem(position);
String ph = Session.retreivePhone(mContext, Session.USER_PHONE);
if (flw.getDrawable().getConstantState().equals
(mContext.getResources().getDrawable(R.drawable.ic_action_follow_plus).getConstantState())) {
makeFollower(ph, p.getCeleb_code(), textViewFollowerCount);
flw.setImageDrawable(mContext.getResources().getDrawable(R.drawable.ic_action_unfollow));
} else {
makeUnFollower(ph, p.getCeleb_code(), textViewFollowerCount);
flw.setImageDrawable(mContext.getResources().getDrawable(R.drawable.ic_action_follow_plus));
}
}
});
}
return v;
}
private void makeFollower(String fanPhone, String celebPhone, TextView textViewFollowerCount) {
Log.d("phone of both", "makeFollower: fan:" + fanPhone + " Cel:" + celebPhone);
StringRequest stringRequest = new StringRequest(Request.Method.POST, Api.URL_POST_FOLLOW,
new Response.Listener<String>() {
@Override
public void onResponse(String response) {
Log.d("FromServer follow", response.toString());
try {
JSONObject jsonObject = new JSONObject(response);
String res = jsonObject.getString("result");
JSONArray jo = new JSONArray(res);
String s = jo.getString(0);
JSONObject jst = new JSONObject(s);
textViewFollowerCount.setText(jst.getString("Follower"));
TastyToast.makeText(mContext, jst.getString("result"), TastyToast.LENGTH_SHORT, TastyToast.SUCCESS);
} catch (Exception e) {
e.printStackTrace();
}
}
},
new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Log.d("FromServer follow", "" + error.getMessage());
// TastyToast.makeText(mContext, "Error!", TastyToast.LENGTH_LONG, TastyToast.ERROR);
}
}) {
@Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put("fan", fanPhone);
params.put("celebrity", celebPhone);
return params;
}
};
RequestQueue requestQueue = Volley.newRequestQueue(mContext);
requestQueue.add(stringRequest);
}
private void makeUnFollower(String fanPhone, String celebPhone, TextView textViewFollowerCount) {
Log.d("phone of both", "make un Follower: fan:" + fanPhone + " Cel:" + celebPhone);
StringRequest stringRequest = new StringRequest(Request.Method.POST, Api.URL_POST_UNFOLLOW,
new Response.Listener<String>() {
@Override
public void onResponse(String response) {
Log.d("FromServer follow", response.toString());
try {
JSONObject jsonObject = new JSONObject(response);
String res = jsonObject.getString("result");
JSONArray jo = new JSONArray(res);
String s = jo.getString(0);
JSONObject jst = new JSONObject(s);
textViewFollowerCount.setText(jst.getString("Follower"));
TastyToast.makeText(mContext, jst.getString("result"), TastyToast.LENGTH_SHORT, TastyToast.SUCCESS);
} catch (Exception e) {
e.printStackTrace();
}
}
},
new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Log.d("FromServer follow", "" + error.getMessage());
// TastyToast.makeText(mContext, "Error!", TastyToast.LENGTH_LONG, TastyToast.ERROR);
}
}) {
@Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put("fan", fanPhone);
params.put("celebrity", celebPhone);
return params;
}
};
RequestQueue requestQueue = Volley.newRequestQueue(mContext);
requestQueue.add(stringRequest);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.gobblin.metastore.DatasetStateStore;
import org.apache.gobblin.runtime.job.JobProgress;
import org.apache.gobblin.runtime.job.TaskProgress;
import org.apache.hadoop.io.Text;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Meter;
import com.google.common.base.Enums;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.gson.stream.JsonWriter;
import com.linkedin.data.template.StringMap;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metrics.GobblinMetrics;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobStateEnum;
import org.apache.gobblin.rest.LauncherTypeEnum;
import org.apache.gobblin.rest.Metric;
import org.apache.gobblin.rest.MetricArray;
import org.apache.gobblin.rest.MetricTypeEnum;
import org.apache.gobblin.rest.TaskExecutionInfoArray;
import org.apache.gobblin.runtime.api.MonitoredObject;
import org.apache.gobblin.runtime.util.JobMetrics;
import org.apache.gobblin.runtime.util.MetricGroup;
import org.apache.gobblin.source.extractor.JobCommitPolicy;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.ImmutableProperties;
/**
* A class for tracking job state information.
*
* @author Yinan Li
*/
public class JobState extends SourceState implements JobProgress {
/**
* An enumeration of possible job states, which are identical to
* {@link org.apache.gobblin.configuration.WorkUnitState.WorkingState}
* in terms of naming.
*
* <p> Status state diagram:
* <ul>
* <li> null => PENDING
* <li> PENDING => RUNNING
* <li> PENDING => CANCELLED
* <li> RUNNING => CANCELLED
* <li> RUNNING => SUCCESSFUL
* <li> RUNNING => FAILED
* <li> SUCCESSFUL => COMMITTED
* <li> SUCCESSFUL => CANCELLED (cancelled before committing)
* </ul>
*/
public enum RunningState implements MonitoredObject {
/** Pending creation of {@link WorkUnit}s. */
PENDING,
/** Starting the execution of {@link WorkUnit}s. */
RUNNING,
/** All {@link WorkUnit}s have finished successfully or the job commit policy is
* {@link JobCommitPolicy#COMMIT_ON_PARTIAL_SUCCESS} */
SUCCESSFUL,
/** Job state has been committed */
COMMITTED,
/** At least one {@link WorkUnit}s has failed for a job with job commit policy
* {@link JobCommitPolicy#COMMIT_ON_FULL_SUCCESS}. */
FAILED,
/** The execution of the job was cancelled. */
CANCELLED;
public boolean isCancelled() {
return this.equals(CANCELLED);
}
public boolean isDone() {
return this.equals(COMMITTED) || this.equals(FAILED) || this.equals(CANCELLED);
}
public boolean isSuccess() {
return this.equals(COMMITTED);
}
public boolean isFailure() {
return this.equals(FAILED);
}
public boolean isRunningOrDone() {
return isDone() || this.equals(RUNNING);
}
}
private String jobName;
private String jobId;
private long startTime = 0;
private long endTime = 0;
private long duration = 0;
private RunningState state = RunningState.PENDING;
private int taskCount = 0;
private final Map<String, TaskState> taskStates = Maps.newLinkedHashMap();
// Skipped task states shouldn't be exposed to publisher, but they need to be in JobState and DatasetState so that they can be written to StateStore.
private final Map<String, TaskState> skippedTaskStates = Maps.newLinkedHashMap();
private DatasetStateStore datasetStateStore;
// Necessary for serialization/deserialization
public JobState() {
}
public JobState(String jobName, String jobId) {
this.jobName = jobName;
this.jobId = jobId;
this.setId(jobId);
}
public JobState(State properties,String jobName, String jobId) {
super(properties);
this.jobName = jobName;
this.jobId = jobId;
this.setId(jobId);
}
public JobState(State properties, Map<String, JobState.DatasetState> previousDatasetStates, String jobName,
String jobId) {
super(properties, previousDatasetStates, workUnitStatesFromDatasetStates(previousDatasetStates.values()));
this.jobName = jobName;
this.jobId = jobId;
this.setId(jobId);
}
public static String getJobNameFromState(State state) {
return state.getProp(ConfigurationKeys.JOB_NAME_KEY);
}
public static String getJobNameFromProps(Properties props) {
return props.getProperty(ConfigurationKeys.JOB_NAME_KEY);
}
public static String getJobGroupFromState(State state) {
return state.getProp(ConfigurationKeys.JOB_GROUP_KEY);
}
public static String getJobGroupFromProps(Properties props) {
return props.getProperty(ConfigurationKeys.JOB_GROUP_KEY);
}
public static String getJobDescriptionFromProps(State state) {
return state.getProp(ConfigurationKeys.JOB_DESCRIPTION_KEY);
}
public static String getJobDescriptionFromProps(Properties props) {
return props.getProperty(ConfigurationKeys.JOB_DESCRIPTION_KEY);
}
/**
* Get job name.
*
* @return job name
*/
public String getJobName() {
return this.jobName;
}
/**
* Set job name.
*
* @param jobName job name
*/
public void setJobName(String jobName) {
this.jobName = jobName;
}
/**
* Get job ID.
*
* @return job ID
*/
public String getJobId() {
return this.jobId;
}
/**
* Set job ID.
*
* @param jobId job ID
*/
public void setJobId(String jobId) {
this.jobId = jobId;
}
/**
* Get job start time.
*
* @return job start time
*/
public long getStartTime() {
return this.startTime;
}
/**
* Set job start time.
*
* @param startTime job start time
*/
public void setStartTime(long startTime) {
this.startTime = startTime;
}
/**
* Get job end time.
*
* @return job end time
*/
public long getEndTime() {
return this.endTime;
}
/**
* Get the currently elapsed time for this job.
* @return
*/
public long getElapsedTime() {
if (this.endTime > 0) {
return this.endTime - this.startTime;
}
if (this.startTime > 0) {
return System.currentTimeMillis() - this.startTime;
}
return 0;
}
/**
* Set job end time.
*
* @param endTime job end time
*/
public void setEndTime(long endTime) {
this.endTime = endTime;
}
/**
* Get job duration in milliseconds.
*
* @return job duration in milliseconds
*/
public long getDuration() {
return this.duration;
}
/**
* Set job duration in milliseconds.
*
* @param duration job duration in milliseconds
*/
public void setDuration(long duration) {
this.duration = duration;
}
/**
* Get job running state of type {@link RunningState}.
*
* @return job running state of type {@link RunningState}
*/
public synchronized RunningState getState() {
return this.state;
}
/**
* Set job running state of type {@link RunningState}.
*
* @param state job running state of type {@link RunningState}
*/
public synchronized void setState(RunningState state) {
this.state = state;
}
/**
* Get the number of tasks this job consists of.
*
* @return number of tasks this job consists of
*/
public int getTaskCount() {
return this.taskCount;
}
/**
* Set the number of tasks this job consists of.
*
* @param taskCount number of tasks this job consists of
*/
public void setTaskCount(int taskCount) {
this.taskCount = taskCount;
}
/**
* Increment the number of tasks by 1.
*/
public void incrementTaskCount() {
this.taskCount++;
}
/**
* Add a single {@link TaskState}.
*
* @param taskState {@link TaskState} to add
*/
public void addTaskState(TaskState taskState) {
this.taskStates.put(taskState.getTaskId(), taskState);
}
public void addSkippedTaskState(TaskState taskState) {
this.skippedTaskStates.put(taskState.getTaskId(), taskState);
}
public void removeTaskState(TaskState taskState) {
this.taskStates.remove(taskState.getTaskId());
this.taskCount--;
}
/**
* Filter the task states corresponding to the skipped work units and add it to the skippedTaskStates
*/
public void filterSkippedTaskStates() {
List<TaskState> skippedTaskStates = new ArrayList<>();
for (TaskState taskState : this.taskStates.values()) {
if (taskState.getWorkingState() == WorkUnitState.WorkingState.SKIPPED) {
skippedTaskStates.add(taskState);
}
}
for (TaskState taskState : skippedTaskStates) {
removeTaskState(taskState);
addSkippedTaskState(taskState);
}
}
/**
* Add a collection of {@link TaskState}s.
*
* @param taskStates collection of {@link TaskState}s to add
*/
public void addTaskStates(Collection<TaskState> taskStates) {
for (TaskState taskState : taskStates) {
this.taskStates.put(taskState.getTaskId(), taskState);
}
}
public void addSkippedTaskStates(Collection<TaskState> taskStates) {
for (TaskState taskState : taskStates) {
addSkippedTaskState(taskState);
}
}
/**
* Get the number of completed tasks.
*
* @return number of completed tasks
*/
public int getCompletedTasks() {
int completedTasks = 0;
for (TaskState taskState : this.taskStates.values()) {
if (taskState.isCompleted()) {
completedTasks++;
}
}
return completedTasks;
}
/**
* Get {@link TaskState}s of {@link Task}s of this job.
*
* @return a list of {@link TaskState}s
*/
public List<TaskState> getTaskStates() {
return ImmutableList.<TaskState>builder().addAll(this.taskStates.values()).build();
}
@Override
public List<TaskState> getTaskProgress() {
return getTaskStates();
}
/**
* Create a {@link Map} from dataset URNs (as being specified by {@link ConfigurationKeys#DATASET_URN_KEY} to
* {@link DatasetState} objects that represent the dataset states and store {@link TaskState}s corresponding
* to the datasets.
*
* <p>
* {@link TaskState}s that do not have {@link ConfigurationKeys#DATASET_URN_KEY} set will be added to
* the dataset state belonging to {@link ConfigurationKeys#DEFAULT_DATASET_URN}.
* </p>
*
* @return a {@link Map} from dataset URNs to {@link DatasetState}s representing the dataset states
*/
public Map<String, DatasetState> createDatasetStatesByUrns() {
Map<String, DatasetState> datasetStatesByUrns = Maps.newHashMap();
for (TaskState taskState : this.taskStates.values()) {
String datasetUrn = createDatasetUrn(datasetStatesByUrns, taskState);
datasetStatesByUrns.get(datasetUrn).incrementTaskCount();
datasetStatesByUrns.get(datasetUrn).addTaskState(taskState);
}
for (TaskState taskState : this.skippedTaskStates.values()) {
String datasetUrn = createDatasetUrn(datasetStatesByUrns, taskState);
datasetStatesByUrns.get(datasetUrn).addSkippedTaskState(taskState);
}
return ImmutableMap.copyOf(datasetStatesByUrns);
}
private String createDatasetUrn(Map<String, DatasetState> datasetStatesByUrns, TaskState taskState) {
String datasetUrn = taskState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN);
if (!datasetStatesByUrns.containsKey(datasetUrn)) {
DatasetState datasetState = newDatasetState(false);
datasetState.setDatasetUrn(datasetUrn);
datasetStatesByUrns.put(datasetUrn, datasetState);
}
return datasetUrn;
}
/**
* Get task states of {@link Task}s of this job as {@link WorkUnitState}s.
*
* @return a list of {@link WorkUnitState}s
*/
public List<WorkUnitState> getTaskStatesAsWorkUnitStates() {
ImmutableList.Builder<WorkUnitState> builder = ImmutableList.builder();
for (TaskState taskState : this.taskStates.values()) {
WorkUnitState workUnitState = new WorkUnitState(taskState.getWorkunit(), taskState.getJobState());
workUnitState.setId(taskState.getId());
workUnitState.addAll(taskState);
builder.add(workUnitState);
}
return builder.build();
}
/**
* Get the {@link LauncherTypeEnum} for this {@link JobState}.
*/
public LauncherTypeEnum getLauncherType() {
return Enums.getIfPresent(LauncherTypeEnum.class,
this.getProp(ConfigurationKeys.JOB_LAUNCHER_TYPE_KEY, JobLauncherFactory.JobLauncherType.LOCAL.name()))
.or(LauncherTypeEnum.LOCAL);
}
/**
* Sets the {@link LauncherTypeEnum} for this {@link JobState}.
*/
public void setJobLauncherType(LauncherTypeEnum jobLauncherType) {
this.setProp(ConfigurationKeys.JOB_LAUNCHER_TYPE_KEY, jobLauncherType.name());
}
/**
* Get the tracking URL for this {@link JobState}.
*/
public Optional<String> getTrackingURL() {
return Optional.fromNullable(this.getProp(ConfigurationKeys.JOB_TRACKING_URL_KEY));
}
@Override
public void readFields(DataInput in)
throws IOException {
Text text = new Text();
text.readFields(in);
this.jobName = text.toString().intern();
text.readFields(in);
this.jobId = text.toString().intern();
this.setId(this.jobId);
this.startTime = in.readLong();
this.endTime = in.readLong();
this.duration = in.readLong();
text.readFields(in);
this.state = RunningState.valueOf(text.toString());
this.taskCount = in.readInt();
int numTaskStates = in.readInt();
getTaskStateWithCommonAndSpecWuProps(numTaskStates, in);
super.readFields(in);
}
private void getTaskStateWithCommonAndSpecWuProps(int numTaskStates, DataInput in)
throws IOException {
Properties commonWuProps = new Properties();
for (int i = 0; i < numTaskStates; i++) {
TaskState taskState = new TaskState();
taskState.readFields(in);
if (i == 0) {
commonWuProps.putAll(taskState.getWorkunit().getProperties());
} else {
Properties newCommonWuProps = new Properties();
newCommonWuProps
.putAll(Maps.difference(commonWuProps, taskState.getWorkunit().getProperties()).entriesInCommon());
commonWuProps = newCommonWuProps;
}
this.taskStates.put(taskState.getTaskId().intern(), taskState);
}
ImmutableProperties immutableCommonProperties = new ImmutableProperties(commonWuProps);
for (TaskState taskState : this.taskStates.values()) {
Properties newSpecProps = new Properties();
newSpecProps.putAll(
Maps.difference(immutableCommonProperties, taskState.getWorkunit().getProperties()).entriesOnlyOnRight());
taskState.setWuProperties(immutableCommonProperties, newSpecProps);
}
}
@Override
public void write(DataOutput out)
throws IOException {
write(out, true, true);
}
public void write(DataOutput out, boolean writeTasks, boolean writePreviousWorkUnitStates)
throws IOException {
Text text = new Text();
text.set(this.jobName);
text.write(out);
text.set(this.jobId);
text.write(out);
out.writeLong(this.startTime);
out.writeLong(this.endTime);
out.writeLong(this.duration);
text.set(this.state.name());
text.write(out);
out.writeInt(this.taskCount);
if (writeTasks) {
out.writeInt(this.taskStates.size() + this.skippedTaskStates.size());
for (TaskState taskState : this.taskStates.values()) {
taskState.write(out);
}
for (TaskState taskState : this.skippedTaskStates.values()) {
taskState.write(out);
}
} else {
out.writeInt(0);
}
super.write(out, writePreviousWorkUnitStates);
}
/**
* Convert this {@link JobState} to a json document.
*
* @param jsonWriter a {@link com.google.gson.stream.JsonWriter}
* used to write the json document
* @param keepConfig whether to keep all configuration properties
* @throws IOException
*/
public void toJson(JsonWriter jsonWriter, boolean keepConfig)
throws IOException {
jsonWriter.beginObject();
writeStateSummary(jsonWriter);
jsonWriter.name("task states");
jsonWriter.beginArray();
for (TaskState taskState : this.taskStates.values()) {
taskState.toJson(jsonWriter, keepConfig);
}
for (TaskState taskState : this.skippedTaskStates.values()) {
taskState.toJson(jsonWriter, keepConfig);
}
jsonWriter.endArray();
if (keepConfig) {
jsonWriter.name("properties");
propsToJson(jsonWriter);
}
jsonWriter.endObject();
}
/**
* Write a summary to the json document
*
* @param jsonWriter a {@link com.google.gson.stream.JsonWriter}
* used to write the json document
*/
protected void writeStateSummary(JsonWriter jsonWriter) throws IOException {
jsonWriter.name("job name").value(this.getJobName()).name("job id").value(this.getJobId()).name("job state")
.value(this.getState().name()).name("start time").value(this.getStartTime()).name("end time")
.value(this.getEndTime()).name("duration").value(this.getDuration()).name("tasks").value(this.getTaskCount())
.name("completed tasks").value(this.getCompletedTasks());
}
protected void propsToJson(JsonWriter jsonWriter)
throws IOException {
jsonWriter.beginObject();
for (String key : this.getPropertyNames()) {
jsonWriter.name(key).value(this.getProp(key));
}
jsonWriter.endObject();
}
@Override
public boolean equals(Object object) {
if (!(object instanceof JobState)) {
return false;
}
JobState other = (JobState) object;
return super.equals(other) && this.jobName.equals(other.jobName) && this.jobId.equals(other.jobId);
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + this.jobName.hashCode();
result = prime * result + this.jobId.hashCode();
return result;
}
@Override
public String toString() {
StringWriter stringWriter = new StringWriter();
try (JsonWriter jsonWriter = new JsonWriter(stringWriter)) {
jsonWriter.setIndent("\t");
this.toJson(jsonWriter, false);
} catch (IOException ioe) {
// Ignored
}
return stringWriter.toString();
}
/**
* Convert this {@link JobState} instance to a {@link JobExecutionInfo} instance.
*
* @return a {@link JobExecutionInfo} instance
*/
public JobExecutionInfo toJobExecutionInfo() {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName(this.jobName);
jobExecutionInfo.setJobId(this.jobId);
if (this.startTime > 0) {
jobExecutionInfo.setStartTime(this.startTime);
}
if (this.endTime > 0) {
jobExecutionInfo.setEndTime(this.endTime);
}
jobExecutionInfo.setDuration(this.duration);
jobExecutionInfo.setState(JobStateEnum.valueOf(this.state.name()));
jobExecutionInfo.setLaunchedTasks(this.taskCount);
jobExecutionInfo.setCompletedTasks(this.getCompletedTasks());
jobExecutionInfo.setLauncherType(getLauncherType());
if (getTrackingURL().isPresent()) {
jobExecutionInfo.setTrackingUrl(getTrackingURL().get());
}
// Add task execution information
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskState taskState : this.getTaskStates()) {
taskExecutionInfos.add(taskState.toTaskExecutionInfo());
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
// Add job metrics
JobMetrics jobMetrics = JobMetrics.get(this);
MetricArray metricArray = new MetricArray();
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getCounters()
.entrySet()) {
Metric counter = new Metric();
counter.setGroup(MetricGroup.JOB.name());
counter.setName(entry.getKey());
counter.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.COUNTER.name()));
counter.setValue(Long.toString(((Counter) entry.getValue()).getCount()));
metricArray.add(counter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getMeters()
.entrySet()) {
Metric meter = new Metric();
meter.setGroup(MetricGroup.JOB.name());
meter.setName(entry.getKey());
meter.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.METER.name()));
meter.setValue(Double.toString(((Meter) entry.getValue()).getMeanRate()));
metricArray.add(meter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics.getMetricContext().getGauges()
.entrySet()) {
Metric gauge = new Metric();
gauge.setGroup(MetricGroup.JOB.name());
gauge.setName(entry.getKey());
gauge.setType(MetricTypeEnum.valueOf(GobblinMetrics.MetricType.GAUGE.name()));
gauge.setValue(((Gauge<?>) entry.getValue()).getValue().toString());
metricArray.add(gauge);
}
jobExecutionInfo.setMetrics(metricArray);
// Add job properties
Map<String, String> jobProperties = Maps.newHashMap();
for (String name : this.getPropertyNames()) {
String value = this.getProp(name);
if (!Strings.isNullOrEmpty(value)) {
jobProperties.put(name, value);
}
}
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
return jobExecutionInfo;
}
/**
* Create a new {@link JobState.DatasetState} based on this {@link JobState} instance.
*
* @param fullCopy whether to do a full copy of this {@link JobState} instance
* @return a new {@link JobState.DatasetState} object
*/
public DatasetState newDatasetState(boolean fullCopy) {
DatasetState datasetState = new DatasetState(this.jobName, this.jobId);
datasetState.setStartTime(this.startTime);
datasetState.setEndTime(this.endTime);
datasetState.setDuration(this.duration);
if (fullCopy) {
datasetState.setState(this.state);
datasetState.setTaskCount(this.taskCount);
datasetState.addTaskStates(this.taskStates.values());
datasetState.addSkippedTaskStates(this.skippedTaskStates.values());
}
return datasetState;
}
public static List<WorkUnitState> workUnitStatesFromDatasetStates(Iterable<JobState.DatasetState> datasetStates) {
ImmutableList.Builder<WorkUnitState> taskStateBuilder = ImmutableList.builder();
for (JobState datasetState : datasetStates) {
taskStateBuilder.addAll(datasetState.getTaskStatesAsWorkUnitStates());
}
return taskStateBuilder.build();
}
/**
* A subclass of {@link JobState} that is used to represent dataset states.
*
* <p>
* A {@code DatasetState} does <em>not</em> contain any properties. Operations such as {@link #getProp(String)}
* and {@link #setProp(String, Object)} are not supported.
* </p>
*/
public static class DatasetState extends JobState {
// For serialization/deserialization
public DatasetState() {
super();
}
public DatasetState(String jobName, String jobId) {
super(jobName, jobId);
}
public void setDatasetUrn(String datasetUrn) {
super.setProp(ConfigurationKeys.DATASET_URN_KEY, datasetUrn);
}
public String getDatasetUrn() {
return super.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN);
}
public void incrementJobFailures() {
super.setProp(ConfigurationKeys.JOB_FAILURES_KEY,
Integer.parseInt(super.getProp(ConfigurationKeys.JOB_FAILURES_KEY, "0")) + 1);
}
public void setNoJobFailure() {
super.setProp(ConfigurationKeys.JOB_FAILURES_KEY, 0);
}
public int getJobFailures() {
return Integer.parseInt(super.getProp(ConfigurationKeys.JOB_FAILURES_KEY));
}
@Override
protected void propsToJson(JsonWriter jsonWriter)
throws IOException {
jsonWriter.beginObject();
jsonWriter.name(ConfigurationKeys.DATASET_URN_KEY).value(getDatasetUrn());
jsonWriter.name(ConfigurationKeys.JOB_FAILURES_KEY).value(getJobFailures());
jsonWriter.endObject();
}
@Override
public String getProp(String key) {
throw new UnsupportedOperationException();
}
@Override
public String getProp(String key, String def) {
throw new UnsupportedOperationException();
}
@Override
public void setProp(String key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public void addAll(Properties properties) {
throw new UnsupportedOperationException();
}
@Override
public void addAllIfNotExist(Properties properties) {
throw new UnsupportedOperationException();
}
@Override
public void overrideWith(Properties properties) {
throw new UnsupportedOperationException();
}
@Override
protected void writeStateSummary(JsonWriter jsonWriter)
throws IOException {
super.writeStateSummary(jsonWriter);
jsonWriter.name("datasetUrn").value(getDatasetUrn());
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ContactServiceSoapBindingStub.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202108;
public class ContactServiceSoapBindingStub extends org.apache.axis.client.Stub implements com.google.api.ads.admanager.axis.v202108.ContactServiceInterface {
private java.util.Vector cachedSerClasses = new java.util.Vector();
private java.util.Vector cachedSerQNames = new java.util.Vector();
private java.util.Vector cachedSerFactories = new java.util.Vector();
private java.util.Vector cachedDeserFactories = new java.util.Vector();
static org.apache.axis.description.OperationDesc [] _operations;
static {
_operations = new org.apache.axis.description.OperationDesc[3];
_initOperationDesc1();
}
private static void _initOperationDesc1(){
org.apache.axis.description.OperationDesc oper;
org.apache.axis.description.ParameterDesc param;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("createContacts");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "contacts"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact"), com.google.api.ads.admanager.axis.v202108.Contact[].class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact"));
oper.setReturnClass(com.google.api.ads.admanager.axis.v202108.Contact[].class);
oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiExceptionFault"),
"com.google.api.ads.admanager.axis.v202108.ApiException",
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiException"),
true
));
_operations[0] = oper;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("getContactsByStatement");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "statement"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Statement"), com.google.api.ads.admanager.axis.v202108.Statement.class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ContactPage"));
oper.setReturnClass(com.google.api.ads.admanager.axis.v202108.ContactPage.class);
oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiExceptionFault"),
"com.google.api.ads.admanager.axis.v202108.ApiException",
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiException"),
true
));
_operations[1] = oper;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("updateContacts");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "contacts"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact"), com.google.api.ads.admanager.axis.v202108.Contact[].class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact"));
oper.setReturnClass(com.google.api.ads.admanager.axis.v202108.Contact[].class);
oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiExceptionFault"),
"com.google.api.ads.admanager.axis.v202108.ApiException",
new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiException"),
true
));
_operations[2] = oper;
}
public ContactServiceSoapBindingStub() throws org.apache.axis.AxisFault {
this(null);
}
public ContactServiceSoapBindingStub(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
this(service);
super.cachedEndpoint = endpointURL;
}
public ContactServiceSoapBindingStub(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
if (service == null) {
super.service = new org.apache.axis.client.Service();
} else {
super.service = service;
}
((org.apache.axis.client.Service)super.service).setTypeMappingVersion("1.2");
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ApiException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiVersionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ApiVersionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApiVersionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ApiVersionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ApplicationException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ApplicationException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "AuthenticationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.AuthenticationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "AuthenticationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.AuthenticationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "BaseContact");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.BaseContact.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "BooleanValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.BooleanValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "CollectionSizeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.CollectionSizeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "CollectionSizeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.CollectionSizeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "CommonError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.CommonError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "CommonError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.CommonErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.Contact.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Contact.Status");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ContactStatus.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ContactError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ContactError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ContactError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ContactErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ContactPage");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ContactPage.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Date");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.Date.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "DateTime");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.DateTime.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "DateTimeValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.DateTimeValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "DateValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.DateValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "FeatureError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.FeatureError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "FeatureError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.FeatureErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "FieldPathElement");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.FieldPathElement.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "InternalApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.InternalApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "InternalApiError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.InternalApiErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "InvalidEmailError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.InvalidEmailError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "InvalidEmailError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.InvalidEmailErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "NotNullError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.NotNullError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "NotNullError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.NotNullErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "NumberValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.NumberValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ObjectValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ObjectValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ParseError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ParseError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ParseError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ParseErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PermissionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PermissionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PermissionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PermissionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PublisherQueryLanguageContextError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PublisherQueryLanguageContextError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PublisherQueryLanguageContextError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PublisherQueryLanguageContextErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PublisherQueryLanguageSyntaxError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PublisherQueryLanguageSyntaxError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "PublisherQueryLanguageSyntaxError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.PublisherQueryLanguageSyntaxErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "QuotaError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.QuotaError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "QuotaError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.QuotaErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "RequiredCollectionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.RequiredCollectionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "RequiredCollectionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.RequiredCollectionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "RequiredError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.RequiredError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "RequiredError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.RequiredErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ServerError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ServerError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ServerError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.ServerErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "SetValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.SetValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "SoapRequestHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.SoapRequestHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "SoapResponseHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.SoapResponseHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Statement");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.Statement.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StatementError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StatementError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StatementError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StatementErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "String_ValueMapEntry");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.String_ValueMapEntry.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StringFormatError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StringFormatError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StringFormatError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StringFormatErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StringLengthError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StringLengthError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "StringLengthError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.StringLengthErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "TextValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.TextValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "UniqueError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.UniqueError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Value");
cachedSerQNames.add(qName);
cls = com.google.api.ads.admanager.axis.v202108.Value.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
}
protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException {
try {
org.apache.axis.client.Call _call = super._createCall();
if (super.maintainSessionSet) {
_call.setMaintainSession(super.maintainSession);
}
if (super.cachedUsername != null) {
_call.setUsername(super.cachedUsername);
}
if (super.cachedPassword != null) {
_call.setPassword(super.cachedPassword);
}
if (super.cachedEndpoint != null) {
_call.setTargetEndpointAddress(super.cachedEndpoint);
}
if (super.cachedTimeout != null) {
_call.setTimeout(super.cachedTimeout);
}
if (super.cachedPortName != null) {
_call.setPortName(super.cachedPortName);
}
java.util.Enumeration keys = super.cachedProperties.keys();
while (keys.hasMoreElements()) {
java.lang.String key = (java.lang.String) keys.nextElement();
_call.setProperty(key, super.cachedProperties.get(key));
}
// All the type mapping information is registered
// when the first call is made.
// The type mapping information is actually registered in
// the TypeMappingRegistry of the service, which
// is the reason why registration is only needed for the first call.
synchronized (this) {
if (firstCall()) {
// must set encoding style before registering serializers
_call.setEncodingStyle(null);
for (int i = 0; i < cachedSerFactories.size(); ++i) {
java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i);
javax.xml.namespace.QName qName =
(javax.xml.namespace.QName) cachedSerQNames.get(i);
java.lang.Object x = cachedSerFactories.get(i);
if (x instanceof Class) {
java.lang.Class sf = (java.lang.Class)
cachedSerFactories.get(i);
java.lang.Class df = (java.lang.Class)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) {
org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory)
cachedSerFactories.get(i);
org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
}
}
}
return _call;
}
catch (java.lang.Throwable _t) {
throw new org.apache.axis.AxisFault("Failure trying to get the Call object", _t);
}
}
public com.google.api.ads.admanager.axis.v202108.Contact[] createContacts(com.google.api.ads.admanager.axis.v202108.Contact[] contacts) throws java.rmi.RemoteException, com.google.api.ads.admanager.axis.v202108.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[0]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "createContacts"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {contacts});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.admanager.axis.v202108.Contact[]) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.admanager.axis.v202108.Contact[]) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.admanager.axis.v202108.Contact[].class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.admanager.axis.v202108.ApiException) {
throw (com.google.api.ads.admanager.axis.v202108.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
public com.google.api.ads.admanager.axis.v202108.ContactPage getContactsByStatement(com.google.api.ads.admanager.axis.v202108.Statement statement) throws java.rmi.RemoteException, com.google.api.ads.admanager.axis.v202108.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[1]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "getContactsByStatement"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {statement});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.admanager.axis.v202108.ContactPage) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.admanager.axis.v202108.ContactPage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.admanager.axis.v202108.ContactPage.class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.admanager.axis.v202108.ApiException) {
throw (com.google.api.ads.admanager.axis.v202108.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
public com.google.api.ads.admanager.axis.v202108.Contact[] updateContacts(com.google.api.ads.admanager.axis.v202108.Contact[] contacts) throws java.rmi.RemoteException, com.google.api.ads.admanager.axis.v202108.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[2]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "updateContacts"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {contacts});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.admanager.axis.v202108.Contact[]) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.admanager.axis.v202108.Contact[]) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.admanager.axis.v202108.Contact[].class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.admanager.axis.v202108.ApiException) {
throw (com.google.api.ads.admanager.axis.v202108.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
}
| |
/*
Copyright 2014 Immutables Authors and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.immutables.generator;
import com.google.common.base.Ascii;
import com.google.common.base.CaseFormat;
import com.google.common.base.CharMatcher;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import java.util.List;
import static com.google.common.base.Preconditions.*;
/**
* Converter-like function to apply or extract naming, derived from input.
*/
public abstract class Naming implements Function<String, String> {
private Naming() {}
private static final String NOT_DETECTED = "";
private static final String NAME_PLACEHOLDER = "*";
private static final Splitter TEMPLATE_SPLITTER = Splitter.on(NAME_PLACEHOLDER);
private static final CharMatcher TEMPLATE_CHAR_MATCHER =
CharMatcher.is('_')
.or(CharMatcher.is(NAME_PLACEHOLDER.charAt(0)))
.or(CharMatcher.inRange('a', 'z'))
.or(CharMatcher.inRange('A', 'Z'))
.or(CharMatcher.inRange('0', '9'))
.precomputed();
/**
* Applies naming to input identifier, converting it to desired naming.
* @param input the input identifier
* @return applied naming
*/
@Override
public abstract String apply(String input);
/**
* Tries to extract source identifier name out of already applied naming.
* @param identifier to detect naming from
* @return empty string if nothing detected
*/
public abstract String detect(String identifier);
/**
* Checks if it's identity naming.
* @see #identity()
* @return true, if is identity naming
*/
public abstract boolean isIdentity();
/**
* Checks if is constant naming.
* Verbatim naming convention do not use any supplied input name as base.
* Consider example factory method "from" constant naming,
* contrary to the factory method "newMyType" uses "MyType" as and input applying "new" prefix.
* @return true, if is constant
*/
public abstract boolean isConstant();
/**
* Returns non-contant naming which is this. Sometimes context require naming should be
* non-contant, otherwise names will clash in shared identifier scope. If this naming is constant,
* then it is turned into corresponding prefix naming.
* @param preference preference for prefix or suffix naming
* @return non-constant naming template or {@code this} if already non-constant
*/
public abstract Naming requireNonConstant(Preference preference);
public enum Preference {
PREFIX, SUFFIX
}
public enum Usage {
INDIFFERENT,
CAPITALIZED,
// funny name
LOWERIZED;
public String apply(String input) {
if (!input.isEmpty()) {
if (this == CAPITALIZED && !Ascii.isUpperCase(input.charAt(0))) {
return CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, input);
}
if (this == LOWERIZED && !Ascii.isLowerCase(input.charAt(0))) {
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_CAMEL, input);
}
}
return input;
}
}
/**
* Naming the repeats the input name
* @return identity naming
*/
public static Naming identity() {
return IDENTITY_NAMING;
}
/**
* @param template template string
* @return naming that could be applied or detected following template
*/
public static Naming from(String template) {
if (template.isEmpty() || template.equals(NAME_PLACEHOLDER)) {
return IDENTITY_NAMING;
}
checkArgument(TEMPLATE_CHAR_MATCHER.matchesAllOf(template),
"Naming template [%s] contains unsupported characters, only java identifier chars and '*' placeholder are allowed (ASCII only)",
template);
List<String> parts = TEMPLATE_SPLITTER.splitToList(template);
checkArgument(parts.size() <= 2,
"Naming template [%s] contains more than one '*' placeholder, which is unsupported",
template);
return parts.size() == 1
? new ConstantNaming(template)
: new PrefixSuffixNaming(parts.get(0), parts.get(1));
}
public static Naming[] fromAll(String... templates) {
Naming[] namings = new Naming[templates.length];
for (int i = 0; i < templates.length; i++) {
namings[i] = from(templates[i]);
}
return namings;
}
private static final Naming IDENTITY_NAMING = new Naming() {
@Override
public String apply(String input) {
return input;
}
@Override
public String detect(String identifier) {
return identifier;
}
@Override
public boolean isIdentity() {
return true;
}
@Override
public boolean isConstant() {
return false;
}
@Override
public Naming requireNonConstant(Preference preference) {
return this;
}
@Override
public String toString() {
return NAME_PLACEHOLDER;
}
};
private static class ConstantNaming extends Naming {
final String name;
ConstantNaming(String name) {
this.name = name;
}
@Override
public String apply(String input) {
return name;
}
@Override
public String detect(String identifier) {
return identifier.equals(name) ? name : NOT_DETECTED;
}
@Override
public boolean isIdentity() {
return false;
}
@Override
public boolean isConstant() {
return true;
}
@Override
public Naming requireNonConstant(Preference preference) {
switch (preference) {
case SUFFIX:
return new PrefixSuffixNaming("", Usage.CAPITALIZED.apply(name));
case PREFIX:
default:
return new PrefixSuffixNaming(name, "");
}
}
@Override
public String toString() {
return name;
}
}
private static class PrefixSuffixNaming extends Naming {
final String prefix;
final String suffix;
final int lengthsOfPrefixAndSuffix;
PrefixSuffixNaming(String prefix, String suffix) {
this.prefix = prefix;
this.suffix = suffix;
this.lengthsOfPrefixAndSuffix = suffix.length() + prefix.length();
Preconditions.checkArgument(lengthsOfPrefixAndSuffix > 0);
}
@Override
public String apply(String input) {
Usage resultFormat = prefix.isEmpty()
? Usage.INDIFFERENT
: Usage.CAPITALIZED;
return prefix + resultFormat.apply(input) + suffix;
}
@Override
public String detect(String identifier) {
if (identifier.length() <= lengthsOfPrefixAndSuffix) {
return NOT_DETECTED;
}
boolean prefixMatches = prefix.isEmpty() ||
(identifier.startsWith(prefix) && Ascii.isUpperCase(identifier.charAt(prefix.length())));
boolean suffixMatches = suffix.isEmpty() || identifier.endsWith(suffix);
if (prefixMatches && suffixMatches) {
String detected = identifier.substring(prefix.length(), identifier.length() - suffix.length());
return prefix.isEmpty()
? detected
: CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_CAMEL, detected);
}
return NOT_DETECTED;
}
@Override
public boolean isIdentity() {
return false;
}
@Override
public boolean isConstant() {
return false;
}
@Override
public Naming requireNonConstant(Preference preference) {
return this;
}
@Override
public String toString() {
return prefix + NAME_PLACEHOLDER + suffix;
}
}
}
| |
package in.rusty.tomatobrew.models;
import java.util.HashMap;
public class Movie {
private String id;
private String title;
private String year;
private String mpaa_rating;
private String runtime;
private String critics_consensus;
private String synopsis;
private String[] genre;
private String studio;
private Cast[] abridged_cast;
private Cast[] abridged_directors;
private Rating ratings;
private HashMap<String, String> links;
private HashMap<String, String> posters;
private HashMap<String, String> release_dates;
private HashMap<String, String> alternate_ids;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getYear() {
return year;
}
public void setYear(String year) {
this.year = year;
}
public String getMpaa_rating() {
return mpaa_rating;
}
public void setMpaa_rating(String mpaa_rating) {
this.mpaa_rating = mpaa_rating;
}
public String getRuntime() {
return runtime;
}
public void setRuntime(String runtime) {
this.runtime = runtime;
}
public String getCritics_consensus() {
return critics_consensus;
}
public void setCritics_consensus(String critics_consensus) {
this.critics_consensus = critics_consensus;
}
public String getSynopsis() {
return synopsis;
}
public void setSynopsis(String synopsis) {
this.synopsis = synopsis;
}
public HashMap<String, String> getLinks() {
return links;
}
public void setLinks(HashMap<String, String> links) {
this.links = links;
}
public Rating getRating() {
return ratings;
}
public void setRating(Rating rating) {
this.ratings = rating;
}
public HashMap<String, String> getPosters() {
return posters;
}
public void setPosters(HashMap<String, String> posters) {
this.posters = posters;
}
public HashMap<String, String> getRelease_dates() {
return release_dates;
}
public void setRelease_dates(HashMap<String, String> release_dates) {
this.release_dates = release_dates;
}
public HashMap<String, String> getAlternate_ids() {
return alternate_ids;
}
public void setAlternate_ids(HashMap<String, String> alternate_ids) {
this.alternate_ids = alternate_ids;
}
public String[] getGenre() {
return genre;
}
public void setGenre(String[] genre) {
this.genre = genre;
}
public String getStudio() {
return studio;
}
public void setStudio(String studio) {
this.studio = studio;
}
public Cast[] getAbridged_cast() {
return abridged_cast;
}
public void setAbridged_cast(Cast[] abridged_cast) {
this.abridged_cast = abridged_cast;
}
public Cast[] getAbridged_directors() {
return abridged_directors;
}
public void setAbridged_directors(Cast[] abridged_directors) {
this.abridged_directors = abridged_directors;
}
public Rating getRatings() {
return ratings;
}
public void setRatings(Rating ratings) {
this.ratings = ratings;
}
@Override
public String toString() {
return "Movie : \"" + title + "\"";
}
class Rating {
private String critics_rating;
private String critics_score;
private String audience_rating;
private String audience_score;
public String getCritics_rating() {
return critics_rating;
}
public void setCritics_rating(String critics_rating) {
this.critics_rating = critics_rating;
}
public String getCritics_score() {
return critics_score;
}
public void setCritics_score(String critics_score) {
this.critics_score = critics_score;
}
public String getAudience_rating() {
return audience_rating;
}
public void setAudience_rating(String audience_rating) {
this.audience_rating = audience_rating;
}
public String getAudience_score() {
return audience_score;
}
public void setAudience_score(String audience_score) {
this.audience_score = audience_score;
}
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.SuppressJava6Requirement;
import io.netty.util.internal.ThrowableUtil;
import io.netty.util.internal.UnstableApi;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
/**
* Exception thrown when an HTTP/2 error was encountered.
*/
@UnstableApi
public class Http2Exception extends Exception {
private static final long serialVersionUID = -6941186345430164209L;
private final Http2Error error;
private final ShutdownHint shutdownHint;
public Http2Exception(Http2Error error) {
this(error, ShutdownHint.HARD_SHUTDOWN);
}
public Http2Exception(Http2Error error, ShutdownHint shutdownHint) {
this.error = checkNotNull(error, "error");
this.shutdownHint = checkNotNull(shutdownHint, "shutdownHint");
}
public Http2Exception(Http2Error error, String message) {
this(error, message, ShutdownHint.HARD_SHUTDOWN);
}
public Http2Exception(Http2Error error, String message, ShutdownHint shutdownHint) {
super(message);
this.error = checkNotNull(error, "error");
this.shutdownHint = checkNotNull(shutdownHint, "shutdownHint");
}
public Http2Exception(Http2Error error, String message, Throwable cause) {
this(error, message, cause, ShutdownHint.HARD_SHUTDOWN);
}
public Http2Exception(Http2Error error, String message, Throwable cause, ShutdownHint shutdownHint) {
super(message, cause);
this.error = checkNotNull(error, "error");
this.shutdownHint = checkNotNull(shutdownHint, "shutdownHint");
}
static Http2Exception newStatic(Http2Error error, String message, ShutdownHint shutdownHint,
Class<?> clazz, String method) {
final Http2Exception exception;
if (PlatformDependent.javaVersion() >= 7) {
exception = new StacklessHttp2Exception(error, message, shutdownHint, true);
} else {
exception = new StacklessHttp2Exception(error, message, shutdownHint);
}
return ThrowableUtil.unknownStackTrace(exception, clazz, method);
}
@SuppressJava6Requirement(reason = "uses Java 7+ Exception.<init>(String, Throwable, boolean, boolean)" +
" but is guarded by version checks")
private Http2Exception(Http2Error error, String message, ShutdownHint shutdownHint, boolean shared) {
super(message, null, false, true);
assert shared;
this.error = checkNotNull(error, "error");
this.shutdownHint = checkNotNull(shutdownHint, "shutdownHint");
}
public Http2Error error() {
return error;
}
/**
* Provide a hint as to what type of shutdown should be executed. Note this hint may be ignored.
*/
public ShutdownHint shutdownHint() {
return shutdownHint;
}
/**
* Use if an error has occurred which can not be isolated to a single stream, but instead applies
* to the entire connection.
* @param error The type of error as defined by the HTTP/2 specification.
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return An exception which can be translated into an HTTP/2 error.
*/
public static Http2Exception connectionError(Http2Error error, String fmt, Object... args) {
return new Http2Exception(error, String.format(fmt, args));
}
/**
* Use if an error has occurred which can not be isolated to a single stream, but instead applies
* to the entire connection.
* @param error The type of error as defined by the HTTP/2 specification.
* @param cause The object which caused the error.
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return An exception which can be translated into an HTTP/2 error.
*/
public static Http2Exception connectionError(Http2Error error, Throwable cause,
String fmt, Object... args) {
return new Http2Exception(error, String.format(fmt, args), cause);
}
/**
* Use if an error has occurred which can not be isolated to a single stream, but instead applies
* to the entire connection.
* @param error The type of error as defined by the HTTP/2 specification.
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return An exception which can be translated into an HTTP/2 error.
*/
public static Http2Exception closedStreamError(Http2Error error, String fmt, Object... args) {
return new ClosedStreamCreationException(error, String.format(fmt, args));
}
/**
* Use if an error which can be isolated to a single stream has occurred. If the {@code id} is not
* {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a {@link Http2Exception.StreamException} will be returned.
* Otherwise the error is considered a connection error and a {@link Http2Exception} is returned.
* @param id The stream id for which the error is isolated to.
* @param error The type of error as defined by the HTTP/2 specification.
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return If the {@code id} is not
* {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a {@link Http2Exception.StreamException} will be returned.
* Otherwise the error is considered a connection error and a {@link Http2Exception} is returned.
*/
public static Http2Exception streamError(int id, Http2Error error, String fmt, Object... args) {
return CONNECTION_STREAM_ID == id ?
Http2Exception.connectionError(error, fmt, args) :
new StreamException(id, error, String.format(fmt, args));
}
/**
* Use if an error which can be isolated to a single stream has occurred. If the {@code id} is not
* {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a {@link Http2Exception.StreamException} will be returned.
* Otherwise the error is considered a connection error and a {@link Http2Exception} is returned.
* @param id The stream id for which the error is isolated to.
* @param error The type of error as defined by the HTTP/2 specification.
* @param cause The object which caused the error.
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return If the {@code id} is not
* {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a {@link Http2Exception.StreamException} will be returned.
* Otherwise the error is considered a connection error and a {@link Http2Exception} is returned.
*/
public static Http2Exception streamError(int id, Http2Error error, Throwable cause,
String fmt, Object... args) {
return CONNECTION_STREAM_ID == id ?
Http2Exception.connectionError(error, cause, fmt, args) :
new StreamException(id, error, String.format(fmt, args), cause);
}
/**
* A specific stream error resulting from failing to decode headers that exceeds the max header size list.
* If the {@code id} is not {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a
* {@link Http2Exception.StreamException} will be returned. Otherwise the error is considered a
* connection error and a {@link Http2Exception} is returned.
* @param id The stream id for which the error is isolated to.
* @param error The type of error as defined by the HTTP/2 specification.
* @param onDecode Whether this error was caught while decoding headers
* @param fmt String with the content and format for the additional debug data.
* @param args Objects which fit into the format defined by {@code fmt}.
* @return If the {@code id} is not
* {@link Http2CodecUtil#CONNECTION_STREAM_ID} then a {@link HeaderListSizeException}
* will be returned. Otherwise the error is considered a connection error and a {@link Http2Exception} is
* returned.
*/
public static Http2Exception headerListSizeError(int id, Http2Error error, boolean onDecode,
String fmt, Object... args) {
return CONNECTION_STREAM_ID == id ?
Http2Exception.connectionError(error, fmt, args) :
new HeaderListSizeException(id, error, String.format(fmt, args), onDecode);
}
/**
* Check if an exception is isolated to a single stream or the entire connection.
* @param e The exception to check.
* @return {@code true} if {@code e} is an instance of {@link Http2Exception.StreamException}.
* {@code false} otherwise.
*/
public static boolean isStreamError(Http2Exception e) {
return e instanceof StreamException;
}
/**
* Get the stream id associated with an exception.
* @param e The exception to get the stream id for.
* @return {@link Http2CodecUtil#CONNECTION_STREAM_ID} if {@code e} is a connection error.
* Otherwise the stream id associated with the stream error.
*/
public static int streamId(Http2Exception e) {
return isStreamError(e) ? ((StreamException) e).streamId() : CONNECTION_STREAM_ID;
}
/**
* Provides a hint as to if shutdown is justified, what type of shutdown should be executed.
*/
public enum ShutdownHint {
/**
* Do not shutdown the underlying channel.
*/
NO_SHUTDOWN,
/**
* Attempt to execute a "graceful" shutdown. The definition of "graceful" is left to the implementation.
* An example of "graceful" would be wait for some amount of time until all active streams are closed.
*/
GRACEFUL_SHUTDOWN,
/**
* Close the channel immediately after a {@code GOAWAY} is sent.
*/
HARD_SHUTDOWN
}
/**
* Used when a stream creation attempt fails but may be because the stream was previously closed.
*/
public static final class ClosedStreamCreationException extends Http2Exception {
private static final long serialVersionUID = -6746542974372246206L;
public ClosedStreamCreationException(Http2Error error) {
super(error);
}
public ClosedStreamCreationException(Http2Error error, String message) {
super(error, message);
}
public ClosedStreamCreationException(Http2Error error, String message, Throwable cause) {
super(error, message, cause);
}
}
/**
* Represents an exception that can be isolated to a single stream (as opposed to the entire connection).
*/
public static class StreamException extends Http2Exception {
private static final long serialVersionUID = 602472544416984384L;
private final int streamId;
StreamException(int streamId, Http2Error error, String message) {
super(error, message, ShutdownHint.NO_SHUTDOWN);
this.streamId = streamId;
}
StreamException(int streamId, Http2Error error, String message, Throwable cause) {
super(error, message, cause, ShutdownHint.NO_SHUTDOWN);
this.streamId = streamId;
}
public int streamId() {
return streamId;
}
}
public static final class HeaderListSizeException extends StreamException {
private static final long serialVersionUID = -8807603212183882637L;
private final boolean decode;
HeaderListSizeException(int streamId, Http2Error error, String message, boolean decode) {
super(streamId, error, message);
this.decode = decode;
}
public boolean duringDecode() {
return decode;
}
}
/**
* Provides the ability to handle multiple stream exceptions with one throw statement.
*/
public static final class CompositeStreamException extends Http2Exception implements Iterable<StreamException> {
private static final long serialVersionUID = 7091134858213711015L;
private final List<StreamException> exceptions;
public CompositeStreamException(Http2Error error, int initialCapacity) {
super(error, ShutdownHint.NO_SHUTDOWN);
exceptions = new ArrayList<StreamException>(initialCapacity);
}
public void add(StreamException e) {
exceptions.add(e);
}
@Override
public Iterator<StreamException> iterator() {
return exceptions.iterator();
}
}
private static final class StacklessHttp2Exception extends Http2Exception {
private static final long serialVersionUID = 1077888485687219443L;
StacklessHttp2Exception(Http2Error error, String message, ShutdownHint shutdownHint) {
super(error, message, shutdownHint);
}
StacklessHttp2Exception(Http2Error error, String message, ShutdownHint shutdownHint, boolean shared) {
super(error, message, shutdownHint, shared);
}
// Override fillInStackTrace() so we not populate the backtrace via a native call and so leak the
// Classloader.
@Override
public Throwable fillInStackTrace() {
return this;
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2011, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.test.internal.metadata.descriptor;
import java.util.List;
import java.util.Set;
import javax.validation.ConstraintDeclarationException;
import javax.validation.constraints.NotNull;
import javax.validation.metadata.ConstraintDescriptor;
import javax.validation.metadata.MethodDescriptor;
import javax.validation.metadata.ParameterDescriptor;
import javax.validation.metadata.Scope;
import org.joda.time.DateMidnight;
import org.testng.annotations.Test;
import org.hibernate.validator.test.internal.metadata.Customer;
import org.hibernate.validator.test.internal.metadata.CustomerRepository;
import org.hibernate.validator.test.internal.metadata.CustomerRepositoryExt;
import org.hibernate.validator.test.internal.metadata.CustomerRepositoryExt.CustomerExtension;
import org.hibernate.validator.test.internal.metadata.IllegalCustomerRepositoryExt;
import org.hibernate.validator.testutil.TestForIssue;
import static org.fest.assertions.Assertions.assertThat;
import static org.hibernate.validator.internal.util.Contracts.assertNotNull;
import static org.hibernate.validator.testutil.ValidatorUtil.getMethodDescriptor;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
/**
* @author Gunnar Morling
*/
public class MethodDescriptorTest {
@Test
public void testGetMethod() throws Exception {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"foo"
);
assertEquals( methodDescriptor.getName(), "foo" );
}
@Test
public void testIsCascaded() {
MethodDescriptor cascadingMethodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"foo"
);
assertTrue( cascadingMethodDescriptor.getReturnValueDescriptor().isCascaded() );
MethodDescriptor nonCascadingMethodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"baz"
);
assertFalse( nonCascadingMethodDescriptor.getReturnValueDescriptor().isCascaded() );
}
@Test
public void testHasConstraints() {
MethodDescriptor descriptor = getMethodDescriptor(
CustomerRepository.class,
"bar"
);
assertFalse(
descriptor.hasConstraints(),
"Method has no constraints."
);
descriptor = getMethodDescriptor(
CustomerRepository.class,
"methodWithCrossParameterConstraint",
DateMidnight.class,
DateMidnight.class
);
assertFalse(
descriptor.hasConstraints(),
"Cross-parameter constraints shouldn't be reported on MethodDescriptor."
);
}
@Test
public void testGetElementClass() {
//the return type as defined in the base type
MethodDescriptor methodDescriptor = getMethodDescriptor( CustomerRepository.class, "bar" );
assertEquals( methodDescriptor.getElementClass(), Customer.class );
//the return type is now the one as defined in the derived type (covariant return type)
methodDescriptor = getMethodDescriptor( CustomerRepositoryExt.class, "bar" );
assertEquals( methodDescriptor.getElementClass(), CustomerExtension.class );
}
@Test
public void testGetConstraintDescriptors() {
MethodDescriptor descriptor = getMethodDescriptor(
CustomerRepository.class,
"bar"
);
assertTrue( descriptor.getConstraintDescriptors().isEmpty() );
descriptor = getMethodDescriptor(
CustomerRepository.class,
"methodWithCrossParameterConstraint",
DateMidnight.class,
DateMidnight.class
);
assertTrue( descriptor.getConstraintDescriptors().isEmpty() );
}
@Test
public void testFindConstraintsMatchingGroups() {
MethodDescriptor descriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"methodWithCrossParameterConstraint",
DateMidnight.class,
DateMidnight.class
);
assertTrue(
descriptor.findConstraints()
.getConstraintDescriptors()
.isEmpty()
);
}
@Test
@TestForIssue(jiraKey = "HV-443")
public void testFindParameterConstraintLookingAt() {
ParameterDescriptor parameterDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"createCustomer",
CharSequence.class,
String.class
).getParameterDescriptors().get( 1 );
Set<ConstraintDescriptor<?>> constraintDescriptors = parameterDescriptor.findConstraints()
.lookingAt( Scope.LOCAL_ELEMENT )
.getConstraintDescriptors();
assertEquals( constraintDescriptors.size(), 0 );
constraintDescriptors = parameterDescriptor.findConstraints()
.lookingAt( Scope.HIERARCHY )
.getConstraintDescriptors();
assertEquals( constraintDescriptors.size(), 1 );
assertEquals(
constraintDescriptors.iterator().next().getAnnotation().annotationType(),
NotNull.class
);
}
@Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000151.*")
@TestForIssue(jiraKey = "HV-683")
public void testGetMethodDescriptorForIllegalyConfiguredMethodCausesConstraintDeclarationException() {
getMethodDescriptor( IllegalCustomerRepositoryExt.class, "zap", int.class );
}
@Test
public void testGetParameterConstraints() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class, "createCustomer", CharSequence.class, String.class
);
List<ParameterDescriptor> parameterConstraints = methodDescriptor.getParameterDescriptors();
assertNotNull( parameterConstraints );
assertEquals( parameterConstraints.size(), 2 );
ParameterDescriptor parameterDescriptor1 = parameterConstraints.get( 0 );
assertEquals( parameterDescriptor1.getElementClass(), CharSequence.class );
assertFalse( parameterDescriptor1.hasConstraints() );
ParameterDescriptor parameterDescriptor2 = parameterConstraints.get( 1 );
assertEquals( parameterDescriptor2.getElementClass(), String.class );
assertTrue( parameterDescriptor2.hasConstraints() );
}
@Test
public void testGetParameterConstraintsForParameterlessMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"baz"
);
List<ParameterDescriptor> parameterConstraints = methodDescriptor.getParameterDescriptors();
assertNotNull( parameterConstraints );
assertEquals( parameterConstraints.size(), 0 );
}
@Test
public void testGetReturnValueDescriptorForVoidMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"saveCustomer",
Customer.class
);
assertThat( methodDescriptor.getReturnValueDescriptor() ).isNotNull();
}
@Test
public void testIsReturnValueConstrainedForConstrainedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"baz"
);
assertThat( methodDescriptor.hasConstrainedReturnValue() ).isTrue();
}
@Test
public void testIsReturnValueConstrainedForCascadedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"foo"
);
assertThat( methodDescriptor.hasConstrainedReturnValue() ).isTrue();
}
@Test
public void testIsReturnValueConstrainedForParameterConstrainedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"createCustomer",
CharSequence.class,
String.class
);
assertThat( methodDescriptor.hasConstrainedReturnValue() ).isFalse();
}
@Test
public void testIsReturnValueConstrainedForVoidMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"saveCustomer",
Customer.class
);
assertThat( methodDescriptor.hasConstrainedReturnValue() ).isFalse();
}
@Test
public void testAreParametersConstrainedForParameterConstrainedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"createCustomer",
CharSequence.class,
String.class
);
assertThat( methodDescriptor.hasConstrainedParameters() ).isTrue();
}
@Test
public void testAreParametersConstrainedForParameterCascadedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"saveCustomer",
Customer.class
);
assertThat( methodDescriptor.hasConstrainedParameters() ).isTrue();
}
@Test
public void testAreParametersConstrainedForCrossParameterConstrainedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"methodWithCrossParameterConstraint", DateMidnight.class, DateMidnight.class
);
assertThat( methodDescriptor.hasConstrainedParameters() ).isTrue();
}
@Test
public void testAreParametersConstrainedForNonParameterConstrainedMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"zip",
int.class
);
assertThat( methodDescriptor.hasConstrainedParameters() ).isFalse();
}
@Test
public void testAreParametersConstrainedForParameterlessMethod() {
MethodDescriptor methodDescriptor = getMethodDescriptor(
CustomerRepositoryExt.class,
"zip"
);
assertThat( methodDescriptor.hasConstrainedParameters() ).isFalse();
}
}
| |
/*
* FireTVDiscoveryProviderTest
* Connect SDK
*
* Copyright (c) 2015 LG Electronics.
* Created by Oleksii Frolov on 08 Jul 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.connectsdk.discovery.provider;
import com.amazon.whisperplay.fling.media.controller.DiscoveryController;
import com.amazon.whisperplay.fling.media.controller.RemoteMediaPlayer;
import com.connectsdk.discovery.DiscoveryProvider;
import com.connectsdk.discovery.DiscoveryProviderListener;
import com.connectsdk.service.FireTVService;
import com.connectsdk.service.command.ServiceCommandError;
import com.connectsdk.service.config.ServiceDescription;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@RunWith(RobolectricTestRunner.class)
@Config(manifest=Config.NONE)
public class FireTVDiscoveryProviderTest {
private FireTVDiscoveryProvider provider;
private DiscoveryController controller;
@Before
public void setUp() {
controller = Mockito.mock(DiscoveryController.class);
provider = new FireTVDiscoveryProvider(controller);
}
@Test
public void testStartWhenNotRunning() {
provider.start();
Mockito.verify(controller, Mockito.times(1)).start(provider.fireTVListener);
}
@Test
public void testStartWhenRunning() {
provider.start();
provider.start();
Mockito.verify(controller, Mockito.times(1)).start(provider.fireTVListener);
}
@Test
public void testStopWhenNotRunning() {
provider.stop();
Mockito.verify(controller, Mockito.times(0)).stop();
}
@Test
public void testStopWhenRunning() {
provider.start();
provider.stop();
Mockito.verify(controller, Mockito.times(1)).stop();
}
@Test
public void testStopWithFoundServices() {
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
final ServiceDescription service1 = Mockito.mock(ServiceDescription.class);
provider.foundServices.put("service1", service1);
final ServiceDescription service2 = Mockito.mock(ServiceDescription.class);
provider.foundServices.put("service2", service2);
provider.stop();
Mockito.verify(controller, Mockito.times(0)).stop();
Mockito.verify(listener).onServiceRemoved(provider, service1);
Mockito.verify(listener).onServiceRemoved(provider, service2);
Assert.assertTrue(provider.foundServices.isEmpty());
}
@Test
public void testAddListener() {
// given
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
Assert.assertEquals(0, provider.serviceListeners.size());
// when
provider.addListener(listener);
// then
Assert.assertEquals(1, provider.serviceListeners.size());
}
@Test
public void testRemoveListener() {
// given
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
// when
provider.removeListener(listener);
// then
Assert.assertEquals(0, provider.serviceListeners.size());
}
@Test
public void testDiscoveryPlayerDiscovered() {
// given
RemoteMediaPlayer remoteMediaPlayer = mockRemoteMediaPlayer();
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
ArgumentCaptor<FireTVDiscoveryProvider> argDiscoveryProvider = ArgumentCaptor
.forClass(FireTVDiscoveryProvider.class);
ArgumentCaptor<ServiceDescription> argServiceDescription = ArgumentCaptor
.forClass(ServiceDescription.class);
// when
provider.fireTVListener.playerDiscovered(remoteMediaPlayer);
// then
Mockito.verify(listener).onServiceAdded(argDiscoveryProvider.capture(),
argServiceDescription.capture());
// check all required fields in service description
ServiceDescription serviceDescription = argServiceDescription.getValue();
Assert.assertSame(provider, argDiscoveryProvider.getValue());
Assert.assertSame(remoteMediaPlayer, serviceDescription.getDevice());
Assert.assertFalse(provider.foundServices.isEmpty());
Assert.assertEquals("FireTVDevice", serviceDescription.getFriendlyName());
Assert.assertEquals("UID", serviceDescription.getIpAddress());
Assert.assertEquals("UID", serviceDescription.getUUID());
Assert.assertEquals(FireTVService.ID, serviceDescription.getServiceID());
Assert.assertEquals(1, provider.foundServices.size());
}
@Test
public void testDiscoveryPlayerDiscoveredTwice() {
// given
RemoteMediaPlayer remoteMediaPlayer = mockRemoteMediaPlayer();
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
ArgumentCaptor<FireTVDiscoveryProvider> argDiscoveryProvider = ArgumentCaptor
.forClass(FireTVDiscoveryProvider.class);
ArgumentCaptor<ServiceDescription> argServiceDescription = ArgumentCaptor
.forClass(ServiceDescription.class);
Assert.assertEquals(0, provider.foundServices.size());
// when
provider.fireTVListener.playerDiscovered(remoteMediaPlayer);
Assert.assertEquals(1, provider.foundServices.size());
Mockito.when(remoteMediaPlayer.getName()).thenReturn("UpdatedField");
provider.fireTVListener.playerDiscovered(remoteMediaPlayer);
// then
Mockito.verify(listener).onServiceAdded(argDiscoveryProvider.capture(),
argServiceDescription.capture());
// check all required fields in service description
ServiceDescription serviceDescription = argServiceDescription.getValue();
Assert.assertSame(provider, argDiscoveryProvider.getValue());
Assert.assertSame(remoteMediaPlayer, serviceDescription.getDevice());
Assert.assertFalse(provider.foundServices.isEmpty());
Assert.assertEquals("UpdatedField", serviceDescription.getFriendlyName());
Assert.assertEquals("UID", serviceDescription.getIpAddress());
Assert.assertEquals("UID", serviceDescription.getUUID());
Assert.assertEquals(FireTVService.ID, serviceDescription.getServiceID());
Assert.assertEquals(1, provider.foundServices.size());
}
@Test
public void testDiscoveryNullPlayerDiscovered() {
// given
RemoteMediaPlayer remoteMediaPlayer = null;
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
try {
provider.fireTVListener.playerDiscovered(remoteMediaPlayer);
} catch (Exception e) {
Assert.fail("playerLost should not throw exceptions");
}
// when
Mockito.verify(listener, Mockito.times(0)).onServiceAdded(
Mockito.any(DiscoveryProvider.class),
Mockito.any(ServiceDescription.class));
Assert.assertTrue(provider.foundServices.isEmpty());
}
@Test
public void testDiscoveryPlayerLostWithEmptyProvider() {
// given
RemoteMediaPlayer remoteMediaPlayer = mockRemoteMediaPlayer();
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
// when
provider.fireTVListener.playerLost(remoteMediaPlayer);
// then
Mockito.verify(listener, Mockito.times(0)).onServiceRemoved(Mockito.eq(provider),
Mockito.any(ServiceDescription.class));
Assert.assertTrue(provider.foundServices.isEmpty());
}
@Test
public void testDiscoveryPlayerLost() {
// given
RemoteMediaPlayer remoteMediaPlayer = mockRemoteMediaPlayer();
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
provider.fireTVListener.playerDiscovered(remoteMediaPlayer);
provider.fireTVListener.playerLost(remoteMediaPlayer);
ArgumentCaptor<FireTVDiscoveryProvider> argDiscoveryProvider = ArgumentCaptor
.forClass(FireTVDiscoveryProvider.class);
ArgumentCaptor<ServiceDescription> argServiceDescription = ArgumentCaptor
.forClass(ServiceDescription.class);
// when
Mockito.verify(listener).onServiceRemoved(argDiscoveryProvider.capture(),
argServiceDescription.capture());
// then
ServiceDescription serviceDescription = argServiceDescription.getValue();
Assert.assertSame(provider, argDiscoveryProvider.getValue());
Assert.assertSame(remoteMediaPlayer, serviceDescription.getDevice());
Assert.assertTrue(provider.foundServices.isEmpty());
}
@Test
public void testDiscoveryNullPlayerLost() {
// given
RemoteMediaPlayer remoteMediaPlayer = null;
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
try {
provider.fireTVListener.playerLost(remoteMediaPlayer);
} catch (Exception e) {
Assert.fail("playerLost should not throw exceptions");
}
// when
Mockito.verify(listener, Mockito.times(0)).onServiceRemoved(
Mockito.any(DiscoveryProvider.class),
Mockito.any(ServiceDescription.class));
Assert.assertTrue(provider.foundServices.isEmpty());
}
@Test
public void testDiscoveryFailure() {
// given
DiscoveryProviderListener listener = Mockito.mock(DiscoveryProviderListener.class);
provider.addListener(listener);
// when
provider.fireTVListener.discoveryFailure();
// then
Mockito.verify(listener).onServiceDiscoveryFailed(Mockito.eq(provider),
Mockito.any(ServiceCommandError.class));
}
@Test
public void testReset() {
// given
provider.fireTVListener.playerDiscovered(mockRemoteMediaPlayer());
provider.start();
// when
provider.reset();
// then
Assert.assertTrue(provider.foundServices.isEmpty());
Mockito.verify(controller).stop();
}
@Test
public void testRestart() {
// given
provider.start();
// when
provider.restart();
// then
Mockito.verify(controller, Mockito.times(2)).start(
Mockito.any(DiscoveryController.IDiscoveryListener.class));
Mockito.verify(controller, Mockito.times(1)).stop();
}
@Test
public void testRescan() {
// given
provider.start();
// when
provider.rescan();
// then
Mockito.verify(controller, Mockito.times(2)).start(
Mockito.any(DiscoveryController.IDiscoveryListener.class));
Mockito.verify(controller, Mockito.times(1)).stop();
}
@Test
public void testInitialState() {
FireTVDiscoveryProvider provider = new FireTVDiscoveryProvider(Robolectric.application);
Assert.assertNotNull(provider.fireTVListener);
Assert.assertNotNull(provider.foundServices);
Assert.assertNotNull(provider.serviceListeners);
Assert.assertTrue(provider.foundServices.isEmpty());
Assert.assertTrue(provider.serviceListeners.isEmpty());
}
@Test
public void testIsEmptyWithoutFoundServices() {
Assert.assertTrue(provider.isEmpty());
}
@Test
public void testIsEmptyWithFoundServices() {
provider.fireTVListener.playerDiscovered(mockRemoteMediaPlayer());
Assert.assertFalse(provider.isEmpty());
}
private RemoteMediaPlayer mockRemoteMediaPlayer() {
RemoteMediaPlayer player = Mockito.mock(RemoteMediaPlayer.class);
Mockito.when(player.getUniqueIdentifier()).thenReturn("UID");
Mockito.when(player.getName()).thenReturn("FireTVDevice");
return player;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.uiDesigner.binding;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.JavaClassReferenceProvider;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiReferenceProcessor;
import com.intellij.psi.util.*;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.uiDesigner.GuiFormFileType;
import com.intellij.uiDesigner.UIFormXmlConstants;
import com.intellij.uiDesigner.compiler.Utils;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yole
*/
public class FormReferenceProvider extends PsiReferenceProvider {
private static final Logger LOG = Logger.getInstance(FormReferenceProvider.class);
private static class CachedFormData {
PsiReference[] myReferences;
Map<String, Pair<PsiType, TextRange>> myFieldNameToTypeMap;
CachedFormData(final PsiReference[] refs, final Map<String, Pair<PsiType, TextRange>> map) {
myReferences = refs;
myFieldNameToTypeMap = map;
}
}
private static final Key<CachedValue<CachedFormData>> CACHED_DATA = Key.create("Cached form reference");
@Override
public PsiReference @NotNull [] getReferencesByElement(@NotNull final PsiElement element, @NotNull final ProcessingContext context) {
if (element instanceof PsiPlainTextFile) {
PsiPlainTextFile plainTextFile = (PsiPlainTextFile) element;
if (plainTextFile.getFileType().equals(GuiFormFileType.INSTANCE)) {
return getCachedData(plainTextFile).myReferences;
}
}
return PsiReference.EMPTY_ARRAY;
}
@Nullable
public static PsiFile getFormFile(PsiField field) {
PsiReference ref = getFormReference(field);
if (ref != null) {
return ref.getElement().getContainingFile();
}
return null;
}
@Nullable
public static PsiReference getFormReference(PsiField field) {
final PsiClass containingClass = field.getContainingClass();
if (containingClass != null && containingClass.getQualifiedName() != null) {
final List<PsiFile> forms = FormClassIndex.findFormsBoundToClass(containingClass.getProject(), containingClass);
for (PsiFile formFile : forms) {
final PsiReference[] refs = formFile.getReferences();
for (final PsiReference ref : refs) {
if (ref.isReferenceTo(field)) {
return ref;
}
}
}
}
return null;
}
public static @Nullable
PsiType getGUIComponentType(final PsiPlainTextFile file, String fieldName) {
final Map<String, Pair<PsiType, TextRange>> fieldNameToTypeMap = getCachedData(file).myFieldNameToTypeMap;
final Pair<PsiType, TextRange> typeRangePair = fieldNameToTypeMap.get(fieldName);
return Pair.getFirst(typeRangePair);
}
public static void setGUIComponentType(PsiPlainTextFile file, String fieldName, String typeText) {
final Map<String, Pair<PsiType, TextRange>> fieldNameToTypeMap = getCachedData(file).myFieldNameToTypeMap;
final Pair<PsiType, TextRange> typeRangePair = fieldNameToTypeMap.get(fieldName);
if (typeRangePair != null) {
final TextRange range = typeRangePair.getSecond();
if (range != null) {
PsiDocumentManager.getInstance(file.getProject()).getDocument(file).replaceString(range.getStartOffset(), range.getEndOffset(), typeText);
}
}
}
private static void processReferences(final PsiPlainTextFile file, final PsiReferenceProcessor processor) {
final Project project = file.getProject();
final XmlTag rootTag = ReadAction.compute(() -> {
final XmlFile xmlFile = (XmlFile)PsiFileFactory.getInstance(project)
.createFileFromText("a.xml", XmlFileType.INSTANCE, file.getViewProvider().getContents());
return xmlFile.getRootTag();
});
if (rootTag == null || !Utils.FORM_NAMESPACE.equals(rootTag.getNamespace())) {
return;
}
@NonNls final String name = rootTag.getName();
if (!"form".equals(name)){
return;
}
PsiReference classReference = null;
final XmlAttribute classToBind = rootTag.getAttribute("bind-to-class", null);
if (classToBind != null) {
// reference to class
final XmlAttributeValue valueElement = classToBind.getValueElement();
if (valueElement == null) {
return;
}
final String className = valueElement.getValue().replace('$','.');
final PsiReference[] referencesByString = new JavaClassReferenceProvider().getReferencesByString(className, file, valueElement.getTextRange().getStartOffset() + 1);
if(referencesByString.length < 1){
// There are no references there
return;
}
for (PsiReference aReferencesByString : referencesByString) {
processor.execute(aReferencesByString);
}
classReference = referencesByString[referencesByString.length - 1];
}
final PsiReference finalClassReference = classReference;
ApplicationManager.getApplication().runReadAction(() -> processReferences(rootTag, finalClassReference, file, processor));
}
private static TextRange getValueRange(final XmlAttribute classToBind) {
final XmlAttributeValue valueElement = classToBind.getValueElement();
final TextRange textRange = valueElement.getTextRange();
return new TextRange(textRange.getStartOffset() + 1, textRange.getEndOffset() - 1); // skip " "
}
private static void processReferences(final XmlTag tag,
final PsiReference classReference,
final PsiPlainTextFile file,
final PsiReferenceProcessor processor) {
final XmlAttribute clsAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_CLASS, null);
final String classNameStr = clsAttribute != null? clsAttribute.getValue().replace('$','.') : null;
// field
{
final XmlAttribute bindingAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_BINDING, null);
if (bindingAttribute != null && classReference != null) {
final XmlAttribute customCreateAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_CUSTOM_CREATE, null);
boolean customCreate = (customCreateAttribute != null && Boolean.parseBoolean(customCreateAttribute.getValue()));
final TextRange nameRange = clsAttribute != null ? getValueRange(clsAttribute) : null;
processor.execute(new FieldFormReference(file, classReference, getValueRange(bindingAttribute), classNameStr, nameRange, customCreate));
}
final XmlAttribute titleBundleAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_TITLE_RESOURCE_BUNDLE, null);
final XmlAttribute titleKeyAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_TITLE_KEY, null);
if (titleBundleAttribute != null && titleKeyAttribute != null) {
processResourceBundleFileReferences(file, processor, titleBundleAttribute);
processor.execute(new ResourceBundleKeyReference(file, titleBundleAttribute.getValue(), getValueRange(titleKeyAttribute)));
}
final XmlAttribute bundleAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_RESOURCE_BUNDLE, null);
final XmlAttribute keyAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_KEY, null);
if (bundleAttribute != null && keyAttribute != null) {
processResourceBundleFileReferences(file, processor, bundleAttribute);
processor.execute(new ResourceBundleKeyReference(file, bundleAttribute.getValue(), getValueRange(keyAttribute)));
}
processNestedFormReference(tag, processor, file);
processButtonGroupReference(tag, processor, file, classReference);
}
// component class
{
if (clsAttribute != null) {
final JavaClassReferenceProvider provider = new JavaClassReferenceProvider();
final PsiReference[] referencesByString = provider.getReferencesByString(classNameStr, file, clsAttribute.getValueElement().getTextRange().getStartOffset() + 1);
if(referencesByString.length < 1){
// There are no references there
return;
}
for (PsiReference aReferencesByString : referencesByString) {
processor.execute(aReferencesByString);
}
}
}
// property references
XmlTag parentTag = tag.getParentTag();
if (parentTag != null && parentTag.getName().equals(UIFormXmlConstants.ELEMENT_PROPERTIES)) {
XmlTag componentTag = parentTag.getParentTag();
if (componentTag != null) {
String className = componentTag.getAttributeValue(UIFormXmlConstants.ATTRIBUTE_CLASS, Utils.FORM_NAMESPACE);
if (className != null) {
processPropertyReference(tag, processor, file, className.replace('$', '.'));
}
}
}
final XmlTag[] subtags = tag.getSubTags();
for (XmlTag subtag : subtags) {
processReferences(subtag, classReference, file, processor);
}
}
private static void processResourceBundleFileReferences(final PsiPlainTextFile file,
final PsiReferenceProcessor processor,
final XmlAttribute titleBundleAttribute) {
processPackageReferences(file, processor, titleBundleAttribute);
processor.execute(new ResourceBundleFileReference(file, getValueRange(titleBundleAttribute)));
}
private static void processPackageReferences(final PsiPlainTextFile file,
final PsiReferenceProcessor processor,
final XmlAttribute attribute) {
final TextRange valueRange = getValueRange(attribute);
final String value = attribute.getValue();
int pos=-1;
while(true) {
pos = value.indexOf('/', pos+1);
if (pos < 0) {
break;
}
processor.execute(new FormPackageReference(file, new TextRange(valueRange.getStartOffset(), valueRange.getStartOffset() + pos)));
}
}
private static void processNestedFormReference(final XmlTag tag, final PsiReferenceProcessor processor, final PsiPlainTextFile file) {
final XmlAttribute formFileAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_FORM_FILE, null);
if (formFileAttribute != null) {
processPackageReferences(file, processor, formFileAttribute);
processor.execute(new ResourceFileReference(file, getValueRange(formFileAttribute)));
}
}
private static void processButtonGroupReference(final XmlTag tag, final PsiReferenceProcessor processor, final PsiPlainTextFile file,
final PsiReference classReference) {
final XmlAttribute boundAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_BOUND, null);
final XmlAttribute nameAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_NAME, null);
if (boundAttribute != null && Boolean.parseBoolean(boundAttribute.getValue()) && nameAttribute != null) {
processor.execute(new FieldFormReference(file, classReference, getValueRange(nameAttribute), null, null, false));
}
}
private static void processPropertyReference(final XmlTag tag, final PsiReferenceProcessor processor, final PsiPlainTextFile file,
final String className) {
final XmlAttribute valueAttribute = tag.getAttribute(UIFormXmlConstants.ATTRIBUTE_VALUE, null);
if (valueAttribute != null) {
PsiReference reference = ReadAction.compute(() -> {
final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(file.getProject());
final Module module = ModuleUtilCore.findModuleForPsiElement(file);
if (module == null) return null;
final GlobalSearchScope scope = module.getModuleWithDependenciesAndLibrariesScope(false);
PsiClass psiClass = psiFacade.findClass(className, scope);
if (psiClass != null) {
PsiMethod getter = PropertyUtilBase.findPropertyGetter(psiClass, tag.getName(), false, true);
if (getter != null) {
final PsiType returnType = getter.getReturnType();
if (returnType instanceof PsiClassType) {
PsiClassType propClassType = (PsiClassType)returnType;
PsiClass propClass = propClassType.resolve();
if (propClass != null) {
if (propClass.isEnum()) {
return new FormEnumConstantReference(file, getValueRange(valueAttribute), propClassType);
}
PsiClass iconClass = psiFacade.findClass("javax.swing.Icon", scope);
if (iconClass != null && InheritanceUtil.isInheritorOrSelf(propClass, iconClass, true)) {
return new ResourceFileReference(file, getValueRange(valueAttribute));
}
}
}
}
}
return null;
});
if (reference != null) {
if (reference instanceof ResourceFileReference) {
processPackageReferences(file, processor, valueAttribute);
}
processor.execute(reference);
}
}
}
@Nullable
public static String getBundleName(final PropertiesFile propertiesFile) {
final PsiDirectory directory = propertiesFile.getParent();
if (directory == null) {
return null;
}
final String packageName;
final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(directory);
if (aPackage == null) {
packageName = "";
}
else {
packageName = aPackage.getQualifiedName();
}
String bundleName = propertiesFile.getResourceBundle().getBaseName();
if (packageName.length() > 0) {
bundleName = packageName + '.' + bundleName;
}
bundleName = bundleName.replace('.', '/');
return bundleName;
}
private static CachedFormData getCachedData(final PsiPlainTextFile element) {
CachedValue<CachedFormData> data = element.getUserData(CACHED_DATA);
if(data == null) {
data = CachedValuesManager.getManager(element.getProject()).createCachedValue(new CachedValueProvider<CachedFormData>() {
final Map<String, Pair<PsiType, TextRange>> map = new HashMap<>();
@Override
public Result<CachedFormData> compute() {
final PsiReferenceProcessor.CollectElements processor = new PsiReferenceProcessor.CollectElements() {
@Override
public boolean execute(PsiReference ref) {
if (ref instanceof FieldFormReference) {
final FieldFormReference fieldRef = ((FieldFormReference)ref);
final String componentClassName = fieldRef.getComponentClassName();
if (componentClassName != null) {
final PsiClassType type = JavaPsiFacade.getInstance(element.getProject()).getElementFactory()
.createTypeByFQClassName(componentClassName, element.getResolveScope());
map.put(fieldRef.getRangeText(), new Pair<>(type, fieldRef.getComponentClassNameTextRange()));
}
}
return super.execute(ref);
}
};
processReferences(element, processor);
final PsiReference[] refs = processor.toArray(PsiReference.EMPTY_ARRAY);
return new Result<>(new CachedFormData(refs, map), element);
}
}, false);
element.putUserData(CACHED_DATA, data);
}
return data.getValue();
}
public void projectOpened() {
}
public void projectClosed() {
}
@NotNull @NonNls
public String getComponentName() {
return "FormReferenceProvider";
}
public void initComponent() {
}
public void disposeComponent() {
}
}
| |
/**
* Sapelli data collection platform: http://sapelli.org
*
* Copyright 2012-2014 University College London - ExCiteS group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ucl.excites.sapelli.storage.model.columns;
import java.io.IOException;
import java.math.BigInteger;
import uk.ac.ucl.excites.sapelli.shared.io.BitInputStream;
import uk.ac.ucl.excites.sapelli.shared.io.BitOutputStream;
import uk.ac.ucl.excites.sapelli.shared.util.BigIntegerUtils;
import uk.ac.ucl.excites.sapelli.shared.util.IntegerRangeMapping;
import uk.ac.ucl.excites.sapelli.storage.model.Column;
import uk.ac.ucl.excites.sapelli.storage.model.ComparableColumn;
import uk.ac.ucl.excites.sapelli.storage.model.Record;
import uk.ac.ucl.excites.sapelli.storage.visitors.ColumnVisitor;
/**
* A column for integers with configurable size up to 64 bits
*
* @author mstevens
*/
public class IntegerColumn extends ComparableColumn<Long>
{
// STATICS -----------------------------------------------------------
private static final long serialVersionUID = 2L;
private static final boolean DEFAULT_SIGNEDNESS = true; // allow signed values by default
private static final int DEFAULT_SIZE_BITS = Integer.SIZE; // use 32 bit integers by default
// DYNAMICS ----------------------------------------------------------
private final int size; // size in number of bits
private final boolean signed;
private final IntegerRangeMapping rangeMapping;
/**
* Creates an IntegerColumn with the default number of bits ({@value #DEFAULT_SIZE_BITS}) and the default signedness ({@value #DEFAULT_SIGNEDNESS})
*
* @param name
* @param optional
*/
public IntegerColumn(String name, boolean optional)
{
this(name, optional, DEFAULT_SIGNEDNESS, DEFAULT_SIZE_BITS);
}
/**
* Creates an IntegerColumn of the specified size (in bits) and the default signedness ({@value #DEFAULT_SIGNEDNESS})
*
* @param name
* @param optional
* @param sizeBits size in number of bits (minimum 1, maximum 64)
*/
public IntegerColumn(String name, boolean optional, int sizeBits)
{
this(name, optional, DEFAULT_SIGNEDNESS, sizeBits);
}
/**
* Creates an IntegerColumn with the default number of bits ({@value #DEFAULT_SIZE_BITS}) and the specified signedness
*
* @param name
* @param optional
* @param signed
*/
public IntegerColumn(String name, boolean optional, boolean signed)
{
this(name, optional, signed, DEFAULT_SIZE_BITS);
}
/**
* Creates an IntegerColumn of the specified size (in bits) and the specified signedness
*
* @param name
* @param optional
* @param signed
* @param sizeBits size in number of bits (minimum 1, maximum 64 or 63, depending on signed)
*/
public IntegerColumn(String name, boolean optional, boolean signed, int sizeBits)
{
this(name, optional, signed, sizeBits, false);
}
/**
* Creates an IntegerColumn of the specified size (in bits) and the specified signedness.
* {@code sizeBits} can be 0 but to avoid this being done by accident {@code allowEmpty} must be {@code true} in that case.
*
* @param name
* @param optional
* @param signed
* @param sizeBits size in number of bits (minimum 0 or 1, depending on allowEmpty; maximum 64 or 63, depending on signed)
* @param allowEmpty whether or not to allow sizeBits to be 0
*/
public IntegerColumn(String name, boolean optional, boolean signed, int sizeBits, boolean allowEmpty)
{
super(name, optional);
if(sizeBits < (allowEmpty ? 0 : 1) || sizeBits > (Long.SIZE - (signed ? 0 : 1)))
throw new IllegalArgumentException( "Invalid size (" + sizeBits + " bits), allowed range for " +
(signed ? "" : "un") + "signed and " + (allowEmpty ? "" : "not ") + "allowed empty is [" +
(allowEmpty ? 0 : 1) + ", " + (Long.SIZE - (signed ? 0 : 1)) + "] bits.");
this.size = sizeBits;
this.signed = signed;
this.rangeMapping = null;
}
/**
* Creates an IntegerColumn that is just big enough to be able to store any integer
* from the range [minLogicalValue; maxLogicalValue] (inclusive).
*
* @param name
* @param optional
* @param minLogicalValue
* @param maxLogicalValue must be strictly larger than minLogicalValue
*/
public IntegerColumn(String name, boolean optional, long minLogicalValue, long maxLogicalValue)
{
this(name, optional, minLogicalValue, maxLogicalValue, false);
}
/**
* Creates an IntegerColumn that is just big enough to be able to store any integer
* from the range [minLogicalValue; maxLogicalValue] (inclusive).
*
* @param name
* @param optional
* @param minLogicalValue
* @param maxLogicalValue
* @param allowEmpty when {@code false} minLogicalValue must be < maxLogicalValue, when {@code true} minLogicalValue must be <= maxLogicalValue
*/
public IntegerColumn(String name, boolean optional, long minLogicalValue, long maxLogicalValue, boolean allowEmpty)
{
this(name, optional, new IntegerRangeMapping(minLogicalValue, maxLogicalValue, allowEmpty));
}
/**
* Creates an IntegerColumn that is just big enough to be able to store values accepted by the provided IntegerRangeMapping
*
* @param name
* @param optional
* @param rangeMapping
*/
public IntegerColumn(String name, boolean optional, IntegerRangeMapping rangeMapping)
{
super(name, optional);
this.rangeMapping = rangeMapping;
if( rangeMapping.lowBound().compareTo(BigInteger.valueOf(Long.MIN_VALUE)) < 0 ||
rangeMapping.highBound(false).compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0)
throw new IllegalArgumentException("The given rangeMapping accepts BigInteger values of magnitudes beyond the bounds of Long integers");
this.size = rangeMapping.size();
this.signed = rangeMapping.lowBound().compareTo(BigInteger.ZERO) < 0;
}
@Override
public IntegerColumn copy()
{
if(rangeMapping == null)
return new IntegerColumn(name, optional, signed, size);
else
return new IntegerColumn(name, optional, new IntegerRangeMapping(rangeMapping));
}
/**
* Integer version of {@link IntegerColumn#storeValue(Record, Long)}
*
* @param record
* @param value
* @throws IllegalArgumentException
* @throws NullPointerException
*/
public void storeValue(Record record, Integer value) throws IllegalArgumentException, NullPointerException
{
storeValue(record, (Long) convert(value));
}
/**
* Converts Numbers (Integers, Shorts, etc.) to Longs
*
* @param value possibly null
* @return
* @throws ClassCastException when the value is not a {@link Number}
*
* @see uk.ac.ucl.excites.sapelli.storage.model.Column#convert(java.lang.Object)
*/
@Override
public Object convert(Object value)
{
return value == null ? null : (value instanceof Long ? value : Long.valueOf(((Number) value).longValue()));
}
/**
* @param record
* @param nullReplacement
* @return
*/
public long getPrimitiveLong(Record record, long nullReplacement)
{
Long longValue = retrieveValue(record);
if(longValue == null)
return nullReplacement;
return longValue.longValue();
}
/**
* @param record
* @param nullReplacement
* @return
*/
public int getPrimitiveInt(Record record, int nullReplacement)
{
Long longValue = retrieveValue(record);
if(longValue == null)
return nullReplacement;
return longValue.intValue();
}
/**
* @param value the String to parse (can be expected to be neither null nor "")
* @return the parsed value
* @throws NumberFormatException
*/
@Override
public Long parse(String value) throws NumberFormatException
{
return Long.valueOf(value);
}
@Override
protected void validate(Long value) throws IllegalArgumentException
{
if(rangeMapping != null && !rangeMapping.inStrictRange(value))
throw new IllegalArgumentException("The value (" + value + ") is not in the allowed range: " + rangeMapping.getStrictRangeString() + ".");
if(value < getMinValue() || value > getMaxValue())
{
if(!signed && value < 0l)
throw new IllegalArgumentException("Cannot store negative value (" + value + ") as an unsigned integer.");
//else:
throw new IllegalArgumentException("The value (" + value + ") does not fit a" + (signed ? " " : "n un") + "signed integer of " + size + " bits (allowed range: " + IntegerRangeMapping.GetRangeString(getMinValue(), getMaxValue()) + ").");
}
}
public long getMinValue()
{
return (rangeMapping != null ? rangeMapping.lowBound() :
BigIntegerUtils.GetMinValue(size, signed)).longValue();
}
public long getMaxValue()
{
return (rangeMapping != null ? rangeMapping.highBound() :
BigIntegerUtils.GetMaxValue(size, signed)).longValue();
}
@Override
protected void write(Long value, BitOutputStream bitStream) throws IOException
{
if(rangeMapping != null)
rangeMapping.write(value, bitStream);
else
bitStream.write(value, size, signed);
}
@Override
protected Long read(BitInputStream bitStream) throws IOException
{
if(rangeMapping != null)
return rangeMapping.read(bitStream).longValue();
else
return bitStream.readInteger(size, signed);
}
@Override
protected int _getMinimumSize()
{
return size;
}
@Override
protected int _getMaximumSize()
{
return size;
}
/**
* @return the signed
*/
public boolean isSigned()
{
return signed;
}
@Override
public String toString(Long value)
{
return value.toString();
}
@Override
protected boolean equalRestrictions(Column<Long> otherColumn)
{
if(otherColumn instanceof IntegerColumn)
{
IntegerColumn other = (IntegerColumn) otherColumn;
return this.size == other.size &&
this.signed == other.signed &&
(rangeMapping == null ? other.rangeMapping == null : this.rangeMapping.equals(other.rangeMapping));
}
else
return false;
}
@Override
protected Long copy(Long value)
{
return Long.valueOf(value);
}
/**
* Even though the type is actually Long we have called this column an "IntegerColumn" because the size can vary (so values are not necessarily 64bit longs)
*
* @see uk.ac.ucl.excites.sapelli.storage.model.Column#getTypeString()
*/
@Override
public String getTypeString()
{
return Integer.class.getSimpleName();
}
@Override
public void accept(ColumnVisitor visitor)
{
visitor.visit(this);
}
@Override
protected int compareNonNullValues(Long lhs, Long rhs)
{
return lhs.compareTo(rhs);
}
@Override
public int hashCode()
{
int hash = super.hashCode();
hash = 31 * hash + size;
hash = 31 * hash + (signed ? 0 : 1);
hash = 31 * hash + (rangeMapping == null ? 0 : rangeMapping.hashCode());
return hash;
}
@Override
public Class<Long> getType()
{
return Long.class;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.azure.blob;
import java.util.Map;
import com.microsoft.azure.storage.blob.CloudBlob;
import org.apache.camel.component.azure.common.AbstractConfiguration;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
@UriParams
public class BlobServiceConfiguration extends AbstractConfiguration {
private String containerName;
private String blobName;
@UriParam
private CloudBlob azureBlobClient;
@UriParam(defaultValue = "blockblob")
private BlobType blobType = BlobType.blockblob;
@UriParam(label = "producer", defaultValue = "listBlobs")
private BlobServiceOperations operation = BlobServiceOperations.listBlobs;
@UriParam(label = "producer")
private int streamWriteSize;
@UriParam
private int streamReadSize;
@UriParam(label = "producer")
private Map<String, String> blobMetadata;
@UriParam(defaultValue = "true")
private boolean closeStreamAfterRead = true;
@UriParam(label = "producer", defaultValue = "true")
private boolean closeStreamAfterWrite = true;
@UriParam
private String fileDir;
@UriParam(defaultValue = "0")
private Long blobOffset = 0L;
@UriParam
private Long dataLength;
@UriParam(label = "producer")
private String blobPrefix;
@UriParam
private boolean publicForRead;
@UriParam(label = "producer", defaultValue = "true")
private boolean useFlatListing = true;
public BlobServiceOperations getOperation() {
return operation;
}
/**
* Blob service operation hint to the producer
*/
public void setOperation(BlobServiceOperations operation) {
this.operation = operation;
}
public String getContainerName() {
return containerName;
}
/**
* Set the blob service container name
*/
public void setContainerName(String containerName) {
this.containerName = containerName;
}
public String getBlobName() {
return blobName;
}
/**
* Blob name, required for most operations
*/
public void setBlobName(String blobName) {
this.blobName = blobName;
}
public BlobType getBlobType() {
return blobType;
}
/**
* Set a blob type, 'blockblob' is default
*/
public void setBlobType(BlobType blobType) {
this.blobType = blobType;
}
public int getStreamWriteSize() {
return streamWriteSize;
}
/**
* Set the size of the buffer for writing block and page blocks
*/
public void setStreamWriteSize(int streamWriteSize) {
this.streamWriteSize = streamWriteSize;
}
public int getStreamReadSize() {
return streamReadSize;
}
/**
* Set the minimum read size in bytes when reading the blob content
*/
public void setStreamReadSize(int streamReadSize) {
this.streamReadSize = streamReadSize;
}
public Map<String, String> getBlobMetadata() {
return blobMetadata;
}
/**
* Set the blob meta-data
*/
public void setBlobMetadata(Map<String, String> blobMetadata) {
this.blobMetadata = blobMetadata;
}
public CloudBlob getAzureBlobClient() {
return azureBlobClient;
}
/**
* The blob service client
*/
public void setAzureBlobClient(CloudBlob azureBlobClient) {
this.azureBlobClient = azureBlobClient;
}
public boolean isCloseStreamAfterWrite() {
return closeStreamAfterWrite;
}
/**
* Close the stream after write or keep it open, default is true
*/
public void setCloseStreamAfterWrite(boolean closeStreamAfterWrite) {
this.closeStreamAfterWrite = closeStreamAfterWrite;
}
public boolean isCloseStreamAfterRead() {
return closeStreamAfterRead;
}
/**
* Close the stream after read or keep it open, default is true
*/
public void setCloseStreamAfterRead(boolean closeStreamAfterRead) {
this.closeStreamAfterRead = closeStreamAfterRead;
}
public String getFileDir() {
return fileDir;
}
/**
* Set the file directory where the downloaded blobs will be saved to
*/
public void setFileDir(String fileDir) {
this.fileDir = fileDir;
}
public Long getBlobOffset() {
return blobOffset;
}
/**
* Set the blob offset for the upload or download operations, default is 0
*/
public void setBlobOffset(Long dataOffset) {
this.blobOffset = dataOffset;
}
public Long getDataLength() {
return dataLength;
}
/**
* Set the data length for the download or page blob upload operations
*/
public void setDataLength(Long dataLength) {
this.dataLength = dataLength;
}
public String getBlobPrefix() {
return blobPrefix;
}
/**
* Set a prefix which can be used for listing the blobs
*/
public void setBlobPrefix(String blobPrefix) {
this.blobPrefix = blobPrefix;
}
public boolean isPublicForRead() {
return publicForRead;
}
/**
* Storage resources can be public for reading their content, if this property is enabled
* then the credentials do not have to be set
*/
public void setPublicForRead(boolean publicForRead) {
this.publicForRead = publicForRead;
}
public boolean isUseFlatListing() {
return useFlatListing;
}
/**
* Specify if the flat or hierarchical blob listing should be used
*/
public void setUseFlatListing(boolean useFlatListing) {
this.useFlatListing = useFlatListing;
}
}
| |
package com.imslpdroid.data;
import java.io.File;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Environment;
import android.util.Log;
public class DataStorage {
private static Lock dbLock = new ReentrantLock();
private static final String DIVISOR = "---";
private static final String EXTERNAL_DOWNLOAD_PATH = "imslpdroid_data";
public static File getDownloadedScoreFile(Score s) {
return new File(getExternalDownloadPath(), s.getScoreId() + ".pdf");
}
public static File getExternalDownloadPath() {
return new File(Environment.getExternalStorageDirectory(), EXTERNAL_DOWNLOAD_PATH);
}
public static List<String> getListOfFilesInDownloadDirectory() {
String[] files = getExternalDownloadPath().list();
return new LinkedList<String>(files != null ? Arrays.asList(files) : new LinkedList<String>());
}
/** write to DB a new file */
public static void addDownloadedFileInDB(Context context, Score score) {
try {
dbLock.lock();
DataStorageHelper sdh = new DataStorageHelper(context);
SQLiteDatabase db = sdh.getWritableDatabase();
ContentValues m = new ContentValues();
m.put("filename", score.getScoreId() + ".pdf");
m.put("info", score.getVisualizationString() + DIVISOR + score.getTitle() + DIVISOR + score.getPagesAndCo() + DIVISOR + score.getPublisherInfo());
db.insert("fileDownloaded", null, m);
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while addDownloadedFileInDB: " + e.toString());
} finally {
dbLock.unlock();
}
}
/** return filenames */
public static List<String> getDownloadedFilesName(Context context) {
List<String> files = new LinkedList<String>();
try {
dbLock.lock();
DataStorageHelper sdh = new DataStorageHelper(context);
SQLiteDatabase db = sdh.getReadableDatabase();
Cursor cursor = db.query("fileDownloaded", new String[] { "info" }, null, null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
String info = cursor.getString(0);
String name = info;
if (info.contains(DIVISOR))
name = info.split(DIVISOR)[0];
files.add(name);
cursor.moveToNext();
}
cursor.close();
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while getDownloadedFilesName: " + e.toString());
} finally {
dbLock.unlock();
}
return files;
}
/** return fileinfo by ID */
public static String[] getDownloadedFileinfo(Context context, String ID) {
String[] infoFile = new String[3];
try {
dbLock.lock();
DataStorageHelper sdh = new DataStorageHelper(context);
SQLiteDatabase db = sdh.getReadableDatabase();
Cursor cursor = db.query("fileDownloaded", new String[] { "info" }, "filename='" + ID + ".pdf'", null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
String info = cursor.getString(0);
if (info.contains(ID) && info.contains(DIVISOR)) {
infoFile[0] = info.split(DIVISOR)[1];
infoFile[1] = info.split(DIVISOR)[2];
infoFile[2] = info.split(DIVISOR)[3];
break;
}
cursor.moveToNext();
}
cursor.close();
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while getDownloadedFilesName: " + e.toString());
} finally {
dbLock.unlock();
}
return infoFile;
}
/** synchronize db with the download directory */
public static void syncronizeDownloadedFileTable(Context context) {
try {
dbLock.lock();
DataStorageHelper sdh = new DataStorageHelper(context);
SQLiteDatabase db = sdh.getWritableDatabase();
List<String> filesInDB = new LinkedList<String>();
Cursor cursor = db.query("fileDownloaded", new String[] { "filename" }, null, null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
filesInDB.add(cursor.getString(0));
cursor.moveToNext();
}
cursor.close();
List<String> filesInDir = getListOfFilesInDownloadDirectory();
//delete files that are in the DB but aren't in the Directory
if (!filesInDB.isEmpty()) {
for (String dbfile : filesInDB) {
if (!filesInDir.contains(dbfile))
db.delete("fileDownloaded", "filename=?", new String[] { dbfile });
}
}
//delete files that are in the Directory but aren't in the DB
if (!filesInDir.isEmpty()) {
for (String dirfile : filesInDir) {
if (!filesInDB.contains(dirfile)) {
File toDelete = new File(getExternalDownloadPath(), dirfile);
toDelete.delete();
}
}
}
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while syncronizeDownloadedFileTable: " + e.toString());
} finally {
dbLock.unlock();
}
}
public static void deleteDownloadedFile(Context context, Score score) {
DataStorage.getDownloadedScoreFile(score).delete();
syncronizeDownloadedFileTable(context);
}
public static void deleteDownloadedFile(Context context, String score) {
File toDelete = new File(getExternalDownloadPath(), score);
toDelete.delete();
syncronizeDownloadedFileTable(context);
}
public static void deleteAllDownloadedFiles(Context context) {
List<String> filesInDir = getListOfFilesInDownloadDirectory();
// delete all files that are in the Directory
for (String dirfile : filesInDir)
deleteDownloadedFile(context, dirfile);
try {
dbLock.lock();
DataStorageHelper sdh = new DataStorageHelper(context);
SQLiteDatabase dbw = sdh.getWritableDatabase();
dbw.delete("fileDownloaded", null, null);
dbw.close();
} catch (Exception e) {
Log.e("datastorage", "exception while syncronizeDownloadedFileTable: " + e.toString());
} finally {
dbLock.unlock();
}
}
public static List<String> readGenericListFromDB(Context context, String baseUrl) {
List<String> list = new LinkedList<String>();
try {
dbLock.lock();
SQLiteDatabase db = new DataStorageHelper(context).getWritableDatabase();
Cursor cursor = db.query("genericlist", new String[] { "entry" }, String.format("pageurl='%s'", baseUrl), null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
list.add(cursor.getString(0));
cursor.moveToNext();
}
cursor.close();
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while readGenericListFromDB: " + e.toString());
} finally {
dbLock.unlock();
}
return list;
}
public static void writeGenericListToDB(Context context, String baseUrl, List<String> entries) {
try {
dbLock.lock();
SQLiteDatabase db = new DataStorageHelper(context).getWritableDatabase();
db.delete("genericlist", String.format("pageurl='%s'", baseUrl), null);
db.beginTransaction();
for (String r : entries) {
ContentValues m = new ContentValues();
m.put("pageurl", baseUrl);
m.put("entry", r);
db.insert("genericlist", null, m);
}
db.setTransactionSuccessful();
db.endTransaction();
db.close();
} catch (Exception e) {
Log.e("datastorage", "exception while writeGenericListToDB: " + e.toString());
} finally {
dbLock.unlock();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.net;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.runtime.io.network.netty.SSLHandlerFactory;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.ClientAuth;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.JdkSslContext;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.OpenSsl;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.OpenSslX509KeyManagerFactory;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslContext;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslContextBuilder;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslProvider;
import javax.annotation.Nullable;
import javax.net.ServerSocketFactory;
import javax.net.SocketFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLServerSocket;
import javax.net.ssl.SSLServerSocketFactory;
import javax.net.ssl.TrustManagerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.util.Arrays;
import java.util.List;
import static org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslProvider.JDK;
import static org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslProvider.OPENSSL;
import static org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslProvider.OPENSSL_REFCNT;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Common utilities to manage SSL transport settings.
*/
public class SSLUtils {
/**
* Checks whether SSL for internal communication (rpc, data transport, blob server) is enabled.
*/
public static boolean isInternalSSLEnabled(Configuration sslConfig) {
@SuppressWarnings("deprecation")
final boolean fallbackFlag = sslConfig.getBoolean(SecurityOptions.SSL_ENABLED);
return sslConfig.getBoolean(SecurityOptions.SSL_INTERNAL_ENABLED, fallbackFlag);
}
/**
* Checks whether SSL for the external REST endpoint is enabled.
*/
public static boolean isRestSSLEnabled(Configuration sslConfig) {
@SuppressWarnings("deprecation")
final boolean fallbackFlag = sslConfig.getBoolean(SecurityOptions.SSL_ENABLED);
return sslConfig.getBoolean(SecurityOptions.SSL_REST_ENABLED, fallbackFlag);
}
/**
* Checks whether mutual SSL authentication for the external REST endpoint is enabled.
*/
public static boolean isRestSSLAuthenticationEnabled(Configuration sslConfig) {
checkNotNull(sslConfig, "sslConfig");
return isRestSSLEnabled(sslConfig) &&
sslConfig.getBoolean(SecurityOptions.SSL_REST_AUTHENTICATION_ENABLED);
}
/**
* Creates a factory for SSL Server Sockets from the given configuration.
* SSL Server Sockets are always part of internal communication.
*/
public static ServerSocketFactory createSSLServerSocketFactory(Configuration config) throws Exception {
SSLContext sslContext = createInternalSSLContext(config, false);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled");
}
String[] protocols = getEnabledProtocols(config);
String[] cipherSuites = getEnabledCipherSuites(config);
SSLServerSocketFactory factory = sslContext.getServerSocketFactory();
return new ConfiguringSSLServerSocketFactory(factory, protocols, cipherSuites);
}
/**
* Creates a factory for SSL Client Sockets from the given configuration.
* SSL Client Sockets are always part of internal communication.
*/
public static SocketFactory createSSLClientSocketFactory(Configuration config) throws Exception {
SSLContext sslContext = createInternalSSLContext(config, true);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled");
}
return sslContext.getSocketFactory();
}
/**
* Creates a SSLEngineFactory to be used by internal communication server endpoints.
*/
public static SSLHandlerFactory createInternalServerSSLEngineFactory(final Configuration config) throws Exception {
SslContext sslContext = createInternalNettySSLContext(config, false);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled for internal communication.");
}
return new SSLHandlerFactory(
sslContext,
config.getInteger(SecurityOptions.SSL_INTERNAL_HANDSHAKE_TIMEOUT),
config.getInteger(SecurityOptions.SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT));
}
/**
* Creates a SSLEngineFactory to be used by internal communication client endpoints.
*/
public static SSLHandlerFactory createInternalClientSSLEngineFactory(final Configuration config) throws Exception {
SslContext sslContext = createInternalNettySSLContext(config, true);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled for internal communication.");
}
return new SSLHandlerFactory(
sslContext,
config.getInteger(SecurityOptions.SSL_INTERNAL_HANDSHAKE_TIMEOUT),
config.getInteger(SecurityOptions.SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT));
}
/**
* Creates a {@link SSLHandlerFactory} to be used by the REST Servers.
*
* @param config The application configuration.
*/
public static SSLHandlerFactory createRestServerSSLEngineFactory(final Configuration config) throws Exception {
ClientAuth clientAuth = isRestSSLAuthenticationEnabled(config) ? ClientAuth.REQUIRE : ClientAuth.NONE;
SslContext sslContext = createRestNettySSLContext(config, false, clientAuth);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled for REST endpoints.");
}
return new SSLHandlerFactory(
sslContext,
-1,
-1);
}
/**
* Creates a {@link SSLHandlerFactory} to be used by the REST Clients.
*
* @param config The application configuration.
*/
public static SSLHandlerFactory createRestClientSSLEngineFactory(final Configuration config) throws Exception {
ClientAuth clientAuth = isRestSSLAuthenticationEnabled(config) ? ClientAuth.REQUIRE : ClientAuth.NONE;
SslContext sslContext = createRestNettySSLContext(config, true, clientAuth);
if (sslContext == null) {
throw new IllegalConfigurationException("SSL is not enabled for REST endpoints.");
}
return new SSLHandlerFactory(
sslContext,
-1,
-1);
}
private static String[] getEnabledProtocols(final Configuration config) {
checkNotNull(config, "config must not be null");
return config.getString(SecurityOptions.SSL_PROTOCOL).split(",");
}
private static String[] getEnabledCipherSuites(final Configuration config) {
checkNotNull(config, "config must not be null");
return config.getString(SecurityOptions.SSL_ALGORITHMS).split(",");
}
@VisibleForTesting
static SslProvider getSSLProvider(final Configuration config) {
checkNotNull(config, "config must not be null");
String providerString = config.getString(SecurityOptions.SSL_PROVIDER);
if (providerString.equalsIgnoreCase("OPENSSL")) {
if (OpenSsl.isAvailable()) {
return OPENSSL;
} else {
throw new IllegalConfigurationException("openSSL not available", OpenSsl.unavailabilityCause());
}
} else if (providerString.equalsIgnoreCase("JDK")) {
return JDK;
} else {
throw new IllegalConfigurationException("Unknown SSL provider: %s", providerString);
}
}
private static TrustManagerFactory getTrustManagerFactory(Configuration config, boolean internal)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
String trustStoreFilePath = getAndCheckOption(
config,
internal ? SecurityOptions.SSL_INTERNAL_TRUSTSTORE : SecurityOptions.SSL_REST_TRUSTSTORE,
SecurityOptions.SSL_TRUSTSTORE);
String trustStorePassword = getAndCheckOption(
config,
internal ? SecurityOptions.SSL_INTERNAL_TRUSTSTORE_PASSWORD : SecurityOptions.SSL_REST_TRUSTSTORE_PASSWORD,
SecurityOptions.SSL_TRUSTSTORE_PASSWORD);
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
try (InputStream trustStoreFile = Files
.newInputStream(new File(trustStoreFilePath).toPath())) {
trustStore.load(trustStoreFile, trustStorePassword.toCharArray());
}
TrustManagerFactory tmf =
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
tmf.init(trustStore);
return tmf;
}
private static KeyManagerFactory getKeyManagerFactory(
Configuration config,
boolean internal,
SslProvider provider)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException,
UnrecoverableKeyException {
String keystoreFilePath = getAndCheckOption(
config,
internal ? SecurityOptions.SSL_INTERNAL_KEYSTORE : SecurityOptions.SSL_REST_KEYSTORE,
SecurityOptions.SSL_KEYSTORE);
String keystorePassword = getAndCheckOption(
config,
internal ? SecurityOptions.SSL_INTERNAL_KEYSTORE_PASSWORD : SecurityOptions.SSL_REST_KEYSTORE_PASSWORD,
SecurityOptions.SSL_KEYSTORE_PASSWORD);
String certPassword = getAndCheckOption(
config,
internal ? SecurityOptions.SSL_INTERNAL_KEY_PASSWORD : SecurityOptions.SSL_REST_KEY_PASSWORD,
SecurityOptions.SSL_KEY_PASSWORD);
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
try (InputStream keyStoreFile = Files.newInputStream(new File(keystoreFilePath).toPath())) {
keyStore.load(keyStoreFile, keystorePassword.toCharArray());
}
final KeyManagerFactory kmf;
if (provider == OPENSSL || provider == OPENSSL_REFCNT) {
kmf = new OpenSslX509KeyManagerFactory();
} else {
kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
}
kmf.init(keyStore, certPassword.toCharArray());
return kmf;
}
/**
* Creates the SSL Context for internal SSL, if internal SSL is configured.
* For internal SSL, the client and server side configuration are identical, because
* of mutual authentication.
*/
@Nullable
private static SSLContext createInternalSSLContext(Configuration config, boolean clientMode) throws Exception {
JdkSslContext nettySSLContext =
(JdkSslContext) createInternalNettySSLContext(config, clientMode, JDK);
if (nettySSLContext != null) {
return nettySSLContext.context();
} else {
return null;
}
}
@Nullable
private static SslContext createInternalNettySSLContext(Configuration config, boolean clientMode)
throws Exception {
return createInternalNettySSLContext(config, clientMode, getSSLProvider(config));
}
/**
* Creates the SSL Context for internal SSL, if internal SSL is configured.
* For internal SSL, the client and server side configuration are identical, because
* of mutual authentication.
*/
@Nullable
private static SslContext createInternalNettySSLContext(
Configuration config, boolean clientMode, SslProvider provider)
throws Exception {
checkNotNull(config, "config");
if (!isInternalSSLEnabled(config)) {
return null;
}
String[] sslProtocols = getEnabledProtocols(config);
List<String> ciphers = Arrays.asList(getEnabledCipherSuites(config));
int sessionCacheSize = config.getInteger(SecurityOptions.SSL_INTERNAL_SESSION_CACHE_SIZE);
int sessionTimeoutMs = config.getInteger(SecurityOptions.SSL_INTERNAL_SESSION_TIMEOUT);
KeyManagerFactory kmf = getKeyManagerFactory(config, true, provider);
TrustManagerFactory tmf = getTrustManagerFactory(config, true);
ClientAuth clientAuth = ClientAuth.REQUIRE;
final SslContextBuilder sslContextBuilder;
if (clientMode) {
sslContextBuilder = SslContextBuilder.forClient().keyManager(kmf);
} else {
sslContextBuilder = SslContextBuilder.forServer(kmf);
}
return sslContextBuilder
.sslProvider(provider)
.protocols(sslProtocols)
.ciphers(ciphers)
.trustManager(tmf)
.clientAuth(clientAuth)
.sessionCacheSize(sessionCacheSize)
.sessionTimeout(sessionTimeoutMs / 1000)
.build();
}
/**
* Creates an SSL context for clients against the external REST endpoint.
*/
@Nullable
@VisibleForTesting
public static SSLContext createRestSSLContext(Configuration config, boolean clientMode) throws Exception {
ClientAuth clientAuth = isRestSSLAuthenticationEnabled(config) ? ClientAuth.REQUIRE : ClientAuth.NONE;
JdkSslContext nettySSLContext =
(JdkSslContext) createRestNettySSLContext(config, clientMode, clientAuth, JDK);
if (nettySSLContext != null) {
return nettySSLContext.context();
} else {
return null;
}
}
@Nullable
private static SslContext createRestNettySSLContext(
Configuration config, boolean clientMode, ClientAuth clientAuth)
throws Exception {
return createRestNettySSLContext(config, clientMode, clientAuth, getSSLProvider(config));
}
/**
* Creates an SSL context for the external REST SSL.
* If mutual authentication is configured the client and the server side configuration are identical.
*/
@Nullable
public static SslContext createRestNettySSLContext(
Configuration config, boolean clientMode, ClientAuth clientAuth, SslProvider provider)
throws Exception {
checkNotNull(config, "config");
if (!isRestSSLEnabled(config)) {
return null;
}
String[] sslProtocols = getEnabledProtocols(config);
final SslContextBuilder sslContextBuilder;
if (clientMode) {
sslContextBuilder = SslContextBuilder.forClient();
if (clientAuth != ClientAuth.NONE) {
KeyManagerFactory kmf = getKeyManagerFactory(config, false, provider);
sslContextBuilder.keyManager(kmf);
}
} else {
KeyManagerFactory kmf = getKeyManagerFactory(config, false, provider);
sslContextBuilder = SslContextBuilder.forServer(kmf);
}
if (clientMode || clientAuth != ClientAuth.NONE) {
TrustManagerFactory tmf = getTrustManagerFactory(config, false);
sslContextBuilder.trustManager(tmf);
}
return sslContextBuilder
.sslProvider(provider)
.protocols(sslProtocols)
.clientAuth(clientAuth)
.build();
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
private static String getAndCheckOption(Configuration config, ConfigOption<String> primaryOption, ConfigOption<String> fallbackOption) {
String value = config.getString(primaryOption, config.getString(fallbackOption));
if (value != null) {
return value;
}
else {
throw new IllegalConfigurationException("The config option " + primaryOption.key() +
" or " + fallbackOption.key() + " is missing.");
}
}
// ------------------------------------------------------------------------
// Wrappers for socket factories that additionally configure the sockets
// ------------------------------------------------------------------------
private static class ConfiguringSSLServerSocketFactory extends ServerSocketFactory {
private final SSLServerSocketFactory sslServerSocketFactory;
private final String[] protocols;
private final String[] cipherSuites;
ConfiguringSSLServerSocketFactory(
SSLServerSocketFactory sslServerSocketFactory,
String[] protocols,
String[] cipherSuites) {
this.sslServerSocketFactory = sslServerSocketFactory;
this.protocols = protocols;
this.cipherSuites = cipherSuites;
}
@Override
public ServerSocket createServerSocket(int port) throws IOException {
SSLServerSocket socket = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port);
configureServerSocket(socket);
return socket;
}
@Override
public ServerSocket createServerSocket(int port, int backlog) throws IOException {
SSLServerSocket socket = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, backlog);
configureServerSocket(socket);
return socket;
}
@Override
public ServerSocket createServerSocket(int port, int backlog, InetAddress ifAddress) throws IOException {
SSLServerSocket socket = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, backlog, ifAddress);
configureServerSocket(socket);
return socket;
}
private void configureServerSocket(SSLServerSocket socket) {
socket.setEnabledProtocols(protocols);
socket.setEnabledCipherSuites(cipherSuites);
socket.setNeedClientAuth(true);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.api.request;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.Locale;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.commons.testing.sling.MockResource;
import org.apache.sling.commons.testing.sling.MockSlingHttpServletRequest;
import junit.framework.TestCase;
public class RequestUtilTest extends TestCase {
public void testHandleIfModifiedSince(){
assertTrue(RequestUtil.handleIfModifiedSince(getMockRequest(1309268989938L,1309269042730L),getMockResponse()));
assertFalse(RequestUtil.handleIfModifiedSince(getMockRequest(1309269042730L,1309268989938L),getMockResponse()));
assertFalse(RequestUtil.handleIfModifiedSince(getMockRequest(-1,1309268989938L),getMockResponse()));
}
protected SlingHttpServletRequest getMockRequest(final long modificationTime, final long ifModifiedSince) {
final String resourcePath = "foo";
final MockSlingHttpServletRequest r = new MockSlingHttpServletRequest(resourcePath, null, null, null, null) {
@Override
public long getDateHeader(String name) {
return ifModifiedSince;
}
};
final String path = "/foo/node";
final MockResource mr = new MockResource(null, path, null) {};
mr.getResourceMetadata().setModificationTime(modificationTime);
r.setResource(mr);
return r;
}
public void testParserAcceptHeader(){
assertEquals(RequestUtil.parserAcceptHeader("compress;q=0.5, gzip;q=1.0").get("compress"), 0.5);
assertEquals(RequestUtil.parserAcceptHeader("compress,gzip").get("compress"),1.0);
assertEquals(RequestUtil.parserAcceptHeader("compress").get("compress"),1.0);
assertEquals(RequestUtil.parserAcceptHeader("compress;q=string,gzip;q=1.0").get("compress"), 1.0);
assertNull(RequestUtil.parserAcceptHeader("compress;q=0.5, gzip;q=1.0").get("compres"));
}
protected HttpServletResponse getMockResponse() {
return new HttpServletResponse() {
@Override
public void setLocale(Locale loc) {}
@Override
public void setContentType(String type) {}
@Override
public void setContentLength(int len) {}
@Override
public void setCharacterEncoding(String charset) {}
@Override
public void setBufferSize(int size) {}
@Override
public void resetBuffer() {}
@Override
public void reset() {}
@Override
public boolean isCommitted() {
return false;
}
@Override
public PrintWriter getWriter() throws IOException {
return null;
}
@Override
public ServletOutputStream getOutputStream() throws IOException {
return null;
}
@Override
public Locale getLocale() {
return null;
}
@Override
public String getContentType() {
return null;
}
@Override
public String getCharacterEncoding() {
return null;
}
@Override
public int getBufferSize() {
return 0;
}
@Override
public void flushBuffer() throws IOException {}
@Override
public void setStatus(int sc, String sm) {}
@Override
public void setStatus(int sc) {}
@Override
public void setIntHeader(String name, int value) {}
@Override
public void setHeader(String name, String value) {}
@Override
public void setDateHeader(String name, long date) {}
@Override
public void sendRedirect(String location) throws IOException {}
@Override
public void sendError(int sc, String msg) throws IOException {}
@Override
public void sendError(int sc) throws IOException {}
@Override
public String encodeUrl(String url) {
return null;
}
@Override
public String encodeURL(String url) {
return null;
}
@Override
public String encodeRedirectUrl(String url) {
return null;
}
@Override
public String encodeRedirectURL(String url) {
return null;
}
@Override
public boolean containsHeader(String name) {
return false;
}
@Override
public void addIntHeader(String name, int value) {}
@Override
public void addHeader(String name, String value) {}
@Override
public void addDateHeader(String name, long date) {}
@Override
public void addCookie(Cookie cookie) {}
@Override
public void setContentLengthLong(long len) {}
@Override
public int getStatus() {
return 0;
}
@Override
public String getHeader(String name) {
return null;
}
@Override
public Collection<String> getHeaders(String name) {
return null;
}
@Override
public Collection<String> getHeaderNames() {
return null;
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.codegen;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import org.apache.ignite.internal.GridCodegenConverter;
import org.apache.ignite.internal.GridDirectCollection;
import org.apache.ignite.internal.GridDirectMap;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.IgniteCodeGeneratingFail;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersionEx;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.jetbrains.annotations.Nullable;
import static java.lang.reflect.Modifier.isStatic;
import static java.lang.reflect.Modifier.isTransient;
/**
* Direct marshallable code generator.
*/
public class MessageCodeGenerator {
/** */
private static final Comparator<Field> FIELD_CMP = new Comparator<Field>() {
@Override public int compare(Field f1, Field f2) {
return f1.getName().compareTo(f2.getName());
}
};
/** */
private static final String DFLT_SRC_DIR = U.getIgniteHome() + "/modules/core/src/main/java";
/** */
private static final String INDEXING_SRC_DIR = U.getIgniteHome() + "/modules/indexing/src/main/java";
/** */
private static final Class<?> BASE_CLS = Message.class;
/** */
private static final String EMPTY = "";
/** */
private static final String TAB = " ";
/** */
private static final String BUF_VAR = "buf";
/** */
private static final Map<Class<?>, MessageCollectionItemType> TYPES = U.newHashMap(30);
static {
TYPES.put(byte.class, MessageCollectionItemType.BYTE);
TYPES.put(Byte.class, MessageCollectionItemType.BYTE);
TYPES.put(short.class, MessageCollectionItemType.SHORT);
TYPES.put(Short.class, MessageCollectionItemType.SHORT);
TYPES.put(int.class, MessageCollectionItemType.INT);
TYPES.put(Integer.class, MessageCollectionItemType.INT);
TYPES.put(long.class, MessageCollectionItemType.LONG);
TYPES.put(Long.class, MessageCollectionItemType.LONG);
TYPES.put(float.class, MessageCollectionItemType.FLOAT);
TYPES.put(Float.class, MessageCollectionItemType.FLOAT);
TYPES.put(double.class, MessageCollectionItemType.DOUBLE);
TYPES.put(Double.class, MessageCollectionItemType.DOUBLE);
TYPES.put(char.class, MessageCollectionItemType.CHAR);
TYPES.put(Character.class, MessageCollectionItemType.CHAR);
TYPES.put(boolean.class, MessageCollectionItemType.BOOLEAN);
TYPES.put(Boolean.class, MessageCollectionItemType.BOOLEAN);
TYPES.put(byte[].class, MessageCollectionItemType.BYTE_ARR);
TYPES.put(short[].class, MessageCollectionItemType.SHORT_ARR);
TYPES.put(int[].class, MessageCollectionItemType.INT_ARR);
TYPES.put(long[].class, MessageCollectionItemType.LONG_ARR);
TYPES.put(float[].class, MessageCollectionItemType.FLOAT_ARR);
TYPES.put(double[].class, MessageCollectionItemType.DOUBLE_ARR);
TYPES.put(char[].class, MessageCollectionItemType.CHAR_ARR);
TYPES.put(boolean[].class, MessageCollectionItemType.BOOLEAN_ARR);
TYPES.put(String.class, MessageCollectionItemType.STRING);
TYPES.put(BitSet.class, MessageCollectionItemType.BIT_SET);
TYPES.put(UUID.class, MessageCollectionItemType.UUID);
TYPES.put(IgniteUuid.class, MessageCollectionItemType.IGNITE_UUID);
}
/**
* @param cls Class.
* @return Type enum value.
*/
private static MessageCollectionItemType typeEnum(Class<?> cls) {
MessageCollectionItemType type = TYPES.get(cls);
if (type == null) {
assert Message.class.isAssignableFrom(cls) : cls;
type = MessageCollectionItemType.MSG;
}
return type;
}
/** */
private final Collection<String> write = new ArrayList<>();
/** */
private final Collection<String> read = new ArrayList<>();
/** */
private final Map<Class<?>, Integer> fieldCnt = new HashMap<>();
/** */
private final String srcDir;
/** */
private int totalFieldCnt;
/** */
private List<Field> fields;
/** */
private int indent;
/**
* @param args Arguments.
* @throws Exception In case of error.
*/
public static void main(String[] args) throws Exception {
String srcDir = DFLT_SRC_DIR;
if (args != null && args.length > 0)
srcDir = args[0];
MessageCodeGenerator gen = new MessageCodeGenerator(srcDir);
// gen.generateAll(true);
// gen.generateAndWrite(GridChangeGlobalStateMessageResponse.class);
// gen.generateAndWrite(GridNearAtomicUpdateRequest.class);
// gen.generateAndWrite(GridMessageCollection.class);
// gen.generateAndWrite(DataStreamerEntry.class);
// gen.generateAndWrite(GridDistributedLockRequest.class);
// gen.generateAndWrite(GridDistributedLockResponse.class);
// gen.generateAndWrite(GridNearLockRequest.class);
// gen.generateAndWrite(GridNearLockResponse.class);
// gen.generateAndWrite(GridDhtLockRequest.class);
// gen.generateAndWrite(GridDhtLockResponse.class);
//
// gen.generateAndWrite(GridDistributedTxPrepareRequest.class);
// gen.generateAndWrite(GridDistributedTxPrepareResponse.class);
// gen.generateAndWrite(GridNearTxPrepareRequest.class);
// gen.generateAndWrite(GridNearTxPrepareResponse.class);
// gen.generateAndWrite(GridDhtTxPrepareRequest.class);
// gen.generateAndWrite(GridDhtTxPrepareResponse.class);
//
// gen.generateAndWrite(GridDistributedTxFinishRequest.class);
// gen.generateAndWrite(GridDistributedTxFinishResponse.class);
// gen.generateAndWrite(GridNearTxFinishRequest.class);
// gen.generateAndWrite(GridNearTxFinishResponse.class);
// gen.generateAndWrite(GridDhtTxFinishRequest.class);
// gen.generateAndWrite(GridDhtTxFinishResponse.class);
//
// gen.generateAndWrite(GridCacheTxRecoveryRequest.class);
// gen.generateAndWrite(GridCacheTxRecoveryResponse.class);
// gen.generateAndWrite(GridQueryCancelRequest.class);
// gen.generateAndWrite(GridQueryFailResponse.class);
// gen.generateAndWrite(GridQueryNextPageRequest.class);
// gen.generateAndWrite(GridQueryNextPageResponse.class);
// gen.generateAndWrite(GridQueryRequest.class);
// gen.generateAndWrite(GridCacheSqlQuery.class);
// gen.generateAndWrite(GridH2Null.class);
// gen.generateAndWrite(GridH2Boolean.class);
// gen.generateAndWrite(GridH2Byte.class);
// gen.generateAndWrite(GridH2Short.class);
// gen.generateAndWrite(GridH2Integer.class);
// gen.generateAndWrite(GridH2Long.class);
// gen.generateAndWrite(GridH2Decimal.class);
// gen.generateAndWrite(GridH2Double.class);
// gen.generateAndWrite(GridH2Float.class);
// gen.generateAndWrite(GridH2Time.class);
// gen.generateAndWrite(GridH2Date.class);
// gen.generateAndWrite(GridH2Timestamp.class);
// gen.generateAndWrite(GridH2Bytes.class);
// gen.generateAndWrite(GridH2String.class);
// gen.generateAndWrite(GridH2Array.class);
// gen.generateAndWrite(GridH2JavaObject.class);
// gen.generateAndWrite(GridH2Uuid.class);
// gen.generateAndWrite(GridH2Geometry.class);
// gen.generateAndWrite(GridH2CacheObject.class);
// gen.generateAndWrite(GridH2IndexRangeRequest.class);
// gen.generateAndWrite(GridH2IndexRangeResponse.class);
// gen.generateAndWrite(GridH2RowRange.class);
// gen.generateAndWrite(GridH2RowRangeBounds.class);
// gen.generateAndWrite(GridH2QueryRequest.class);
// gen.generateAndWrite(GridH2RowMessage.class);
// gen.generateAndWrite(GridCacheVersion.class);
// gen.generateAndWrite(GridCacheVersionEx.class);
}
/**
* @param srcDir Source directory.
*/
public MessageCodeGenerator(String srcDir) {
this.srcDir = srcDir;
}
/**
* Generates code for all classes.
*
* @param write Whether to write to file.
* @throws Exception In case of error.
*/
public void generateAll(boolean write) throws Exception {
Collection<Class<? extends Message>> classes = classes();
for (Class<? extends Message> cls : classes) {
try {
boolean isAbstract = Modifier.isAbstract(cls.getModifiers());
System.out.println("Processing class: " + cls.getName() + (isAbstract ? " (abstract)" : ""));
if (write)
generateAndWrite(cls);
else
generate(cls);
}
catch (IllegalStateException e) {
System.out.println("Will skip class generation [cls=" + cls + ", err=" + e.getMessage() + ']');
}
}
}
/**
* Generates code for provided class and writes it to source file.
* Note: this method must be called only from {@code generateAll(boolean)}
* and only with updating {@code CLASSES_ORDER_FILE} and other auto generated files.
*
* @param cls Class.
* @throws Exception In case of error.
*/
@SuppressWarnings("ConstantConditions")
public void generateAndWrite(Class<? extends Message> cls) throws Exception {
assert cls != null;
generate(cls);
File file = new File(srcDir, cls.getName().replace('.', File.separatorChar) + ".java");
if (!file.exists() || !file.isFile()) {
System.out.println("Source file not found: " + file.getPath());
return;
}
Collection<String> src = new ArrayList<>();
BufferedReader rdr = null;
try {
rdr = new BufferedReader(new FileReader(file));
String line;
boolean skip = false;
boolean writeFound = false;
boolean readFound = false;
boolean fieldCntFound = false;
while ((line = rdr.readLine()) != null) {
if (!skip) {
src.add(line);
if (line.contains("public boolean writeTo(ByteBuffer buf, MessageWriter writer)")) {
src.addAll(write);
skip = true;
writeFound = true;
}
else if (line.contains("public boolean readFrom(ByteBuffer buf, MessageReader reader)")) {
src.addAll(read);
skip = true;
readFound = true;
}
else if (line.contains("public byte fieldsCount()")) {
src.add(TAB + TAB + "return " + totalFieldCnt + ";");
skip = true;
fieldCntFound = true;
}
}
else if (line.startsWith(TAB + "}")) {
src.add(line);
skip = false;
}
}
if (!writeFound)
System.out.println(" writeTo method doesn't exist.");
if (!readFound)
System.out.println(" readFrom method doesn't exist.");
if (!fieldCntFound)
System.out.println(" fieldCount method doesn't exist.");
}
finally {
if (rdr != null)
rdr.close();
}
BufferedWriter wr = null;
try {
wr = new BufferedWriter(new FileWriter(file));
for (String line : src)
wr.write(line + '\n');
}
finally {
if (wr != null)
wr.close();
}
}
/**
* Generates code for provided class.
*
* @param cls Class.
* @throws Exception In case of error.
*/
private void generate(Class<? extends Message> cls) throws Exception {
assert cls != null;
if (cls.isInterface())
return;
if (cls.isAnnotationPresent(IgniteCodeGeneratingFail.class))
throw new IllegalStateException("@IgniteCodeGeneratingFail is provided for class: " + cls.getName());
write.clear();
read.clear();
fields = new ArrayList<>();
Field[] declaredFields = cls.getDeclaredFields();
for (Field field : declaredFields) {
int mod = field.getModifiers();
if (!isStatic(mod) && !isTransient(mod) && !field.isAnnotationPresent(GridDirectTransient.class))
fields.add(field);
}
Collections.sort(fields, FIELD_CMP);
int state = startState(cls);
totalFieldCnt = state + fields.size();
indent = 2;
boolean hasSuper = cls.getSuperclass() != Object.class;
start(write, hasSuper ? "writeTo" : null, true);
start(read, hasSuper ? "readFrom" : null, false);
indent++;
for (Field field : fields)
processField(field, state++);
indent--;
finish(write, null);
finish(read, cls.getSimpleName());
}
/**
* @param cls Message class.
* @return Start state.
*/
private int startState(Class<?> cls) {
assert cls != null;
Class<?> superCls = cls.getSuperclass();
Integer state = fieldCnt.get(superCls);
if (state != null)
return state;
state = 0;
while (cls.getSuperclass() != Object.class) {
cls = cls.getSuperclass();
for (Field field : cls.getDeclaredFields()) {
int mod = field.getModifiers();
if (!isStatic(mod) && !isTransient(mod) && !field.isAnnotationPresent(GridDirectTransient.class))
state++;
}
}
fieldCnt.put(superCls, state);
return state;
}
/**
* @param code Code lines.
* @param superMtd Super class method name.
* @param write Whether write code is generated.
*/
private void start(Collection<String> code, @Nullable String superMtd, boolean write) {
assert code != null;
code.add(builder().a(write ? "writer" : "reader").a(".setBuffer(").a(BUF_VAR).a(");").toString());
code.add(EMPTY);
if (!write) {
code.add(builder().a("if (!reader.beforeMessageRead())").toString());
indent++;
code.add(builder().a("return false;").toString());
code.add(EMPTY);
indent--;
}
if (superMtd != null) {
if (write)
returnFalseIfFailed(code, "super." + superMtd, BUF_VAR, "writer");
else
returnFalseIfFailed(code, "super." + superMtd, BUF_VAR, "reader");
code.add(EMPTY);
}
if (write) {
code.add(builder().a("if (!writer.isHeaderWritten()) {").toString());
indent++;
returnFalseIfFailed(code, "writer.writeHeader", "directType()", "fieldsCount()");
code.add(EMPTY);
code.add(builder().a("writer.onHeaderWritten();").toString());
indent--;
code.add(builder().a("}").toString());
code.add(EMPTY);
}
if (!fields.isEmpty())
code.add(builder().a("switch (").a(write ? "writer.state()" : "reader.state()").a(") {").toString());
}
/**
* @param code Code lines.
*/
private void finish(Collection<String> code, String readClsName) {
assert code != null;
if (!fields.isEmpty()) {
code.add(builder().a("}").toString());
code.add(EMPTY);
}
if (readClsName == null)
code.add(builder().a("return true;").toString());
else
code.add(builder().a("return reader.afterMessageRead(").a(readClsName).a(".class);").toString());
}
/**
* @param field Field.
* @param opt Case option.
*/
private void processField(Field field, int opt) {
assert field != null;
assert opt >= 0;
GridDirectCollection colAnn = field.getAnnotation(GridDirectCollection.class);
GridDirectMap mapAnn = field.getAnnotation(GridDirectMap.class);
if (colAnn == null && Collection.class.isAssignableFrom(field.getType()))
throw new IllegalStateException("@GridDirectCollection annotation is not provided for field: " +
field.getName());
if (mapAnn == null && Map.class.isAssignableFrom(field.getType()))
throw new IllegalStateException("@GridDirectMap annotation is not provided for field: " + field.getName());
writeField(field, opt, colAnn, mapAnn);
readField(field, opt, colAnn, mapAnn);
}
/**
* @param field Field.
* @param opt Case option.
* @param colAnn Collection annotation.
* @param mapAnn Map annotation.
*/
private void writeField(Field field, int opt, @Nullable GridDirectCollection colAnn,
@Nullable GridDirectMap mapAnn) {
assert field != null;
assert opt >= 0;
write.add(builder().a("case ").a(opt).a(":").toString());
indent++;
GridCodegenConverter fldPreproc = field.getAnnotation(GridCodegenConverter.class);
String getExp = (fldPreproc != null && !fldPreproc.get().isEmpty())? fldPreproc.get(): field.getName();
Class<?> writeType = (fldPreproc != null && !fldPreproc.type().equals(GridCodegenConverter.Default.class))?
fldPreproc.type(): field.getType();
returnFalseIfWriteFailed(writeType, field.getName(), colAnn != null ? colAnn.value() : null,
mapAnn != null ? mapAnn.keyType() : null, mapAnn != null ? mapAnn.valueType() : null, false, getExp);
write.add(EMPTY);
write.add(builder().a("writer.incrementState();").toString());
write.add(EMPTY);
indent--;
}
/**
* @param field Field.
* @param opt Case option.
* @param colAnn Collection annotation.
* @param mapAnn Map annotation.
*/
private void readField(Field field, int opt, @Nullable GridDirectCollection colAnn,
@Nullable GridDirectMap mapAnn) {
assert field != null;
assert opt >= 0;
read.add(builder().a("case ").a(opt).a(":").toString());
indent++;
GridCodegenConverter fldPreproc = field.getAnnotation(GridCodegenConverter.class);
String setExp = (fldPreproc != null && !fldPreproc.get().isEmpty())? fldPreproc.set(): "";
Class<?> writeType = (fldPreproc != null && !fldPreproc.type().equals(GridCodegenConverter.Default.class))?
fldPreproc.type(): field.getType();
returnFalseIfReadFailed(writeType, field.getName(), colAnn != null ? colAnn.value() : null,
mapAnn != null ? mapAnn.keyType() : null, mapAnn != null ? mapAnn.valueType() : null, setExp);
read.add(EMPTY);
read.add(builder().a("reader.incrementState();").toString());
read.add(EMPTY);
indent--;
}
/**
* @param type Field type.
* @param name Field name.
* @param colItemType Collection item type.
* @param mapKeyType Map key type.
* @param mapValType Map key value.
* @param raw Raw write flag.
*/
private void returnFalseIfWriteFailed(Class<?> type, String name, @Nullable Class<?> colItemType,
@Nullable Class<?> mapKeyType, @Nullable Class<?> mapValType, boolean raw, String getExpr) {
assert type != null;
assert name != null;
String field = raw ? "null" : '"' + name + '"';
if (type == byte.class)
returnFalseIfFailed(write, "writer.writeByte", field, getExpr);
else if (type == short.class)
returnFalseIfFailed(write, "writer.writeShort", field, getExpr);
else if (type == int.class)
returnFalseIfFailed(write, "writer.writeInt", field, getExpr);
else if (type == long.class)
returnFalseIfFailed(write, "writer.writeLong", field, getExpr);
else if (type == float.class)
returnFalseIfFailed(write, "writer.writeFloat", field, getExpr);
else if (type == double.class)
returnFalseIfFailed(write, "writer.writeDouble", field, getExpr);
else if (type == char.class)
returnFalseIfFailed(write, "writer.writeChar", field, getExpr);
else if (type == boolean.class)
returnFalseIfFailed(write, "writer.writeBoolean", field, getExpr);
else if (type == byte[].class)
returnFalseIfFailed(write, "writer.writeByteArray", field, getExpr);
else if (type == short[].class)
returnFalseIfFailed(write, "writer.writeShortArray", field, getExpr);
else if (type == int[].class)
returnFalseIfFailed(write, "writer.writeIntArray", field, getExpr);
else if (type == long[].class)
returnFalseIfFailed(write, "writer.writeLongArray", field, getExpr);
else if (type == float[].class)
returnFalseIfFailed(write, "writer.writeFloatArray", field, getExpr);
else if (type == double[].class)
returnFalseIfFailed(write, "writer.writeDoubleArray", field, getExpr);
else if (type == char[].class)
returnFalseIfFailed(write, "writer.writeCharArray", field, getExpr);
else if (type == boolean[].class)
returnFalseIfFailed(write, "writer.writeBooleanArray", field, getExpr);
else if (type == String.class)
returnFalseIfFailed(write, "writer.writeString", field, getExpr);
else if (type == BitSet.class)
returnFalseIfFailed(write, "writer.writeBitSet", field, getExpr);
else if (type == UUID.class)
returnFalseIfFailed(write, "writer.writeUuid", field, getExpr);
else if (type == IgniteUuid.class)
returnFalseIfFailed(write, "writer.writeIgniteUuid", field, getExpr);
else if (type.isEnum()) {
String arg = getExpr + " != null ? (byte)" + getExpr + ".ordinal() : -1";
returnFalseIfFailed(write, "writer.writeByte", field, arg);
}
else if (BASE_CLS.isAssignableFrom(type))
returnFalseIfFailed(write, "writer.writeMessage", field, getExpr);
else if (type.isArray()) {
returnFalseIfFailed(write, "writer.writeObjectArray", field, getExpr,
"MessageCollectionItemType." + typeEnum(type.getComponentType()));
}
else if (Collection.class.isAssignableFrom(type) && !Set.class.isAssignableFrom(type)) {
assert colItemType != null;
returnFalseIfFailed(write, "writer.writeCollection", field, getExpr,
"MessageCollectionItemType." + typeEnum(colItemType));
}
else if (Map.class.isAssignableFrom(type)) {
assert mapKeyType != null;
assert mapValType != null;
returnFalseIfFailed(write, "writer.writeMap", field, getExpr,
"MessageCollectionItemType." + typeEnum(mapKeyType),
"MessageCollectionItemType." + typeEnum(mapValType));
}
else
throw new IllegalStateException("Unsupported type: " + type);
}
/**
* @param type Field type.
* @param name Field name.
* @param colItemType Collection item type.
* @param mapKeyType Map key type.
* @param mapValType Map value type.
*/
private void returnFalseIfReadFailed(Class<?> type, @Nullable String name, @Nullable Class<?> colItemType,
@Nullable Class<?> mapKeyType, @Nullable Class<?> mapValType, String setExpr) {
assert type != null;
String field = '"' + name + '"';
if (type == byte.class)
returnFalseIfReadFailed(name, "reader.readByte", setExpr, field);
else if (type == short.class)
returnFalseIfReadFailed(name, "reader.readShort", setExpr, field);
else if (type == int.class)
returnFalseIfReadFailed(name, "reader.readInt", setExpr, field);
else if (type == long.class)
returnFalseIfReadFailed(name, "reader.readLong", setExpr, field);
else if (type == float.class)
returnFalseIfReadFailed(name, "reader.readFloat", setExpr, field);
else if (type == double.class)
returnFalseIfReadFailed(name, "reader.readDouble", setExpr, field);
else if (type == char.class)
returnFalseIfReadFailed(name, "reader.readChar", setExpr, field);
else if (type == boolean.class)
returnFalseIfReadFailed(name, "reader.readBoolean", setExpr, field);
else if (type == byte[].class)
returnFalseIfReadFailed(name, "reader.readByteArray", setExpr, field);
else if (type == short[].class)
returnFalseIfReadFailed(name, "reader.readShortArray", setExpr, field);
else if (type == int[].class)
returnFalseIfReadFailed(name, "reader.readIntArray", setExpr, field);
else if (type == long[].class)
returnFalseIfReadFailed(name, "reader.readLongArray", setExpr, field);
else if (type == float[].class)
returnFalseIfReadFailed(name, "reader.readFloatArray", setExpr, field);
else if (type == double[].class)
returnFalseIfReadFailed(name, "reader.readDoubleArray", setExpr, field);
else if (type == char[].class)
returnFalseIfReadFailed(name, "reader.readCharArray", setExpr, field);
else if (type == boolean[].class)
returnFalseIfReadFailed(name, "reader.readBooleanArray", setExpr, field);
else if (type == String.class)
returnFalseIfReadFailed(name, "reader.readString", setExpr, field);
else if (type == BitSet.class)
returnFalseIfReadFailed(name, "reader.readBitSet", setExpr, field);
else if (type == UUID.class)
returnFalseIfReadFailed(name, "reader.readUuid", setExpr, field);
else if (type == IgniteUuid.class)
returnFalseIfReadFailed(name, "reader.readIgniteUuid", setExpr, field);
else if (type.isEnum()) {
String loc = name + "Ord";
read.add(builder().a("byte ").a(loc).a(";").toString());
read.add(EMPTY);
returnFalseIfReadFailed(loc, "reader.readByte", setExpr, field);
read.add(EMPTY);
read.add(builder().a(name).a(" = ").a(type.getSimpleName()).a(".fromOrdinal(").a(loc).a(");").toString());
}
else if (BASE_CLS.isAssignableFrom(type))
returnFalseIfReadFailed(name, "reader.readMessage", setExpr, field);
else if (type.isArray()) {
Class<?> compType = type.getComponentType();
returnFalseIfReadFailed(name, "reader.readObjectArray", setExpr, field,
"MessageCollectionItemType." + typeEnum(compType),
compType.getSimpleName() + ".class");
}
else if (Collection.class.isAssignableFrom(type) && !Set.class.isAssignableFrom(type)) {
assert colItemType != null;
returnFalseIfReadFailed(name, "reader.readCollection", setExpr, field,
"MessageCollectionItemType." + typeEnum(colItemType));
}
else if (Map.class.isAssignableFrom(type)) {
assert mapKeyType != null;
assert mapValType != null;
boolean linked = type.equals(LinkedHashMap.class);
returnFalseIfReadFailed(name, "reader.readMap", setExpr, field,
"MessageCollectionItemType." + typeEnum(mapKeyType),
"MessageCollectionItemType." + typeEnum(mapValType),
linked ? "true" : "false");
}
else
throw new IllegalStateException("Unsupported type: " + type);
}
/**
* @param var Variable name.
* @param mtd Method name.
* @param args Method arguments.
*/
private void returnFalseIfReadFailed(String var, String mtd, String setConverter, @Nullable String... args) {
assert mtd != null;
String argsStr = "";
if (args != null && args.length > 0) {
for (String arg : args)
argsStr += arg + ", ";
argsStr = argsStr.substring(0, argsStr.length() - 2);
}
if (setConverter.isEmpty())
read.add(builder().a(var).a(" = ").a(mtd).a("(").a(argsStr).a(");").toString());
else {
read.add(builder().a(var).a(" = ").a(setConverter
.replace("$val$", new SB().a(mtd).a("(").a(argsStr).a(")").toString())).a(";").toString());
}
read.add(EMPTY);
read.add(builder().a("if (!reader.isLastRead())").toString());
indent++;
read.add(builder().a("return false;").toString());
indent--;
}
/**
* @param code Code lines.
* @param accessor Field or method name.
* @param args Method arguments.
*/
private void returnFalseIfFailed(Collection<String> code, String accessor, @Nullable String... args) {
assert code != null;
assert accessor != null;
String argsStr = "";
if (args != null && args.length > 0) {
for (String arg : args)
argsStr += arg + ", ";
argsStr = argsStr.substring(0, argsStr.length() - 2);
}
code.add(builder().a("if (!").a(accessor).a("(").a(argsStr).a("))").toString());
indent++;
code.add(builder().a("return false;").toString());
indent--;
}
/**
* Creates new builder with correct indent.
*
* @return Builder.
*/
private SB builder() {
assert indent > 0;
SB sb = new SB();
for (int i = 0; i < indent; i++)
sb.a(TAB);
return sb;
}
/**
* Gets all direct marshallable classes.
* First classes will be classes from {@code classesOrder} with same order
* as ordered values. Other classes will be at the end and ordered by name
* (with package prefix).
* That orders need for saving {@code directType} value.
*
* @return Classes.
* @throws Exception In case of error.
*/
private Collection<Class<? extends Message>> classes() throws Exception {
Collection<Class<? extends Message>> col = new TreeSet<>(
new Comparator<Class<? extends Message>>() {
@Override public int compare(Class<? extends Message> c1,
Class<? extends Message> c2) {
return c1.getName().compareTo(c2.getName());
}
});
URLClassLoader ldr = (URLClassLoader)getClass().getClassLoader();
for (URL url : ldr.getURLs()) {
File file = new File(url.toURI());
int prefixLen = file.getPath().length() + 1;
processFile(file, ldr, prefixLen, col);
}
return col;
}
/**
* Recursively process provided file or directory.
*
* @param file File.
* @param ldr Class loader.
* @param prefixLen Path prefix length.
* @param col Classes.
* @throws Exception In case of error.
*/
@SuppressWarnings("unchecked")
private void processFile(File file, ClassLoader ldr, int prefixLen,
Collection<Class<? extends Message>> col) throws Exception {
assert file != null;
assert ldr != null;
assert prefixLen > 0;
assert col != null;
if (!file.exists())
throw new FileNotFoundException("File doesn't exist: " + file);
if (file.isDirectory()) {
for (File f : file.listFiles())
processFile(f, ldr, prefixLen, col);
}
else {
assert file.isFile();
String path = file.getPath();
if (path.endsWith(".class")) {
String clsName = path.substring(prefixLen, path.length() - 6).replace(File.separatorChar, '.');
Class<?> cls = Class.forName(clsName, false, ldr);
if (cls.getDeclaringClass() == null && cls.getEnclosingClass() == null &&
!BASE_CLS.equals(cls) && BASE_CLS.isAssignableFrom(cls))
col.add((Class<? extends Message>)cls);
}
}
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net.test.util;
import android.util.Base64;
import androidx.annotation.GuardedBy;
import org.chromium.base.Log;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.security.KeyStore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
/**
* Simple http test server for testing.
*
* This server runs in a thread in the current process, so it is convenient
* for loopback testing without the need to setup TCP forwarding to the
* host computer.
*/
public class WebServer {
private static final String TAG = "WebServer";
private static Set<WebServer> sInstances = new HashSet<>();
private static Set<WebServer> sSecureInstances = new HashSet<>();
private final ServerThread mServerThread;
private String mServerUri;
private final boolean mSsl;
private final int mPort;
public static final String STATUS_OK = "200 OK";
/**
* Writes an HTTP response to |output|.
* |status| should be one of the STATUS_* values above.
*/
public static void writeResponse(OutputStream output, String status, byte[] body)
throws IOException {
if (body == null) {
body = new byte[0];
}
output.write(("HTTP/1.1 " + status + "\r\nContent-Length: " + String.valueOf(body.length)
+ "\r\n\r\n")
.getBytes());
output.write(body);
output.flush();
}
/** Represents an HTTP header. */
public static class HTTPHeader {
public final String key;
public final String value;
/** Constructs an HTTP header. */
public HTTPHeader(String key, String value) {
this.key = key;
this.value = value;
}
/**
* Parse an HTTP header from a string line. Returns null if the line is not a valid HTTP
* header.
*/
public static HTTPHeader parseLine(String line) {
String[] parts = line.split(":", 2);
if (parts.length == 2) {
return new HTTPHeader(parts[0].trim(), parts[1].trim());
}
return null;
}
@Override
public String toString() {
return key + ": " + value;
}
}
/** Thrown when an HTTP request could not be parsed. */
public static class InvalidRequest extends Exception {
/** Constructor */
public InvalidRequest() {
super("Invalid HTTP request");
}
}
/** A parsed HTTP request. */
public static class HTTPRequest {
private String mMethod;
private String mURI;
private String mHTTPVersion;
private HTTPHeader[] mHeaders;
private byte[] mBody;
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(requestLine());
builder.append("\r\n");
for (HTTPHeader header : mHeaders) {
builder.append(header.toString());
builder.append("\r\n");
}
if (mBody != null) {
builder.append("\r\n");
try {
builder.append(new String(mBody, "UTF-8"));
} catch (UnsupportedEncodingException e) {
builder.append("<binary body, length=").append(mBody.length).append(">\r\n");
}
}
return builder.toString();
}
/** Returns the request line as a String. */
public String requestLine() {
return mMethod + " " + mURI + " " + mHTTPVersion;
}
/** Returns the request method. */
public String getMethod() {
return mMethod;
}
/** Returns the request URI. */
public String getURI() {
return mURI;
}
/** Returns the request HTTP version. */
public String getHTTPVersion() {
return mHTTPVersion;
}
/** Returns the request headers. */
public HTTPHeader[] getHeaders() {
return mHeaders;
}
/** Returns the request body. */
public byte[] getBody() {
return mBody;
}
/**
* Returns the header value for the given header name. If a header is present multiple
* times, this only returns the first occurence. Returns "" if the header is not found.
*/
public String headerValue(String headerName) {
for (String value : headerValues(headerName)) {
return value;
}
return "";
}
/** Returns all header values for the given header name. */
public List<String> headerValues(String headerName) {
List<String> matchingHeaders = new ArrayList<String>();
for (HTTPHeader header : mHeaders) {
if (header.key.equalsIgnoreCase(headerName)) {
matchingHeaders.add(header.value);
}
}
return matchingHeaders;
}
private static boolean hasChunkedTransferEncoding(HTTPRequest req) {
List<String> transferEncodings = req.headerValues("Transfer-Encoding");
for (String encoding : transferEncodings) {
if (encoding.equals("chunked")) {
return true;
}
}
return false;
}
/** Parses an HTTP request from an input stream. */
public static HTTPRequest parse(InputStream stream) throws InvalidRequest, IOException {
boolean firstLine = true;
HTTPRequest req = new HTTPRequest();
ArrayList<HTTPHeader> mHeaders = new ArrayList<HTTPHeader>();
ByteArrayOutputStream line = new ByteArrayOutputStream();
for (int b = stream.read(); b != -1; b = stream.read()) {
if (b == '\r') {
int next = stream.read();
if (next == '\n') {
String lineString;
try {
lineString = new String(line.toByteArray(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new InvalidRequest();
}
line.reset();
if (firstLine) {
String[] parts = lineString.split(" ", 3);
if (parts.length != 3) {
throw new InvalidRequest();
}
req.mMethod = parts[0];
req.mURI = parts[1];
req.mHTTPVersion = parts[2];
firstLine = false;
} else {
if (lineString.length() == 0) {
break;
}
HTTPHeader header = HTTPHeader.parseLine(lineString);
if (header != null) {
mHeaders.add(header);
}
}
} else if (next == -1) {
throw new InvalidRequest();
} else {
line.write(b);
line.write(next);
}
} else {
line.write(b);
}
}
if (firstLine) {
if (line.size() == 0) return null;
throw new InvalidRequest();
}
req.mHeaders = mHeaders.toArray(new HTTPHeader[0]);
int contentLength = -1;
if (req.mMethod.equals("GET") || req.mMethod.equals("HEAD")) {
contentLength = 0;
}
try {
contentLength = Integer.parseInt(req.headerValue("Content-Length"));
} catch (NumberFormatException e) {
}
if (contentLength >= 0) {
byte[] content = new byte[contentLength];
for (int offset = 0; offset < contentLength;) {
int bytesRead = stream.read(content, offset, contentLength);
if (bytesRead == -1) { // short read, keep truncated content.
content = Arrays.copyOf(content, offset);
break;
}
offset += bytesRead;
}
req.mBody = content;
} else if (hasChunkedTransferEncoding(req)) {
ByteArrayOutputStream mBody = new ByteArrayOutputStream();
byte[] buffer = new byte[1000];
int bytesRead;
while ((bytesRead = stream.read(buffer, 0, buffer.length)) != -1) {
mBody.write(buffer, 0, bytesRead);
}
req.mBody = mBody.toByteArray();
}
return req;
}
}
/** An interface for handling HTTP requests. */
public interface RequestHandler {
/** handleRequest is called when an HTTP request is received. handleRequest should write a
* response to stream. */
void handleRequest(HTTPRequest request, OutputStream stream);
}
private RequestHandler mRequestHandler;
/** Sets the request handler. */
public void setRequestHandler(RequestHandler handler) {
mRequestHandler = handler;
}
/** Handle an HTTP request. Calls |mRequestHandler| if set. */
private void handleRequest(HTTPRequest request, OutputStream stream) {
assert Thread.currentThread()
== mServerThread : "handleRequest called from non-server thread";
if (mRequestHandler != null) {
mRequestHandler.handleRequest(request, stream);
}
}
public void setServerHost(String hostname) {
try {
mServerUri = new java.net
.URI(mSsl ? "https" : "http", null, hostname,
mServerThread.mSocket.getLocalPort(), null, null, null)
.toString();
} catch (java.net.URISyntaxException e) {
Log.wtf(TAG, e.getMessage());
}
}
/**
* Create and start a local HTTP server instance. Additional must only be true
* if an instance was already created. You are responsible for calling
* shutdown() on each instance you create.
*
* @param port Port number the server must use, or 0 to automatically choose a free port.
* @param ssl True if the server should be using secure sockets.
* @param additional True if creating an additional server instance.
* @throws Exception
*/
public WebServer(int port, boolean ssl, boolean additional) throws Exception {
mPort = port;
mSsl = ssl;
if (mSsl) {
if ((additional && WebServer.sSecureInstances.isEmpty())
|| (!additional && !WebServer.sSecureInstances.isEmpty())) {
throw new IllegalStateException("There are " + WebServer.sSecureInstances.size()
+ " SSL WebServer instances. Expected " + (additional ? ">=1" : "0")
+ " because additional is " + additional);
}
} else {
if ((additional && WebServer.sInstances.isEmpty())
|| (!additional && !WebServer.sInstances.isEmpty())) {
throw new IllegalStateException("There are " + WebServer.sSecureInstances.size()
+ " WebServer instances. Expected " + (additional ? ">=1" : "0")
+ " because additional is " + additional);
}
}
mServerThread = new ServerThread(mPort, mSsl);
setServerHost("localhost");
mServerThread.start();
if (mSsl) {
WebServer.sSecureInstances.add(this);
} else {
WebServer.sInstances.add(this);
}
}
/**
* Create and start a local HTTP server instance.
*
* @param port Port number the server must use, or 0 to automatically choose a free port.
* @param ssl True if the server should be using secure sockets.
* @throws Exception
*/
public WebServer(int port, boolean ssl) throws Exception {
this(port, ssl, false);
}
/**
* Terminate the http server.
*/
public void shutdown() {
if (mSsl) {
WebServer.sSecureInstances.remove(this);
} else {
WebServer.sInstances.remove(this);
}
try {
mServerThread.cancelAllRequests();
// Block until the server thread is done shutting down.
mServerThread.join();
} catch (MalformedURLException e) {
throw new IllegalStateException(e);
} catch (InterruptedException | IOException e) {
throw new RuntimeException(e);
}
}
public String getBaseUrl() {
return mServerUri + "/";
}
/**
* Gets the URL on the server under which a particular request path will be accessible.
*
* This only gets the URL, you still need to set the response if you intend to access it.
*
* @param requestPath The path to respond to.
* @return The full URL including the requestPath.
*/
public String getResponseUrl(String requestPath) {
return mServerUri + requestPath;
}
private class ServerThread extends Thread {
private final boolean mIsSsl;
private ServerSocket mSocket;
private SSLContext mSslContext;
private final Object mLock = new Object();
@GuardedBy("mLock")
private boolean mIsCancelled;
@GuardedBy("mLock")
private Socket mCurrentRequestSocket;
/**
* Defines the keystore contents for the server, BKS version. Holds just a
* single self-generated key. The subject name is "Test Server".
*/
private static final String SERVER_KEYS_BKS =
"AAAAAQAAABQDkebzoP1XwqyWKRCJEpn/t8dqIQAABDkEAAVteWtleQAAARpYl20nAAAAAQAFWC41"
+ "MDkAAAJNMIICSTCCAbKgAwIBAgIESEfU1jANBgkqhkiG9w0BAQUFADBpMQswCQYDVQQGEwJVUzET"
+ "MBEGA1UECBMKQ2FsaWZvcm5pYTEMMAoGA1UEBxMDTVRWMQ8wDQYDVQQKEwZHb29nbGUxEDAOBgNV"
+ "BAsTB0FuZHJvaWQxFDASBgNVBAMTC1Rlc3QgU2VydmVyMB4XDTA4MDYwNTExNTgxNFoXDTA4MDkw"
+ "MzExNTgxNFowaTELMAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExDDAKBgNVBAcTA01U"
+ "VjEPMA0GA1UEChMGR29vZ2xlMRAwDgYDVQQLEwdBbmRyb2lkMRQwEgYDVQQDEwtUZXN0IFNlcnZl"
+ "cjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0LIdKaIr9/vsTq8BZlA3R+NFWRaH4lGsTAQy"
+ "DPMF9ZqEDOaL6DJuu0colSBBBQ85hQTPa9m9nyJoN3pEi1hgamqOvQIWcXBk+SOpUGRZZFXwniJV"
+ "zDKU5nE9MYgn2B9AoiH3CSuMz6HRqgVaqtppIe1jhukMc/kHVJvlKRNy9XMCAwEAATANBgkqhkiG"
+ "9w0BAQUFAAOBgQC7yBmJ9O/eWDGtSH9BH0R3dh2NdST3W9hNZ8hIa8U8klhNHbUCSSktZmZkvbPU"
+ "hse5LI3dh6RyNDuqDrbYwcqzKbFJaq/jX9kCoeb3vgbQElMRX8D2ID1vRjxwlALFISrtaN4VpWzV"
+ "yeoHPW4xldeZmoVtjn8zXNzQhLuBqX2MmAAAAqwAAAAUvkUScfw9yCSmALruURNmtBai7kQAAAZx"
+ "4Jmijxs/l8EBaleaUru6EOPioWkUAEVWCxjM/TxbGHOi2VMsQWqRr/DZ3wsDmtQgw3QTrUK666sR"
+ "MBnbqdnyCyvM1J2V1xxLXPUeRBmR2CXorYGF9Dye7NkgVdfA+9g9L/0Au6Ugn+2Cj5leoIgkgApN"
+ "vuEcZegFlNOUPVEs3SlBgUF1BY6OBM0UBHTPwGGxFBBcetcuMRbUnu65vyDG0pslT59qpaR0TMVs"
+ "P+tcheEzhyjbfM32/vwhnL9dBEgM8qMt0sqF6itNOQU/F4WGkK2Cm2v4CYEyKYw325fEhzTXosck"
+ "MhbqmcyLab8EPceWF3dweoUT76+jEZx8lV2dapR+CmczQI43tV9btsd1xiBbBHAKvymm9Ep9bPzM"
+ "J0MQi+OtURL9Lxke/70/MRueqbPeUlOaGvANTmXQD2OnW7PISwJ9lpeLfTG0LcqkoqkbtLKQLYHI"
+ "rQfV5j0j+wmvmpMxzjN3uvNajLa4zQ8l0Eok9SFaRr2RL0gN8Q2JegfOL4pUiHPsh64WWya2NB7f"
+ "V+1s65eA5ospXYsShRjo046QhGTmymwXXzdzuxu8IlnTEont6P4+J+GsWk6cldGbl20hctuUKzyx"
+ "OptjEPOKejV60iDCYGmHbCWAzQ8h5MILV82IclzNViZmzAapeeCnexhpXhWTs+xDEYSKEiG/camt"
+ "bhmZc3BcyVJrW23PktSfpBQ6D8ZxoMfF0L7V2GQMaUg+3r7ucrx82kpqotjv0xHghNIm95aBr1Qw"
+ "1gaEjsC/0wGmmBDg1dTDH+F1p9TInzr3EFuYD0YiQ7YlAHq3cPuyGoLXJ5dXYuSBfhDXJSeddUkl"
+ "k1ufZyOOcskeInQge7jzaRfmKg3U94r+spMEvb0AzDQVOKvjjo1ivxMSgFRZaDb/4qw=";
private static final String PASSWORD = "android";
/**
* Loads a keystore from a base64-encoded String. Returns the KeyManager[]
* for the result.
*/
private KeyManager[] getKeyManagers() throws Exception {
byte[] bytes = Base64.decode(SERVER_KEYS_BKS, Base64.DEFAULT);
InputStream inputStream = new ByteArrayInputStream(bytes);
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(inputStream, PASSWORD.toCharArray());
inputStream.close();
String algorithm = KeyManagerFactory.getDefaultAlgorithm();
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(algorithm);
keyManagerFactory.init(keyStore, PASSWORD.toCharArray());
return keyManagerFactory.getKeyManagers();
}
private void setCurrentRequestSocket(Socket socket) {
synchronized (mLock) {
mCurrentRequestSocket = socket;
}
}
private boolean getIsCancelled() {
synchronized (mLock) {
return mIsCancelled;
}
}
// Called from non-server thread.
public void cancelAllRequests() throws IOException {
synchronized (mLock) {
mIsCancelled = true;
if (mCurrentRequestSocket != null) {
try {
mCurrentRequestSocket.close();
} catch (IOException ignored) {
// Catching this to ensure the server socket is closed as well.
}
}
}
// Any current and subsequent accept call will throw instead of block.
mSocket.close();
}
public ServerThread(int port, boolean ssl) throws Exception {
super("ServerThread");
mIsSsl = ssl;
// If tests are run back-to-back, it may take time for the port to become available.
// Retry a few times with a sleep to wait for the port.
int retry = 3;
while (true) {
try {
if (mIsSsl) {
mSslContext = SSLContext.getInstance("TLS");
mSslContext.init(getKeyManagers(), null, null);
mSocket = mSslContext.getServerSocketFactory().createServerSocket(port);
} else {
mSocket = new ServerSocket(port);
}
return;
} catch (IOException e) {
Log.w(TAG, e.getMessage());
if (--retry == 0) {
throw e;
}
// sleep in case server socket is still being closed
Thread.sleep(1000);
}
}
}
@Override
public void run() {
try {
while (!getIsCancelled()) {
Socket socket = mSocket.accept();
try {
setCurrentRequestSocket(socket);
HTTPRequest request = HTTPRequest.parse(socket.getInputStream());
if (request != null) {
handleRequest(request, socket.getOutputStream());
}
} catch (InvalidRequest | IOException e) {
Log.e(TAG, e.getMessage());
} finally {
socket.close();
}
}
} catch (SocketException e) {
} catch (IOException e) {
Log.w(TAG, e.getMessage());
}
}
}
}
| |
/**
The MIT License (MIT)
Copyright (c) 2012-2014 Valentin Konovalov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.*/
package ru.valle.net.tcpip;
import android.util.BuildConfig;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.concurrent.ArrayBlockingQueue;
/**
* TCP level
*
* @author vkonova
*/
public final class TCPLink {
private boolean started;//synch on this
private int mss;//synch on this
private int state = STATE_CLOSED;//synch on this
private int lastReceivedRecepientBufferSize = 256;//synch intraThreadLock
private int lastReceivedAck;// synch intraThreadLock
private int mineSeq;
private int heSentData;//synch intraThreadLock
private final int localPort;
private final IPLink ipLink;
private final PipedInputStream downstream;
private final NamedPipedOutputStream upstream;
private final PipedOutputStream downstreamFeed;
private final PipedInputStream upstreamFeed;
private final Object intraThreadLock = new Object();
private static final int STATE_CLOSED = 0;
private static final int STATE_LISTEN = 1;
private static final int STATE_SYN_SENT = 2;
private static final int STATE_SYN_RECEIVED = 3;
private static final int STATE_ESTABLISHED = 4;
private static final int STATE_NO_MORE_DATA_TO_DOWNLOAD = 5;
private static final int STATE_NO_MORE_DATA_TO_UPLOAD = 6;
private static final int STATE_E_DESTROYED = 7;
private final boolean isClient;
private int remotePort;
private Thread appLayerThread;
private final int remoteAddress;
private final ArrayBlockingQueue<IP> incomingPackets = new ArrayBlockingQueue<IP>(16, true);
private Thread downloadThread;
private Thread uploadThread;
private final Long key;
// final byte[] readBuf = new byte[65000];
// volatile int readBufFreeSpace;
public TCPLink(int localPort, int remotePort, int remoteAddress, Long key, IPLink ipLink) throws IOException {
this.key = key;
this.localPort = localPort;
this.remotePort = remotePort;
isClient = localPort > 1024;
this.ipLink = ipLink;
this.remoteAddress = remoteAddress;
downstream = new PipedInputStream(downstreamFeed = new PipedOutputStream(), 5100);//2 sent data received from server
upstream = new NamedPipedOutputStream(upstreamFeed = new PipedInputStream(5100));//2 read data sent to server
// readBufFreeSpace = 65000;
}
public synchronized OutputStream getUpstream() {
start();
return upstream;
}
public synchronized InputStream getDownstream() {
start();
return downstream;
}
private int getReadBufSize() {
int available;
try {
available = downstream.available();
// if(available==64) {
// Log.d(TAG, "");
// downstream.available();
// }
} catch (IOException ex) {
available = -1;
}
return available >= 0 ? 65000 - available : 65000;
}
synchronized int getState() {
return state;
}
private void start() {
if (!started) {
started = true;
uploadThread = new Thread(new Runnable() {
@Override
public void run() {
try {
if (isClient) {
int seqNumber = 0;
int ackNumber = 0;
byte[] tcp = TCP.build(localPort, remotePort, seqNumber, ackNumber,
TCP.SYN, getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: send SYN " + seqNumber + "/" + ackNumber);
synchronized (TCPLink.this) {
if (state != STATE_CLOSED) {
throw new RuntimeException();//"not closed on " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U start");
}
synchronized (intraThreadLock) {
mineSeq = 1;//sent so far
}
ipLink.send(tcp, remoteAddress);
state = STATE_SYN_SENT;
TCPLink.this.notifyAll();
}
}
byte[] sendingBuf;
long start;// = System.currentTimeMillis();
synchronized (TCPLink.this) {
if (!isClient && !(state == STATE_CLOSED || state == STATE_LISTEN || state == STATE_SYN_RECEIVED)) {
throw new RuntimeException();//"not closed or listen or synrec on " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U start, now " + state);
}
while (state != STATE_ESTABLISHED) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: wait STATE_ESTABLISHED");
try {
TCPLink.this.wait();
} catch (InterruptedException ex) {
return;
}
if (state == STATE_E_DESTROYED || state == STATE_NO_MORE_DATA_TO_DOWNLOAD) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "U: " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + " instead of STATE_ESTABLISHED have got " + state + ", exit U thread");
return;
}
}
sendingBuf = new byte[mss > 0 ? mss : 1200];
}
// if (System.currentTimeMillis() - start > 1000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: connection await wait in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (System.currentTimeMillis() - start));
// }
// long startms;// = System.currentTimeMillis();
boolean closed = false;
while (true) {
int dataReaded = 0;
boolean pushRequested = false;
int mineSeqOld;
final int NAGLES_TIME = 200;
long startr = -1;
while (true) {
int maxLen;
synchronized (intraThreadLock) {
maxLen = Math.min(sendingBuf.length, lastReceivedRecepientBufferSize);
mineSeqOld = mineSeq;//sent so far
}
// Log.d(TAG, "readts maxlen " + maxLen + " dataReaded " + dataReaded + " to read " + (maxLen - dataReaded) + " avail " + upstreamFeed.available() + " " + upstreamFeed);
if (maxLen > 0 && dataReaded < maxLen) {
int dataLenToReadNow = dataReaded > 0 ? Math.min(upstreamFeed.available(), maxLen - dataReaded) : maxLen - dataReaded;
int readedRightNow = upstreamFeed.read(sendingBuf, dataReaded, dataLenToReadNow);
if (startr < 0 && readedRightNow >= 0) {
startr = System.nanoTime();
}
if (readedRightNow < 0) {
// Log.d(TAG, "readts CLOSED" + TCPLink.this.hashCode());
closed = true;
break;
}
dataReaded += readedRightNow;
// Log.d(TAG, "readts rsf dataReaded " + dataReaded + " to read " + (maxLen - dataReaded));
}
pushRequested |= upstream.popPushStatus();
int timeSinceStartReading = startr < 0 ? 0 : (int) ((System.nanoTime() - startr) / 1000000);
if (closed || (pushRequested && dataReaded > 0) || (maxLen > 0 && dataReaded == maxLen) || (timeSinceStartReading >= NAGLES_TIME && dataReaded > 0)) {
// Log.d(TAG, "readts rsf dataReaded END " + dataReaded
// + " because closed? " + closed
// + ", push & readed some " + (pushRequested && dataReaded > 0)
// + ", maximum amount " + (maxLen > 0 && dataReaded == maxLen)
// + ", nagle timeout " + (timeSinceStartReading >= NAGLES_TIME && dataReaded > 0));
break;
}
synchronized (intraThreadLock) {
int timeToWait = Math.max(10, (NAGLES_TIME - timeSinceStartReading) / 16);
intraThreadLock.wait(timeToWait);
}
}
// if (startr >= 0 && (System.nanoTime() - startr) / 1000000 > 500) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: indata read in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (System.nanoTime() - startr) / 1000000);
// }
if (dataReaded > 0) {
// receivedBytes += dataReaded;
// Log.d(TAG, "!readts " + dataReaded);
byte[] sendData = new byte[dataReaded];
System.arraycopy(sendingBuf, 0, sendData, 0, dataReaded);
//we have got some data to send (like HTTP)
while (true) {
byte flags = TCP.ACK;
if (pushRequested) {
pushRequested = false;
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: PSH REQ");
flags |= TCP.PSH;
}
final int heSentDataLoc;
synchronized (intraThreadLock) {
heSentDataLoc = heSentData;
// mineSeq = mineSeqOld + dataReaded;
}
byte[] tcp = TCP.build(localPort, remotePort, mineSeqOld, heSentDataLoc,
flags, getReadBufSize(), sendData,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: have " + sendData.length + " bytes to send, SEND ACK " + mineSeqOld + "/" + heSentDataLoc + " len " + sendData.length);
ipLink.send(tcp, remoteAddress);
// startms = System.currentTimeMillis();
synchronized (intraThreadLock) {
// if (Math.abs(mineSeqOld + dataReaded - lastReceivedAck) > dataReaded) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
// }
if (mineSeqOld + dataReaded != lastReceivedAck) {
int time = 7000;
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: await mineseq " + (mineSeqOld + dataReaded) + " == lastReceivedAck " + lastReceivedAck
// + " sent len " + dataReaded
// + " received/sent/confirmedSent " + receivedBytes + "/" + sentBytes + "/" + confirmedSentBytes);
while (true) {
start = System.nanoTime() / 1000000;
intraThreadLock.wait(time);
time -= System.nanoTime() / 1000000 - start;
if (mineSeqOld + dataReaded == lastReceivedAck || time < 100) {
break;
}
}
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: wait for sent so far(mineseq) " + (mineSeqOld + dataReaded) + ", confirmed sent so far(lastReceivedAck) " + lastReceivedAck + " state " + state);
}
// if (System.currentTimeMillis() - startms > 1000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: SPEED ALERT: packet confirmation for mineseq " + (mineSeqOld + dataReaded) + " in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (System.currentTimeMillis() - startms));
// }
// lastReceivedAckLocal = lastReceivedAck;
if (mineSeqOld + dataReaded == lastReceivedAck) {
mineSeq = mineSeqOld + dataReaded;
// lastReceivedRecepientBufferSize /= 2;
lastReceivedRecepientBufferSize -= dataReaded;
break;
} else {
// if (state == STATE_NO_MORE_DATA_TO_DOWNLOAD) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: seems there is a lost packet, he already sent all data( FINWAIT) " + lastReceivedAck + ", now " + mineSeq + ", ack is " + mineSeqOld + " ack is uptodate? " + (heSentDataLoc == heSentData));
// }
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: seems there is a lost packet, need to retry data sending from pos " + lastReceivedAck + ", now " + mineSeq + ", ack is " + mineSeqOld + " ack is uptodate? " + (heSentDataLoc == heSentData));
}
}
}
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: ack ok ");
} else if (dataReaded == 0) {
if (closed) {//there will be no data from this side
byte flags = TCP.FIN | TCP.ACK;
final int heSentDataLoc;
synchronized (intraThreadLock) {
heSentDataLoc = heSentData;
mineSeqOld = mineSeq;
mineSeq++;
}
byte[] tcp = TCP.build(localPort, remotePort, mineSeqOld, heSentDataLoc,
flags, getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: have FIN to send, SEND FIN " + mineSeqOld + "/" + heSentDataLoc);
ipLink.send(tcp, remoteAddress);
synchronized (intraThreadLock) {
if (mineSeq != lastReceivedAck) {
int time = 3000;
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: await FIN mineseq " + mineSeq + " == lastReceivedAck " + lastReceivedAck);
while (true) {
start = System.nanoTime() / 1000000;
intraThreadLock.wait(time);
time -= System.nanoTime() / 1000000 - start;
if (mineSeq == lastReceivedAck || time < 100) {
break;
}
}
// if (time < 2500) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: waiting for packet in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (3000 - time));
// }
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: end wait FIN for sent so far(mineseq) " + mineSeq + ", confirmed sent so far(lastReceivedAck) " + lastReceivedAck);
}
// if (mineSeq != lastReceivedAck) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: FIN seems there is a lost packet, close anyway, state " + state);
// }
}
synchronized (TCPLink.this) {
// Log.d(TAG, (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U" + " START waiting for read side end, to keep ESTABLISHED state, curr state is " + state);
if (state == STATE_ESTABLISHED) {
state = STATE_NO_MORE_DATA_TO_UPLOAD;
}
// start = System.currentTimeMillis();
while (!(state == STATE_NO_MORE_DATA_TO_DOWNLOAD || state == STATE_E_DESTROYED)) {
// Log.d(TAG, (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U" + " waits for read side end, to keep ESTABLISHED state, curr state is " + state);
TCPLink.this.wait(20000);
}
// if (System.currentTimeMillis() - start > 1000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: fin in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (System.currentTimeMillis() - start));
// }
// Log.d(TAG, (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U" + " correctly closed");
}
return;
} else {
// start = System.currentTimeMillis();
synchronized (intraThreadLock) {
// if (lastReceivedRecepientBufferSize < 2000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: wait, receiver is not ready, recepient buff size is " + lastReceivedRecepientBufferSize + " state " + state);
//
// } else {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U: wait, no data to send, recepient buff size is " + lastReceivedRecepientBufferSize + " state " + state + " upstreamFeed avail " + upstreamFeed.available());
//
// }
intraThreadLock.wait(150);
}
synchronized (TCPLink.this) {
if (state == STATE_E_DESTROYED) {
return;
}
}
// if (System.currentTimeMillis() - start > 1000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: uncongestion wait in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U " + "took " + (System.currentTimeMillis() - start));
// }
}
}
}
} catch (Exception ex) {
if (BuildConfig.DEBUG) {
Log.d("TCP", "ex " + ex + " in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U");
}
} finally {
synchronized (TCPLink.this) {
// Log.d(TAG, "thread death " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "U" + " state " + state + " received/sent/confirmedSent " + receivedBytes + "/" + sentBytes + "/" + confirmedSentBytes);
state = STATE_E_DESTROYED;
ipLink.close(TCPLink.this);
}
}
}
});
uploadThread.start();
downloadThread = new Thread(new Runnable() {
// private long finWaitStart;
@Override
public void run() {
try {
if (!isClient) {
synchronized (TCPLink.this) {
if (state != STATE_CLOSED) {
throw new RuntimeException();//"not closed on " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D start");
}
state = STATE_LISTEN;
synchronized (intraThreadLock) {
mineSeq = 0;
}
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_LISTEN");
}
} else {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_CLOSED");
synchronized (TCPLink.this) {
while (state != STATE_SYN_SENT) {
TCPLink.this.wait(10000);
}
}
}
while (true) {
final int stateLoc = getState();
if (stateLoc == STATE_E_DESTROYED) {
break;
} else {
// long start = System.currentTimeMillis();
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D wait for a new packet " + state);
IP ip = incomingPackets.take();
// if (System.currentTimeMillis() - start > 1000) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: packet receive in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D " + "took " + (System.currentTimeMillis() - start));
// }
if (ip != null) {
if ((ip.tcp.flags & TCP.RST) == TCP.RST) {
synchronized (TCPLink.this) {
state = STATE_E_DESTROYED;
TCPLink.this.notifyAll();
synchronized (intraThreadLock) {
intraThreadLock.notifyAll();
}
return;
}
}
switch (stateLoc) {
case STATE_CLOSED:
// Log.e(TAG, "ERROR: D thread in closed state");
break;
case STATE_LISTEN:
if ((ip.tcp.flags & TCP.SYN) == TCP.SYN) {
processSYN(ip);
} else {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received NOT SYN in STATE_LISTEN");
}
break;
case STATE_SYN_SENT:
if ((ip.tcp.flags & TCP.SYN) == TCP.SYN
&& (ip.tcp.flags & TCP.ACK) == TCP.ACK) {
synchronized (TCPLink.this) {
if (state != STATE_SYN_SENT) {
// Log.e(TAG, !isClient ? "" : " "
// + "ERROR received SYN ACK in state " + state);//closed or destroyed?
return;
}
final int mineSeqLoc, heSentDataLoc;
synchronized (intraThreadLock) {
heSentDataLoc = heSentData = ip.tcp.seqNumber + 1;
mineSeqLoc = mineSeq;
}
byte[] tcp = TCP.build(localPort, remotePort, mineSeqLoc, heSentDataLoc,
TCP.ACK, getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received SYN ACK " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " send ACK " + mineSeqLoc + "/" + heSentDataLoc);
ipLink.send(tcp, remoteAddress);
state = STATE_ESTABLISHED;
// Log.d(TAG, "* " + (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_ESTABLISHED SS");
TCPLink.this.notifyAll();
}
} else {
// Log.w(TAG, !isClient ? "" : " "
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: ignore packet in STATE_SYN_SENT with flags " + TCP.showFlags(ip.tcp.flags));//closed or destroyed?
}
break;
case STATE_SYN_RECEIVED:
if ((ip.tcp.flags & TCP.ACK) == TCP.ACK) {
synchronized (TCPLink.this) {
if (state != STATE_SYN_RECEIVED) {
// Log.d(TAG, !isClient ? "" : " "
// + "ERROR received ACK in state " + state);//closed or destroyed?
return;
}
synchronized (intraThreadLock) {
lastReceivedAck = ip.tcp.ackNumber;
lastReceivedRecepientBufferSize = ip.tcp.windowSize;
intraThreadLock.notifyAll();
heSentData = ip.tcp.seqNumber;
}
state = STATE_ESTABLISHED;
TCPLink.this.notifyAll();
// Log.d(TAG, "* " + (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_ESTABLISHED SR");
// if (ip.tcp.payload != null && ip.tcp.payload.length > 0) {
// Log.e(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_SYN_RECEIVED received GOOD packet, but payload will be ignored " + ip.tcp.payload.length + " " + ip.tcp.flags + " " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber);
//
// }
}
} else {
// Log.e(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_SYN_RECEIVED received strange packet " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " flags " + TCP.showFlags(ip.tcp.flags));
}
break;
case STATE_NO_MORE_DATA_TO_DOWNLOAD:
// if ((System.nanoTime() - finWaitStart) / 1000000 > 120000L) {
// Log.w(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received FIN long time ago");
//
// }
case STATE_NO_MORE_DATA_TO_UPLOAD:
case STATE_ESTABLISHED:
if ((ip.tcp.flags & TCP.SYN) == TCP.SYN) {
// Log.e(TAG, "SYN in established state! " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " flags " + TCP.showFlags(ip.tcp.flags));
synchronized (TCPLink.this) {
state = STATE_LISTEN;
synchronized (intraThreadLock) {
heSentData = lastReceivedAck = 0;
mineSeq = 0;
}
TCPLink.this.notifyAll();
}
processSYN(ip);
break;
}
final boolean flowOk;
final boolean fin;
final int mineSeqLoc,
heSentDataLoc;
int payloadLen = ip.tcp.payload.length;
synchronized (intraThreadLock) {
lastReceivedRecepientBufferSize = ip.tcp.windowSize;
fin = (ip.tcp.flags & (TCP.FIN)) == TCP.FIN;
flowOk = heSentData == ip.tcp.seqNumber;
if (flowOk) {
heSentData += payloadLen;
if (fin) {
heSentData++;
}
}
// if (payloadLen > 0) {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received (NONZERO " + payloadLen + ") " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " flags " + TCP.showFlags(ip.tcp.flags)
// + " set lastReceivedAck to " + ip.tcp.ackNumber + (" it's " + (((ip.tcp.flags & TCP.ACK) == TCP.ACK))) + ", was " + lastReceivedAck
// + " heSentData now " + heSentData + ", " + (flowOk ? "FLOW OK " : "FLOW ERROR ")
// + " in " + TCPLink.this);
// } else {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received (ZERO bytes)" + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " flags " + TCP.showFlags(ip.tcp.flags)
// + " heSentData now " + heSentData + ", set lastReceivedAck to " + ip.tcp.ackNumber + (" it's " + (((ip.tcp.flags & TCP.ACK) == TCP.ACK))) + ", was " + lastReceivedAck + " in " + TCPLink.this);
// }
if ((ip.tcp.flags & TCP.ACK) == TCP.ACK) {
lastReceivedAck = ip.tcp.ackNumber;
}
heSentDataLoc = heSentData;
mineSeqLoc = mineSeq;
intraThreadLock.notifyAll();
}
if (payloadLen > 0) {
if (flowOk) {
downstreamFeed.write(ip.tcp.payload);
if ((ip.tcp.flags & TCP.PSH) == TCP.PSH) {
downstreamFeed.flush();
}
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S")+TCPLink.this.hashCode() +"D: data feed was fed) " + TCPLink.this);
} else {
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: !!!!!DISCARD PACKET " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " should be " + heSentData + "/x, so request packet from position " + heSentData + " instead " + ip.tcp.seqNumber + " " + TCPLink.this);
}
byte[] tcp = TCP.build(localPort, remotePort, mineSeqLoc, heSentDataLoc,
TCP.ACK, getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: SEND E ACK " + mineSeqLoc + "/" + heSentDataLoc);
// start = System.currentTimeMillis();
ipLink.send(tcp, remoteAddress);
// if (System.currentTimeMillis() - start > 500) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "SPEED ALERT: packet response sending in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D " + "took " + (System.currentTimeMillis() - start) + " ms");
// }
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S")+TCPLink.this.hashCode() +"D: the e packet was sent " + TCPLink.this);
} else {
// if (!flowOk) {
// Log.d(TAG, (!isClient ? "" : " ")
// + "!!!!!STRANGE E PACKET " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " should be " + heSentData + "/x, it means I'm waiting for confirmation of sending, FIN? " + fin);
// }
}
if (flowOk && fin) {
byte[] tcp = TCP.build(localPort, remotePort, mineSeqLoc, heSentDataLoc,
TCP.ACK, getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received FIN (" + ip.tcp.payload.length + " bytes) " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " flags " + TCP.showFlags(ip.tcp.flags)
// + " heSentData " + heSentData + ", " + (flowOk ? "FLOW OK " : "FLOW ERROR ")
// + " !SEND E ACK " + mineSeqLoc + "/" + heSentDataLoc);
ipLink.send(tcp, remoteAddress);
downstreamFeed.flush();
downstreamFeed.close();
synchronized (TCPLink.this) {
if (state == STATE_NO_MORE_DATA_TO_UPLOAD) {
// Log.d(TAG, (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D" + " correctly closed");
return;
}
TCPLink.this.notifyAll();
state = STATE_NO_MORE_DATA_TO_DOWNLOAD;
// finWaitStart = System.nanoTime();
}
}
break;
}
}
}
}
} catch (Exception ex) {
if (BuildConfig.DEBUG) {
Log.d("TCP", "ex " + ex + " in " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D");
}
} finally {
synchronized (TCPLink.this) {
// Log.d(TAG, "thread death " + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D state " + state + " received/sent " + receivedBytes + "/" + sentBytes);
state = STATE_E_DESTROYED;
ipLink.close(TCPLink.this);
}
}
}
private void processSYN(IP ip) throws IOException {
final int mineSeqLoc, heSentDataLoc;
synchronized (TCPLink.this) {
if (state != STATE_LISTEN) {
// Log.d(TAG, !isClient ? "" : " "
// + "ERROR received SYN in state " + state);//closed or destroyed?
return;
}
synchronized (intraThreadLock) {
remotePort = ip.tcp.sourcePort;
heSentData = ip.tcp.seqNumber;
if ((ip.tcp.flags & TCP.ACK) == TCP.ACK) {
lastReceivedAck = ip.tcp.ackNumber;
}
lastReceivedRecepientBufferSize = ip.tcp.windowSize;
intraThreadLock.notifyAll();
mineSeqLoc = mineSeq;
heSentDataLoc = heSentData + 1;
mineSeq++;
}
state = STATE_SYN_RECEIVED;
TCPLink.this.notifyAll();
mss = ip.tcp.mss;
// Log.d(TAG, (!isClient ? "" : " * ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: in STATE_SYN_RECEIVED, MSS " + mss);
}
// if(heSentData==0) Log.d(TAG, "!!!!!!!!!! send tcp with zero ack (ON LIST) "+TCPLink.this);
byte[] tcp = TCP.build(localPort, remotePort, mineSeqLoc, heSentDataLoc,
(byte) (TCP.SYN | TCP.ACK), getReadBufSize(), null,
ipLink.localAddress, remoteAddress);
// Log.d(TAG, (!isClient ? "" : " ")
// + (isClient ? "C" : "S") + TCPLink.this.hashCode() + "D: received SYN " + ip.tcp.seqNumber + "/" + ip.tcp.ackNumber + " send SYN ACK " + mineSeqLoc + "/" + heSentDataLoc);
ipLink.send(tcp, remoteAddress);
}
});
downloadThread.start();
}
}
public void setAppLayerThread(Thread thread) {
appLayerThread = thread;
}
void processPacket(IP ip) throws InterruptedException {
incomingPackets.put(ip);
}
void close() {
if (appLayerThread != null && appLayerThread.isAlive()) {
appLayerThread.interrupt();
}
if (downloadThread != null && downloadThread.isAlive()) {
downloadThread.interrupt();
}
if (uploadThread != null && uploadThread.isAlive()) {
uploadThread.interrupt();
}
}
Long getKey() {
return key;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.vinet.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.DatastoreFailureException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.gson.*;
import com.google.vinet.data.*;
import java.time.*;
import java.util.*;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.stream.Stream;
/**
* A web servlet for posting user requests.
*/
@WebServlet("/request")
public class RequestServlet extends HttpServlet {
/** The Datastore entity name for the ticket(s) associated with a request. */
public static final String TICKET_TABLE_NAME = "Ticket";
/** The {@code DatastoreService} implementation that this {@code RequestServlet depends on}. */
private final DatastoreService datastore;
/** The {@code UserService} implementation that this {@code RequestServlet depends on}. */
private final UserService userService;
/** The {@code RegistrationServlet} implementation that this {@code RequestServlet depends on}. */
private final RegistrationServlet registrationServlet;
/**
* Construct a RequestServlet with all of its dependencies set to their default implementations.
*/
public RequestServlet() {
this.datastore = DatastoreServiceFactory.getDatastoreService();
this.userService = UserServiceFactory.getUserService();
this.registrationServlet = new RegistrationServlet();
}
/**
* Construct a RequestServlet which depends on the provided dependencies.
* @param datastore The DatastoreService implementation to depend on.
* @param userService The UserService implementation to depend on.
* @param registrationServlet The RegistrationServlet implementation to depend on.
*/
public RequestServlet(DatastoreService datastore, UserService userService, RegistrationServlet registrationServlet) {
this.datastore = datastore;
this.userService = userService;
this.registrationServlet = registrationServlet;
}
/**
* Post an Isolate's request to the servlet. Both the request and its tickets will be put into the DataStore.
* @param request The request to be read.
* @param response The response to be written to.
* @throws IOException If an IOException occurs while reading from the request or writing to the reponse.
*/
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException {
if (response == null) {
throw new IllegalArgumentException("response must not be null");
}
if (request == null) {
throw new IllegalArgumentException("request must not be null");
}
if (!this.userService.isUserLoggedIn()) {
response.sendError(
HttpServletResponse.SC_UNAUTHORIZED,
"user must be logged in to post a request"
);
return;
}
final boolean registered = registrationServlet.isUserRegistered();
if (!registered) {
response.sendError(
HttpServletResponse.SC_UNAUTHORIZED,
"user must be registered to post a request"
);
return;
}
final boolean isIsolate = registrationServlet.isUserIsolate();
if (!isIsolate) {
response.sendError(
HttpServletResponse.SC_UNAUTHORIZED,
"user must be registered as an isolate to post a request"
);
return;
}
String date = request.getParameter("date");
String duration = request.getParameter("duration");
String startTime = request.getParameter("startTime");
String endTime = request.getParameter("endTime");
String timezone = request.getParameter("timezoneId");
/* NOTE: Subjects and Details will be matched in the order they are received.
* For example:
* subjects[0] will be linked to details [0],
* subjects[1] will be linked to details [1]
* and so on as above.
*/
final String[] subjectsRAW = request.getParameterValues("subject");
final String[] detailsRAW = request.getParameterValues("details");
if (date == null
|| duration == null
|| startTime == null
|| endTime == null
|| timezone == null
|| subjectsRAW == null
|| detailsRAW == null) {
response.sendError(
HttpServletResponse.SC_BAD_REQUEST,
"one or more of the parameters were null"
);
return;
}
date = date.trim();
duration = duration.trim();
startTime = startTime.trim();
endTime = endTime.trim();
timezone = timezone.trim();
final List<String> subjects = Collections.unmodifiableList(Arrays.asList(trimMembers(subjectsRAW)));
final List<String> details = Collections.unmodifiableList(Arrays.asList(trimMembers(detailsRAW)));
if (date.isEmpty()
|| duration.isEmpty()
|| startTime.isEmpty()
|| endTime.isEmpty()
|| timezone.isEmpty()
|| subjects.size() == 0
|| details.size() == 0) {
response.sendError(
HttpServletResponse.SC_BAD_REQUEST,
"one or more of the parameters were empty"
);
return;
}
if (subjects.parallelStream().anyMatch(e -> e == null || e.equals(""))) {
response.sendError(
HttpServletResponse.SC_BAD_REQUEST,
"all members of subjects array must not be null or empty"
);
return;
}
if (details.parallelStream().anyMatch(e -> e == null || e.equals(""))) {
response.sendError(
HttpServletResponse.SC_BAD_REQUEST,
"all members of details array must not be null or empty"
);
return;
}
/* subjects and details must be of equal length to ensure that the subjects and details
* have been received correctly.
*/
if (subjects.size() != details.size()) {
response.sendError(
HttpServletResponse.SC_BAD_REQUEST,
"subjects and details must be of equal length"
);
return;
}
/* This catch block is used to ensure UserService has not become unavailable
* since login status was confirmed. This should never happen, but could. */
final String userId;
try {
userId = userService.getCurrentUser().getUserId();
} catch (Exception ex) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
/*
* All of the below are java.time representations of the associated parameters.
*/
final ZoneId timezoneId;
final LocalDate localDate;
final LocalTime localStartTime;
final LocalTime localEndTime;
final Duration requestDuration;
/* If any of the below fail, then the request cannot be accepted, as we cannot determine
* when the request is due to take place. */
try {
timezoneId = ZoneId.of(timezone);
localDate = LocalDate.parse(date);
localStartTime = LocalTime.parse(startTime);
localEndTime = LocalTime.parse(endTime);
requestDuration = Duration.ofMinutes(Long.parseLong(duration));
} catch (DateTimeException | ArithmeticException | NumberFormatException exception) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, "error parsing date/time");
return;
}
/* Combine the date, time, and timezones. */
final ZonedDateTime zonedStartDateTime = ZonedDateTime.of(localDate, localStartTime, timezoneId);
final ZonedDateTime zonedEndDateTime = ZonedDateTime.of(localDate, localEndTime, timezoneId);
/* Convert the start/end date-times to an Instant to be compatible with the TimeSlot interface. */
final Instant start = zonedStartDateTime.toInstant();
final Instant end = zonedEndDateTime.toInstant();
final Gson gson = new Gson();
final Entity ticketEntity = new Entity(TICKET_TABLE_NAME);
ticketEntity.setProperty("isolateId", userId);
ticketEntity.setProperty("duration", requestDuration.toString());
ticketEntity.setProperty("subjects", gson.toJson(subjects));
ticketEntity.setProperty("details", gson.toJson(details));
IsolateTimeSlot.datastore = this.datastore;
/* Put the ticket into the datastore, then create an IsolateTimeSlot which points to this ticket,
* and put that IsolateTimeSlot into the datastore. */
try {
final Key ticketKey = this.datastore.put(ticketEntity);
final Isolate isolate = new Isolate(userId);
final IsolateTimeSlot timeSlot = new IsolateTimeSlot(start, end, isolate, localDate, ticketKey);
timeSlot.toDatastore();
} catch (Exception exception) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
response.sendRedirect("/isolate/home.html");
}
/**
* Apply the trim() method to all non-null members of a String array,
* in-place.
* @param array The array to process.
* @return A reference to {@code array}, for chaining.
*/
private static String[] trimMembers(String[] array) {
for (int i = 0; i < array.length; i++) {
if (array[i] != null) {
array[i] = array[i].trim();
}
}
return array;
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.collection.primitive.hopscotch;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.junit.Ignore;
import org.junit.Test;
import org.neo4j.collection.primitive.Primitive;
import org.neo4j.collection.primitive.PrimitiveLongIntMap;
import org.neo4j.collection.primitive.PrimitiveLongSet;
import org.neo4j.test.randomized.RandomizedTester.TargetFactory;
import org.neo4j.test.randomized.TestResource;
import static java.lang.System.currentTimeMillis;
import static java.lang.Thread.sleep;
@Ignore( "Not a test. A benchmark" )
public class PrimitiveCollectionBenchmark
{
/* A tuesday afternoon run on MPs machine:
TROVE
testing: trove4j long->int map, random
add: 1696, contains: 566, mem: 1154154496 0.37877269331638164
add: 1881, contains: 614, mem: 1569521664 0.37877269331638164
add: 1761, contains: 596, mem: 2332098560 0.37877269331638164
testing: trove4j long->int map, seq
add: 531, contains: 69, mem: 2517041152 0.37965116815437405
add: 355, contains: 69, mem: 2519793664 0.37965116815437405
add: 334, contains: 74, mem: 2517041152 0.37965116815437405
testing: trove4j set, random
add: 1231, contains: 714, mem: 2518220800 0.37877269331638164
add: 1306, contains: 665, mem: 2517041152 0.37877269331638164
add: 1343, contains: 678, mem: 2517499904 0.37877269331638164
testing: trove4j set, seq
add: 286, contains: 93, mem: 2517041152 0.37965116815437405
add: 329, contains: 93, mem: 2517172224 0.37965116815437405
add: 310, contains: 92, mem: 2517172224 0.37965116815437405
COLT
testing: colt long->int map, random
add: 1765, contains: 693, mem: 1186922496 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@4990d0d2
add: 1609, contains: 657, mem: 1086259200 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@599e80b1
add: 1698, contains: 651, mem: 1634729984 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@ae03a4d
testing: colt long->int map, seq
add: 620, contains: 111, mem: 1955528704 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@62d790a8
add: 624, contains: 109, mem: 2323382272 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@39be9f72
add: 478, contains: 109, mem: 2325938176 org.neo4j.kernel.impl.util.hopscotch.PrimitiveCollectionBenchmark$ColtMap@1769261f
NEO4J
testing: neo4j hop-scotch long->int map, random
add: 1669, contains: 812, mem: 813039616 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
add: 1509, contains: 864, mem: 895942656 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
add: 1470, contains: 791, mem: 1097334784 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
testing: neo4j hop-scotch long->int map, seq
add: 649, contains: 121, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
add: 649, contains: 119, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
add: 645, contains: 120, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
testing: neo4j hop-scotch set, random
add: 1380, contains: 761, mem: 1097334784 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
add: 1333, contains: 761, mem: 1097334784 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
add: 1317, contains: 766, mem: 1097334784 int[]-table[capacity:16777216, size:9976861, usage:0.5946672558784485]
testing: neo4j hop-scotch set, seq
add: 594, contains: 152, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
add: 594, contains: 153, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
add: 593, contains: 151, mem: 1097334784 int[]-table[capacity:16777216, size:10000000, usage:0.5960464477539062]
*/
private static final int RUNS = 3;
@Test
public void performanceTestPrimitiveLongSet() throws Exception
{
TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
{
@Override
public MapInterface newInstance()
{
return new HopScotchSet();
}
};
performanceTest( "neo4j hop-scotch set, random", factory, RANDOM_DATA );
performanceTest( "neo4j hop-scotch set, seq", factory, SEQUENTIAL_DATA );
}
@Test
public void performanceTestPrimitiveLongMap() throws Exception
{
TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
{
@Override
public MapInterface newInstance()
{
return new HopScotchMap();
}
};
performanceTest( "neo4j hop-scotch long->int map, random", factory, RANDOM_DATA );
performanceTest( "neo4j hop-scotch long->int map, seq", factory, SEQUENTIAL_DATA );
}
// @Test
// public void performanceTestColtLongMap() throws Exception
// {
// Factory<MapInterface> factory = new Factory<MapInterface>()
// {
// @Override
// public MapInterface newInstance()
// {
// return new ColtMap();
// }
// };
// performanceTest( "colt long->int map, random", factory, RANDOM_DATA );
// performanceTest( "colt long->int map, seq", factory, SEQUENTIAL_DATA );
// }
// @Test
// public void performanceTestTroveLongSet() throws Exception
// {
// TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
// {
// @Override
// public MapInterface newInstance()
// {
// return new TroveSet();
// }
// };
// performanceTest( "trove4j set, random", factory, RANDOM_DATA );
// performanceTest( "trove4j set, seq", factory, SEQUENTIAL_DATA );
// }
//
// @Test
// public void performanceTestTroveLongMap() throws Exception
// {
// TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
// {
// @Override
// public MapInterface newInstance()
// {
// return new TroveMap();
// }
// };
// performanceTest( "trove4j long->int map, random", factory, RANDOM_DATA );
// performanceTest( "trove4j long->int map, seq", factory, SEQUENTIAL_DATA );
// }
@Test
public void performanceTestJavaLongSet() throws Exception
{
TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
{
@Override
public MapInterface newInstance()
{
return new JucSet();
}
};
performanceTest( "juc set, random", factory, RANDOM_DATA );
performanceTest( "juc set, seq", factory, SEQUENTIAL_DATA );
}
@Test
public void performanceTestJavaLongMap() throws Exception
{
TargetFactory<MapInterface> factory = new TargetFactory<MapInterface>()
{
@Override
public MapInterface newInstance()
{
return new JucMap();
}
};
performanceTest( "juc Long->Integer map, random", factory, RANDOM_DATA );
performanceTest( "juc Long->Integer map, seq", factory, SEQUENTIAL_DATA );
}
private void performanceTest( String name, TargetFactory<MapInterface> factory,
long[] data ) throws Exception
{
System.out.println( "testing: " + name );
for ( int r = 0; r < RUNS; r++ )
{
// GIVEN
try ( final MapInterface target = factory.newInstance() )
{
// WHEN
long time = currentTimeMillis();
long dataSize = data.length;
for ( int i = 0; i < dataSize; i++ )
{
target.put( data[i], (int)data[i] );
}
long addTime = currentTimeMillis() - time;
time = currentTimeMillis();
for ( int i = 0; i < dataSize; i++ )
{
target.get( data[i] );
}
long containsTime = currentTimeMillis() - time;
printResults( addTime, containsTime, target );
}
}
}
private interface MapInterface extends TestResource
{
void put( long key, int value );
void get( long key );
}
// private static class ColtMap implements MapInterface
// {
// private final OpenLongIntHashMap map = new OpenLongIntHashMap();
//
// @Override
// public void put( long key, int value )
// {
// map.put( key, value );
// }
//
// @Override
// public void get( long key )
// {
// map.get( key );
// }
// }
private static class JucSet implements MapInterface
{
private final Set<Long> set = new HashSet<>();
@Override
public void put( long key, int value )
{
set.add( key );
}
@Override
public void get( long key )
{
set.contains( key );
}
@Override
public String toString()
{
return "" + set.size();
}
@Override
public void close()
{
}
}
private static class JucMap implements MapInterface
{
private final Map<Long, Integer> map = new HashMap<>();
@Override
public void put( long key, int value )
{
map.put( key, value );
}
@Override
public void get( long key )
{
map.get( key );
}
@Override
public String toString()
{
return "" + map.size();
}
@Override
public void close()
{
}
}
// private static class TroveSet implements MapInterface
// {
// private final TLongHashSet set = new TLongHashSet();
//
// @Override
// public void put( long key, int value )
// {
// set.add( key );
// }
//
// @Override
// public void get( long key )
// {
// set.contains( key );
// }
//
// @Override
// public String toString()
// {
// return "" + ((double)set.size() / (double)set.capacity());
// }
// }
//
// private static class TroveMap implements MapInterface
// {
// private final TLongIntHashMap map = new TLongIntHashMap();
//
// @Override
// public void put( long key, int value )
// {
// map.put( key, value );
// }
//
// @Override
// public void get( long key )
// {
// map.get( key );
// }
//
// @Override
// public String toString()
// {
// return "" + ((double)map.size() / (double)map.capacity());
// }
// }
private static class HopScotchSet implements MapInterface
{
private final PrimitiveLongSet set = Primitive.offHeapLongSet();
@Override
public void put( long key, int value )
{
set.add( key );
}
@Override
public void get( long key )
{
set.contains( key );
}
@Override
public String toString()
{
return set.toString();
}
@Override
public void close()
{
set.close();
}
}
private static class HopScotchMap implements MapInterface
{
private final PrimitiveLongIntMap map = Primitive.longIntMap();
@Override
public void put( long key, int value )
{
map.put( key, value );
}
@Override
public void get( long key )
{
map.get( key );
}
@Override
public String toString()
{
return map.toString();
}
@Override
public void close()
{
map.close();
}
}
private void printResults( long addTime, long containsTime, Object set ) throws Exception
{
for ( int i = 0; i < 5; i++ )
{
System.gc();
sleep( 1000 );
}
System.out.println( " add: " + addTime + ", contains: " + containsTime +
", mem: " + Runtime.getRuntime().totalMemory() + " " + set );
}
private static final int DATA_SIZE = 10_000_000;
private static final long[] RANDOM_DATA, SEQUENTIAL_DATA;
static
{
RANDOM_DATA = new long[DATA_SIZE];
SEQUENTIAL_DATA = new long[DATA_SIZE];
Random random = new Random( 145878 /*picked at random, of course*/ );
for ( int i = 0; i < DATA_SIZE; i++ )
{
RANDOM_DATA[i] = Math.abs( random.nextInt() );
SEQUENTIAL_DATA[i] = i;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.largemessage;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQBuffers;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.QueueConfiguration;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.MessageHandler;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.StoreConfiguration;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.DataConstants;
import org.apache.activemq.artemis.utils.DeflaterReader;
import org.jboss.logging.Logger;
import org.junit.Assert;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public abstract class LargeMessageTestBase extends ActiveMQTestBase {
private static final Logger log = Logger.getLogger(LargeMessageTestBase.class);
protected final SimpleString ADDRESS = new SimpleString("SimpleAddress");
protected StoreConfiguration.StoreType storeType;
public LargeMessageTestBase(StoreConfiguration.StoreType storeType) {
this.storeType = storeType;
}
@Override
public void tearDown() throws Exception {
super.tearDown();
if (storeType == StoreConfiguration.StoreType.DATABASE) {
destroyTables(Arrays.asList("BINDINGS", "LARGE_MESSAGE", "MESSAGE", "NODE_MANAGER_STORE"));
}
}
@Parameterized.Parameters(name = "storeType={0}")
public static Collection<Object[]> data() {
Object[][] params = new Object[][]{{StoreConfiguration.StoreType.FILE}, {StoreConfiguration.StoreType.DATABASE}};
return Arrays.asList(params);
}
protected void testChunks(final boolean isXA,
final boolean restartOnXA,
final boolean rollbackFirstSend,
final boolean useStreamOnConsume,
final boolean realFiles,
final boolean preAck,
final boolean sendingBlocking,
final boolean testBrowser,
final boolean useMessageConsumer,
final int numberOfMessages,
final long numberOfBytes,
final int waitOnConsumer,
final long delayDelivery) throws Exception {
testChunks(isXA, restartOnXA, rollbackFirstSend, useStreamOnConsume, realFiles, preAck, sendingBlocking, testBrowser, useMessageConsumer, numberOfMessages, numberOfBytes, waitOnConsumer, delayDelivery, -1, 10 * 1024);
}
protected void testChunks(final boolean isXA,
final boolean restartOnXA,
final boolean rollbackFirstSend,
final boolean useStreamOnConsume,
final boolean realFiles,
final boolean preAck,
final boolean sendingBlocking,
final boolean testBrowser,
final boolean useMessageConsumer,
final int numberOfMessages,
final long numberOfBytes,
final int waitOnConsumer,
final long delayDelivery,
final int producerWindow,
final int minSize) throws Exception {
clearDataRecreateServerDirs();
Configuration configuration;
if (storeType == StoreConfiguration.StoreType.DATABASE) {
configuration = createDefaultJDBCConfig(true);
} else {
configuration = createDefaultConfig(false);
}
ActiveMQServer server = createServer(realFiles, configuration);
server.start();
ServerLocator locator = createInVMNonHALocator();
try {
if (sendingBlocking) {
locator.setBlockOnNonDurableSend(true).setBlockOnDurableSend(true).setBlockOnAcknowledge(true);
}
if (producerWindow > 0) {
locator.setConfirmationWindowSize(producerWindow);
}
locator.setMinLargeMessageSize(minSize);
ClientSessionFactory sf = locator.createSessionFactory();
ClientSession session;
Xid xid = null;
session = sf.createSession(null, null, isXA, false, false, preAck, 0);
if (isXA) {
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
}
session.createQueue(new QueueConfiguration(ADDRESS));
ClientProducer producer = session.createProducer(ADDRESS);
if (rollbackFirstSend) {
sendMessages(numberOfMessages, numberOfBytes, delayDelivery, session, producer);
if (isXA) {
session.end(xid, XAResource.TMSUCCESS);
session.prepare(xid);
session.close();
if (realFiles && restartOnXA) {
server.stop();
server.start();
sf = locator.createSessionFactory();
}
session = sf.createSession(null, null, isXA, false, false, preAck, 0);
Xid[] xids = session.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(1, xids.length);
Assert.assertEquals(xid, xids[0]);
session.rollback(xid);
producer = session.createProducer(ADDRESS);
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
} else {
session.rollback();
}
validateNoFilesOnLargeDir();
}
sendMessages(numberOfMessages, numberOfBytes, delayDelivery, session, producer);
if (isXA) {
session.end(xid, XAResource.TMSUCCESS);
session.prepare(xid);
session.close();
if (realFiles && restartOnXA) {
server.stop();
server.start();
//we need to recreate sf's
sf = locator.createSessionFactory();
}
session = sf.createSession(null, null, isXA, false, false, preAck, 0);
Xid[] xids = session.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(1, xids.length);
Assert.assertEquals(xid, xids[0]);
producer = session.createProducer(ADDRESS);
session.commit(xid, false);
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
} else {
session.commit();
}
session.close();
if (realFiles) {
server.stop();
server = createServer(realFiles, configuration);
server.start();
sf = locator.createSessionFactory();
}
session = sf.createSession(null, null, isXA, false, false, preAck, 0);
if (isXA) {
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
}
ClientConsumer consumer = null;
for (int iteration = testBrowser ? 0 : 1; iteration < 2; iteration++) {
session.stop();
// first time with a browser
consumer = session.createConsumer(ADDRESS, null, iteration == 0);
if (useMessageConsumer) {
final CountDownLatch latchDone = new CountDownLatch(numberOfMessages);
final AtomicInteger errors = new AtomicInteger(0);
MessageHandler handler = new MessageHandler() {
int msgCounter;
@Override
public void onMessage(final ClientMessage message) {
try {
if (delayDelivery > 0) {
long originalTime = (Long) message.getObjectProperty(new SimpleString("original-time"));
Assert.assertTrue(System.currentTimeMillis() - originalTime + "<" + delayDelivery, System.currentTimeMillis() - originalTime >= delayDelivery);
}
if (!preAck) {
message.acknowledge();
}
Assert.assertNotNull(message);
if (delayDelivery <= 0) {
// right now there is no guarantee of ordered delivered on multiple scheduledMessages with
// the same
// scheduled delivery time
Assert.assertEquals(msgCounter, ((Integer) message.getObjectProperty(new SimpleString("counter-message"))).intValue());
}
if (useStreamOnConsume) {
final AtomicLong bytesRead = new AtomicLong(0);
message.saveToOutputStream(new OutputStream() {
@Override
public void write(final byte[] b) throws IOException {
if (b[0] == ActiveMQTestBase.getSamplebyte(bytesRead.get())) {
bytesRead.addAndGet(b.length);
LargeMessageTestBase.log.debug("Read position " + bytesRead.get() + " on consumer");
} else {
LargeMessageTestBase.log.warn("Received invalid packet at position " + bytesRead.get());
}
}
@Override
public void write(final int b) throws IOException {
if (b == ActiveMQTestBase.getSamplebyte(bytesRead.get())) {
bytesRead.incrementAndGet();
} else {
LargeMessageTestBase.log.warn("byte not as expected!");
}
}
});
Assert.assertEquals(numberOfBytes, bytesRead.get());
} else {
ActiveMQBuffer buffer = message.getBodyBuffer();
buffer.resetReaderIndex();
for (long b = 0; b < numberOfBytes; b++) {
if (b % (1024L * 1024L) == 0) {
LargeMessageTestBase.log.debug("Read " + b + " bytes");
}
Assert.assertEquals(ActiveMQTestBase.getSamplebyte(b), buffer.readByte());
}
try {
buffer.readByte();
Assert.fail("Supposed to throw an exception");
} catch (Exception e) {
}
}
} catch (Throwable e) {
e.printStackTrace();
LargeMessageTestBase.log.warn("Got an error", e);
errors.incrementAndGet();
} finally {
latchDone.countDown();
msgCounter++;
}
}
};
session.start();
consumer.setMessageHandler(handler);
Assert.assertTrue(latchDone.await(waitOnConsumer, TimeUnit.MILLISECONDS));
Assert.assertEquals(0, errors.get());
} else {
session.start();
for (int i = 0; i < numberOfMessages; i++) {
ClientMessage message = consumer.receive(waitOnConsumer + delayDelivery);
Assert.assertNotNull(message);
if (delayDelivery > 0) {
long originalTime = (Long) message.getObjectProperty(new SimpleString("original-time"));
Assert.assertTrue(System.currentTimeMillis() - originalTime + "<" + delayDelivery, System.currentTimeMillis() - originalTime >= delayDelivery);
}
if (!preAck) {
message.acknowledge();
}
Assert.assertNotNull(message);
if (delayDelivery <= 0) {
// right now there is no guarantee of ordered delivered on multiple scheduledMessages with the same
// scheduled delivery time
Assert.assertEquals(i, ((Integer) message.getObjectProperty(new SimpleString("counter-message"))).intValue());
}
if (useStreamOnConsume) {
final AtomicLong bytesRead = new AtomicLong(0);
message.saveToOutputStream(new OutputStream() {
@Override
public void write(final byte[] b) throws IOException {
if (b.length > 0) {
if (b[0] == ActiveMQTestBase.getSamplebyte(bytesRead.get())) {
bytesRead.addAndGet(b.length);
} else {
LargeMessageTestBase.log.warn("Received invalid packet at position " + bytesRead.get());
}
}
}
@Override
public void write(final int b) throws IOException {
if (bytesRead.get() % (1024L * 1024L) == 0) {
LargeMessageTestBase.log.debug("Read " + bytesRead.get() + " bytes");
}
if (b == (byte) 'a') {
bytesRead.incrementAndGet();
} else {
LargeMessageTestBase.log.warn("byte not as expected!");
}
}
});
Assert.assertEquals(numberOfBytes, bytesRead.get());
} else {
ActiveMQBuffer buffer = message.getBodyBuffer();
buffer.resetReaderIndex();
for (long b = 0; b < numberOfBytes; b++) {
if (b % (1024L * 1024L) == 0L) {
LargeMessageTestBase.log.debug("Read " + b + " bytes");
}
Assert.assertEquals(ActiveMQTestBase.getSamplebyte(b), buffer.readByte());
}
}
}
}
consumer.close();
if (iteration == 0) {
if (isXA) {
session.end(xid, XAResource.TMSUCCESS);
session.rollback(xid);
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
} else {
session.rollback();
}
} else {
if (isXA) {
session.end(xid, XAResource.TMSUCCESS);
session.commit(xid, true);
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
} else {
session.commit();
}
}
}
session.close();
Assert.assertEquals(0, ((Queue) server.getPostOffice().getBinding(ADDRESS).getBindable()).getDeliveringCount());
Assert.assertEquals(0, ((Queue) server.getPostOffice().getBinding(ADDRESS).getBindable()).getMessageCount());
validateNoFilesOnLargeDir();
} catch (Throwable e) {
e.printStackTrace();
throw e;
} finally {
locator.close();
try {
server.stop();
} catch (Throwable ignored) {
ignored.printStackTrace();
}
}
}
/**
* @param numberOfMessages
* @param numberOfBytes
* @param delayDelivery
* @param session
* @param producer
* @throws Exception
* @throws IOException
* @throws ActiveMQException
*/
private void sendMessages(final int numberOfMessages,
final long numberOfBytes,
final long delayDelivery,
final ClientSession session,
final ClientProducer producer) throws Exception {
LargeMessageTestBase.log.debug("NumberOfBytes = " + numberOfBytes);
for (int i = 0; i < numberOfMessages; i++) {
ClientMessage message = session.createMessage(true);
// If the test is using more than 1M, we will only use the Streaming, as it require too much memory from the
// test
if (numberOfBytes > 1024 * 1024 || i % 2 == 0) {
LargeMessageTestBase.log.debug("Sending message (stream)" + i);
message.setBodyInputStream(ActiveMQTestBase.createFakeLargeStream(numberOfBytes));
} else {
LargeMessageTestBase.log.debug("Sending message (array)" + i);
byte[] bytes = new byte[(int) numberOfBytes];
for (int j = 0; j < bytes.length; j++) {
bytes[j] = ActiveMQTestBase.getSamplebyte(j);
}
message.getBodyBuffer().writeBytes(bytes);
}
message.putIntProperty(new SimpleString("counter-message"), i);
if (delayDelivery > 0) {
long time = System.currentTimeMillis();
message.putLongProperty(new SimpleString("original-time"), time);
message.putLongProperty(Message.HDR_SCHEDULED_DELIVERY_TIME, time + delayDelivery);
producer.send(message);
} else {
producer.send(message);
}
}
}
protected ActiveMQBuffer createLargeBuffer(final int numberOfIntegers) {
ActiveMQBuffer body = ActiveMQBuffers.fixedBuffer(DataConstants.SIZE_INT * numberOfIntegers);
for (int i = 0; i < numberOfIntegers; i++) {
body.writeInt(i);
}
return body;
}
protected ClientMessage createLargeClientMessageStreaming(final ClientSession session,
final int numberOfBytes) throws Exception {
return createLargeClientMessageStreaming(session, numberOfBytes, true);
}
protected ClientMessage createLargeClientMessage(final ClientSession session,
final byte[] buffer,
final boolean durable) throws Exception {
ClientMessage msgs = session.createMessage(durable);
msgs.getBodyBuffer().writeBytes(buffer);
return msgs;
}
protected ClientMessage createLargeClientMessageStreaming(final ClientSession session,
final long numberOfBytes,
final boolean persistent) throws Exception {
ClientMessage clientMessage = session.createMessage(persistent);
clientMessage.setBodyInputStream(ActiveMQTestBase.createFakeLargeStream(numberOfBytes));
return clientMessage;
}
/**
* @param session
* @param queueToRead
* @param numberOfBytes
* @throws ActiveMQException
* @throws IOException
*/
protected void readMessage(final ClientSession session,
final SimpleString queueToRead,
final int numberOfBytes) throws ActiveMQException, IOException {
session.start();
ClientConsumer consumer = session.createConsumer(queueToRead);
ClientMessage clientMessage = consumer.receive(5000);
Assert.assertNotNull(clientMessage);
clientMessage.acknowledge();
session.commit();
consumer.close();
}
protected OutputStream createFakeOutputStream() throws Exception {
return new OutputStream() {
private boolean closed = false;
private int count;
@Override
public void close() throws IOException {
super.close();
closed = true;
}
@Override
public void write(final int b) throws IOException {
if (count++ % 1024 * 1024 == 0) {
LargeMessageTestBase.log.debug("OutputStream received " + count + " bytes");
}
if (closed) {
throw new IOException("Stream was closed");
}
}
};
}
//depending on the value of regular argument, it can produce a text stream
//whose size is above minLargeMessageSize but whose compressed size is either
//below minLargeMessageSize (regular = true) or above it (regular = false)
public static void adjustLargeCompression(boolean regular,
TestLargeMessageInputStream stream,
int step) throws IOException {
int absoluteStep = Math.abs(step);
while (true) {
DeflaterReader compressor = new DeflaterReader(stream, new AtomicLong());
try {
byte[] buffer = new byte[1048 * 50];
int totalCompressed = 0;
int n = compressor.read(buffer);
while (n != -1) {
totalCompressed += n;
n = compressor.read(buffer);
}
// check compressed size
if (regular && (totalCompressed < stream.getMinLarge())) {
// ok it can be sent as regular
stream.resetAdjust(0);
break;
} else if ((!regular) && (totalCompressed > stream.getMinLarge())) {
// now it cannot be sent as regular
stream.resetAdjust(0);
break;
} else {
stream.resetAdjust(regular ? -absoluteStep : absoluteStep);
}
} finally {
compressor.close();
}
}
}
public static class TestLargeMessageInputStream extends InputStream {
private final int minLarge;
private int size;
private int pos;
private boolean random;
public TestLargeMessageInputStream(int minLarge) {
this(minLarge, false);
}
public TestLargeMessageInputStream(int minLarge, boolean random) {
pos = 0;
this.minLarge = minLarge;
this.size = minLarge + 1024;
this.random = random;
}
public int getChar(int index) {
if (random) {
Random r = new Random();
return 'A' + r.nextInt(26);
} else {
return 'A' + index % 26;
}
}
public void setSize(int size) {
this.size = size;
}
public TestLargeMessageInputStream(TestLargeMessageInputStream other) {
this.minLarge = other.minLarge;
this.size = other.size;
this.pos = other.pos;
}
public int getSize() {
return size;
}
public int getMinLarge() {
return this.minLarge;
}
@Override
public int read() throws IOException {
if (pos == size)
return -1;
pos++;
return getChar(pos - 1);
}
public void resetAdjust(int step) {
size += step;
if (size <= minLarge) {
throw new IllegalStateException("Couldn't adjust anymore, size smaller than minLarge " + minLarge);
}
pos = 0;
}
@Override
public TestLargeMessageInputStream clone() {
return new TestLargeMessageInputStream(this);
}
public char[] toArray() throws IOException {
char[] result = new char[size];
for (int i = 0; i < result.length; i++) {
result[i] = (char) read();
}
return result;
}
}
}
| |
package ro.nextreports.server.web.analysis.feature.sort;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.DataTable;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.ChoiceRenderer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.StringResourceModel;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.image.ContextImage;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.spring.injection.annot.SpringBean;
import ro.nextreports.server.domain.Analysis;
import ro.nextreports.server.service.StorageService;
import ro.nextreports.server.util.AnalysisUtil;
import ro.nextreports.server.web.analysis.util.DatabaseUtil;
import ro.nextreports.server.web.common.behavior.SimpleTooltipBehavior;
import ro.nextreports.server.web.common.form.FormContentPanel;
import ro.nextreports.server.web.common.form.FormPanel;
import ro.nextreports.server.web.common.menu.MenuItem;
import ro.nextreports.server.web.common.menu.MenuPanel;
import ro.nextreports.server.web.common.table.BaseTable;
import ro.nextreports.server.web.common.table.LinkPropertyColumn;
public class SortPanel extends FormContentPanel<Analysis> {
private ArrayList<String> sortProperty;
private ArrayList<Boolean> ascending;
private SortObject sortObject;
private DropDownChoice<String> columnChoice;
private DropDownChoice<Boolean> orderChoice;
private Label label;
private DataTable<SortObject, String> table;
private SortObjectDataProvider provider;
private boolean firstSortRemoved = false;
private boolean changeFirstSortOrder = false;
private int editIndex = -1;
private IModel<String> addTextModel;
@SpringBean
private StorageService storageService;
public SortPanel(IModel<Analysis> model) {
super(FormPanel.CONTENT_ID);
sortProperty = new ArrayList<String>(model.getObject().getSortProperty());
ascending = new ArrayList<Boolean>(model.getObject().getAscending());
sortObject = new SortObject();
sortObject.setColumn(model.getObject().getSimpleColumns().get(0));
sortObject.setOrder(Boolean.TRUE);
ContextImage urlImage = new ContextImage("infoImage","images/information.png");
urlImage.add(new SimpleTooltipBehavior(AnalysisUtil.getAnalysisInfo(model.getObject(), 5, storageService.getSettings())));
add(urlImage);
add(new Label("column", new StringResourceModel("SortPanel.column", null, null)));
columnChoice = new DropDownChoice<String>("columnChoice",
new PropertyModel<String>(this, "sortObject.column"), model.getObject().getSimpleColumns(),
new ChoiceRenderer<String>() {
@Override
public Object getDisplayValue(String fullColumnName) {
return DatabaseUtil.getColumnAlias(fullColumnName);
}
});
columnChoice.setOutputMarkupPlaceholderTag(true);
columnChoice.setRequired(true);
add(columnChoice);
add(new Label("order", new StringResourceModel("SortPanel.order", null, null)));
orderChoice = new DropDownChoice<Boolean>("orderChoice",
new PropertyModel<Boolean>(this, "sortObject.order"), Arrays.asList(Boolean.TRUE, Boolean.FALSE));
orderChoice.setOutputMarkupPlaceholderTag(true);
orderChoice.setRequired(true);
add(orderChoice);
AjaxSubmitLink addLink = new AjaxSubmitLink("addLink") {
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
if (editIndex != -1) {
int index = sortProperty.indexOf(sortObject.getColumn());
if ( (index != -1) && (index != editIndex) ) {
error(getString("SortPanel.duplicateColumn"));
target.add(getFeedbackPanel());
return;
}
if (editIndex == 0) {
if (sortProperty.get(editIndex).equals(sortObject.getColumn())) {
changeFirstSortOrder = true;
} else {
firstSortRemoved = true;
}
}
sortProperty.set(editIndex, sortObject.getColumn());
ascending.set(editIndex, sortObject.getOrder());
resetEdit(target);
} else {
if (sortProperty.contains(sortObject.getColumn())) {
error(getString("SortPanel.duplicateColumn"));
target.add(getFeedbackPanel());
return;
}
sortProperty.add(sortObject.getColumn());
ascending.add(sortObject.getOrder());
}
target.add(table);
target.add(getFeedbackPanel());
}
};
addTextModel = Model.of("");
label = new Label("addMessage", addTextModel);
label.setOutputMarkupPlaceholderTag(true);
addLink.add(label);
add(addLink);
addTable();
}
protected void onConfigure() {
super.onConfigure();
addTextModel.setObject(getString("add"));
}
private void resetEdit(AjaxRequestTarget target) {
addTextModel.setObject(getString("add"));
editIndex = -1;
target.add(label);
}
private void addTable() {
List<IColumn<SortObject, String>> columns = new ArrayList<IColumn<SortObject, String>>();
columns.add(new AbstractColumn<SortObject, String>(new Model<String>("")) {
@Override
public String getCssClass() {
return "index";
}
public void populateItem(Item<ICellPopulator<SortObject>> item, String componentId, final IModel<SortObject> rowModel) {
int col=item.getIndex();
Item<?> i = (Item<?>) item.getParent().getParent();
int row = i.getIndex()+1;
item.add(new Label(componentId, new Model<String>(String.valueOf(row))));
}
});
columns.add(new AbstractColumn<SortObject, String>(new StringResourceModel("SortPanel.column", null, null)) {
public void populateItem(Item<ICellPopulator<SortObject>> item, String componentId, final IModel<SortObject> rowModel) {
final SortObject sortObject = rowModel.getObject();
item.add(new Label(componentId, new Model<String>(sortObject.getColumn())));
}
});
columns.add(new AnalysisBooleanImagePropertyColumn<SortObject>(new StringResourceModel("SortPanel.order", null, null), "order"));
columns.add(new LinkPropertyColumn<SortObject>(new StringResourceModel("up", null, null), new StringResourceModel("up", null, null)) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(Item item, String componentId, IModel model, AjaxRequestTarget target) {
SortObject sortObject = (SortObject) model.getObject();
int upIndex = sortProperty.indexOf(sortObject.getColumn());
if (upIndex > 0) {
sortProperty.remove(upIndex);
sortProperty.add(upIndex-1, sortObject.getColumn());
ascending.remove(upIndex);
ascending.add(upIndex-1, sortObject.getOrder());
if (upIndex == 1) {
changeFirstSortOrder = true;
}
resetEdit(target);
target.add(table);
}
}
});
columns.add(new LinkPropertyColumn<SortObject>(new StringResourceModel("down", null, null), new StringResourceModel("down", null, null)) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(Item item, String componentId, IModel model, AjaxRequestTarget target) {
SortObject sortObject = (SortObject) model.getObject();
int upIndex = sortProperty.indexOf(sortObject.getColumn());
if (upIndex < sortProperty.size()-1) {
sortProperty.remove(upIndex);
sortProperty.add(upIndex+1, sortObject.getColumn());
ascending.remove(upIndex);
ascending.add(upIndex+1, sortObject.getOrder());
resetEdit(target);
target.add(table);
}
}
});
columns.add(new LinkPropertyColumn<SortObject>(new StringResourceModel("edit", null, null), new StringResourceModel("edit", null, null)) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(Item item, String componentId, IModel model, AjaxRequestTarget target) {
SortObject sortObject = (SortObject) model.getObject();
editIndex = sortProperty.indexOf(sortObject.getColumn());
SortPanel.this.sortObject = sortObject;
addTextModel.setObject(getString("edit"));
target.add(columnChoice);
target.add(orderChoice);
target.add(label);
}
});
columns.add(new LinkPropertyColumn<SortObject>(new StringResourceModel("delete", null, null), new StringResourceModel("delete", null, null), new StringResourceModel("SortPanel.askDelete", null, null)) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(Item item, String componentId, IModel model, AjaxRequestTarget target) {
SortObject sortObject = (SortObject) model.getObject();
int index = sortProperty.indexOf(sortObject.getColumn());
if (index == 0) {
firstSortRemoved = true;
}
sortProperty.remove(index);
ascending.remove(index);
resetEdit(target);
target.add(table);
}
});
provider = new SortObjectDataProvider(new Model<ArrayList<String>>(sortProperty), new Model<ArrayList<Boolean>>(ascending));
table = new BaseTable<SortObject>("table", columns, provider, 10);
table.setOutputMarkupId(true);
add(table);
}
public ArrayList<String> getSortProperty() {
return sortProperty;
}
public ArrayList<Boolean> getAscending() {
return ascending;
}
public boolean isFirstSortRemoved() {
return firstSortRemoved;
}
public boolean isChangeFirstSortOrder() {
return changeFirstSortOrder;
}
public boolean isEdit() {
return editIndex != -1;
}
}
| |
/*
* Copyright 2015 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.netty;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import io.grpc.Attributes;
import io.grpc.CallCredentials;
import io.grpc.ChannelCredentials;
import io.grpc.ChannelLogger;
import io.grpc.ChoiceChannelCredentials;
import io.grpc.ChoiceServerCredentials;
import io.grpc.CompositeChannelCredentials;
import io.grpc.Grpc;
import io.grpc.InsecureChannelCredentials;
import io.grpc.InsecureServerCredentials;
import io.grpc.InternalChannelz;
import io.grpc.InternalChannelz.Security;
import io.grpc.Metadata;
import io.grpc.SecurityLevel;
import io.grpc.ServerCredentials;
import io.grpc.ServerStreamTracer;
import io.grpc.Status;
import io.grpc.StatusException;
import io.grpc.StatusRuntimeException;
import io.grpc.TlsChannelCredentials;
import io.grpc.TlsServerCredentials;
import io.grpc.internal.ClientTransportFactory;
import io.grpc.internal.GrpcAttributes;
import io.grpc.internal.InternalServer;
import io.grpc.internal.ManagedClientTransport;
import io.grpc.internal.ServerListener;
import io.grpc.internal.ServerStream;
import io.grpc.internal.ServerTransport;
import io.grpc.internal.ServerTransportListener;
import io.grpc.internal.TestUtils.NoopChannelLogger;
import io.grpc.internal.testing.TestUtils;
import io.grpc.netty.ProtocolNegotiators.ClientTlsHandler;
import io.grpc.netty.ProtocolNegotiators.ClientTlsProtocolNegotiator;
import io.grpc.netty.ProtocolNegotiators.HostPort;
import io.grpc.netty.ProtocolNegotiators.ServerTlsHandler;
import io.grpc.netty.ProtocolNegotiators.WaitUntilActiveHandler;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.channel.Channel;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandler;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOutboundHandlerAdapter;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultEventLoop;
import io.netty.channel.DefaultEventLoopGroup;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.channel.local.LocalAddress;
import io.netty.channel.local.LocalChannel;
import io.netty.channel.local.LocalServerChannel;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.codec.http.HttpServerUpgradeHandler;
import io.netty.handler.codec.http.HttpServerUpgradeHandler.UpgradeCodec;
import io.netty.handler.codec.http.HttpServerUpgradeHandler.UpgradeCodecFactory;
import io.netty.handler.codec.http2.DefaultHttp2Connection;
import io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder;
import io.netty.handler.codec.http2.DefaultHttp2ConnectionEncoder;
import io.netty.handler.codec.http2.DefaultHttp2FrameReader;
import io.netty.handler.codec.http2.DefaultHttp2FrameWriter;
import io.netty.handler.codec.http2.Http2ConnectionDecoder;
import io.netty.handler.codec.http2.Http2ConnectionEncoder;
import io.netty.handler.codec.http2.Http2ServerUpgradeCodec;
import io.netty.handler.codec.http2.Http2Settings;
import io.netty.handler.proxy.ProxyConnectException;
import io.netty.handler.ssl.ApplicationProtocolConfig;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.ssl.SslHandshakeCompletionEvent;
import io.netty.handler.ssl.SupportedCipherSuiteFilter;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import java.io.File;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.KeyStore;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Filter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManagerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
@RunWith(JUnit4.class)
public class ProtocolNegotiatorsTest {
private static final Runnable NOOP_RUNNABLE = new Runnable() {
@Override public void run() {}
};
private static File server1Cert;
private static File server1Key;
private static File caCert;
@BeforeClass
public static void loadCerts() throws Exception {
server1Cert = TestUtils.loadCert("server1.pem");
server1Key = TestUtils.loadCert("server1.key");
caCert = TestUtils.loadCert("ca.pem");
}
private static final int TIMEOUT_SECONDS = 60;
@Rule public final TestRule globalTimeout = new DisableOnDebug(Timeout.seconds(TIMEOUT_SECONDS));
@SuppressWarnings("deprecation") // https://github.com/grpc/grpc-java/issues/7467
@Rule public final ExpectedException thrown = ExpectedException.none();
private final EventLoopGroup group = new DefaultEventLoop();
private Channel chan;
private Channel server;
private final GrpcHttp2ConnectionHandler grpcHandler =
FakeGrpcHttp2ConnectionHandler.newHandler();
private EmbeddedChannel channel = new EmbeddedChannel();
private ChannelPipeline pipeline = channel.pipeline();
private SslContext sslContext;
private SSLEngine engine;
private ChannelHandlerContext channelHandlerCtx;
private static ChannelLogger noopLogger = new NoopChannelLogger();
@Before
public void setUp() throws Exception {
File serverCert = TestUtils.loadCert("server1.pem");
File key = TestUtils.loadCert("server1.key");
sslContext = GrpcSslContexts.forServer(serverCert, key)
.ciphers(TestUtils.preferredTestCiphers(), SupportedCipherSuiteFilter.INSTANCE).build();
engine = SSLContext.getDefault().createSSLEngine();
engine.setUseClientMode(true);
}
@After
public void tearDown() {
if (server != null) {
server.close();
}
if (chan != null) {
chan.close();
}
group.shutdownGracefully();
}
@Test
public void fromClient_unknown() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
});
assertThat(result.error).isNotNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator).isNull();
}
@Test
public void fromClient_tls() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(TlsChannelCredentials.create());
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.TlsProtocolNegotiatorClientFactory.class);
}
@Test
public void fromClient_unsupportedTls() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(TlsChannelCredentials.newBuilder().requireFakeFeature().build());
assertThat(result.error).contains("FAKE");
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator).isNull();
}
@Test
public void fromClient_insecure() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(InsecureChannelCredentials.create());
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.PlaintextProtocolNegotiatorClientFactory.class);
}
@Test
public void fromClient_composite() {
CallCredentials callCredentials = mock(CallCredentials.class);
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(CompositeChannelCredentials.create(
TlsChannelCredentials.create(), callCredentials));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isSameInstanceAs(callCredentials);
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.TlsProtocolNegotiatorClientFactory.class);
result = ProtocolNegotiators.from(CompositeChannelCredentials.create(
InsecureChannelCredentials.create(), callCredentials));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isSameInstanceAs(callCredentials);
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.PlaintextProtocolNegotiatorClientFactory.class);
}
@Test
public void fromClient_netty() {
ProtocolNegotiator.ClientFactory factory = mock(ProtocolNegotiator.ClientFactory.class);
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(NettyChannelCredentials.create(factory));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator).isSameInstanceAs(factory);
}
@Test
public void fromClient_choice() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(ChoiceChannelCredentials.create(
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
},
TlsChannelCredentials.create(),
InsecureChannelCredentials.create()));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.TlsProtocolNegotiatorClientFactory.class);
result = ProtocolNegotiators.from(ChoiceChannelCredentials.create(
InsecureChannelCredentials.create(),
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
},
TlsChannelCredentials.create()));
assertThat(result.error).isNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.PlaintextProtocolNegotiatorClientFactory.class);
}
@Test
public void fromClient_choice_unknown() {
ProtocolNegotiators.FromChannelCredentialsResult result =
ProtocolNegotiators.from(ChoiceChannelCredentials.create(
new ChannelCredentials() {
@Override
public ChannelCredentials withoutBearerTokens() {
throw new UnsupportedOperationException();
}
}));
assertThat(result.error).isNotNull();
assertThat(result.callCredentials).isNull();
assertThat(result.negotiator).isNull();
}
private InternalChannelz.Tls expectSuccessfulHandshake(
ChannelCredentials channelCreds, ServerCredentials serverCreds) throws Exception {
return (InternalChannelz.Tls) expectHandshake(channelCreds, serverCreds, true);
}
private Status expectFailedHandshake(
ChannelCredentials channelCreds, ServerCredentials serverCreds) throws Exception {
return (Status) expectHandshake(channelCreds, serverCreds, false);
}
private Object expectHandshake(
ChannelCredentials channelCreds, ServerCredentials serverCreds, boolean expectSuccess)
throws Exception {
MockServerListener serverListener = new MockServerListener();
ClientTransportFactory clientFactory = NettyChannelBuilder
// Although specified here, address is ignored because we never call build.
.forAddress("localhost", 0, channelCreds)
.buildTransportFactory();
InternalServer server = NettyServerBuilder
.forPort(0, serverCreds)
.buildTransportServers(Collections.<ServerStreamTracer.Factory>emptyList());
server.start(serverListener);
ManagedClientTransport.Listener clientTransportListener =
mock(ManagedClientTransport.Listener.class);
ManagedClientTransport client = clientFactory.newClientTransport(
server.getListenSocketAddress(),
new ClientTransportFactory.ClientTransportOptions()
.setAuthority(TestUtils.TEST_SERVER_HOST),
mock(ChannelLogger.class));
callMeMaybe(client.start(clientTransportListener));
Object result;
if (expectSuccess) {
verify(clientTransportListener, timeout(TIMEOUT_SECONDS * 1000)).transportReady();
InternalChannelz.SocketStats stats = serverListener.transports.poll().getStats().get();
assertThat(stats.security).isNotNull();
assertThat(stats.security.tls).isNotNull();
result = stats.security.tls;
} else {
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
verify(clientTransportListener, timeout(TIMEOUT_SECONDS * 1000))
.transportShutdown(captor.capture());
result = captor.getValue();
}
client.shutdownNow(Status.UNAVAILABLE.withDescription("trash it"));
server.shutdown();
assertTrue(
serverListener.waitForShutdown(TIMEOUT_SECONDS * 1000, TimeUnit.MILLISECONDS));
verify(clientTransportListener, timeout(TIMEOUT_SECONDS * 1000)).transportTerminated();
clientFactory.close();
return result;
}
@Test
public void from_tls_clientAuthNone_noClientCert() throws Exception {
// Use convenience API to better match most user's usage
ServerCredentials serverCreds = TlsServerCredentials.create(server1Cert, server1Key);
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.trustManager(caCert)
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(tls.remoteCert).isNull();
}
@Test
public void from_tls_clientAuthNone_clientCert() throws Exception {
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(tls.remoteCert).isNull();
}
@Test
public void from_tls_clientAuthRequire_noClientCert() throws Exception {
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.clientAuth(TlsServerCredentials.ClientAuth.REQUIRE)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.trustManager(caCert)
.build();
Status status = expectFailedHandshake(channelCreds, serverCreds);
assertEquals(Status.Code.UNAVAILABLE, status.getCode());
StatusException sre = status.asException();
// because of netty/netty#11604 we need to check for both TLSv1.2 and v1.3 behaviors
if (sre.getCause() instanceof SSLHandshakeException) {
assertThat(sre).hasCauseThat().isInstanceOf(SSLHandshakeException.class);
assertThat(sre).hasCauseThat().hasMessageThat().contains("SSLV3_ALERT_HANDSHAKE_FAILURE");
} else {
// Client cert verification is after handshake in TLSv1.3
assertThat(sre).hasCauseThat().hasCauseThat().isInstanceOf(SSLException.class);
assertThat(sre).hasCauseThat().hasMessageThat().contains("CERTIFICATE_REQUIRED");
}
}
@Test
public void from_tls_clientAuthRequire_clientCert() throws Exception {
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.clientAuth(TlsServerCredentials.ClientAuth.REQUIRE)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(((X509Certificate) tls.remoteCert).getSubjectX500Principal().getName())
.contains("CN=*.test.google.com");
}
@Test
public void from_tls_clientAuthOptional_noClientCert() throws Exception {
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.clientAuth(TlsServerCredentials.ClientAuth.OPTIONAL)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.trustManager(caCert)
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(tls.remoteCert).isNull();
}
@Test
public void from_tls_clientAuthOptional_clientCert() throws Exception {
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.clientAuth(TlsServerCredentials.ClientAuth.OPTIONAL)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.trustManager(caCert)
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(((X509Certificate) tls.remoteCert).getSubjectX500Principal().getName())
.contains("CN=*.test.google.com");
}
@Test
public void from_tls_managers() throws Exception {
SelfSignedCertificate cert = new SelfSignedCertificate(TestUtils.TEST_SERVER_HOST);
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(null);
keyStore.setKeyEntry("mykey", cert.key(), new char[0], new Certificate[] {cert.cert()});
KeyManagerFactory keyManagerFactory =
KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(keyStore, new char[0]);
KeyStore certStore = KeyStore.getInstance(KeyStore.getDefaultType());
certStore.load(null);
certStore.setCertificateEntry("mycert", cert.cert());
TrustManagerFactory trustManagerFactory =
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(certStore);
ServerCredentials serverCreds = TlsServerCredentials.newBuilder()
.keyManager(keyManagerFactory.getKeyManagers())
.trustManager(trustManagerFactory.getTrustManagers())
.clientAuth(TlsServerCredentials.ClientAuth.REQUIRE)
.build();
ChannelCredentials channelCreds = TlsChannelCredentials.newBuilder()
.keyManager(keyManagerFactory.getKeyManagers())
.trustManager(trustManagerFactory.getTrustManagers())
.build();
InternalChannelz.Tls tls = expectSuccessfulHandshake(channelCreds, serverCreds);
assertThat(((X509Certificate) tls.remoteCert).getSubjectX500Principal().getName())
.isEqualTo("CN=" + TestUtils.TEST_SERVER_HOST);
cert.delete();
}
@Test
public void fromServer_unknown() {
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(new ServerCredentials() {});
assertThat(result.error).isNotNull();
assertThat(result.negotiator).isNull();
}
@Test
public void fromServer_tls() throws Exception {
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(TlsServerCredentials.create(server1Cert, server1Key));
assertThat(result.error).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.TlsProtocolNegotiatorServerFactory.class);
}
@Test
public void fromServer_unsupportedTls() throws Exception {
ProtocolNegotiators.FromServerCredentialsResult result = ProtocolNegotiators.from(
TlsServerCredentials.newBuilder()
.keyManager(server1Cert, server1Key)
.requireFakeFeature()
.build());
assertThat(result.error).contains("FAKE");
assertThat(result.negotiator).isNull();
}
@Test
public void fromServer_insecure() {
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(InsecureServerCredentials.create());
assertThat(result.error).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.PlaintextProtocolNegotiatorServerFactory.class);
}
@Test
public void fromServer_netty() {
ProtocolNegotiator.ServerFactory factory = mock(ProtocolNegotiator.ServerFactory.class);
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(NettyServerCredentials.create(factory));
assertThat(result.error).isNull();
assertThat(result.negotiator).isSameInstanceAs(factory);
}
@Test
public void fromServer_choice() throws Exception {
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(ChoiceServerCredentials.create(
new ServerCredentials() {},
TlsServerCredentials.create(server1Cert, server1Key),
InsecureServerCredentials.create()));
assertThat(result.error).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.TlsProtocolNegotiatorServerFactory.class);
result = ProtocolNegotiators.from(ChoiceServerCredentials.create(
InsecureServerCredentials.create(),
new ServerCredentials() {},
TlsServerCredentials.create(server1Cert, server1Key)));
assertThat(result.error).isNull();
assertThat(result.negotiator)
.isInstanceOf(ProtocolNegotiators.PlaintextProtocolNegotiatorServerFactory.class);
}
@Test
public void fromServer_choice_unknown() {
ProtocolNegotiators.FromServerCredentialsResult result =
ProtocolNegotiators.from(ChoiceServerCredentials.create(
new ServerCredentials() {}));
assertThat(result.error).isNotNull();
assertThat(result.negotiator).isNull();
}
@Test
public void waitUntilActiveHandler_handlerAdded() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
final WaitUntilActiveHandler handler =
new WaitUntilActiveHandler(new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
assertTrue(ctx.channel().isActive());
latch.countDown();
super.handlerAdded(ctx);
}
}, noopLogger);
ChannelHandler lateAddingHandler = new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.pipeline().addLast(handler);
ctx.pipeline().fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
// do not propagate channelActive().
}
};
LocalAddress addr = new LocalAddress("local");
ChannelFuture cf = new Bootstrap()
.channel(LocalChannel.class)
.handler(lateAddingHandler)
.group(group)
.register();
chan = cf.channel();
ChannelFuture sf = new ServerBootstrap()
.channel(LocalServerChannel.class)
.childHandler(new ChannelHandlerAdapter() {})
.group(group)
.bind(addr);
server = sf.channel();
sf.sync();
assertEquals(1, latch.getCount());
chan.connect(addr).sync();
assertTrue(latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS));
assertNull(chan.pipeline().context(WaitUntilActiveHandler.class));
}
@Test
public void waitUntilActiveHandler_channelActive() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
WaitUntilActiveHandler handler =
new WaitUntilActiveHandler(new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
assertTrue(ctx.channel().isActive());
latch.countDown();
super.handlerAdded(ctx);
}
}, noopLogger);
LocalAddress addr = new LocalAddress("local");
ChannelFuture cf = new Bootstrap()
.channel(LocalChannel.class)
.handler(handler)
.group(group)
.register();
chan = cf.channel();
ChannelFuture sf = new ServerBootstrap()
.channel(LocalServerChannel.class)
.childHandler(new ChannelHandlerAdapter() {})
.group(group)
.bind(addr);
server = sf.channel();
sf.sync();
assertEquals(1, latch.getCount());
chan.connect(addr).sync();
chan.pipeline().fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
assertTrue(latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS));
assertNull(chan.pipeline().context(WaitUntilActiveHandler.class));
}
@Test
public void tlsHandler_failsOnNullEngine() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("ssl");
Object unused = ProtocolNegotiators.serverTls(null);
}
@Test
public void tlsHandler_handlerAddedAddsSslHandler() throws Exception {
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
assertTrue(pipeline.first() instanceof SslHandler);
}
@Test
public void tlsHandler_userEventTriggeredNonSslEvent() throws Exception {
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
channelHandlerCtx = pipeline.context(handler);
Object nonSslEvent = new Object();
pipeline.fireUserEventTriggered(nonSslEvent);
// A non ssl event should not cause the grpcHandler to be in the pipeline yet.
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNull(grpcHandlerCtx);
}
@Test
public void tlsHandler_userEventTriggeredSslEvent_unsupportedProtocol() throws Exception {
SslHandler badSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "badprotocol";
}
};
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
final AtomicReference<Throwable> error = new AtomicReference<>();
ChannelHandler errorCapture = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
error.set(cause);
}
};
pipeline.addLast(errorCapture);
pipeline.replace(SslHandler.class, null, badSslHandler);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
// No h2 protocol was specified, so there should be an error, (normally handled by WBAEH)
assertThat(error.get()).hasMessageThat().contains("Unable to find compatible protocol");
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNull(grpcHandlerCtx);
}
@Test
public void tlsHandler_userEventTriggeredSslEvent_handshakeFailure() throws Exception {
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = new SslHandshakeCompletionEvent(new RuntimeException("bad"));
final AtomicReference<Throwable> error = new AtomicReference<>();
ChannelHandler errorCapture = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
error.set(cause);
}
};
pipeline.addLast(errorCapture);
pipeline.fireUserEventTriggered(sslEvent);
// No h2 protocol was specified, so there should be an error, (normally handled by WBAEH)
assertThat(error.get()).hasMessageThat().contains("bad");
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNull(grpcHandlerCtx);
}
@Test
public void tlsHandler_userEventTriggeredSslEvent_supportedProtocolH2() throws Exception {
SslHandler goodSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "h2";
}
};
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
pipeline.replace(SslHandler.class, null, goodSslHandler);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
assertTrue(channel.isOpen());
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNotNull(grpcHandlerCtx);
}
@Test
public void serverTlsHandler_userEventTriggeredSslEvent_supportedProtocolCustom()
throws Exception {
SslHandler goodSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "managed_mtls";
}
};
File serverCert = TestUtils.loadCert("server1.pem");
File key = TestUtils.loadCert("server1.key");
List<String> alpnList = Arrays.asList("managed_mtls", "h2");
ApplicationProtocolConfig apn = new ApplicationProtocolConfig(
ApplicationProtocolConfig.Protocol.ALPN,
ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE,
ApplicationProtocolConfig.SelectedListenerFailureBehavior.ACCEPT,
alpnList);
sslContext = GrpcSslContexts.forServer(serverCert, key)
.ciphers(TestUtils.preferredTestCiphers(), SupportedCipherSuiteFilter.INSTANCE)
.applicationProtocolConfig(apn).build();
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
pipeline.replace(SslHandler.class, null, goodSslHandler);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
assertTrue(channel.isOpen());
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNotNull(grpcHandlerCtx);
}
@Test
public void serverTlsHandler_userEventTriggeredSslEvent_unsupportedProtocolCustom()
throws Exception {
SslHandler badSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "badprotocol";
}
};
File serverCert = TestUtils.loadCert("server1.pem");
File key = TestUtils.loadCert("server1.key");
List<String> alpnList = Arrays.asList("managed_mtls", "h2");
ApplicationProtocolConfig apn = new ApplicationProtocolConfig(
ApplicationProtocolConfig.Protocol.ALPN,
ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE,
ApplicationProtocolConfig.SelectedListenerFailureBehavior.ACCEPT,
alpnList);
sslContext = GrpcSslContexts.forServer(serverCert, key)
.ciphers(TestUtils.preferredTestCiphers(), SupportedCipherSuiteFilter.INSTANCE)
.applicationProtocolConfig(apn).build();
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
final AtomicReference<Throwable> error = new AtomicReference<>();
ChannelHandler errorCapture = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
error.set(cause);
}
};
pipeline.addLast(errorCapture);
pipeline.replace(SslHandler.class, null, badSslHandler);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
// No h2 protocol was specified, so there should be an error, (normally handled by WBAEH)
assertThat(error.get()).hasMessageThat().contains("Unable to find compatible protocol");
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNull(grpcHandlerCtx);
}
@Test
public void clientTlsHandler_userEventTriggeredSslEvent_supportedProtocolH2() throws Exception {
SslHandler goodSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "h2";
}
};
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
ClientTlsHandler handler = new ClientTlsHandler(grpcHandler, sslContext,
"authority", elg, noopLogger);
pipeline.addLast(handler);
pipeline.replace(SslHandler.class, null, goodSslHandler);
pipeline.fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNotNull(grpcHandlerCtx);
}
@Test
public void clientTlsHandler_userEventTriggeredSslEvent_supportedProtocolCustom()
throws Exception {
SslHandler goodSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "managed_mtls";
}
};
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
File clientCert = TestUtils.loadCert("client.pem");
File key = TestUtils.loadCert("client.key");
List<String> alpnList = Arrays.asList("managed_mtls", "h2");
ApplicationProtocolConfig apn = new ApplicationProtocolConfig(
ApplicationProtocolConfig.Protocol.ALPN,
ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE,
ApplicationProtocolConfig.SelectedListenerFailureBehavior.ACCEPT,
alpnList);
sslContext = GrpcSslContexts.forClient()
.keyManager(clientCert, key)
.ciphers(TestUtils.preferredTestCiphers(), SupportedCipherSuiteFilter.INSTANCE)
.applicationProtocolConfig(apn).build();
ClientTlsHandler handler = new ClientTlsHandler(grpcHandler, sslContext,
"authority", elg, noopLogger);
pipeline.addLast(handler);
pipeline.replace(SslHandler.class, null, goodSslHandler);
pipeline.fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNotNull(grpcHandlerCtx);
}
@Test
public void clientTlsHandler_userEventTriggeredSslEvent_unsupportedProtocol() throws Exception {
SslHandler goodSslHandler = new SslHandler(engine, false) {
@Override
public String applicationProtocol() {
return "badproto";
}
};
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
ClientTlsHandler handler = new ClientTlsHandler(grpcHandler, sslContext,
"authority", elg, noopLogger);
pipeline.addLast(handler);
final AtomicReference<Throwable> error = new AtomicReference<>();
ChannelHandler errorCapture = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
error.set(cause);
}
};
pipeline.addLast(errorCapture);
pipeline.replace(SslHandler.class, null, goodSslHandler);
pipeline.fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
channelHandlerCtx = pipeline.context(handler);
Object sslEvent = SslHandshakeCompletionEvent.SUCCESS;
pipeline.fireUserEventTriggered(sslEvent);
// Bad protocol was specified, so there should be an error, (normally handled by WBAEH)
assertThat(error.get()).hasMessageThat().contains("Unable to find compatible protocol");
ChannelHandlerContext grpcHandlerCtx = pipeline.context(grpcHandler);
assertNull(grpcHandlerCtx);
}
@Test
public void clientTlsHandler_closeDuringNegotiation() throws Exception {
ClientTlsHandler handler = new ClientTlsHandler(grpcHandler, sslContext,
"authority", null, noopLogger);
pipeline.addLast(new WriteBufferingAndExceptionHandler(handler));
ChannelFuture pendingWrite = channel.writeAndFlush(NettyClientHandler.NOOP_MESSAGE);
// SslHandler fires userEventTriggered() before channelInactive()
pipeline.fireChannelInactive();
assertThat(pendingWrite.cause()).isInstanceOf(StatusRuntimeException.class);
assertThat(Status.fromThrowable(pendingWrite.cause()).getCode())
.isEqualTo(Status.Code.UNAVAILABLE);
}
@Test
public void engineLog() {
ChannelHandler handler = new ServerTlsHandler(grpcHandler, sslContext, null);
pipeline.addLast(handler);
channelHandlerCtx = pipeline.context(handler);
Logger logger = Logger.getLogger(ProtocolNegotiators.class.getName());
Filter oldFilter = logger.getFilter();
try {
logger.setFilter(new Filter() {
@Override
public boolean isLoggable(LogRecord record) {
// We still want to the log method to be exercised, just not printed to stderr.
return false;
}
});
ProtocolNegotiators.logSslEngineDetails(
Level.INFO, channelHandlerCtx, "message", new Exception("bad"));
} finally {
logger.setFilter(oldFilter);
}
}
@Test
public void tls_failsOnNullSslContext() {
thrown.expect(NullPointerException.class);
Object unused = ProtocolNegotiators.tls(null);
}
@Test
public void tls_hostAndPort() {
HostPort hostPort = ProtocolNegotiators.parseAuthority("authority:1234");
assertEquals("authority", hostPort.host);
assertEquals(1234, hostPort.port);
}
@Test
public void tls_host() {
HostPort hostPort = ProtocolNegotiators.parseAuthority("[::1]");
assertEquals("[::1]", hostPort.host);
assertEquals(-1, hostPort.port);
}
@Test
public void tls_invalidHost() throws SSLException {
HostPort hostPort = ProtocolNegotiators.parseAuthority("bad_host:1234");
// Even though it looks like a port, we treat it as part of the authority, since the host is
// invalid.
assertEquals("bad_host:1234", hostPort.host);
assertEquals(-1, hostPort.port);
}
@Test
public void httpProxy_nullAddressNpe() throws Exception {
thrown.expect(NullPointerException.class);
Object unused =
ProtocolNegotiators.httpProxy(null, "user", "pass", ProtocolNegotiators.plaintext());
}
@Test
public void httpProxy_nullNegotiatorNpe() throws Exception {
thrown.expect(NullPointerException.class);
Object unused = ProtocolNegotiators.httpProxy(
InetSocketAddress.createUnresolved("localhost", 80), "user", "pass", null);
}
@Test
public void httpProxy_nullUserPassNoException() throws Exception {
assertNotNull(ProtocolNegotiators.httpProxy(
InetSocketAddress.createUnresolved("localhost", 80), null, null,
ProtocolNegotiators.plaintext()));
}
@Test
public void httpProxy_completes() throws Exception {
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
// ProxyHandler is incompatible with EmbeddedChannel because when channelRegistered() is called
// the channel is already active.
LocalAddress proxy = new LocalAddress("httpProxy_completes");
SocketAddress host = InetSocketAddress.createUnresolved("specialHost", 314);
ChannelInboundHandler mockHandler = mock(ChannelInboundHandler.class);
Channel serverChannel = new ServerBootstrap().group(elg).channel(LocalServerChannel.class)
.childHandler(mockHandler)
.bind(proxy).sync().channel();
ProtocolNegotiator nego =
ProtocolNegotiators.httpProxy(proxy, null, null, ProtocolNegotiators.plaintext());
// normally NettyClientTransport will add WBAEH which kick start the ProtocolNegotiation,
// mocking the behavior using KickStartHandler.
ChannelHandler handler =
new KickStartHandler(nego.newHandler(FakeGrpcHttp2ConnectionHandler.noopHandler()));
Channel channel = new Bootstrap().group(elg).channel(LocalChannel.class).handler(handler)
.register().sync().channel();
pipeline = channel.pipeline();
// Wait for initialization to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
channel.connect(host).sync();
serverChannel.close();
ArgumentCaptor<ChannelHandlerContext> contextCaptor =
ArgumentCaptor.forClass(ChannelHandlerContext.class);
Mockito.verify(mockHandler).channelActive(contextCaptor.capture());
ChannelHandlerContext serverContext = contextCaptor.getValue();
final String golden = "isThisThingOn?";
ChannelFuture negotiationFuture = channel.writeAndFlush(bb(golden, channel));
// Wait for sending initial request to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
ArgumentCaptor<Object> objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler)
.channelRead(ArgumentMatchers.<ChannelHandlerContext>any(), objectCaptor.capture());
ByteBuf b = (ByteBuf) objectCaptor.getValue();
String request = b.toString(UTF_8);
b.release();
assertTrue("No trailing newline: " + request, request.endsWith("\r\n\r\n"));
assertTrue("No CONNECT: " + request, request.startsWith("CONNECT specialHost:314 "));
assertTrue("No host header: " + request, request.contains("host: specialHost:314"));
assertFalse(negotiationFuture.isDone());
serverContext.writeAndFlush(bb("HTTP/1.1 200 OK\r\n\r\n", serverContext.channel())).sync();
negotiationFuture.sync();
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler, times(2))
.channelRead(ArgumentMatchers.<ChannelHandlerContext>any(), objectCaptor.capture());
b = (ByteBuf) objectCaptor.getAllValues().get(1);
// If we were using the real grpcHandler, this would have been the HTTP/2 preface
String preface = b.toString(UTF_8);
b.release();
assertEquals(golden, preface);
channel.close();
}
@Test
public void httpProxy_500() throws Exception {
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
// ProxyHandler is incompatible with EmbeddedChannel because when channelRegistered() is called
// the channel is already active.
LocalAddress proxy = new LocalAddress("httpProxy_500");
SocketAddress host = InetSocketAddress.createUnresolved("specialHost", 314);
ChannelInboundHandler mockHandler = mock(ChannelInboundHandler.class);
Channel serverChannel = new ServerBootstrap().group(elg).channel(LocalServerChannel.class)
.childHandler(mockHandler)
.bind(proxy).sync().channel();
ProtocolNegotiator nego =
ProtocolNegotiators.httpProxy(proxy, null, null, ProtocolNegotiators.plaintext());
// normally NettyClientTransport will add WBAEH which kick start the ProtocolNegotiation,
// mocking the behavior using KickStartHandler.
ChannelHandler handler =
new KickStartHandler(nego.newHandler(FakeGrpcHttp2ConnectionHandler.noopHandler()));
Channel channel = new Bootstrap().group(elg).channel(LocalChannel.class).handler(handler)
.register().sync().channel();
pipeline = channel.pipeline();
// Wait for initialization to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
channel.connect(host).sync();
serverChannel.close();
ArgumentCaptor<ChannelHandlerContext> contextCaptor =
ArgumentCaptor.forClass(ChannelHandlerContext.class);
Mockito.verify(mockHandler).channelActive(contextCaptor.capture());
ChannelHandlerContext serverContext = contextCaptor.getValue();
final String golden = "isThisThingOn?";
ChannelFuture negotiationFuture = channel.writeAndFlush(bb(golden, channel));
// Wait for sending initial request to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
ArgumentCaptor<Object> objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler)
.channelRead(any(ChannelHandlerContext.class), objectCaptor.capture());
ByteBuf request = (ByteBuf) objectCaptor.getValue();
request.release();
assertFalse(negotiationFuture.isDone());
String response = "HTTP/1.1 500 OMG\r\nContent-Length: 4\r\n\r\noops";
serverContext.writeAndFlush(bb(response, serverContext.channel())).sync();
thrown.expect(ProxyConnectException.class);
try {
negotiationFuture.sync();
} finally {
channel.close();
}
}
@Test
public void waitUntilActiveHandler_firesNegotiation() throws Exception {
EventLoopGroup elg = new DefaultEventLoopGroup(1);
SocketAddress addr = new LocalAddress("addr");
final AtomicReference<Object> event = new AtomicReference<>();
ChannelHandler next = new ChannelInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
event.set(evt);
ctx.close();
}
};
Channel s = new ServerBootstrap()
.childHandler(new ChannelInboundHandlerAdapter())
.group(elg)
.channel(LocalServerChannel.class)
.bind(addr)
.sync()
.channel();
Channel c = new Bootstrap()
.handler(new WaitUntilActiveHandler(next, noopLogger))
.channel(LocalChannel.class).group(group)
.connect(addr)
.sync()
.channel();
c.pipeline().fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
SocketAddress localAddr = c.localAddress();
ProtocolNegotiationEvent expectedEvent = ProtocolNegotiationEvent.DEFAULT
.withAttributes(
Attributes.newBuilder()
.set(Grpc.TRANSPORT_ATTR_LOCAL_ADDR, localAddr)
.set(Grpc.TRANSPORT_ATTR_REMOTE_ADDR, addr)
.set(GrpcAttributes.ATTR_SECURITY_LEVEL, SecurityLevel.NONE)
.build());
c.closeFuture().sync();
assertThat(event.get()).isInstanceOf(ProtocolNegotiationEvent.class);
ProtocolNegotiationEvent actual = (ProtocolNegotiationEvent) event.get();
assertThat(actual).isEqualTo(expectedEvent);
s.close();
elg.shutdownGracefully();
}
@Test
public void clientTlsHandler_firesNegotiation() throws Exception {
SelfSignedCertificate cert = new SelfSignedCertificate("authority");
SslContext clientSslContext =
GrpcSslContexts.configure(SslContextBuilder.forClient().trustManager(cert.cert())).build();
SslContext serverSslContext =
GrpcSslContexts.configure(SslContextBuilder.forServer(cert.key(), cert.cert())).build();
FakeGrpcHttp2ConnectionHandler gh = FakeGrpcHttp2ConnectionHandler.newHandler();
ClientTlsProtocolNegotiator pn = new ClientTlsProtocolNegotiator(clientSslContext, null);
WriteBufferingAndExceptionHandler clientWbaeh =
new WriteBufferingAndExceptionHandler(pn.newHandler(gh));
SocketAddress addr = new LocalAddress("addr");
ChannelHandler sh =
ProtocolNegotiators.serverTls(serverSslContext)
.newHandler(FakeGrpcHttp2ConnectionHandler.noopHandler());
WriteBufferingAndExceptionHandler serverWbaeh = new WriteBufferingAndExceptionHandler(sh);
Channel s = new ServerBootstrap()
.childHandler(serverWbaeh)
.group(group)
.channel(LocalServerChannel.class)
.bind(addr)
.sync()
.channel();
Channel c = new Bootstrap()
.handler(clientWbaeh)
.channel(LocalChannel.class)
.group(group)
.register()
.sync()
.channel();
ChannelFuture write = c.writeAndFlush(NettyClientHandler.NOOP_MESSAGE);
c.connect(addr).sync();
write.sync();
boolean completed = gh.negotiated.await(TIMEOUT_SECONDS, TimeUnit.SECONDS);
if (!completed) {
assertTrue("failed to negotiated", write.await(TIMEOUT_SECONDS, TimeUnit.SECONDS));
// sync should fail if we are in this block.
write.sync();
throw new AssertionError("neither wrote nor negotiated");
}
c.close();
s.close();
pn.close();
assertThat(gh.securityInfo).isNotNull();
assertThat(gh.securityInfo.tls).isNotNull();
assertThat(gh.attrs.get(GrpcAttributes.ATTR_SECURITY_LEVEL))
.isEqualTo(SecurityLevel.PRIVACY_AND_INTEGRITY);
assertThat(gh.attrs.get(Grpc.TRANSPORT_ATTR_SSL_SESSION)).isInstanceOf(SSLSession.class);
// This is not part of the ClientTls negotiation, but shows that the negotiation event happens
// in the right order.
assertThat(gh.attrs.get(Grpc.TRANSPORT_ATTR_REMOTE_ADDR)).isEqualTo(addr);
}
@Test
public void plaintextUpgradeNegotiator() throws Exception {
LocalAddress addr = new LocalAddress("plaintextUpgradeNegotiator");
UpgradeCodecFactory ucf = new UpgradeCodecFactory() {
@Override
public UpgradeCodec newUpgradeCodec(CharSequence protocol) {
return new Http2ServerUpgradeCodec(FakeGrpcHttp2ConnectionHandler.newHandler());
}
};
final HttpServerCodec serverCodec = new HttpServerCodec();
final HttpServerUpgradeHandler serverUpgradeHandler =
new HttpServerUpgradeHandler(serverCodec, ucf);
Channel serverChannel = new ServerBootstrap()
.group(group)
.channel(LocalServerChannel.class)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(serverCodec, serverUpgradeHandler);
}
})
.bind(addr)
.sync()
.channel();
FakeGrpcHttp2ConnectionHandler gh = FakeGrpcHttp2ConnectionHandler.newHandler();
ProtocolNegotiator nego = ProtocolNegotiators.plaintextUpgrade();
ChannelHandler ch = nego.newHandler(gh);
WriteBufferingAndExceptionHandler wbaeh = new WriteBufferingAndExceptionHandler(ch);
Channel channel = new Bootstrap()
.group(group)
.channel(LocalChannel.class)
.handler(wbaeh)
.register()
.sync()
.channel();
ChannelFuture write = channel.writeAndFlush(NettyClientHandler.NOOP_MESSAGE);
channel.connect(serverChannel.localAddress());
boolean completed = gh.negotiated.await(TIMEOUT_SECONDS, TimeUnit.SECONDS);
if (!completed) {
assertTrue("failed to negotiated", write.await(TIMEOUT_SECONDS, TimeUnit.SECONDS));
// sync should fail if we are in this block.
write.sync();
throw new AssertionError("neither wrote nor negotiated");
}
channel.close().sync();
serverChannel.close();
assertThat(gh.securityInfo).isNull();
assertThat(gh.attrs.get(GrpcAttributes.ATTR_SECURITY_LEVEL)).isEqualTo(SecurityLevel.NONE);
assertThat(gh.attrs.get(Grpc.TRANSPORT_ATTR_REMOTE_ADDR)).isEqualTo(addr);
}
private static void callMeMaybe(Runnable runnable) {
if (runnable != null) {
runnable.run();
}
}
private static class FakeGrpcHttp2ConnectionHandler extends GrpcHttp2ConnectionHandler {
static FakeGrpcHttp2ConnectionHandler noopHandler() {
return newHandler(true);
}
static FakeGrpcHttp2ConnectionHandler newHandler() {
return newHandler(false);
}
private static FakeGrpcHttp2ConnectionHandler newHandler(boolean noop) {
DefaultHttp2Connection conn = new DefaultHttp2Connection(/*server=*/ false);
DefaultHttp2ConnectionEncoder encoder =
new DefaultHttp2ConnectionEncoder(conn, new DefaultHttp2FrameWriter());
DefaultHttp2ConnectionDecoder decoder =
new DefaultHttp2ConnectionDecoder(conn, encoder, new DefaultHttp2FrameReader());
Http2Settings settings = new Http2Settings();
return new FakeGrpcHttp2ConnectionHandler(
/*channelUnused=*/ null, decoder, encoder, settings, noop, noopLogger);
}
private final boolean noop;
private Attributes attrs;
private Security securityInfo;
private final CountDownLatch negotiated = new CountDownLatch(1);
private ChannelHandlerContext ctx;
FakeGrpcHttp2ConnectionHandler(ChannelPromise channelUnused,
Http2ConnectionDecoder decoder,
Http2ConnectionEncoder encoder,
Http2Settings initialSettings,
boolean noop,
ChannelLogger negotiationLogger) {
super(channelUnused, decoder, encoder, initialSettings, negotiationLogger);
this.noop = noop;
}
@Override
public void handleProtocolNegotiationCompleted(Attributes attrs, Security securityInfo) {
checkNotNull(ctx, "handleProtocolNegotiationCompleted cannot be called before handlerAdded");
super.handleProtocolNegotiationCompleted(attrs, securityInfo);
this.attrs = attrs;
this.securityInfo = securityInfo;
// Add a temp handler that verifies first message is a NOOP_MESSAGE
ctx.pipeline().addBefore(ctx.name(), null, new ChannelOutboundHandlerAdapter() {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
checkState(
msg == NettyClientHandler.NOOP_MESSAGE, "First message should be NOOP_MESSAGE");
promise.trySuccess();
ctx.pipeline().remove(this);
}
});
NettyClientHandler.writeBufferingAndRemove(ctx.channel());
negotiated.countDown();
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
if (noop) {
ctx.pipeline().remove(ctx.name());
} else {
super.handlerAdded(ctx);
}
this.ctx = ctx;
}
@Override
public String getAuthority() {
return "authority";
}
}
private static ByteBuf bb(String s, Channel c) {
return ByteBufUtil.writeUtf8(c.alloc(), s);
}
private static final class KickStartHandler extends ChannelDuplexHandler {
private final ChannelHandler next;
public KickStartHandler(ChannelHandler next) {
this.next = checkNotNull(next, "next");
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
ctx.pipeline().replace(ctx.name(), null, next);
ctx.pipeline().fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
}
}
private static class MockServerListener implements ServerListener {
private final CountDownLatch latch = new CountDownLatch(1);
public Queue<ServerTransport> transports = new ArrayDeque<>();
@Override
public ServerTransportListener transportCreated(ServerTransport transport) {
transports.add(transport);
return new MockServerTransportListener();
}
@Override
public void serverShutdown() {
latch.countDown();
}
public boolean waitForShutdown(long timeout, TimeUnit unit) throws InterruptedException {
return latch.await(timeout, unit);
}
}
private static class MockServerTransportListener implements ServerTransportListener {
@Override
public void streamCreated(ServerStream stream, String method, Metadata headers) {}
@Override
public Attributes transportReady(Attributes attributes) {
return attributes;
}
@Override
public void transportTerminated() {}
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.CollectPreconditions.checkRemove;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.RandomAccess;
import java.util.Set;
import javax.annotation.Nullable;
/**
* This class contains static utility methods that operate on or return objects
* of type {@code Iterable}. Except as noted, each method has a corresponding
* {@link Iterator}-based method in the {@link Iterators} class.
*
* <p><i>Performance notes:</i> Unless otherwise noted, all of the iterables
* produced in this class are <i>lazy</i>, which means that their iterators
* only advance the backing iteration when absolutely necessary.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/CollectionUtilitiesExplained#iterables">
* {@code Iterables}</a>.
*
* @author Kevin Bourrillion
* @author Jared Levy
* @since 2.0
*/
@GwtCompatible(emulated = true)
public final class Iterables {
private Iterables() {}
/** Returns an unmodifiable view of {@code iterable}. */
public static <T> Iterable<T> unmodifiableIterable(final Iterable<? extends T> iterable) {
checkNotNull(iterable);
if (iterable instanceof UnmodifiableIterable || iterable instanceof ImmutableCollection) {
@SuppressWarnings("unchecked") // Since it's unmodifiable, the covariant cast is safe
Iterable<T> result = (Iterable<T>) iterable;
return result;
}
return new UnmodifiableIterable<T>(iterable);
}
/**
* Simply returns its argument.
*
* @deprecated no need to use this
* @since 10.0
*/
@Deprecated
public static <E> Iterable<E> unmodifiableIterable(ImmutableCollection<E> iterable) {
return checkNotNull(iterable);
}
private static final class UnmodifiableIterable<T> extends FluentIterable<T> {
private final Iterable<? extends T> iterable;
private UnmodifiableIterable(Iterable<? extends T> iterable) {
this.iterable = iterable;
}
@Override
public Iterator<T> iterator() {
return Iterators.unmodifiableIterator(iterable.iterator());
}
@Override
public String toString() {
return iterable.toString();
}
// no equals and hashCode; it would break the contract!
}
/**
* Returns the number of elements in {@code iterable}.
*/
public static int size(Iterable<?> iterable) {
return (iterable instanceof Collection)
? ((Collection<?>) iterable).size()
: Iterators.size(iterable.iterator());
}
/**
* Returns {@code true} if {@code iterable} contains any object for which {@code equals(element)}
* is true.
*/
public static boolean contains(Iterable<?> iterable, @Nullable Object element) {
if (iterable instanceof Collection) {
Collection<?> collection = (Collection<?>) iterable;
return Collections2.safeContains(collection, element);
}
return Iterators.contains(iterable.iterator(), element);
}
/**
* Removes, from an iterable, every element that belongs to the provided
* collection.
*
* <p>This method calls {@link Collection#removeAll} if {@code iterable} is a
* collection, and {@link Iterators#removeAll} otherwise.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param elementsToRemove the elements to remove
* @return {@code true} if any element was removed from {@code iterable}
*/
@CanIgnoreReturnValue
public static boolean removeAll(Iterable<?> removeFrom, Collection<?> elementsToRemove) {
return (removeFrom instanceof Collection)
? ((Collection<?>) removeFrom).removeAll(checkNotNull(elementsToRemove))
: Iterators.removeAll(removeFrom.iterator(), elementsToRemove);
}
/**
* Removes, from an iterable, every element that does not belong to the
* provided collection.
*
* <p>This method calls {@link Collection#retainAll} if {@code iterable} is a
* collection, and {@link Iterators#retainAll} otherwise.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param elementsToRetain the elements to retain
* @return {@code true} if any element was removed from {@code iterable}
*/
@CanIgnoreReturnValue
public static boolean retainAll(Iterable<?> removeFrom, Collection<?> elementsToRetain) {
return (removeFrom instanceof Collection)
? ((Collection<?>) removeFrom).retainAll(checkNotNull(elementsToRetain))
: Iterators.retainAll(removeFrom.iterator(), elementsToRetain);
}
/**
* Removes, from an iterable, every element that satisfies the provided
* predicate.
*
* <p>Removals may or may not happen immediately as each element is tested
* against the predicate. The behavior of this method is not specified if
* {@code predicate} is dependent on {@code removeFrom}.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param predicate a predicate that determines whether an element should
* be removed
* @return {@code true} if any elements were removed from the iterable
*
* @throws UnsupportedOperationException if the iterable does not support
* {@code remove()}.
* @since 2.0
*/
@CanIgnoreReturnValue
public static <T> boolean removeIf(Iterable<T> removeFrom, Predicate<? super T> predicate) {
if (removeFrom instanceof RandomAccess && removeFrom instanceof List) {
return removeIfFromRandomAccessList((List<T>) removeFrom, checkNotNull(predicate));
}
return Iterators.removeIf(removeFrom.iterator(), predicate);
}
private static <T> boolean removeIfFromRandomAccessList(
List<T> list, Predicate<? super T> predicate) {
// Note: Not all random access lists support set(). Additionally, it's possible
// for a list to reject setting an element, such as when the list does not permit
// duplicate elements. For both of those cases, we need to fall back to a slower
// implementation.
int from = 0;
int to = 0;
for (; from < list.size(); from++) {
T element = list.get(from);
if (!predicate.apply(element)) {
if (from > to) {
try {
list.set(to, element);
} catch (UnsupportedOperationException e) {
slowRemoveIfForRemainingElements(list, predicate, to, from);
return true;
} catch (IllegalArgumentException e) {
slowRemoveIfForRemainingElements(list, predicate, to, from);
return true;
}
}
to++;
}
}
// Clear the tail of any remaining items
list.subList(to, list.size()).clear();
return from != to;
}
private static <T> void slowRemoveIfForRemainingElements(
List<T> list, Predicate<? super T> predicate, int to, int from) {
// Here we know that:
// * (to < from) and that both are valid indices.
// * Everything with (index < to) should be kept.
// * Everything with (to <= index < from) should be removed.
// * The element with (index == from) should be kept.
// * Everything with (index > from) has not been checked yet.
// Check from the end of the list backwards (minimize expected cost of
// moving elements when remove() is called). Stop before 'from' because
// we already know that should be kept.
for (int n = list.size() - 1; n > from; n--) {
if (predicate.apply(list.get(n))) {
list.remove(n);
}
}
// And now remove everything in the range [to, from) (going backwards).
for (int n = from - 1; n >= to; n--) {
list.remove(n);
}
}
/**
* Removes and returns the first matching element, or returns {@code null} if there is none.
*/
@Nullable
static <T> T removeFirstMatching(Iterable<T> removeFrom, Predicate<? super T> predicate) {
checkNotNull(predicate);
Iterator<T> iterator = removeFrom.iterator();
while (iterator.hasNext()) {
T next = iterator.next();
if (predicate.apply(next)) {
iterator.remove();
return next;
}
}
return null;
}
/**
* Determines whether two iterables contain equal elements in the same order.
* More specifically, this method returns {@code true} if {@code iterable1}
* and {@code iterable2} contain the same number of elements and every element
* of {@code iterable1} is equal to the corresponding element of
* {@code iterable2}.
*/
public static boolean elementsEqual(Iterable<?> iterable1, Iterable<?> iterable2) {
if (iterable1 instanceof Collection && iterable2 instanceof Collection) {
Collection<?> collection1 = (Collection<?>) iterable1;
Collection<?> collection2 = (Collection<?>) iterable2;
if (collection1.size() != collection2.size()) {
return false;
}
}
return Iterators.elementsEqual(iterable1.iterator(), iterable2.iterator());
}
/**
* Returns a string representation of {@code iterable}, with the format {@code
* [e1, e2, ..., en]} (that is, identical to {@link java.util.Arrays
* Arrays}{@code .toString(Iterables.toArray(iterable))}). Note that for
* <i>most</i> implementations of {@link Collection}, {@code
* collection.toString()} also gives the same result, but that behavior is not
* generally guaranteed.
*/
public static String toString(Iterable<?> iterable) {
return Iterators.toString(iterable.iterator());
}
/**
* Returns the single element contained in {@code iterable}.
*
* @throws NoSuchElementException if the iterable is empty
* @throws IllegalArgumentException if the iterable contains multiple
* elements
*/
public static <T> T getOnlyElement(Iterable<T> iterable) {
return Iterators.getOnlyElement(iterable.iterator());
}
/**
* Returns the single element contained in {@code iterable}, or {@code
* defaultValue} if the iterable is empty.
*
* @throws IllegalArgumentException if the iterator contains multiple
* elements
*/
@Nullable
public static <T> T getOnlyElement(Iterable<? extends T> iterable, @Nullable T defaultValue) {
return Iterators.getOnlyElement(iterable.iterator(), defaultValue);
}
/**
* Copies an iterable's elements into an array.
*
* @param iterable the iterable to copy
* @param type the type of the elements
* @return a newly-allocated array into which all the elements of the iterable
* have been copied
*/
@GwtIncompatible // Array.newInstance(Class, int)
public static <T> T[] toArray(Iterable<? extends T> iterable, Class<T> type) {
return toArray(iterable, ObjectArrays.newArray(type, 0));
}
static <T> T[] toArray(Iterable<? extends T> iterable, T[] array) {
Collection<? extends T> collection = castOrCopyToCollection(iterable);
return collection.toArray(array);
}
/**
* Copies an iterable's elements into an array.
*
* @param iterable the iterable to copy
* @return a newly-allocated array into which all the elements of the iterable
* have been copied
*/
static Object[] toArray(Iterable<?> iterable) {
return castOrCopyToCollection(iterable).toArray();
}
/**
* Converts an iterable into a collection. If the iterable is already a
* collection, it is returned. Otherwise, an {@link java.util.ArrayList} is
* created with the contents of the iterable in the same iteration order.
*/
private static <E> Collection<E> castOrCopyToCollection(Iterable<E> iterable) {
return (iterable instanceof Collection)
? (Collection<E>) iterable
: Lists.newArrayList(iterable.iterator());
}
/**
* Adds all elements in {@code iterable} to {@code collection}.
*
* @return {@code true} if {@code collection} was modified as a result of this
* operation.
*/
@CanIgnoreReturnValue
public static <T> boolean addAll(Collection<T> addTo, Iterable<? extends T> elementsToAdd) {
if (elementsToAdd instanceof Collection) {
Collection<? extends T> c = Collections2.cast(elementsToAdd);
return addTo.addAll(c);
}
return Iterators.addAll(addTo, checkNotNull(elementsToAdd).iterator());
}
/**
* Returns the number of elements in the specified iterable that equal the
* specified object. This implementation avoids a full iteration when the
* iterable is a {@link Multiset} or {@link Set}.
*
* @see Collections#frequency
*/
public static int frequency(Iterable<?> iterable, @Nullable Object element) {
if ((iterable instanceof Multiset)) {
return ((Multiset<?>) iterable).count(element);
} else if ((iterable instanceof Set)) {
return ((Set<?>) iterable).contains(element) ? 1 : 0;
}
return Iterators.frequency(iterable.iterator(), element);
}
/**
* Returns an iterable whose iterators cycle indefinitely over the elements of
* {@code iterable}.
*
* <p>That iterator supports {@code remove()} if {@code iterable.iterator()}
* does. After {@code remove()} is called, subsequent cycles omit the removed
* element, which is no longer in {@code iterable}. The iterator's
* {@code hasNext()} method returns {@code true} until {@code iterable} is
* empty.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an
* infinite loop. You should use an explicit {@code break} or be certain that
* you will eventually remove all the elements.
*
* <p>To cycle over the iterable {@code n} times, use the following:
* {@code Iterables.concat(Collections.nCopies(n, iterable))}
*/
public static <T> Iterable<T> cycle(final Iterable<T> iterable) {
checkNotNull(iterable);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.cycle(iterable);
}
@Override
public String toString() {
return iterable.toString() + " (cycled)";
}
};
}
/**
* Returns an iterable whose iterators cycle indefinitely over the provided
* elements.
*
* <p>After {@code remove} is invoked on a generated iterator, the removed
* element will no longer appear in either that iterator or any other iterator
* created from the same source iterable. That is, this method behaves exactly
* as {@code Iterables.cycle(Lists.newArrayList(elements))}. The iterator's
* {@code hasNext} method returns {@code true} until all of the original
* elements have been removed.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an
* infinite loop. You should use an explicit {@code break} or be certain that
* you will eventually remove all the elements.
*
* <p>To cycle over the elements {@code n} times, use the following:
* {@code Iterables.concat(Collections.nCopies(n, Arrays.asList(elements)))}
*/
public static <T> Iterable<T> cycle(T... elements) {
return cycle(Lists.newArrayList(elements));
}
/**
* Combines two iterables into a single iterable. The returned iterable has an
* iterator that traverses the elements in {@code a}, followed by the elements
* in {@code b}. The source iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the
* corresponding input iterator supports it.
*/
public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b) {
return FluentIterable.concat(a, b);
}
/**
* Combines three iterables into a single iterable. The returned iterable has
* an iterator that traverses the elements in {@code a}, followed by the
* elements in {@code b}, followed by the elements in {@code c}. The source
* iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the
* corresponding input iterator supports it.
*/
public static <T> Iterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) {
return FluentIterable.concat(a, b, c);
}
/**
* Combines four iterables into a single iterable. The returned iterable has
* an iterator that traverses the elements in {@code a}, followed by the
* elements in {@code b}, followed by the elements in {@code c}, followed by
* the elements in {@code d}. The source iterators are not polled until
* necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the
* corresponding input iterator supports it.
*/
public static <T> Iterable<T> concat(
Iterable<? extends T> a,
Iterable<? extends T> b,
Iterable<? extends T> c,
Iterable<? extends T> d) {
return FluentIterable.concat(a, b, c, d);
}
/**
* Combines multiple iterables into a single iterable. The returned iterable
* has an iterator that traverses the elements of each iterable in
* {@code inputs}. The input iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the
* corresponding input iterator supports it.
*
* @throws NullPointerException if any of the provided iterables is null
*/
public static <T> Iterable<T> concat(Iterable<? extends T>... inputs) {
return concat(ImmutableList.copyOf(inputs));
}
/**
* Combines multiple iterables into a single iterable. The returned iterable
* has an iterator that traverses the elements of each iterable in
* {@code inputs}. The input iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the
* corresponding input iterator supports it. The methods of the returned
* iterable may throw {@code NullPointerException} if any of the input
* iterators is null.
*/
public static <T> Iterable<T> concat(Iterable<? extends Iterable<? extends T>> inputs) {
return FluentIterable.concat(inputs);
}
/**
* Divides an iterable into unmodifiable sublists of the given size (the final
* iterable may be smaller). For example, partitioning an iterable containing
* {@code [a, b, c, d, e]} with a partition size of 3 yields {@code
* [[a, b, c], [d, e]]} -- an outer iterable containing two inner lists of
* three and two elements, all in the original order.
*
* <p>Iterators returned by the returned iterable do not support the {@link
* Iterator#remove()} method. The returned lists implement {@link
* RandomAccess}, whether or not the input list does.
*
* <p><b>Note:</b> if {@code iterable} is a {@link List}, use {@link
* Lists#partition(List, int)} instead.
*
* @param iterable the iterable to return a partitioned view of
* @param size the desired size of each partition (the last may be smaller)
* @return an iterable of unmodifiable lists containing the elements of {@code
* iterable} divided into partitions
* @throws IllegalArgumentException if {@code size} is nonpositive
*/
public static <T> Iterable<List<T>> partition(final Iterable<T> iterable, final int size) {
checkNotNull(iterable);
checkArgument(size > 0);
return new FluentIterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return Iterators.partition(iterable.iterator(), size);
}
};
}
/**
* Divides an iterable into unmodifiable sublists of the given size, padding
* the final iterable with null values if necessary. For example, partitioning
* an iterable containing {@code [a, b, c, d, e]} with a partition size of 3
* yields {@code [[a, b, c], [d, e, null]]} -- an outer iterable containing
* two inner lists of three elements each, all in the original order.
*
* <p>Iterators returned by the returned iterable do not support the {@link
* Iterator#remove()} method.
*
* @param iterable the iterable to return a partitioned view of
* @param size the desired size of each partition
* @return an iterable of unmodifiable lists containing the elements of {@code
* iterable} divided into partitions (the final iterable may have
* trailing null elements)
* @throws IllegalArgumentException if {@code size} is nonpositive
*/
public static <T> Iterable<List<T>> paddedPartition(final Iterable<T> iterable, final int size) {
checkNotNull(iterable);
checkArgument(size > 0);
return new FluentIterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return Iterators.paddedPartition(iterable.iterator(), size);
}
};
}
/**
* Returns a view of {@code unfiltered} containing all elements that satisfy
* the input predicate {@code retainIfTrue}. The returned iterable's iterator
* does not support {@code remove()}.
*/
public static <T> Iterable<T> filter(
final Iterable<T> unfiltered, final Predicate<? super T> retainIfTrue) {
checkNotNull(unfiltered);
checkNotNull(retainIfTrue);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.filter(unfiltered.iterator(), retainIfTrue);
}
};
}
/**
* Returns a view of {@code unfiltered} containing all elements that are of
* the type {@code desiredType}. The returned iterable's iterator does not
* support {@code remove()}.
*/
@GwtIncompatible // Class.isInstance
public static <T> Iterable<T> filter(final Iterable<?> unfiltered, final Class<T> desiredType) {
checkNotNull(unfiltered);
checkNotNull(desiredType);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.filter(unfiltered.iterator(), desiredType);
}
};
}
/**
* Returns {@code true} if any element in {@code iterable} satisfies the predicate.
*/
public static <T> boolean any(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.any(iterable.iterator(), predicate);
}
/**
* Returns {@code true} if every element in {@code iterable} satisfies the
* predicate. If {@code iterable} is empty, {@code true} is returned.
*/
public static <T> boolean all(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.all(iterable.iterator(), predicate);
}
/**
* Returns the first element in {@code iterable} that satisfies the given
* predicate; use this method only when such an element is known to exist. If
* it is possible that <i>no</i> element will match, use {@link #tryFind} or
* {@link #find(Iterable, Predicate, Object)} instead.
*
* @throws NoSuchElementException if no element in {@code iterable} matches
* the given predicate
*/
public static <T> T find(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.find(iterable.iterator(), predicate);
}
/**
* Returns the first element in {@code iterable} that satisfies the given
* predicate, or {@code defaultValue} if none found. Note that this can
* usually be handled more naturally using {@code
* tryFind(iterable, predicate).or(defaultValue)}.
*
* @since 7.0
*/
@Nullable
public static <T> T find(
Iterable<? extends T> iterable, Predicate<? super T> predicate, @Nullable T defaultValue) {
return Iterators.find(iterable.iterator(), predicate, defaultValue);
}
/**
* Returns an {@link Optional} containing the first element in {@code
* iterable} that satisfies the given predicate, if such an element exists.
*
* <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code
* null}. If {@code null} is matched in {@code iterable}, a
* NullPointerException will be thrown.
*
* @since 11.0
*/
public static <T> Optional<T> tryFind(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.tryFind(iterable.iterator(), predicate);
}
/**
* Returns the index in {@code iterable} of the first element that satisfies
* the provided {@code predicate}, or {@code -1} if the Iterable has no such
* elements.
*
* <p>More formally, returns the lowest index {@code i} such that
* {@code predicate.apply(Iterables.get(iterable, i))} returns {@code true},
* or {@code -1} if there is no such index.
*
* @since 2.0
*/
public static <T> int indexOf(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.indexOf(iterable.iterator(), predicate);
}
/**
* Returns a view containing the result of applying {@code function} to each
* element of {@code fromIterable}.
*
* <p>The returned iterable's iterator supports {@code remove()} if {@code
* fromIterable}'s iterator does. After a successful {@code remove()} call,
* {@code fromIterable} no longer contains the corresponding element.
*
* <p>If the input {@code Iterable} is known to be a {@code List} or other
* {@code Collection}, consider {@link Lists#transform} and {@link
* Collections2#transform}.
*/
public static <F, T> Iterable<T> transform(
final Iterable<F> fromIterable, final Function<? super F, ? extends T> function) {
checkNotNull(fromIterable);
checkNotNull(function);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.transform(fromIterable.iterator(), function);
}
};
}
/**
* Returns the element at the specified position in an iterable.
*
* @param position position of the element to return
* @return the element at the specified position in {@code iterable}
* @throws IndexOutOfBoundsException if {@code position} is negative or
* greater than or equal to the size of {@code iterable}
*/
public static <T> T get(Iterable<T> iterable, int position) {
checkNotNull(iterable);
return (iterable instanceof List)
? ((List<T>) iterable).get(position)
: Iterators.get(iterable.iterator(), position);
}
/**
* Returns the element at the specified position in an iterable or a default
* value otherwise.
*
* @param position position of the element to return
* @param defaultValue the default value to return if {@code position} is
* greater than or equal to the size of the iterable
* @return the element at the specified position in {@code iterable} or
* {@code defaultValue} if {@code iterable} contains fewer than
* {@code position + 1} elements.
* @throws IndexOutOfBoundsException if {@code position} is negative
* @since 4.0
*/
@Nullable
public static <T> T get(Iterable<? extends T> iterable, int position, @Nullable T defaultValue) {
checkNotNull(iterable);
Iterators.checkNonnegative(position);
if (iterable instanceof List) {
List<? extends T> list = Lists.cast(iterable);
return (position < list.size()) ? list.get(position) : defaultValue;
} else {
Iterator<? extends T> iterator = iterable.iterator();
Iterators.advance(iterator, position);
return Iterators.getNext(iterator, defaultValue);
}
}
/**
* Returns the first element in {@code iterable} or {@code defaultValue} if
* the iterable is empty. The {@link Iterators} analog to this method is
* {@link Iterators#getNext}.
*
* <p>If no default value is desired (and the caller instead wants a
* {@link NoSuchElementException} to be thrown), it is recommended that
* {@code iterable.iterator().next()} is used instead.
*
* @param defaultValue the default value to return if the iterable is empty
* @return the first element of {@code iterable} or the default value
* @since 7.0
*/
@Nullable
public static <T> T getFirst(Iterable<? extends T> iterable, @Nullable T defaultValue) {
return Iterators.getNext(iterable.iterator(), defaultValue);
}
/**
* Returns the last element of {@code iterable}. If {@code iterable} is a {@link List} with
* {@link RandomAccess} support, then this operation is guaranteed to be {@code O(1)}.
*
* @return the last element of {@code iterable}
* @throws NoSuchElementException if the iterable is empty
*/
public static <T> T getLast(Iterable<T> iterable) {
// TODO(kevinb): Support a concurrently modified collection?
if (iterable instanceof List) {
List<T> list = (List<T>) iterable;
if (list.isEmpty()) {
throw new NoSuchElementException();
}
return getLastInNonemptyList(list);
}
return Iterators.getLast(iterable.iterator());
}
/**
* Returns the last element of {@code iterable} or {@code defaultValue} if
* the iterable is empty. If {@code iterable} is a {@link List} with
* {@link RandomAccess} support, then this operation is guaranteed to be {@code O(1)}.
*
* @param defaultValue the value to return if {@code iterable} is empty
* @return the last element of {@code iterable} or the default value
* @since 3.0
*/
@Nullable
public static <T> T getLast(Iterable<? extends T> iterable, @Nullable T defaultValue) {
if (iterable instanceof Collection) {
Collection<? extends T> c = Collections2.cast(iterable);
if (c.isEmpty()) {
return defaultValue;
} else if (iterable instanceof List) {
return getLastInNonemptyList(Lists.cast(iterable));
}
}
return Iterators.getLast(iterable.iterator(), defaultValue);
}
private static <T> T getLastInNonemptyList(List<T> list) {
return list.get(list.size() - 1);
}
/**
* Returns a view of {@code iterable} that skips its first
* {@code numberToSkip} elements. If {@code iterable} contains fewer than
* {@code numberToSkip} elements, the returned iterable skips all of its
* elements.
*
* <p>Modifications to the underlying {@link Iterable} before a call to
* {@code iterator()} are reflected in the returned iterator. That is, the
* iterator skips the first {@code numberToSkip} elements that exist when the
* {@code Iterator} is created, not when {@code skip()} is called.
*
* <p>The returned iterable's iterator supports {@code remove()} if the
* iterator of the underlying iterable supports it. Note that it is
* <i>not</i> possible to delete the last skipped element by immediately
* calling {@code remove()} on that iterator, as the {@code Iterator}
* contract states that a call to {@code remove()} before a call to
* {@code next()} will throw an {@link IllegalStateException}.
*
* @since 3.0
*/
public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) {
checkNotNull(iterable);
checkArgument(numberToSkip >= 0, "number to skip cannot be negative");
if (iterable instanceof List) {
final List<T> list = (List<T>) iterable;
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
// TODO(kevinb): Support a concurrently modified collection?
int toSkip = Math.min(list.size(), numberToSkip);
return list.subList(toSkip, list.size()).iterator();
}
};
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
final Iterator<T> iterator = iterable.iterator();
Iterators.advance(iterator, numberToSkip);
/*
* We can't just return the iterator because an immediate call to its
* remove() method would remove one of the skipped elements instead of
* throwing an IllegalStateException.
*/
return new Iterator<T>() {
boolean atStart = true;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
T result = iterator.next();
atStart = false; // not called if next() fails
return result;
}
@Override
public void remove() {
checkRemove(!atStart);
iterator.remove();
}
};
}
};
}
/**
* Returns a view of {@code iterable} containing its first {@code limitSize}
* elements. If {@code iterable} contains fewer than {@code limitSize}
* elements, the returned view contains all of its elements. The returned
* iterable's iterator supports {@code remove()} if {@code iterable}'s
* iterator does.
*
* @param iterable the iterable to limit
* @param limitSize the maximum number of elements in the returned iterable
* @throws IllegalArgumentException if {@code limitSize} is negative
* @since 3.0
*/
public static <T> Iterable<T> limit(final Iterable<T> iterable, final int limitSize) {
checkNotNull(iterable);
checkArgument(limitSize >= 0, "limit is negative");
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.limit(iterable.iterator(), limitSize);
}
};
}
/**
* Returns a view of the supplied iterable that wraps each generated
* {@link Iterator} through {@link Iterators#consumingIterator(Iterator)}.
*
* <p>Note: If {@code iterable} is a {@link Queue}, the returned iterable will
* get entries from {@link Queue#remove()} since {@link Queue}'s iteration
* order is undefined. Calling {@link Iterator#hasNext()} on a generated
* iterator from the returned iterable may cause an item to be immediately
* dequeued for return on a subsequent call to {@link Iterator#next()}.
*
* @param iterable the iterable to wrap
* @return a view of the supplied iterable that wraps each generated iterator
* through {@link Iterators#consumingIterator(Iterator)}; for queues,
* an iterable that generates iterators that return and consume the
* queue's elements in queue order
*
* @see Iterators#consumingIterator(Iterator)
* @since 2.0
*/
public static <T> Iterable<T> consumingIterable(final Iterable<T> iterable) {
if (iterable instanceof Queue) {
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return new ConsumingQueueIterator<T>((Queue<T>) iterable);
}
@Override
public String toString() {
return "Iterables.consumingIterable(...)";
}
};
}
checkNotNull(iterable);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.consumingIterator(iterable.iterator());
}
@Override
public String toString() {
return "Iterables.consumingIterable(...)";
}
};
}
// Methods only in Iterables, not in Iterators
/**
* Determines if the given iterable contains no elements.
*
* <p>There is no precise {@link Iterator} equivalent to this method, since
* one can only ask an iterator whether it has any elements <i>remaining</i>
* (which one does using {@link Iterator#hasNext}).
*
* @return {@code true} if the iterable contains no elements
*/
public static boolean isEmpty(Iterable<?> iterable) {
if (iterable instanceof Collection) {
return ((Collection<?>) iterable).isEmpty();
}
return !iterable.iterator().hasNext();
}
/**
* Returns an iterable over the merged contents of all given
* {@code iterables}. Equivalent entries will not be de-duplicated.
*
* <p>Callers must ensure that the source {@code iterables} are in
* non-descending order as this method does not sort its input.
*
* <p>For any equivalent elements across all {@code iterables}, it is
* undefined which element is returned first.
*
* @since 11.0
*/
@Beta
public static <T> Iterable<T> mergeSorted(
final Iterable<? extends Iterable<? extends T>> iterables,
final Comparator<? super T> comparator) {
checkNotNull(iterables, "iterables");
checkNotNull(comparator, "comparator");
Iterable<T> iterable =
new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.mergeSorted(
Iterables.transform(iterables, Iterables.<T>toIterator()), comparator);
}
};
return new UnmodifiableIterable<T>(iterable);
}
// TODO(user): Is this the best place for this? Move to fluent functions?
// Useful as a public method?
static <T> Function<Iterable<? extends T>, Iterator<? extends T>> toIterator() {
return new Function<Iterable<? extends T>, Iterator<? extends T>>() {
@Override
public Iterator<? extends T> apply(Iterable<? extends T> iterable) {
return iterable.iterator();
}
};
}
}
| |
/**
* Copyright 2010 - 2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.exodus.tree.btree;
import jetbrains.exodus.ByteIterable;
import jetbrains.exodus.tree.INode;
import jetbrains.exodus.tree.MutableTreeRoot;
import jetbrains.exodus.tree.TreeTraverser;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
import java.util.NoSuchElementException;
class BTreeTraverser implements TreeTraverser {
@NotNull
protected TreePos[] stack = new TreePos[8];
protected int top = 0;
protected BasePage currentNode;
protected ILeafNode node = ILeafNode.EMPTY;
protected int currentPos;
BTreeTraverser(@NotNull BasePage currentNode) {
this.currentNode = currentNode;
}
// for tests only
private BTreeTraverser(@NotNull BTreeTraverser source) {
stack = source.stack; // tricky
currentNode = source.currentNode;
currentPos = source.currentPos;
}
@Override
public void init(boolean left) {
final int size = currentNode.size;
currentPos = left ? 0 : size - 1;
if (!canMoveDown()) {
currentPos = left ? -1 : size;
}
}
@Override
public boolean isNotEmpty() {
return currentNode.size > 0;
}
@Override
@NotNull
public ByteIterable getKey() {
return node.getKey();
}
@Override
@NotNull
public ByteIterable getValue() {
final ByteIterable result = node.getValue();
if (result == null) {
throw new NullPointerException();
}
return result;
}
@Override
public boolean hasValue() {
return node.hasValue();
}
@Override
@NotNull
public INode moveDown() {
return node = pushChild(new TreePos(currentNode, currentPos), getChildForMoveDown(), 0);
}
@Override
@NotNull
public INode moveDownToLast() {
final BasePage child = getChildForMoveDown();
return node = pushChild(new TreePos(currentNode, currentPos), child, child.size - 1);
}
protected BasePage getChildForMoveDown() {
return currentNode.getChild(currentPos);
}
protected ILeafNode pushChild(@NotNull final TreePos topPos, @NotNull final BasePage child, int pos) {
setAt(top, topPos);
currentNode = child;
currentPos = pos;
++top;
if (child.isBottom()) {
return handleLeaf(child.getKey(pos));
} else {
return ILeafNode.EMPTY;
}
}
protected ILeafNode handleLeaf(BaseLeafNode leaf) {
return leaf;
}
protected void setAt(final int pos, @NotNull final TreePos treePos) {
final int length = stack.length;
if (pos >= length) { // ensure capacity
final int newCapacity = length << 1;
TreePos[] newStack = new TreePos[newCapacity];
System.arraycopy(stack, 0, newStack, 0, length);
stack = newStack;
}
stack[pos] = treePos;
}
@Override
public void moveUp() {
--top;
final TreePos topPos = stack[top];
currentNode = topPos.node;
currentPos = topPos.pos;
node = ILeafNode.EMPTY;
stack[top] = null; // help gc
}
SearchRes getNextSibling(ByteIterable key) {
return currentNode.binarySearch(key, currentPos);
}
@Override
public int compareCurrent(@NotNull final ByteIterable key) {
return currentNode.getKey(currentPos).compareKeyTo(key);
}
public void moveTo(int index) {
currentPos = index;
}
public boolean canMoveTo(int index) {
return index < currentNode.size;
}
@Override
public boolean canMoveRight() {
return currentPos + 1 < currentNode.size;
}
@Override
@NotNull
public INode moveRight() {
++currentPos;
if (currentNode.isBottom()) {
return node = handleLeafR(currentNode.getKey(currentPos));
} else {
return node = ILeafNode.EMPTY;
}
}
protected ILeafNode handleLeafR(BaseLeafNode leaf) {
return leaf;
}
protected ILeafNode handleLeafL(BaseLeafNode leaf) {
return leaf;
}
@Override
public boolean canMoveLeft() {
return currentPos > 0;
}
@Override
@NotNull
public INode moveLeft() {
--currentPos;
if (currentNode.isBottom()) {
return node = handleLeafL(currentNode.getKey(currentPos));
} else {
return node = ILeafNode.EMPTY;
}
}
@Override
public long getCurrentAddress() {
return currentNode.getChildAddress(currentPos);
}
@Override
public boolean canMoveUp() {
return top != 0;
}
@Override
public boolean canMoveDown() {
return !currentNode.isBottom();
}
@Override
public void reset(@NotNull MutableTreeRoot root) {
top = 0;
node = ILeafNode.EMPTY;
currentNode = (BasePage) root;
currentPos = 0;
}
@Override
public boolean moveTo(ByteIterable key, @Nullable ByteIterable value) {
return doMoveTo(key, value, false);
}
@Override
public boolean moveToRange(ByteIterable key, @Nullable ByteIterable value) {
return doMoveTo(key, value, true);
}
private boolean doMoveTo(@NotNull ByteIterable key, @Nullable ByteIterable value, boolean rangeSearch) {
BasePage bottomNode = top == 0 ? currentNode : stack[0].node; // the most bottom node, ignoring lower bound
final ILeafNode result = bottomNode.find(this, 0, key, value, rangeSearch);
if (result == null) {
return false;
}
node = result.isDupLeaf() ? new LeafNodeKV(result.getValue(), result.getKey()) : result;
return true;
}
@NotNull
@Override
public BTreeBase getTree() {
return (top == 0 ? currentNode : stack[0].node).getTree();
}
protected boolean isDup() {
return false;
}
PageIterator iterator() { // for testing purposes
return new PageIterator() {
int index = 0;
int currentIteratorPos = 0;
BasePage currentIteratorNode = null;
@Override
public int getPos() {
return currentIteratorPos;
}
@Override
public boolean hasNext() {
return index <= top; // equality means we should return current
}
@Override
public BasePage next() {
final BasePage next;
if (index < top) {
final TreePos treePos = stack[index];
next = treePos.node;
currentIteratorPos = treePos.pos;
} else {
if (index > top) {
throw new NoSuchElementException("No more pages in stack");
} else {
next = currentNode;
currentIteratorPos = currentPos;
}
}
currentIteratorNode = next;
index++;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
// for tests only
static boolean isInDupMode(@NotNull final AddressIterator addressIterator) {
// hasNext() updates 'inDupTree'
return addressIterator.hasNext() && ((BTreeTraverserDup) addressIterator.getTraverser()).inDupTree;
}
// for tests only
static BTreeTraverser getTraverserNoDup(@NotNull final AddressIterator addressIterator) {
return new BTreeTraverser((BTreeTraverser) addressIterator.getTraverser());
}
interface PageIterator extends Iterator<BasePage> {
int getPos();
}
}
| |
package algorithms;
import algorithms.interfaces.IGenerator;
import java.io.FileOutputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.io.FileNotFoundException;
import java.io.IOException;
public class WellEquidistributedLongperiodLinear extends RandomStreamBase {
private static final long serialVersionUID = 70510L;
private static final double NORM = (1.0 / 0x100000001L);
private static final int W = 32;
private static final int R = 16;
private static final int P = 0;
private static final int M1 = 13;
private static final int M2 = 9;
private static final int M3 = 5;
private static final int MASK = 0xF; // = 15
//state variables
private int state_i;
private int[] state;
//stream and substream variables :
private int[] stream;
private int[] substream;
private static int[] curr_stream;
//the state transition matrices
private static BitMatrix Apw;
private static BitMatrix Apz;
// if the generator was initialised
private static boolean initialised = false;
private static final int [] pw = new int[]
{0x280009a9, 0x31e221d0, 0xa00c0296, 0x763d492b,
0x63875b75, 0xef2acc3a, 0x1400839f, 0x5e0c8526,
0x514e11b, 0x56b398e4, 0x9436c8b9, 0xa6d8130b,
0xc0a48a78, 0x26ad57d0, 0xa3a0c62a, 0x3ff16c9b};
private static final int [] pz = new int[]
{0xcd68f2fe, 0x183e969a, 0x760449ae, 0xaa0ce54e,
0xfb5363af, 0x79deea9b, 0xef66c516, 0x103543cb,
0x244d1a97, 0x7570bc91, 0x31203fc7, 0x455ea2ca,
0xd77d327d, 0xd8c6a83c, 0xc51b05e7, 0x300c1501};
private static void initialisation() {
curr_stream = new int[] {0xA341BF9A, 0xAFE4901B, 0x6B10DE18, 0x05FE1420,
0xE48B1A9C, 0x590AE15E, 0xC5EB82A7, 0x37EAB2F9,
0x90E1C6EA, 0x3AE63902, 0x735DC91C, 0x902E3A8C,
0x6CB28A5D, 0x8474E7D1, 0x843E01A3, 0x5A7370EF};
initialised = true;
private void advanceSeed(int[] seed, int [] p) {
int b;
int [] x = new int[R];
for (int i = 0; i < R; i++) {
state[i] = seed[i];
}
state_i = 0;
for (int j = 0; j < R; ++j) {
b = p[j];
for (int k = 0; k < W; ++k) {
if ((b & 1) == 1) {
for (int i = 0; i < R; i++) {
x[i] ^= state[(state_i + i) & MASK];
}
}
b >>= 1;
nextValue();
}
}
for (int i = 0; i < R; i++) {
seed[i] = x[i];
}
}
private static void verifySeed(int[] seed) {
if (seed.length < R)
throw new IllegalArgumentException("Seed must contain " + R +
"values");
for (int i = 0; i < R; i++)
if (seed[i] != 0)
return;
throw new IllegalArgumentException
("At least one of the element of the seed must not be 0.");
}
private WellEquidistributedLongperiodLinear(int i) {
state = new int[R];
for(int j = 0; j < R; j++)
state[j] = 0;
state[i / W] = 1 << (i % W);
state_i = 0;
}
public WellEquidistributedLongperiodLinear() {
if (!initialised)
initialisation();
state = new int[R];
stream = new int[R];
substream = new int[R];
for(int i = 0; i < R; i++)
stream[i] = curr_stream[i];
advanceSeed(curr_stream, pz);
resetStartStream();
}
public WellEquidistributedLongperiodLinear (String name) {
this();
this.name = name;
}
public static void setPackageSeed (int seed[]) {
verifySeed(seed);
if (!initialised)
initialisation();
for(int i = 0; i < R; i++)
curr_stream[i] = seed[i];
}
public void setSeed (int seed[]) {
verifySeed(seed);
for(int i = 0; i < R; i++)
stream[i] = seed[i];
resetStartStream();
}
public int[] getState() {
int[] result = new int[R];
for(int i = 0; i < R; i++)
result[i] = state[(state_i + i) & MASK];
return result;
}
public WellEquidistributedLongperiodLinear clone() {
WELL512 retour = null;
retour = (WELL512)super.clone();
retour.state = new int[R];
retour.substream = new int[R];
retour.stream = new int[R];
for (int i = 0; i<R; i++) {
retour.state[i] = state[i];
retour.substream[i] = substream[i];
retour.stream[i] = stream[i];
}
return retour;
}
public void resetStartStream() {
for(int i = 0; i < R; i++)
substream[i] = stream[i];
resetStartSubstream();
}
public void resetStartSubstream() {
state_i = 0;
for(int i = 0; i < R; i++)
state[i] = substream[i];
}
public void resetNextSubstream() {
advanceSeed(substream, pw);
resetStartSubstream();
}
public String toString() {
StringBuffer sb = new StringBuffer();
if(name == null)
sb.append("The state of this WELL512 is : {");
else
sb.append("The state of " + name + " is : {");
for(int i = 0; i < R - 1; i++)
sb.append(state[(state_i + i) & MASK] + ", ");
sb.append(state[(state_i + R - 1) & MASK] + "}");
return sb.toString();
}
protected double nextValue() {
int z0, z1, z2;
z0 = state[(state_i + 15) & MASK];
z1 = (state[state_i] ^ (state[state_i] << 16)) ^
(state[(state_i+M1) & MASK] ^ (state[(state_i+M1) & MASK] << 15));
z2 = (state[(state_i+M2) & MASK] ^
(state[(state_i+M2) & MASK] >>> 11));
state[state_i] = z1 ^ z2;
state[(state_i + 15) & MASK] = (z0 ^ (z0 << 2)) ^ (z1 ^ (z1 << 18)) ^
(z2 << 28) ^ (state[state_i] ^
((state[state_i] << 5) & 0xDA442D24));
state_i = (state_i + 15) & MASK;
long result = state[state_i];
return (double)(result > 0 ? result : (result + 0x100000000L)) * NORM;
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.facebook.api;
import static org.junit.Assert.*;
import static org.springframework.http.HttpMethod.*;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.*;
import static org.springframework.test.web.client.response.MockRestResponseCreators.*;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.http.MediaType;
import org.springframework.social.facebook.api.Page.PriceRange;
/**
* @author Craig Walls
*/
public class PageTemplateTest extends AbstractFacebookApiTest {
@Test
public void getPage_place_with_hours() {
mockServer.expect(requestTo(fbUrl("220817147947513")))
.andExpect(method(GET))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess(jsonResource("place-with-hours-page"), MediaType.APPLICATION_JSON));
Page page = facebook.pageOperations().getPage("220817147947513");
assertEquals("220817147947513", page.getId());
assertEquals("Denton Square Donuts", page.getName());
assertEquals("https://www.facebook.com/DentonSquareDonuts", page.getLink());
assertEquals(3078, page.getFanCount());
assertEquals("Restaurant/cafe", page.getCategory());
assertEquals("www.dsdonuts.com", page.getWebsite());
assertEquals("Denton", page.getLocation().getCity());
assertEquals("TX", page.getLocation().getState());
assertEquals("United States", page.getLocation().getCountry());
assertEquals(33.21556, page.getLocation().getLatitude(), 0.0001);
assertEquals(-97.13414, page.getLocation().getLongitude(), 0.0001);
assertEquals("940-220-9447", page.getPhone());
assertEquals(959, page.getCheckins());
assertFalse(page.canPost());
assertTrue(page.isPublished());
assertFalse(page.isCommunityPage());
assertFalse(page.hasAddedApp());
assertEquals(68, page.getTalkingAboutCount());
assertNotNull(page.getHours());
assertEquals("07:30", page.getHours().get("mon_1_open"));
assertEquals("13:00", page.getHours().get("mon_1_close"));
assertEquals("07:30", page.getHours().get("tue_1_open"));
assertEquals("13:00", page.getHours().get("tue_1_close"));
assertEquals("07:30", page.getHours().get("wed_1_open"));
assertEquals("13:00", page.getHours().get("wed_1_close"));
assertEquals("07:30", page.getHours().get("thu_1_open"));
assertEquals("13:00", page.getHours().get("thu_1_close"));
assertEquals("07:30", page.getHours().get("fri_1_open"));
assertEquals("13:00", page.getHours().get("fri_1_close"));
assertEquals("07:30", page.getHours().get("sat_1_open"));
assertEquals("13:00", page.getHours().get("sat_1_close"));
assertEquals("07:30", page.getHours().get("sun_1_open"));
assertEquals("13:00", page.getHours().get("sun_1_close"));
}
@Test
public void getPage_application() {
mockServer.expect(requestTo(fbUrl("140372495981006")))
.andExpect(method(GET))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess(jsonResource("application-page"), MediaType.APPLICATION_JSON));
Page page = facebook.pageOperations().getPage("140372495981006");
assertEquals("140372495981006", page.getId());
assertEquals("Greenhouse", page.getName());
assertEquals("https://www.facebook.com/apps/application.php?id=140372495981006", page.getLink());
assertEquals("The social destination for Spring application developers.", page.getDescription());
assertTrue(page.canPost());
assertEquals(0, page.getTalkingAboutCount());
}
@SuppressWarnings("unchecked")
@Test
public void getPage_withExtraData() {
mockServer.expect(requestTo(fbUrl("11803178355")))
.andExpect(method(GET))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess(jsonResource("page-with-extra-data"), MediaType.APPLICATION_JSON));
Page page = facebook.pageOperations().getPage("11803178355");
assertEquals("11803178355", page.getId());
assertEquals("A Scanner Darkly", page.getName());
assertEquals("https://www.facebook.com/pages/A-Scanner-Darkly/11803178355", page.getLink());
assertNull(page.getDescription());
Map<String, Object> extraData = page.getExtraData();
assertEquals("This is extra data", extraData.get("extra_data"));
assertEquals(0, page.getWereHereCount());
assertEquals("Keanu Reeves, Robert Downey Jr., Woody Harrelson, Winona Ryder, Rory Cochrane", page.getStarring());
assertEquals("Richard Linklater based on Philip K. Dick's novel", page.getScreenplayBy());
assertEquals("2007", page.getReleaseDate());
assertEquals("Steven Soderbergh and George Clooney (Executive Producers)", page.getProducedBy());
assertTrue(page.getPlotOutline().startsWith("In the future \"seven years from now\", America has lost the war on drugs. A highly addictive and debilitating illegal drug called Substance D, distilled from small blue flowers"));
assertEquals("Science Fiction", page.getGenre());
assertEquals("Richard Linklater", page.getDirectedBy());
assertEquals("Winner of Best Animation award OFCS Awards 2007", page.getAwards());
Map<String, Object> embedded = (Map<String, Object>) extraData.get("embedded");
assertEquals("y", embedded.get("x"));
assertEquals(2, embedded.get("a"));
Map<String, Object> deeper = (Map<String, Object>) embedded.get("deeper");
assertEquals("bar", deeper.get("foo"));
assertEquals(PriceRange.$$, page.getPriceRange());
}
@Test
public void isPageAdmin() {
expectFetchAccounts();
assertFalse(facebook.pageOperations().isPageAdmin("2468013579"));
assertTrue(facebook.pageOperations().isPageAdmin("987654321"));
assertTrue(facebook.pageOperations().isPageAdmin("1212121212"));
}
@Test
public void getAccounts() {
expectFetchAccounts();
List<Account> accounts = facebook.pageOperations().getAccounts();
assertEquals(2, accounts.size());
assertEquals("987654321", accounts.get(0).getId());
assertEquals("Test Page", accounts.get(0).getName());
assertEquals("Page", accounts.get(0).getCategory());
assertEquals("pageAccessToken", accounts.get(0).getAccessToken());
assertEquals("1212121212", accounts.get(1).getId());
assertEquals("Test Page 2", accounts.get(1).getName());
assertEquals("Page", accounts.get(1).getCategory());
assertEquals("page2AccessToken", accounts.get(1).getAccessToken());
}
@Test
public void getAccount() {
expectFetchAccounts();
Account account = facebook.pageOperations().getAccount("987654321");
assertEquals("987654321", account.getId());
assertEquals("Test Page", account.getName());
assertEquals("Page", account.getCategory());
assertEquals("pageAccessToken", account.getAccessToken());
}
@Test
public void getAccount_missingAccount() throws Exception {
expectFetchAccounts();
assertNull(facebook.pageOperations().getAccount("BOGUS"));
}
@Test
public void getAccessToken() {
expectFetchAccounts();
assertEquals("pageAccessToken", facebook.pageOperations().getAccessToken("987654321"));
}
@Test(expected=PageAdministrationException.class)
public void getAccessToken_missingAccount() {
expectFetchAccounts();
facebook.pageOperations().getAccessToken("BOGUS");
}
@Test
public void post_message() throws Exception {
expectFetchAccounts();
String requestBody = "message=Hello+Facebook+World&access_token=pageAccessToken";
mockServer.expect(requestTo(fbUrl("987654321/feed")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andExpect(content().string(requestBody))
.andRespond(withSuccess("{\"id\":\"123456_78901234\"}", MediaType.APPLICATION_JSON));
new PostData("987654321").message("Hello Facebook World");
assertEquals("123456_78901234", facebook.pageOperations().post(new PagePostData("987654321").message("Hello Facebook World")));
mockServer.verify();
}
@Test(expected = PageAdministrationException.class)
public void postMessage_notAdmin() throws Exception {
expectFetchAccounts();
facebook.pageOperations().post(new PagePostData("2468013579").message("Hello Facebook World"));
}
@Test
public void postLink() throws Exception {
expectFetchAccounts();
String requestBody = "message=Hello+Facebook+World&link=someLink&name=some+name&caption=some+caption&description=some+description&access_token=pageAccessToken";
mockServer.expect(requestTo(fbUrl("987654321/feed")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andExpect(content().string(requestBody))
.andRespond(withSuccess("{\"id\":\"123456_78901234\"}", MediaType.APPLICATION_JSON));
assertEquals("123456_78901234", facebook.pageOperations().post(new PagePostData("987654321").message("Hello Facebook World").link("someLink", null, "some name", "some caption", "some description")));
mockServer.verify();
}
@Test
public void postLink_withPicture() throws Exception {
expectFetchAccounts();
String requestBody = "message=Hello+Facebook+World&link=someLink&name=some+name&caption=some+caption&description=some+description&picture=somePic&access_token=pageAccessToken";
mockServer.expect(requestTo(fbUrl("987654321/feed")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andExpect(content().string(requestBody))
.andRespond(withSuccess("{\"id\":\"123456_78901234\"}", MediaType.APPLICATION_JSON));
assertEquals("123456_78901234", facebook.pageOperations().post(new PagePostData("987654321").message("Hello Facebook World").link("someLink", "somePic", "some name", "some caption", "some description")));
mockServer.verify();
}
@Test
public void postMessage_withTargetAudience() throws Exception {
expectFetchAccounts();
String requestBody = "message=Hello+Facebook+World&targeting=%7B%27value%27%3A+%27CUSTOM%27%2C%27countries%27%3A%27PE%27%7D&access_token=pageAccessToken";
mockServer.expect(requestTo(fbUrl("987654321/feed")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andExpect(content().string(requestBody))
.andRespond(withSuccess("{\"id\":\"123456_78901234\"}", MediaType.APPLICATION_JSON));
assertEquals("123456_78901234", facebook.pageOperations().post(new PagePostData("987654321").message("Hello Facebook World").targeting(new Targeting().countries("PE"))));
mockServer.verify();
}
@Test(expected = PageAdministrationException.class)
public void postLink_notAdmin() throws Exception {
expectFetchAccounts();
facebook.pageOperations().post(new PagePostData("2468013579").message("Hello Facebook World").link("someLink", null, "some name", "some caption", "some description"));
}
@Test
public void postPhoto_noCaption() {
expectFetchAccounts();
mockServer.expect(requestTo(fbUrl("192837465/photos")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess("{\"id\":\"12345\"}", MediaType.APPLICATION_JSON));
// TODO: Match body content to ensure fields and photo are included
Resource photo = getUploadResource("photo.jpg", "PHOTO DATA");
String photoId = facebook.pageOperations().postPhoto("987654321", "192837465", photo);
assertEquals("12345", photoId);
}
@Test
public void postPhoto_withCaption() {
expectFetchAccounts();
mockServer.expect(requestTo(fbUrl("192837465/photos")))
.andExpect(method(POST))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess("{\"id\":\"12345\"}", MediaType.APPLICATION_JSON));
// TODO: Match body content to ensure fields and photo are included
Resource photo = getUploadResource("photo.jpg", "PHOTO DATA");
String photoId = facebook.pageOperations().postPhoto("987654321", "192837465", photo, "Some caption");
assertEquals("12345", photoId);
}
@Test
public void search() {
mockServer.expect(requestTo(fbUrl("search?q=coffee&type=place¢er=33.050278%2C-96.745833&distance=5280")))
.andExpect(method(GET))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess(jsonResource("places-list"), MediaType.APPLICATION_JSON));
List<Page> places = facebook.pageOperations().searchPlaces("coffee", 33.050278, -96.745833, 5280);
assertEquals(2, places.size());
assertEquals("117723491586638", places.get(0).getId());
assertEquals("True Brew Coffee & Espresso Service", places.get(0).getName());
assertEquals("Local business", places.get(0).getCategory());
assertEquals("542 Haggard St", places.get(0).getLocation().getStreet());
assertEquals("Plano", places.get(0).getLocation().getCity());
assertEquals("TX", places.get(0).getLocation().getState());
assertEquals("United States", places.get(0).getLocation().getCountry());
assertEquals("75074-5529", places.get(0).getLocation().getZip());
assertEquals(33.026239, places.get(0).getLocation().getLatitude(), 0.00001);
assertEquals(-96.707089, places.get(0).getLocation().getLongitude(), 0.00001);
assertEquals("169020919798274", places.get(1).getId());
assertEquals("Starbucks Coffee", places.get(1).getName());
assertEquals("Local business", places.get(1).getCategory());
assertNull(places.get(1).getLocation().getStreet());
assertEquals("Plano", places.get(1).getLocation().getCity());
assertEquals("TX", places.get(1).getLocation().getState());
assertEquals("United States", places.get(1).getLocation().getCountry());
assertNull(places.get(1).getLocation().getZip());
assertEquals(33.027734, places.get(1).getLocation().getLatitude(), 0.00001);
assertEquals(-96.795133, places.get(1).getLocation().getLongitude(), 0.00001);
}
// private helpers
private void expectFetchAccounts() {
mockServer.expect(requestTo(fbUrl("me/accounts")))
.andExpect(method(GET))
.andExpect(header("Authorization", "OAuth someAccessToken"))
.andRespond(withSuccess(jsonResource("accounts"), MediaType.APPLICATION_JSON));
}
private Resource getUploadResource(final String filename, String content) {
Resource video = new ByteArrayResource(content.getBytes()) {
public String getFilename() throws IllegalStateException {
return filename;
};
};
return video;
}
}
| |
/*
* Copyright 2001-2010 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sop4j.base.joda.time;
import com.sop4j.base.joda.convert.FromString;
import com.sop4j.base.joda.convert.ToString;
import com.sop4j.base.joda.time.base.BaseSingleFieldPeriod;
import com.sop4j.base.joda.time.field.FieldUtils;
import com.sop4j.base.joda.time.format.ISOPeriodFormat;
import com.sop4j.base.joda.time.format.PeriodFormatter;
/**
* An immutable time period representing a number of months.
* <p>
* <code>Months</code> is an immutable period that can only store months.
* It does not store years, days or hours for example. As such it is a
* type-safe way of representing a number of months in an application.
* <p>
* The number of months is set in the constructor, and may be queried using
* <code>getMonths()</code>. Basic mathematical operations are provided -
* <code>plus()</code>, <code>minus()</code>, <code>multipliedBy()</code> and
* <code>dividedBy()</code>.
* <p>
* <code>Months</code> is thread-safe and immutable.
*
* @author Stephen Colebourne
* @since 1.4
*/
public final class Months extends BaseSingleFieldPeriod {
/** Constant representing zero months. */
public static final Months ZERO = new Months(0);
/** Constant representing one month. */
public static final Months ONE = new Months(1);
/** Constant representing two months. */
public static final Months TWO = new Months(2);
/** Constant representing three months. */
public static final Months THREE = new Months(3);
/** Constant representing four months. */
public static final Months FOUR = new Months(4);
/** Constant representing five months. */
public static final Months FIVE = new Months(5);
/** Constant representing six months. */
public static final Months SIX = new Months(6);
/** Constant representing seven months. */
public static final Months SEVEN = new Months(7);
/** Constant representing eight months. */
public static final Months EIGHT = new Months(8);
/** Constant representing nine months. */
public static final Months NINE = new Months(9);
/** Constant representing ten months. */
public static final Months TEN = new Months(10);
/** Constant representing eleven months. */
public static final Months ELEVEN = new Months(11);
/** Constant representing twelve months. */
public static final Months TWELVE = new Months(12);
/** Constant representing the maximum number of months that can be stored in this object. */
public static final Months MAX_VALUE = new Months(Integer.MAX_VALUE);
/** Constant representing the minimum number of months that can be stored in this object. */
public static final Months MIN_VALUE = new Months(Integer.MIN_VALUE);
/** The parser to use for this class. */
private static final PeriodFormatter PARSER = ISOPeriodFormat.standard().withParseType(PeriodType.months());
/** Serialization version. */
private static final long serialVersionUID = 87525275727380867L;
//-----------------------------------------------------------------------
/**
* Obtains an instance of <code>Months</code> that may be cached.
* <code>Months</code> is immutable, so instances can be cached and shared.
* This factory method provides access to shared instances.
*
* @param months the number of months to obtain an instance for
* @return the instance of Months
*/
public static Months months(int months) {
switch (months) {
case 0:
return ZERO;
case 1:
return ONE;
case 2:
return TWO;
case 3:
return THREE;
case 4:
return FOUR;
case 5:
return FIVE;
case 6:
return SIX;
case 7:
return SEVEN;
case 8:
return EIGHT;
case 9:
return NINE;
case 10:
return TEN;
case 11:
return ELEVEN;
case 12:
return TWELVE;
case Integer.MAX_VALUE:
return MAX_VALUE;
case Integer.MIN_VALUE:
return MIN_VALUE;
default:
return new Months(months);
}
}
//-----------------------------------------------------------------------
/**
* Creates a <code>Months</code> representing the number of whole months
* between the two specified datetimes. This method corectly handles
* any daylight savings time changes that may occur during the interval.
*
* @param start the start instant, must not be null
* @param end the end instant, must not be null
* @return the period in months
* @throws IllegalArgumentException if the instants are null or invalid
*/
public static Months monthsBetween(ReadableInstant start, ReadableInstant end) {
int amount = BaseSingleFieldPeriod.between(start, end, DurationFieldType.months());
return Months.months(amount);
}
/**
* Creates a <code>Months</code> representing the number of whole months
* between the two specified partial datetimes.
* <p>
* The two partials must contain the same fields, for example you can specify
* two <code>LocalDate</code> objects.
*
* @param start the start partial date, must not be null
* @param end the end partial date, must not be null
* @return the period in months
* @throws IllegalArgumentException if the partials are null or invalid
*/
public static Months monthsBetween(ReadablePartial start, ReadablePartial end) {
if (start instanceof LocalDate && end instanceof LocalDate) {
Chronology chrono = DateTimeUtils.getChronology(start.getChronology());
int months = chrono.months().getDifference(
((LocalDate) end).getLocalMillis(), ((LocalDate) start).getLocalMillis());
return Months.months(months);
}
int amount = BaseSingleFieldPeriod.between(start, end, ZERO);
return Months.months(amount);
}
/**
* Creates a <code>Months</code> representing the number of whole months
* in the specified interval. This method corectly handles any daylight
* savings time changes that may occur during the interval.
*
* @param interval the interval to extract months from, null returns zero
* @return the period in months
* @throws IllegalArgumentException if the partials are null or invalid
*/
public static Months monthsIn(ReadableInterval interval) {
if (interval == null) {
return Months.ZERO;
}
int amount = BaseSingleFieldPeriod.between(interval.getStart(), interval.getEnd(), DurationFieldType.months());
return Months.months(amount);
}
/**
* Creates a new <code>Months</code> by parsing a string in the ISO8601 format 'PnM'.
* <p>
* The parse will accept the full ISO syntax of PnYnMnWnDTnHnMnS however only the
* months component may be non-zero. If any other component is non-zero, an exception
* will be thrown.
*
* @param periodStr the period string, null returns zero
* @return the period in months
* @throws IllegalArgumentException if the string format is invalid
*/
@FromString
public static Months parseMonths(String periodStr) {
if (periodStr == null) {
return Months.ZERO;
}
Period p = PARSER.parsePeriod(periodStr);
return Months.months(p.getMonths());
}
//-----------------------------------------------------------------------
/**
* Creates a new instance representing a number of months.
* You should consider using the factory method {@link #months(int)}
* instead of the constructor.
*
* @param months the number of months to represent
*/
private Months(int months) {
super(months);
}
/**
* Resolves singletons.
*
* @return the singleton instance
*/
private Object readResolve() {
return Months.months(getValue());
}
//-----------------------------------------------------------------------
/**
* Gets the duration field type, which is <code>months</code>.
*
* @return the period type
*/
public DurationFieldType getFieldType() {
return DurationFieldType.months();
}
/**
* Gets the period type, which is <code>months</code>.
*
* @return the period type
*/
public PeriodType getPeriodType() {
return PeriodType.months();
}
//-----------------------------------------------------------------------
/**
* Gets the number of months that this period represents.
*
* @return the number of months in the period
*/
public int getMonths() {
return getValue();
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the specified number of months added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param months the amount of months to add, may be negative
* @return the new period plus the specified number of months
* @throws ArithmeticException if the result overflows an int
*/
public Months plus(int months) {
if (months == 0) {
return this;
}
return Months.months(FieldUtils.safeAdd(getValue(), months));
}
/**
* Returns a new instance with the specified number of months added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param months the amount of months to add, may be negative, null means zero
* @return the new period plus the specified number of months
* @throws ArithmeticException if the result overflows an int
*/
public Months plus(Months months) {
if (months == null) {
return this;
}
return plus(months.getValue());
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the specified number of months taken away.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param months the amount of months to take away, may be negative
* @return the new period minus the specified number of months
* @throws ArithmeticException if the result overflows an int
*/
public Months minus(int months) {
return plus(FieldUtils.safeNegate(months));
}
/**
* Returns a new instance with the specified number of months taken away.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param months the amount of months to take away, may be negative, null means zero
* @return the new period minus the specified number of months
* @throws ArithmeticException if the result overflows an int
*/
public Months minus(Months months) {
if (months == null) {
return this;
}
return minus(months.getValue());
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the months multiplied by the specified scalar.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param scalar the amount to multiply by, may be negative
* @return the new period multiplied by the specified scalar
* @throws ArithmeticException if the result overflows an int
*/
public Months multipliedBy(int scalar) {
return Months.months(FieldUtils.safeMultiply(getValue(), scalar));
}
/**
* Returns a new instance with the months divided by the specified divisor.
* The calculation uses integer division, thus 3 divided by 2 is 1.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param divisor the amount to divide by, may be negative
* @return the new period divided by the specified divisor
* @throws ArithmeticException if the divisor is zero
*/
public Months dividedBy(int divisor) {
if (divisor == 1) {
return this;
}
return Months.months(getValue() / divisor);
}
//-----------------------------------------------------------------------
/**
* Returns a new instance with the months value negated.
*
* @return the new period with a negated value
* @throws ArithmeticException if the result overflows an int
*/
public Months negated() {
return Months.months(FieldUtils.safeNegate(getValue()));
}
//-----------------------------------------------------------------------
/**
* Is this months instance greater than the specified number of months.
*
* @param other the other period, null means zero
* @return true if this months instance is greater than the specified one
*/
public boolean isGreaterThan(Months other) {
if (other == null) {
return getValue() > 0;
}
return getValue() > other.getValue();
}
/**
* Is this months instance less than the specified number of months.
*
* @param other the other period, null means zero
* @return true if this months instance is less than the specified one
*/
public boolean isLessThan(Months other) {
if (other == null) {
return getValue() < 0;
}
return getValue() < other.getValue();
}
//-----------------------------------------------------------------------
/**
* Gets this instance as a String in the ISO8601 duration format.
* <p>
* For example, "P4M" represents 4 months.
*
* @return the value as an ISO8601 string
*/
@ToString
public String toString() {
return "P" + String.valueOf(getValue()) + "M";
}
}
| |
/*
* Copyright 2016 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.characters;
import com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.engine.Time;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent;
import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent;
import org.terasology.entitySystem.event.ReceiveEvent;
import org.terasology.entitySystem.systems.BaseComponentSystem;
import org.terasology.entitySystem.systems.RegisterMode;
import org.terasology.entitySystem.systems.RegisterSystem;
import org.terasology.entitySystem.systems.UpdateSubscriberSystem;
import org.terasology.logic.characters.events.SetMovementModeEvent;
import org.terasology.logic.location.LocationComponent;
import org.terasology.logic.players.LocalPlayer;
import org.terasology.math.geom.Vector3f;
import org.terasology.network.NetworkSystem;
import org.terasology.physics.engine.CharacterCollider;
import org.terasology.physics.engine.PhysicsEngine;
import org.terasology.registry.In;
import org.terasology.registry.Share;
import org.terasology.utilities.collection.CircularBuffer;
import org.terasology.world.WorldProvider;
import java.util.Map;
@RegisterSystem(RegisterMode.AUTHORITY)
@Share(PredictionSystem.class)
public class ServerCharacterPredictionSystem extends BaseComponentSystem implements UpdateSubscriberSystem, PredictionSystem {
public static final int RENDER_DELAY = 100;
public static final int MAX_INPUT_OVERFLOW = 100;
public static final int MAX_INPUT_UNDERFLOW = 100;
private static final Logger logger = LoggerFactory.getLogger(ServerCharacterPredictionSystem.class);
private static final int BUFFER_SIZE = 128;
private static final int TIME_BETWEEN_STATE_REPLICATE = 50;
@In
private Time time;
@In
private PhysicsEngine physics;
@In
private WorldProvider worldProvider;
@In
private LocalPlayer localPlayer;
@In
private NetworkSystem networkSystem;
private CharacterMover characterMover;
private Map<EntityRef, CircularBuffer<CharacterStateEvent>> characterStates = Maps.newHashMap();
private Map<EntityRef, CharacterMoveInputEvent> lastInputEvent = Maps.newHashMap();
private long nextSendState;
private CharacterMovementSystemUtility characterMovementSystemUtility;
@Override
public void initialise() {
characterMover = new KinematicCharacterMover(worldProvider, physics);
nextSendState = time.getGameTimeInMs() + TIME_BETWEEN_STATE_REPLICATE;
characterMovementSystemUtility = new CharacterMovementSystemUtility(physics);
}
@ReceiveEvent(components = {CharacterMovementComponent.class, LocationComponent.class})
public void onCreate(final OnActivatedComponent event, final EntityRef entity) {
physics.getCharacterCollider(entity);
CircularBuffer<CharacterStateEvent> stateBuffer = CircularBuffer.create(BUFFER_SIZE);
stateBuffer.add(createInitialState(entity));
characterStates.put(entity, stateBuffer);
}
@ReceiveEvent(components = {CharacterMovementComponent.class, LocationComponent.class})
public void onDestroy(final BeforeDeactivateComponent event, final EntityRef entity) {
physics.removeCharacterCollider(entity);
characterStates.remove(entity);
lastInputEvent.remove(entity);
}
@ReceiveEvent
public void onSetMovementModeEvent(SetMovementModeEvent event, EntityRef character, CharacterMovementComponent movementComponent) {
CircularBuffer<CharacterStateEvent> stateBuffer = characterStates.get(character);
CharacterStateEvent lastState = stateBuffer.getLast();
CharacterStateEvent newState = new CharacterStateEvent(lastState);
newState.setSequenceNumber(lastState.getSequenceNumber());
if (event.getMode() != lastState.getMode()) {
newState.setMode(event.getMode());
} else {
newState.setMode(MovementMode.WALKING);
}
stateBuffer.add(newState);
characterMovementSystemUtility.setToState(character, newState);
}
@ReceiveEvent(components = {CharacterMovementComponent.class, LocationComponent.class})
public void onPlayerInput(CharacterMoveInputEvent input, EntityRef entity) {
CharacterCollider characterCollider = physics.getCharacterCollider(entity);
if (characterCollider.isPending()) {
logger.debug("Skipping input, collision not yet established");
return;
}
CircularBuffer<CharacterStateEvent> stateBuffer = characterStates.get(entity);
CharacterStateEvent lastState = stateBuffer.getLast();
if (input.getDelta() + lastState.getTime() < time.getGameTimeInMs() + MAX_INPUT_OVERFLOW) {
CharacterStateEvent newState = stepState(input, lastState, entity);
stateBuffer.add(newState);
characterMovementSystemUtility.setToState(entity, newState);
lastInputEvent.put(entity, input);
} else {
logger.warn("Received too much input from {}, dropping input.", entity);
}
}
@ReceiveEvent(components = {CharacterMovementComponent.class, LocationComponent.class})
public void onTeleport(CharacterTeleportEvent event, EntityRef entity) {
CircularBuffer<CharacterStateEvent> stateBuffer = characterStates.get(entity);
CharacterStateEvent lastState = stateBuffer.getLast();
CharacterStateEvent newState = new CharacterStateEvent(lastState);
newState.setPosition(new Vector3f(event.getTargetPosition()));
newState.setTime(time.getGameTimeInMs());
stateBuffer.add(newState);
characterMovementSystemUtility.setToState(entity, newState);
}
@ReceiveEvent(components = {CharacterMovementComponent.class, LocationComponent.class})
public void onImpulse(CharacterImpulseEvent event, EntityRef entity) {
Vector3f impulse = event.getDirection();
CircularBuffer<CharacterStateEvent> stateBuffer = characterStates.get(entity);
CharacterStateEvent lastState = stateBuffer.getLast();
CharacterStateEvent newState = new CharacterStateEvent(lastState);
newState.setVelocity(impulse.add(newState.getVelocity()));
newState.setTime(time.getGameTimeInMs());
newState.setGrounded(false);
stateBuffer.add(newState);
characterMovementSystemUtility.setToState(entity, newState);
}
private CharacterStateEvent createInitialState(EntityRef entity) {
LocationComponent location = entity.getComponent(LocationComponent.class);
return new CharacterStateEvent(time.getGameTimeInMs(), 0, location.getWorldPosition(), location.getWorldRotation(), new Vector3f(), 0, 0, MovementMode.WALKING, false);
}
private CharacterStateEvent stepState(CharacterMoveInputEvent input, CharacterStateEvent lastState, EntityRef entity) {
return characterMover.step(lastState, input, entity);
}
@Override
public void update(float delta) {
if (nextSendState < time.getGameTimeInMs()) {
long lastSendTime = nextSendState - TIME_BETWEEN_STATE_REPLICATE;
for (Map.Entry<EntityRef, CircularBuffer<CharacterStateEvent>> entry : characterStates.entrySet()) {
if (entry.getValue().size() > 0) {
CharacterStateEvent state = entry.getValue().getLast();
if (state.getTime() >= lastSendTime) {
entry.getKey().send(state);
} else if (time.getGameTimeInMs() - state.getTime() > MAX_INPUT_UNDERFLOW) {
// Haven't received input in a while, repeat last input
CharacterMoveInputEvent lastInput = lastInputEvent.get(entry.getKey());
if (lastInput != null) {
CharacterMoveInputEvent newInput = new CharacterMoveInputEvent(lastInput, (int) (time.getGameTimeInMs() - state.getTime()));
onPlayerInput(newInput, entry.getKey());
}
entry.getKey().send(state);
}
}
}
nextSendState += TIME_BETWEEN_STATE_REPLICATE;
}
long renderTime = time.getGameTimeInMs() - RENDER_DELAY;
for (Map.Entry<EntityRef, CircularBuffer<CharacterStateEvent>> entry : characterStates.entrySet()) {
if (entry.getKey().equals(localPlayer.getCharacterEntity())) {
continue;
}
setToTime(renderTime, entry.getKey(), entry.getValue());
}
}
private void setToTime(long renderTime, EntityRef entity, CircularBuffer<CharacterStateEvent> buffer) {
CharacterStateEvent previous = null;
CharacterStateEvent next = null;
for (CharacterStateEvent state : buffer) {
if (state.getTime() <= renderTime) {
previous = state;
} else {
next = state;
break;
}
}
if (previous != null) {
if (next != null) {
characterMovementSystemUtility.setToInterpolateState(entity, previous, next, renderTime);
} else {
characterMovementSystemUtility.setToExtrapolateState(entity, previous, renderTime);
}
}
}
@Override
public void lagCompensate(EntityRef client, long timeMs) {
for (Map.Entry<EntityRef, CircularBuffer<CharacterStateEvent>> entry : characterStates.entrySet()) {
if (networkSystem.getOwnerEntity(entry.getKey()).equals(client)) {
characterMovementSystemUtility.setToState(entry.getKey(), entry.getValue().getLast());
} else {
setToTime(timeMs - RENDER_DELAY, entry.getKey(), entry.getValue());
}
}
}
@Override
public void restoreToPresent() {
long renderTime = time.getGameTimeInMs() - RENDER_DELAY;
for (Map.Entry<EntityRef, CircularBuffer<CharacterStateEvent>> entry : characterStates.entrySet()) {
setToTime(renderTime, entry.getKey(), entry.getValue());
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* A complex type that contains information about the traffic policy that you
* want to create.
* </p>
*/
public class CreateTrafficPolicyRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The name of the traffic policy.
* </p>
*/
private String name;
/**
* <p>
* The definition of this traffic policy in JSON format.
* </p>
*/
private String document;
/**
* <p>
* Any comments that you want to include about the traffic policy.
* </p>
*/
private String comment;
/**
* <p>
* The name of the traffic policy.
* </p>
*
* @param name
* The name of the traffic policy.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the traffic policy.
* </p>
*
* @return The name of the traffic policy.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the traffic policy.
* </p>
*
* @param name
* The name of the traffic policy.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateTrafficPolicyRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The definition of this traffic policy in JSON format.
* </p>
*
* @param document
* The definition of this traffic policy in JSON format.
*/
public void setDocument(String document) {
this.document = document;
}
/**
* <p>
* The definition of this traffic policy in JSON format.
* </p>
*
* @return The definition of this traffic policy in JSON format.
*/
public String getDocument() {
return this.document;
}
/**
* <p>
* The definition of this traffic policy in JSON format.
* </p>
*
* @param document
* The definition of this traffic policy in JSON format.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateTrafficPolicyRequest withDocument(String document) {
setDocument(document);
return this;
}
/**
* <p>
* Any comments that you want to include about the traffic policy.
* </p>
*
* @param comment
* Any comments that you want to include about the traffic policy.
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* <p>
* Any comments that you want to include about the traffic policy.
* </p>
*
* @return Any comments that you want to include about the traffic policy.
*/
public String getComment() {
return this.comment;
}
/**
* <p>
* Any comments that you want to include about the traffic policy.
* </p>
*
* @param comment
* Any comments that you want to include about the traffic policy.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateTrafficPolicyRequest withComment(String comment) {
setComment(comment);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: " + getName() + ",");
if (getDocument() != null)
sb.append("Document: " + getDocument() + ",");
if (getComment() != null)
sb.append("Comment: " + getComment());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateTrafficPolicyRequest == false)
return false;
CreateTrafficPolicyRequest other = (CreateTrafficPolicyRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null
&& other.getName().equals(this.getName()) == false)
return false;
if (other.getDocument() == null ^ this.getDocument() == null)
return false;
if (other.getDocument() != null
&& other.getDocument().equals(this.getDocument()) == false)
return false;
if (other.getComment() == null ^ this.getComment() == null)
return false;
if (other.getComment() != null
&& other.getComment().equals(this.getComment()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode
+ ((getDocument() == null) ? 0 : getDocument().hashCode());
hashCode = prime * hashCode
+ ((getComment() == null) ? 0 : getComment().hashCode());
return hashCode;
}
@Override
public CreateTrafficPolicyRequest clone() {
return (CreateTrafficPolicyRequest) super.clone();
}
}
| |
// Udacity Project 6 Sunshine Watchface
// Author : feorin94
// Sunshine watchface based on 'WatchFace sample' implementation
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.ActivityOptionsCompat;
import android.support.v4.util.Pair;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.gcm.RegistrationIntentService;
import com.example.android.sunshine.app.sync.SunshineSyncAdapter;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
public class MainActivity extends AppCompatActivity implements ForecastFragment.Callback{
private final String LOG_TAG = MainActivity.class.getSimpleName();
private static final String DETAILFRAGMENT_TAG = "DFTAG";
private final static int PLAY_SERVICES_RESOLUTION_REQUEST = 9000;
public static final String SENT_TOKEN_TO_SERVER = "sentTokenToServer";
private boolean mTwoPane;
private String mLocation;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mLocation = Utility.getPreferredLocation(this);
Uri contentUri = getIntent() != null ? getIntent().getData() : null;
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar)findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowTitleEnabled(false);
if (findViewById(R.id.weather_detail_container) != null) {
// The detail container view will be present only in the large-screen layouts
// (res/layout-sw600dp). If this view is present, then the activity should be
// in two-pane mode.
mTwoPane = true;
// In two-pane mode, show the detail view in this activity by
// adding or replacing the detail fragment using a
// fragment transaction.
if (savedInstanceState == null) {
DetailFragment fragment = new DetailFragment();
if (contentUri != null) {
Bundle args = new Bundle();
args.putParcelable(DetailFragment.DETAIL_URI, contentUri);
fragment.setArguments(args);
}
getSupportFragmentManager().beginTransaction()
.replace(R.id.weather_detail_container, fragment, DETAILFRAGMENT_TAG)
.commit();
}
} else {
mTwoPane = false;
getSupportActionBar().setElevation(0f);
}
ForecastFragment forecastFragment = ((ForecastFragment)getSupportFragmentManager()
.findFragmentById(R.id.fragment_forecast));
forecastFragment.setUseTodayLayout(!mTwoPane);
if (contentUri != null) {
forecastFragment.setInitialSelectedDate(
WeatherContract.WeatherEntry.getDateFromUri(contentUri));
}
SunshineSyncAdapter.initializeSyncAdapter(this);
// If Google Play Services is up to date, we'll want to register GCM. If it is not, we'll
// skip the registration and this device will not receive any downstream messages from
// our fake server. Because weather alerts are not a core feature of the app, this should
// not affect the behavior of the app, from a user perspective.
if (checkPlayServices()) {
// Because this is the initial creation of the app, we'll want to be certain we have
// a token. If we do not, then we will start the IntentService that will register this
// application with GCM.
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(this);
boolean sentToken = sharedPreferences.getBoolean(SENT_TOKEN_TO_SERVER, false);
if (!sentToken) {
Intent intent = new Intent(this, RegistrationIntentService.class);
startService(intent);
}
}
SunshineSyncAdapter.syncImmediately(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
startActivity(new Intent(this, SettingsActivity.class));
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onResume() {
super.onResume();
String location = Utility.getPreferredLocation(this);
// update the location in our second pane using the fragment manager
if (location != null && !location.equals(mLocation)) {
ForecastFragment ff = (ForecastFragment)getSupportFragmentManager().findFragmentById(R.id.fragment_forecast);
if ( null != ff ) {
ff.onLocationChanged();
}
DetailFragment df = (DetailFragment)getSupportFragmentManager().findFragmentByTag(DETAILFRAGMENT_TAG);
if ( null != df ) {
df.onLocationChanged(location);
}
mLocation = location;
}
}
@Override
public void onItemSelected(Uri contentUri, ForecastAdapter.ForecastAdapterViewHolder vh) {
if (mTwoPane) {
// In two-pane mode, show the detail view in this activity by
// adding or replacing the detail fragment using a
// fragment transaction.
Bundle args = new Bundle();
args.putParcelable(DetailFragment.DETAIL_URI, contentUri);
DetailFragment fragment = new DetailFragment();
fragment.setArguments(args);
getSupportFragmentManager().beginTransaction()
.replace(R.id.weather_detail_container, fragment, DETAILFRAGMENT_TAG)
.commit();
} else {
Intent intent = new Intent(this, DetailActivity.class)
.setData(contentUri);
ActivityOptionsCompat activityOptions =
ActivityOptionsCompat.makeSceneTransitionAnimation(this,
new Pair<View, String>(vh.mIconView, getString(R.string.detail_icon_transition_name)));
ActivityCompat.startActivity(this, intent, activityOptions.toBundle());
}
}
/**
* Check the device to make sure it has the Google Play Services APK. If
* it doesn't, display a dialog that allows users to download the APK from
* the Google Play Store or enable it in the device's system settings.
*/
private boolean checkPlayServices() {
GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance();
int resultCode = apiAvailability.isGooglePlayServicesAvailable(this);
if (resultCode != ConnectionResult.SUCCESS) {
if (apiAvailability.isUserResolvableError(resultCode)) {
apiAvailability.getErrorDialog(this, resultCode,
PLAY_SERVICES_RESOLUTION_REQUEST).show();
} else {
Log.i(LOG_TAG, "This device is not supported.");
finish();
}
return false;
}
return true;
}
@Override
protected void onStop() {
SunshineSyncAdapter.disconnectGoogleApiClient();
super.onStop();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.vmplugin.v7;
import groovy.lang.Closure;
import groovy.lang.GString;
import groovy.lang.GroovyObject;
import groovy.util.ProxyGenerator;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.codehaus.groovy.GroovyBugError;
import org.codehaus.groovy.reflection.stdclasses.CachedSAMClass;
import org.codehaus.groovy.runtime.ConvertedClosure;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import org.codehaus.groovy.transform.trait.Traits;
/**
* This class contains several transformers for used during method invocation.
* @author <a href="mailto:blackdrag@gmx.org">Jochen "blackdrag" Theodorou</a>
*/
public class TypeTransformers {
private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup();
private static final MethodHandle
TO_STRING, TO_BYTE, TO_INT, TO_LONG, TO_SHORT,
TO_FLOAT, TO_DOUBLE, TO_BIG_INT, TO_BIG_DEC, AS_ARRAY,
TO_REFLECTIVE_PROXY, TO_GENERATED_PROXY, TO_SAMTRAIT_PROXY;
static {
try {
TO_STRING = LOOKUP.findVirtual(Object.class, "toString", MethodType.methodType(String.class));
TO_BYTE = LOOKUP.findVirtual(Number.class, "byteValue", MethodType.methodType(Byte.TYPE));
TO_SHORT = LOOKUP.findVirtual(Number.class, "shortValue", MethodType.methodType(Short.TYPE));
TO_INT = LOOKUP.findVirtual(Number.class, "intValue", MethodType.methodType(Integer.TYPE));
TO_LONG = LOOKUP.findVirtual(Number.class, "longValue", MethodType.methodType(Long.TYPE));
TO_FLOAT = LOOKUP.findVirtual(Number.class, "floatValue", MethodType.methodType(Float.TYPE));
TO_DOUBLE = LOOKUP.findVirtual(Number.class, "doubleValue", MethodType.methodType(Double.TYPE));
// BigDecimal conversion is done by using the double value
// if the given number.
MethodHandle tmp = LOOKUP.findConstructor(BigDecimal.class, MethodType.methodType(Void.TYPE, Double.TYPE));
TO_BIG_DEC = MethodHandles.filterReturnValue(TO_DOUBLE, tmp);
// BigInteger conversion is done by using the string representation
// if the given number
tmp = LOOKUP.findConstructor(BigInteger.class, MethodType.methodType(Void.TYPE, String.class));
TO_BIG_INT = MethodHandles.filterReturnValue(TO_STRING, tmp);
// generic array to array conversion
AS_ARRAY = LOOKUP.findStatic(DefaultTypeTransformation.class, "asArray", MethodType.methodType(Object.class, Object.class, Class.class));
// reflective proxy generation, since we need a ConvertedClosure but have only a normal Closure, we need to create that wrapper object as well
MethodHandle newProxyInstance = LOOKUP.findStatic(Proxy.class, "newProxyInstance",
MethodType.methodType(Object.class, ClassLoader.class, Class[].class, InvocationHandler.class));
MethodHandle newConvertedClosure = LOOKUP.findConstructor(ConvertedClosure.class, MethodType.methodType(Void.TYPE, Closure.class, String.class));
// prepare target newProxyInstance for fold to drop additional arguments needed by newConvertedClosure
MethodType newOrder = newProxyInstance.type().dropParameterTypes(2, 3);
newOrder = newOrder.insertParameterTypes(0, InvocationHandler.class, Closure.class, String.class);
tmp = MethodHandles.permuteArguments(newProxyInstance, newOrder, 3, 4, 0);
// execute fold:
TO_REFLECTIVE_PROXY = MethodHandles.foldArguments(tmp, newConvertedClosure.asType(newConvertedClosure.type().changeReturnType(InvocationHandler.class)));
{
// generated proxy using a map to store the closure
MethodHandle map = LOOKUP.findStatic(Collections.class, "singletonMap",
MethodType.methodType(Map.class, Object.class, Object.class));
newProxyInstance = LOOKUP.findVirtual(ProxyGenerator.class, "instantiateAggregateFromBaseClass",
MethodType.methodType(GroovyObject.class, Map.class, Class.class));
newOrder = newProxyInstance.type().dropParameterTypes(1, 2);
newOrder = newOrder.insertParameterTypes(0, Map.class, Object.class, Object.class);
tmp = MethodHandles.permuteArguments(newProxyInstance, newOrder, 3, 0, 4);
tmp = MethodHandles.foldArguments(tmp, map);
TO_GENERATED_PROXY = tmp;
}
{
// Trait SAM coercion generated proxy using a map to store the closure
MethodHandle map = LOOKUP.findStatic(Collections.class, "singletonMap",
MethodType.methodType(Map.class, Object.class, Object.class));
newProxyInstance = LOOKUP.findVirtual(ProxyGenerator.class, "instantiateAggregate",
MethodType.methodType(GroovyObject.class,Map.class, List.class));
newOrder = newProxyInstance.type().dropParameterTypes(1, 2);
newOrder = newOrder.insertParameterTypes(0, Map.class, Object.class, Object.class);
tmp = MethodHandles.permuteArguments(newProxyInstance, newOrder, 3, 0, 4);
tmp = MethodHandles.foldArguments(tmp, map);
TO_SAMTRAIT_PROXY = tmp;
}
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
/**
* Adds a type transformer applied at runtime.
* This method handles transformations to String from GString,
* array transformations and number based transformations
*/
protected static MethodHandle addTransformer(MethodHandle handle, int pos, Object arg, Class parameter) {
MethodHandle transformer=null;
if (arg instanceof GString) {
transformer = TO_STRING;
} else if (arg instanceof Closure) {
transformer = createSAMTransform(arg, parameter);
} else if (Number.class.isAssignableFrom(parameter)) {
transformer = selectNumberTransformer(parameter, arg);
} else if (parameter.isArray()) {
transformer = MethodHandles.insertArguments(AS_ARRAY, 1, parameter);
}
if (transformer==null) throw new GroovyBugError("Unknown transformation for argument "+arg+" at position "+pos+" with "+arg.getClass()+" for parameter of type "+parameter);
return applyUnsharpFilter(handle, pos, transformer);
}
/**
* creates a method handle able to transform the given Closure into a SAM type
* if the given parameter is a SAM type
*/
private static MethodHandle createSAMTransform(Object arg, Class parameter) {
Method method = CachedSAMClass.getSAMMethod(parameter);
if (method == null) return null;
// TODO: have to think about how to optimize this!
if (parameter.isInterface()) {
if (Traits.isTrait(parameter)) {
// the following code will basically do this:
// Map<String,Closure> impl = Collections.singletonMap(method.getName(),arg);
// return ProxyGenerator.INSTANCE.instantiateAggregate(impl,Collections.singletonList(clazz));
// TO_SAMTRAIT_PROXY is a handle (Object,Object,ProxyGenerator,Class)GroovyObject
// where the second object is the input closure, everything else
// needs to be provide and is in remaining order: method name,
// ProxyGenerator.INSTANCE and singletonList(parameter)
MethodHandle ret = TO_SAMTRAIT_PROXY;
ret = MethodHandles.insertArguments(ret, 2, ProxyGenerator.INSTANCE, Collections.singletonList(parameter));
ret = MethodHandles.insertArguments(ret, 0, method.getName());
return ret;
}
// the following code will basically do this:
// return Proxy.newProxyInstance(
// arg.getClass().getClassLoader(),
// new Class[]{parameter},
// new ConvertedClosure((Closure) arg));
// TO_REFLECTIVE_PROXY will do that for us, though
// input is the closure, the method name, the class loader and the
// class[]. All of that but the closure must be provided here
MethodHandle ret = TO_REFLECTIVE_PROXY;
ret = MethodHandles.insertArguments(ret, 1,
method.getName(),
arg.getClass().getClassLoader(),
new Class[]{parameter});
return ret;
} else {
// the following code will basically do this:
//Map<String, Object> m = Collections.singletonMap(method.getName(), arg);
//return ProxyGenerator.INSTANCE.
// instantiateAggregateFromBaseClass(m, parameter);
// TO_GENERATED_PROXY is a handle (Object,Object,ProxyGenerator,Class)GroovyObject
// where the second object is the input closure, everything else
// needs to be provide and is in remaining order: method name,
// ProxyGenerator.INSTANCE and parameter
MethodHandle ret = TO_GENERATED_PROXY;
ret = MethodHandles.insertArguments(ret, 2, ProxyGenerator.INSTANCE, parameter);
ret = MethodHandles.insertArguments(ret, 0, method.getName());
return ret;
}
}
/**
* Apply a transformer as filter.
* The filter may not match exactly in the types. In this case needed
* additional type transformations are done by {@link MethodHandle#asType(MethodType)}
*/
public static MethodHandle applyUnsharpFilter(MethodHandle handle, int pos, MethodHandle transformer) {
MethodType type = transformer.type();
Class given = handle.type().parameterType(pos);
if (type.returnType() != given || type.parameterType(0) != given) {
transformer = transformer.asType(MethodType.methodType(given, type.parameterType(0)));
}
return MethodHandles.filterArguments(handle, pos, transformer);
}
/**
* returns a transformer later applied as filter to transform one
* number into another
*/
private static MethodHandle selectNumberTransformer(Class param, Object arg) {
param = TypeHelper.getWrapperClass(param);
if (param == Byte.class) {
return TO_BYTE;
} else if (param == Character.class || param == Integer.class) {
return TO_INT;
} else if (param == Long.class) {
return TO_LONG;
} else if (param == Float.class) {
return TO_FLOAT;
} else if (param == Double.class) {
return TO_DOUBLE;
} else if (param == BigInteger.class) {
return TO_BIG_INT;
} else if (param == BigDecimal.class) {
return TO_BIG_DEC;
} else if (param == Short.class) {
return TO_SHORT;
} else {
return null;
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.emitter.graphite;
import com.codahale.metrics.graphite.Graphite;
import com.codahale.metrics.graphite.GraphiteSender;
import com.codahale.metrics.graphite.PickledGraphite;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.logger.Logger;
import io.druid.java.util.emitter.core.Emitter;
import io.druid.java.util.emitter.core.Event;
import io.druid.java.util.emitter.service.AlertEvent;
import io.druid.java.util.emitter.service.ServiceMetricEvent;
import io.druid.server.log.EmittingRequestLogger;
import java.io.IOException;
import java.net.SocketException;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
public class GraphiteEmitter implements Emitter
{
private static Logger log = new Logger(GraphiteEmitter.class);
private final DruidToGraphiteEventConverter graphiteEventConverter;
private final GraphiteEmitterConfig graphiteEmitterConfig;
private final List<Emitter> alertEmitters;
private final List<Emitter> requestLogEmitters;
private final AtomicBoolean started = new AtomicBoolean(false);
private final LinkedBlockingQueue<GraphiteEvent> eventsQueue;
private static final long FLUSH_TIMEOUT = 60000; // default flush wait 1 min
private final ScheduledExecutorService exec = Executors.newScheduledThreadPool(2, new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("GraphiteEmitter-%s")
.build()); // Thread pool of two in order to schedule flush runnable
private AtomicLong countLostEvents = new AtomicLong(0);
public GraphiteEmitter(
GraphiteEmitterConfig graphiteEmitterConfig,
List<Emitter> alertEmitters,
List<Emitter> requestLogEmitters
)
{
this.alertEmitters = alertEmitters;
this.requestLogEmitters = requestLogEmitters;
this.graphiteEmitterConfig = graphiteEmitterConfig;
this.graphiteEventConverter = graphiteEmitterConfig.getDruidToGraphiteEventConverter();
this.eventsQueue = new LinkedBlockingQueue(graphiteEmitterConfig.getMaxQueueSize());
}
@Override
public void start()
{
log.info("Starting Graphite Emitter.");
synchronized (started) {
if (!started.get()) {
exec.scheduleAtFixedRate(
new ConsumerRunnable(),
graphiteEmitterConfig.getFlushPeriod(),
graphiteEmitterConfig.getFlushPeriod(),
TimeUnit.MILLISECONDS
);
started.set(true);
}
}
}
@Override
public void emit(Event event)
{
if (!started.get()) {
throw new ISE("WTF emit was called while service is not started yet");
}
if (event instanceof ServiceMetricEvent) {
final GraphiteEvent graphiteEvent = graphiteEventConverter.druidEventToGraphite((ServiceMetricEvent) event);
if (graphiteEvent == null) {
return;
}
try {
final boolean isSuccessful = eventsQueue.offer(
graphiteEvent,
graphiteEmitterConfig.getEmitWaitTime(),
TimeUnit.MILLISECONDS
);
if (!isSuccessful) {
if (countLostEvents.getAndIncrement() % 1000 == 0) {
log.error(
"Lost total of [%s] events because of emitter queue is full. Please increase the capacity or/and the consumer frequency",
countLostEvents.get()
);
}
}
}
catch (InterruptedException e) {
log.error(e, "got interrupted with message [%s]", e.getMessage());
Thread.currentThread().interrupt();
}
} else if (event instanceof EmittingRequestLogger.RequestLogEvent) {
for (Emitter emitter : requestLogEmitters) {
emitter.emit(event);
}
} else if (!alertEmitters.isEmpty() && event instanceof AlertEvent) {
for (Emitter emitter : alertEmitters) {
emitter.emit(event);
}
} else if (event instanceof AlertEvent) {
AlertEvent alertEvent = (AlertEvent) event;
log.error(
"The following alert is dropped, description is [%s], severity is [%s]",
alertEvent.getDescription(), alertEvent.getSeverity()
);
} else {
log.error("unknown event type [%s]", event.getClass());
}
}
private class ConsumerRunnable implements Runnable
{
private final GraphiteSender graphite;
public ConsumerRunnable()
{
if (graphiteEmitterConfig.getProtocol().equals(GraphiteEmitterConfig.PLAINTEXT_PROTOCOL)) {
graphite = new Graphite(
graphiteEmitterConfig.getHostname(),
graphiteEmitterConfig.getPort()
);
} else {
graphite = new PickledGraphite(
graphiteEmitterConfig.getHostname(),
graphiteEmitterConfig.getPort(),
graphiteEmitterConfig.getBatchSize()
);
}
log.info("Using %s protocol.", graphiteEmitterConfig.getProtocol());
}
@Override
public void run()
{
try {
if (!graphite.isConnected()) {
log.info("trying to connect to graphite server");
graphite.connect();
}
while (eventsQueue.size() > 0 && !exec.isShutdown()) {
try {
final GraphiteEvent graphiteEvent = eventsQueue.poll(
graphiteEmitterConfig.getWaitForEventTime(),
TimeUnit.MILLISECONDS
);
if (graphiteEvent != null) {
log.debug(
"sent [%s] with value [%s] and time [%s]",
graphiteEvent.getEventPath(),
graphiteEvent.getValue(),
graphiteEvent.getTimestamp()
);
graphite.send(
graphiteEvent.getEventPath(),
graphiteEvent.getValue(),
graphiteEvent.getTimestamp()
);
}
}
catch (InterruptedException | IOException e) {
log.error(e, e.getMessage());
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
break;
} else if (e instanceof SocketException) {
// This is antagonistic to general Closeable contract in Java,
// it is needed to allow re-connection in case of the socket is closed due long period of inactivity
graphite.close();
log.warn("Trying to re-connect to graphite server");
graphite.connect();
}
}
}
}
catch (Exception e) {
log.error(e, e.getMessage());
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
}
}
}
@Override
public void flush()
{
if (started.get()) {
Future future = exec.schedule(new ConsumerRunnable(), 0, TimeUnit.MILLISECONDS);
try {
future.get(FLUSH_TIMEOUT, TimeUnit.MILLISECONDS);
}
catch (InterruptedException | ExecutionException | TimeoutException e) {
if (e instanceof InterruptedException) {
throw new RuntimeException("interrupted flushing elements from queue", e);
}
log.error(e, e.getMessage());
}
}
}
@Override
public void close()
{
flush();
started.set(false);
exec.shutdown();
}
protected static String sanitize(String namespace)
{
return sanitize(namespace, false);
}
protected static String sanitize(String namespace, Boolean replaceSlashToDot)
{
Pattern DOT_OR_WHITESPACE = Pattern.compile("[\\s]+|[.]+");
String sanitizedNamespace = DOT_OR_WHITESPACE.matcher(namespace).replaceAll("_");
if (replaceSlashToDot) {
sanitizedNamespace = sanitizedNamespace.replace("/", ".");
}
return sanitizedNamespace;
}
}
| |
package net.jonp.armi.base;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.rmi.NotBoundException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
/**
* Superclass for objects which can be represented by the command/response
* language (i.e. commands and responses).
*/
public abstract class AbstractLanguageObject
{
private static final Logger LOG = Logger.getLogger(AbstractLanguageObject.class);
/** The label on this communication, or <code>null</code>. */
protected String _label;
/**
* Construct a new AbstractLanguageObject.
*
* @param label The label, or <code>null</code>. The label <code>*</code> is
* used for the response when a command has a syntax error and
* cannot be parsed, and should not be used otherwise.
*/
protected AbstractLanguageObject(final String label)
{
_label = label;
}
/**
* Get the label.
*
* @return The label.
*/
public String getLabel()
{
return _label;
}
/**
* Set the label.
*
* @param label The new label.
*/
public void setLabel(final String label)
{
_label = label;
}
/**
* Convert this object into a legal statement in the command/response
* language.
*
* @param registry The ClassRegistry to use when building the statement.
* @return The statement.
* @throws NotBoundException If, during compilation, an object's class is
* not a command language primitive, and is not in the
* {@link #registry} is encountered.
*/
public abstract String toStatement(ClassRegistry registry)
throws NotBoundException;
// FUTURE: Have makeArgument() call getters/setters
/**
* Convert a generic object into a command/response language description.
*
* @param arg The object. All non-static, non-transient, non-final, public
* fields will be serialized, recursing into other objects as
* necessary. No getters/setters will be called.
* @param registry The Class Registry to use when building the argument.
* @return A string serialization of an object, which is a legal statement
* in the command/response language.
* @throws NotBoundException If the object's class is not a command language
* primitive, and is not in the {@link #registry}.
*/
protected String makeArgument(final Object arg, final ClassRegistry registry)
throws NotBoundException
{
return makeArgument(new ArrayList<Object>(), arg, registry);
}
/**
* Convert a generic object into a command/response language description.
*
* @param indexedValues Values that have already been serialized, so we can
* compute back references. Must include one entry for each call
* to this function, in order (depth-first).
* @param arg The object. All non-static, non-transient, non-final, public
* fields will be serialized, recursing into other objects as
* necessary. No getters/setters will be called.
* @param registry The Class Registry to use when building the argument.
* @return A string serialization of an object, which is a legal statement
* in the command/response language.
* @throws NotBoundException If the object's class is not a command language
* primitive, and is not in the {@link #registry}.
*/
protected String makeArgument(final List<Object> indexedValues, final Object arg, final ClassRegistry registry)
throws NotBoundException
{
final StringBuilder buf = new StringBuilder();
if (null == arg) {
// Special case: It is more space efficient (and clear) to write
// 'null' instead of a back-reference, so look for that first
// Also, we cannot guarantee that non-serializable classes will
// restore correctly
// For example, Throwable contains an ArrayList wrapped by
// Collections.unmodifiableList(); this construct is not
// serializable, and would be difficult to coerce without very
// special-purpose code
buf.append("null");
indexedValues.add(null);
}
else if (backIndex(indexedValues, arg, buf)) {
indexedValues.add(arg);
}
else {
indexedValues.add(arg);
if (arg instanceof Byte) {
buf.append(arg.toString()).append("Y");
}
else if (arg instanceof Float) {
buf.append(arg.toString()).append("F");
}
else if (arg instanceof Long) {
buf.append(arg.toString()).append("L");
}
else if (arg instanceof Short) {
buf.append(arg.toString()).append("T");
}
else if (arg instanceof Number) {
buf.append(arg.toString());
}
else if (arg instanceof CharSequence) {
String s = arg.toString();
s = s.replaceAll("\\\\", "\\\\\\\\"); // Replace \ with \\
s = s.replaceAll("\"", "\\\\\\\"");
buf.append("\"").append(s).append("\"");
}
else if (arg instanceof Boolean) {
buf.append(arg.toString());
}
else if (arg.getClass().isArray()) {
// FUTURE: Once primitive array deserialization is implemented,
// remove this check
if (arg.getClass().getComponentType().isPrimitive()) {
throw new IllegalArgumentException("Arrays of primitive types are not supported");
}
buf.append("array(");
buf.append(arg.getClass().getComponentType().getName());
buf.append(") [");
final Object[] elements = (Object[])arg;
boolean first = true;
for (final Object element : elements) {
if (first) {
first = false;
}
else {
buf.append(", ");
}
buf.append(makeArgument(indexedValues, element, registry));
}
buf.append("]");
}
else if (arg instanceof Collection) {
buf.append("collection(");
buf.append(arg.getClass().getName());
buf.append(") [");
boolean first = true;
for (final Object element : (Collection<?>)arg) {
if (first) {
first = false;
}
else {
buf.append(", ");
}
buf.append(makeArgument(indexedValues, element, registry));
}
buf.append("]");
}
else if (arg instanceof Map) {
buf.append("map(");
buf.append(arg.getClass().getName());
buf.append(") [");
boolean first = true;
for (final Map.Entry<?, ?> entry : ((Map<?, ?>)arg).entrySet()) {
if (first) {
first = false;
}
else {
buf.append(", ");
}
buf.append(makeArgument(indexedValues, entry.getKey(), registry));
buf.append(" = ");
buf.append(makeArgument(indexedValues, entry.getValue(), registry));
}
buf.append("]");
}
else {
buf.append(registry.reverseLookup(arg.getClass())).append(" (");
boolean first = true;
Class<?> clazz = arg.getClass();
final List<Field> fields = new ArrayList<Field>();
while (clazz != null) {
final Field[] fieldArray = clazz.getDeclaredFields();
AccessibleObject.setAccessible(fieldArray, true);
Collections.addAll(fields, fieldArray);
clazz = clazz.getSuperclass();
}
Collections.sort(fields, new Comparator<Field>() {
@Override
public int compare(final Field lhs, final Field rhs)
{
final int c = lhs.getDeclaringClass().getName().compareTo(rhs.getDeclaringClass().getName());
if (c != 0) {
return c;
}
return lhs.getName().compareTo(rhs.getName());
}
});
for (final Field field : fields) {
if (((field.getModifiers() & Modifier.TRANSIENT) != Modifier.TRANSIENT) &&
((field.getModifiers() & Modifier.STATIC) != Modifier.STATIC)) {
if (first) {
first = false;
}
else {
buf.append(", ");
}
try {
buf.append(field.getDeclaringClass().getName()).append(".").append(field.getName());
buf.append(" = ").append(makeArgument(indexedValues, field.get(arg), registry));
}
catch (final IllegalAccessException iae) {
// Should not happen, since we disabled access
// checking
throw new IllegalStateException("Field " + field.getName() + " of " + arg.getClass().getName() +
" is not accessible: " + iae.getMessage(), iae);
}
}
}
buf.append(")");
}
}
final String s = buf.toString();
LOG.debug("Converted '" + arg + "' to '" + s + "'");
return s;
}
private boolean backIndex(final List<Object> indexedValues, final Object arg, final StringBuilder buf)
{
// XXX: Cannot use indexOf() because we need to search for reference
// equality, not object equality
boolean found = false;
for (int idx = 0; idx < indexedValues.size(); idx++) {
if (arg == indexedValues.get(idx)) {
buf.append("ref " + idx);
found = true;
break;
}
}
return found;
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.sling.scripting.sightly.impl.engine.compiled;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.scripting.sightly.impl.engine.SightlyEngineConfiguration;
import org.apache.sling.scripting.sightly.java.compiler.ClassInfo;
/**
* Identifies a Java source file based on a {@link Resource}. Depending on the used constructor this class might provide the abstraction
* for either a Java source file generated for a HTL script or for a HTL {@link Resource}-based Java Use-API Object.
*/
public class SourceIdentifier implements ClassInfo {
private static final Set<String> javaKeywords = new HashSet<String>() {{
add("abstract");
add("assert");
add("boolean");
add("break");
add("byte");
add("case");
add("catch");
add("char");
add("class");
add("const");
add("continue");
add("default");
add("do");
add("double");
add("else");
add("enum");
add("extends");
add("final");
add("finally");
add("float");
add("for");
add("goto");
add("if");
add("implements");
add("import");
add("instanceof");
add("int");
add("interface");
add("long");
add("native");
add("new");
add("package");
add("private");
add("protected");
add("public");
add("return");
add("short");
add("static");
add("strictfp");
add("super");
add("switch");
add("synchronized");
add("this");
add("throws");
add("transient");
add("try");
add("void");
add("volatile");
add("while");
}};
public static final Pattern MANGLED_CHAR_PATTER = Pattern.compile("(.*)(__[0-9a-f]{4}__)(.*)");
private SightlyEngineConfiguration engineConfiguration;
private String scriptName;
private String simpleClassName;
private String packageName;
private String fullyQualifiedClassName;
public SourceIdentifier(SightlyEngineConfiguration engineConfiguration, String scriptName) {
this.engineConfiguration = engineConfiguration;
this.scriptName = scriptName;
}
@Override
public String getSimpleClassName() {
if (simpleClassName == null) {
int lastSlashIndex = scriptName.lastIndexOf("/");
String processingScriptName = scriptName;
if (scriptName.endsWith(".java")) {
processingScriptName = scriptName.substring(0, scriptName.length() - 5);
}
if (lastSlashIndex != -1) {
simpleClassName = makeJavaPackage(processingScriptName.substring(lastSlashIndex));
} else {
simpleClassName = makeJavaPackage(processingScriptName);
}
}
return simpleClassName;
}
@Override
public String getPackageName() {
if (packageName == null) {
int lastSlashIndex = scriptName.lastIndexOf("/");
String processingScriptName = scriptName;
boolean javaFile = scriptName.endsWith(".java");
if (javaFile) {
processingScriptName = scriptName.substring(0, scriptName.length() - 5);
}
if (lastSlashIndex != -1) {
packageName = makeJavaPackage(processingScriptName.substring(0, lastSlashIndex));
} else {
packageName = makeJavaPackage(processingScriptName);
}
if (!javaFile) {
packageName = engineConfiguration.getBundleSymbolicName() + "." + packageName;
}
}
return packageName;
}
@Override
public String getFullyQualifiedClassName() {
if (fullyQualifiedClassName == null) {
fullyQualifiedClassName = getPackageName() + "." + getSimpleClassName();
}
return fullyQualifiedClassName;
}
/**
* Converts the given identifier to a legal Java identifier
*
* @param identifier the identifier to convert
* @return legal Java identifier corresponding to the given identifier
*/
public static String makeJavaIdentifier(String identifier) {
StringBuilder modifiedIdentifier = new StringBuilder(identifier.length());
if (!Character.isJavaIdentifierStart(identifier.charAt(0))) {
modifiedIdentifier.append('_');
}
for (int i = 0; i < identifier.length(); i++) {
char ch = identifier.charAt(i);
if (Character.isJavaIdentifierPart(ch) && ch != '_') {
modifiedIdentifier.append(ch);
} else if (ch == '.') {
modifiedIdentifier.append('_');
} else {
modifiedIdentifier.append(mangleChar(ch));
}
}
if (isJavaKeyword(modifiedIdentifier.toString())) {
modifiedIdentifier.append('_');
}
return modifiedIdentifier.toString();
}
/**
* Converts the given scriptName to a Java package or fully-qualified class name
*
* @param scriptName the scriptName to convert
* @return Java package corresponding to the given scriptName
*/
public static String makeJavaPackage(String scriptName) {
String classNameComponents[] = StringUtils.split(scriptName, '/');
StringBuilder legalClassNames = new StringBuilder();
for (int i = 0; i < classNameComponents.length; i++) {
legalClassNames.append(makeJavaIdentifier(classNameComponents[i]));
if (i < classNameComponents.length - 1) {
legalClassNames.append('.');
}
}
return legalClassNames.toString();
}
public static String getScriptName(String slashSubpackage, String fullyQualifiedClassName) {
String className = fullyQualifiedClassName;
StringBuilder pathElements = new StringBuilder("/");
if (StringUtils.isNotEmpty(slashSubpackage) && className.contains(slashSubpackage)) {
className = className.replaceAll(slashSubpackage + "\\.", "");
}
String[] classElements = StringUtils.split(className, '.');
for (int i = 0; i < classElements.length; i++) {
String classElem = classElements[i];
Matcher matcher = MANGLED_CHAR_PATTER.matcher(classElem);
if (matcher.matches()) {
String group = matcher.group(2);
char unmangled = unmangle(group);
classElem = classElem.replaceAll(group, Character.toString(unmangled));
while (matcher.find()) {
group = matcher.group(2);
unmangled = unmangle(group);
classElem = classElem.replaceAll(group, Character.toString(unmangled));
}
} else {
int underscoreIndex = classElem.indexOf('_');
if (underscoreIndex > -1) {
if (underscoreIndex == classElem.length() - 1) {
classElem = classElem.substring(0, classElem.length() -1);
} else {
classElem = classElem.replaceAll("_", ".");
}
}
}
pathElements.append(classElem);
if (i < classElements.length - 1) {
pathElements.append("/");
}
}
return pathElements.toString();
}
/**
* Mangle the specified character to create a legal Java class name.
*
* @param ch the character to mangle
* @return the mangled
*/
public static String mangleChar(char ch) {
return String.format("__%04x__", (int) ch);
}
/**
* Provided a mangled string (obtained by calling {@link #mangleChar(char)}) it will will return the character that was mangled.
*
* @param mangled the mangled string
* @return the original character
*/
public static char unmangle(String mangled) {
String toProcess = mangled.replaceAll("__", "");
return (char) Integer.parseInt(toProcess, 16);
}
/**
* Test whether the argument is a Java keyword.
*
* @param key the String to test
* @return {@code true} if the String is a Java keyword, {@code false} otherwise
*/
public static boolean isJavaKeyword(String key) {
return javaKeywords.contains(key);
}
}
| |
/*
* =============================================================================
*
* Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.extras.java8time.expression;
import java.time.ZoneId;
import java.time.temporal.Temporal;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.thymeleaf.extras.java8time.util.TemporalArrayUtils;
import org.thymeleaf.extras.java8time.util.TemporalCreationUtils;
import org.thymeleaf.extras.java8time.util.TemporalFormattingUtils;
import org.thymeleaf.extras.java8time.util.TemporalListUtils;
import org.thymeleaf.extras.java8time.util.TemporalSetUtils;
import org.thymeleaf.util.Validate;
/**
* <p>
* Utility class to performJava 8 dates ({@link Temporal}) operations.
* </p>
* <p>
* An object of this class is usually available in variable evaluation expressions with the name
* <tt>#temporals</tt>.
* </p>
*
* @author José Miguel Samper
*
* @since 2.1.0
*/
public final class Temporals {
private final TemporalCreationUtils temporalCreationUtils;
private final TemporalFormattingUtils temporalFormattingUtils;
private final TemporalArrayUtils temporalArrayUtils;
private final TemporalListUtils temporalListUtils;
private final TemporalSetUtils temporalSetUtils;
public Temporals(final Locale locale) {
this(locale, ZoneId.systemDefault());
}
public Temporals(final Locale locale, final ZoneId defaultZoneId) {
super();
Validate.notNull(locale, "Locale cannot be null");
this.temporalCreationUtils = new TemporalCreationUtils();
this.temporalFormattingUtils = new TemporalFormattingUtils(locale, defaultZoneId);
this.temporalArrayUtils = new TemporalArrayUtils(locale, defaultZoneId);
this.temporalListUtils = new TemporalListUtils(locale, defaultZoneId);
this.temporalSetUtils = new TemporalSetUtils(locale, defaultZoneId);
}
/**
*
* @return a instance of java.time.LocalDate
* @since 2.1.0
*/
public Temporal create(final Object year, final Object month, final Object day) {
return temporalCreationUtils.create(year, month, day);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal create(final Object year, final Object month, final Object day,
final Object hour, final Object minute) {
return temporalCreationUtils.create(year, month, day, hour, minute);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal create(final Object year, final Object month, final Object day,
final Object hour, final Object minute, final Object second) {
return temporalCreationUtils.create(year, month, day, hour, minute, second);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal create(final Object year, final Object month, final Object day,
final Object hour, final Object minute, final Object second, final Object nanosecond) {
return temporalCreationUtils.create(year, month, day, hour, minute, second, nanosecond);
}
/**
*
* @return a instance of java.time.LocalDate
* @since 2.1.0
*/
public Temporal createDate(final String isoDate) {
return temporalCreationUtils.createDate(isoDate);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal createDateTime(final String isoDate) {
return temporalCreationUtils.createDateTime(isoDate);
}
/**
*
* @return a instance of java.time.LocalDate
* @since 2.1.0
*/
public Temporal createDate(final String isoDate, final String pattern) {
return temporalCreationUtils.createDate(isoDate, pattern);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal createDateTime(final String isoDate, final String pattern) {
return temporalCreationUtils.createDateTime(isoDate, pattern);
}
/**
*
* @return a instance of java.time.LocalDateTime
* @since 2.1.0
*/
public Temporal createNow() {
return temporalCreationUtils.createNow();
}
/**
*
* @return a instance of java.time.ZonedDateTime
* @since 2.1.0
*/
public Temporal createNowForTimeZone(final Object zoneId) {
return temporalCreationUtils.createNowForTimeZone(zoneId);
}
/**
*
* @return a instance of java.time.LocalDate
* @since 2.1.0
*/
public Temporal createToday() {
return temporalCreationUtils.createToday();
}
/**
*
* @return a instance of java.time.ZonedDateTime with 00:00:00.000 for the time part
* @since 2.1.0
*/
public Temporal createTodayForTimeZone(final Object zoneId) {
return temporalCreationUtils.createTodayForTimeZone(zoneId);
}
/**
*
* @since 2.1.0
*/
public String format(final Temporal target) {
return temporalFormattingUtils.format(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayFormat(final Object[] target) {
return temporalArrayUtils.arrayFormat(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listFormat(final List<? extends Temporal> target) {
return temporalListUtils.listFormat(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setFormat(final Set<? extends Temporal> target) {
return temporalSetUtils.setFormat(target);
}
/**
*
* @since 2.1.1
*/
public String format(final Temporal target, final Locale locale) {
return temporalFormattingUtils.format(target, locale);
}
/**
*
* @since 2.1.1
*/
public String[] arrayFormat(final Object[] target, final Locale locale) {
return temporalArrayUtils.arrayFormat(target, locale);
}
/**
*
* @since 2.1.1
*/
public List<String> listFormat(final List<? extends Temporal> target, final Locale locale) {
return temporalListUtils.listFormat(target, locale);
}
/**
*
* @since 2.1.1
*/
public Set<String> setFormat(final Set<? extends Temporal> target, final Locale locale) {
return temporalSetUtils.setFormat(target, locale);
}
/**
*
* @since 2.1.0
*/
public String format(final Temporal target, final String pattern) {
return temporalFormattingUtils.format(target, pattern);
}
/**
*
* @since 2.1.0
*/
public String[] arrayFormat(final Object[] target, final String pattern) {
return temporalArrayUtils.arrayFormat(target, pattern);
}
/**
*
* @since 2.1.0
*/
public List<String> listFormat(final List<? extends Temporal> target, final String pattern) {
return temporalListUtils.listFormat(target, pattern);
}
/**
*
* @since 2.1.0
*/
public Set<String> setFormat(final Set<? extends Temporal> target, final String pattern) {
return temporalSetUtils.setFormat(target, pattern);
}
/**
*
* @since 2.1.1
*/
public String format(final Temporal target, final String pattern, final Locale locale) {
return temporalFormattingUtils.format(target, pattern, locale);
}
/**
*
* @since 2.1.1
*/
public String[] arrayFormat(final Object[] target, final String pattern, final Locale locale) {
return temporalArrayUtils.arrayFormat(target, pattern, locale);
}
/**
*
* @since 2.1.1
*/
public List<String> listFormat(final List<? extends Temporal> target, final String pattern, final Locale locale) {
return temporalListUtils.listFormat(target, pattern, locale);
}
/**
*
* @since 2.1.1
*/
public Set<String> setFormat(final Set<? extends Temporal> target, final String pattern, final Locale locale) {
return temporalSetUtils.setFormat(target, pattern, locale);
}
/**
*
* @since 2.1.0
*/
public Integer day(final Temporal target) {
return temporalFormattingUtils.day(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayDay(final Object[] target) {
return temporalArrayUtils.arrayDay(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listDay(final List<? extends Temporal> target) {
return temporalListUtils.listDay(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setDay(final Set<? extends Temporal> target) {
return temporalSetUtils.setDay(target);
}
/**
*
* @since 2.1.0
*/
public Integer month(final Temporal target) {
return temporalFormattingUtils.month(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayMonth(final Object[] target) {
return temporalArrayUtils.arrayMonth(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listMonth(final List<? extends Temporal> target) {
return temporalListUtils.listMonth(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setMonth(final Set<? extends Temporal> target) {
return temporalSetUtils.setMonth(target);
}
/**
*
* @since 2.1.0
*/
public String monthName(final Temporal target) {
return temporalFormattingUtils.monthName(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayMonthName(final Object[] target) {
return temporalArrayUtils.arrayMonthName(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listMonthName(final List<? extends Temporal> target) {
return temporalListUtils.listMonthName(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setMonthName(final Set<? extends Temporal> target) {
return temporalSetUtils.setMonthName(target);
}
/**
*
* @since 2.1.0
*/
public String monthNameShort(final Temporal target) {
return temporalFormattingUtils.monthNameShort(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayMonthNameShort(final Object[] target) {
return temporalArrayUtils.arrayMonthNameShort(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listMonthNameShort(final List<? extends Temporal> target) {
return temporalListUtils.listMonthNameShort(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setMonthNameShort(final Set<? extends Temporal> target) {
return temporalSetUtils.setMonthNameShort(target);
}
/**
*
* @since 2.1.0
*/
public Integer year(final Temporal target) {
return temporalFormattingUtils.year(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayYear(final Object[] target) {
return temporalArrayUtils.arrayYear(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listYear(final List<? extends Temporal> target) {
return temporalListUtils.listYear(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setYear(final Set<? extends Temporal> target) {
return temporalSetUtils.setYear(target);
}
/**
*
* @since 2.1.0
*/
public Integer dayOfWeek(final Temporal target) {
return temporalFormattingUtils.dayOfWeek(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayDayOfWeek(final Object[] target) {
return temporalArrayUtils.arrayDayOfWeek(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listDayOfWeek(final List<? extends Temporal> target) {
return temporalListUtils.listDayOfWeek(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setDayOfWeek(final Set<? extends Temporal> target) {
return temporalSetUtils.setDayOfWeek(target);
}
/**
*
* @since 2.1.0
*/
public String dayOfWeekName(final Temporal target) {
return temporalFormattingUtils.dayOfWeekName(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayDayOfWeekName(final Object[] target) {
return temporalArrayUtils.arrayDayOfWeekName(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listDayOfWeekName(final List<? extends Temporal> target) {
return temporalListUtils.listDayOfWeekName(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setDayOfWeekName(final Set<? extends Temporal> target) {
return temporalSetUtils.setDayOfWeekName(target);
}
/**
*
* @since 2.1.0
*/
public String dayOfWeekNameShort(final Temporal target) {
return temporalFormattingUtils.dayOfWeekNameShort(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayDayOfWeekNameShort(final Object[] target) {
return temporalArrayUtils.arrayDayOfWeekNameShort(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listDayOfWeekNameShort(final List<? extends Temporal> target) {
return temporalListUtils.listDayOfWeekNameShort(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setDayOfWeekNameShort(final Set<? extends Temporal> target) {
return temporalSetUtils.setDayOfWeekNameShort(target);
}
/**
*
* @since 2.1.0
*/
public Integer hour(final Temporal target) {
return temporalFormattingUtils.hour(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayHour(final Object[] target) {
return temporalArrayUtils.arrayHour(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listHour(final List<? extends Temporal> target) {
return temporalListUtils.listHour(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setHour(final Set<? extends Temporal> target) {
return temporalSetUtils.setHour(target);
}
/**
*
* @since 2.1.0
*/
public Integer minute(final Temporal target) {
return temporalFormattingUtils.minute(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayMinute(final Object[] target) {
return temporalArrayUtils.arrayMinute(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listMinute(final List<? extends Temporal> target) {
return temporalListUtils.listMinute(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setMinute(final Set<? extends Temporal> target) {
return temporalSetUtils.setMinute(target);
}
/**
*
* @since 2.1.0
*/
public Integer second(final Temporal target) {
return temporalFormattingUtils.second(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arraySecond(final Object[] target) {
return temporalArrayUtils.arraySecond(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listSecond(final List<? extends Temporal> target) {
return temporalListUtils.listSecond(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setSecond(final Set<? extends Temporal> target) {
return temporalSetUtils.setSecond(target);
}
/**
*
* @since 2.1.0
*/
public Integer nanosecond(final Temporal target) {
return temporalFormattingUtils.nanosecond(target);
}
/**
*
* @since 2.1.0
*/
public Integer[] arrayNanosecond(final Object[] target) {
return temporalArrayUtils.arrayNanosecond(target);
}
/**
*
* @since 2.1.0
*/
public List<Integer> listNanosecond(final List<? extends Temporal> target) {
return temporalListUtils.listNanosecond(target);
}
/**
*
* @since 2.1.0
*/
public Set<Integer> setNanosecond(final Set<? extends Temporal> target) {
return temporalSetUtils.setNanosecond(target);
}
/**
*
* @since 2.1.0
*/
public String formatISO(final Temporal target) {
return temporalFormattingUtils.formatISO(target);
}
/**
*
* @since 2.1.0
*/
public String[] arrayFormatISO(final Object[] target) {
return temporalArrayUtils.arrayFormatISO(target);
}
/**
*
* @since 2.1.0
*/
public List<String> listFormatISO(final List<? extends Temporal> target) {
return temporalListUtils.listFormatISO(target);
}
/**
*
* @since 2.1.0
*/
public Set<String> setFormatISO(final Set<? extends Temporal> target) {
return temporalSetUtils.setFormatISO(target);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* Defines the actual suggest command for phrase suggestions ( <tt>phrase</tt>).
*/
public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionBuilder> {
private Float maxErrors;
private String separator;
private Float realWordErrorLikelihood;
private Float confidence;
private final Map<String, List<CandidateGenerator>> generators = new HashMap<>();
private Integer gramSize;
private SmoothingModel model;
private Boolean forceUnigrams;
private Integer tokenLimit;
private String preTag;
private String postTag;
private Template collateQuery;
private Map<String, Object> collateParams;
private Boolean collatePrune;
public PhraseSuggestionBuilder(String name) {
super(name, "phrase");
}
/**
* Sets the gram size for the n-gram model used for this suggester. The
* default value is <tt>1</tt> corresponding to <tt>unigrams</tt>. Use
* <tt>2</tt> for <tt>bigrams</tt> and <tt>3</tt> for <tt>trigrams</tt>.
*/
public PhraseSuggestionBuilder gramSize(int gramSize) {
if (gramSize < 1) {
throw new IllegalArgumentException("gramSize must be >= 1");
}
this.gramSize = gramSize;
return this;
}
/**
* Sets the maximum percentage of the terms that at most considered to be
* misspellings in order to form a correction. This method accepts a float
* value in the range [0..1) as a fraction of the actual query terms a
* number <tt>>=1</tt> as an absolut number of query terms.
*
* The default is set to <tt>1.0</tt> which corresponds to that only
* corrections with at most 1 missspelled term are returned.
*/
public PhraseSuggestionBuilder maxErrors(Float maxErrors) {
this.maxErrors = maxErrors;
return this;
}
/**
* Sets the separator that is used to separate terms in the bigram field. If
* not set the whitespace character is used as a separator.
*/
public PhraseSuggestionBuilder separator(String separator) {
this.separator = separator;
return this;
}
/**
* Sets the likelihood of a term being a misspelled even if the term exists
* in the dictionary. The default it <tt>0.95</tt> corresponding to 5% or
* the real words are misspelled.
*/
public PhraseSuggestionBuilder realWordErrorLikelihood(Float realWordErrorLikelihood) {
this.realWordErrorLikelihood = realWordErrorLikelihood;
return this;
}
/**
* Sets the confidence level for this suggester. The confidence level
* defines a factor applied to the input phrases score which is used as a
* threshold for other suggest candidates. Only candidates that score higher
* than the threshold will be included in the result. For instance a
* confidence level of <tt>1.0</tt> will only return suggestions that score
* higher than the input phrase. If set to <tt>0.0</tt> the top N candidates
* are returned. The default is <tt>1.0</tt>
*/
public PhraseSuggestionBuilder confidence(Float confidence) {
this.confidence = confidence;
return this;
}
/**
* Adds a {@link CandidateGenerator} to this suggester. The
* {@link CandidateGenerator} is used to draw candidates for each individual
* phrase term before the candidates are scored.
*/
public PhraseSuggestionBuilder addCandidateGenerator(CandidateGenerator generator) {
List<CandidateGenerator> list = this.generators.get(generator.getType());
if (list == null) {
list = new ArrayList<>();
this.generators.put(generator.getType(), list);
}
list.add(generator);
return this;
}
/**
* Clear the candidate generators.
*/
public PhraseSuggestionBuilder clearCandidateGenerators() {
this.generators.clear();
return this;
}
/**
* If set to <code>true</code> the phrase suggester will fail if the analyzer only
* produces ngrams. the default it <code>true</code>.
*/
public PhraseSuggestionBuilder forceUnigrams(boolean forceUnigrams) {
this.forceUnigrams = forceUnigrams;
return this;
}
/**
* Sets an explicit smoothing model used for this suggester. The default is
* {@link PhraseSuggester#StupidBackoff}.
*/
public PhraseSuggestionBuilder smoothingModel(SmoothingModel model) {
this.model = model;
return this;
}
public PhraseSuggestionBuilder tokenLimit(int tokenLimit) {
this.tokenLimit = tokenLimit;
return this;
}
/**
* Setup highlighting for suggestions. If this is called a highlight field
* is returned with suggestions wrapping changed tokens with preTag and postTag.
*/
public PhraseSuggestionBuilder highlight(String preTag, String postTag) {
if ((preTag == null) != (postTag == null)) {
throw new IllegalArgumentException("Pre and post tag must both be null or both not be null.");
}
this.preTag = preTag;
this.postTag = postTag;
return this;
}
/**
* Sets a query used for filtering out suggested phrases (collation).
*/
public PhraseSuggestionBuilder collateQuery(String collateQuery) {
this.collateQuery = new Template(collateQuery);
return this;
}
/**
* Sets a query used for filtering out suggested phrases (collation).
*/
public PhraseSuggestionBuilder collateQuery(Template collateQueryTemplate) {
this.collateQuery = collateQueryTemplate;
return this;
}
/**
* Sets additional params for collate script
*/
public PhraseSuggestionBuilder collateParams(Map<String, Object> collateParams) {
this.collateParams = collateParams;
return this;
}
/**
* Sets whether to prune suggestions after collation
*/
public PhraseSuggestionBuilder collatePrune(boolean collatePrune) {
this.collatePrune = collatePrune;
return this;
}
@Override
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (realWordErrorLikelihood != null) {
builder.field("real_word_error_likelihood", realWordErrorLikelihood);
}
if (confidence != null) {
builder.field("confidence", confidence);
}
if (separator != null) {
builder.field("separator", separator);
}
if (maxErrors != null) {
builder.field("max_errors", maxErrors);
}
if (gramSize != null) {
builder.field("gram_size", gramSize);
}
if (forceUnigrams != null) {
builder.field("force_unigrams", forceUnigrams);
}
if (tokenLimit != null) {
builder.field("token_limit", tokenLimit);
}
if (!generators.isEmpty()) {
Set<Entry<String, List<CandidateGenerator>>> entrySet = generators.entrySet();
for (Entry<String, List<CandidateGenerator>> entry : entrySet) {
builder.startArray(entry.getKey());
for (CandidateGenerator generator : entry.getValue()) {
generator.toXContent(builder, params);
}
builder.endArray();
}
}
if (model != null) {
builder.startObject("smoothing");
model.toXContent(builder, params);
builder.endObject();
}
if (preTag != null) {
builder.startObject("highlight");
builder.field("pre_tag", preTag);
builder.field("post_tag", postTag);
builder.endObject();
}
if (collateQuery != null) {
builder.startObject("collate");
builder.field("query", collateQuery);
if (collateParams != null) {
builder.field("params", collateParams);
}
if (collatePrune != null) {
builder.field("prune", collatePrune.booleanValue());
}
builder.endObject();
}
return builder;
}
/**
* Creates a new {@link DirectCandidateGenerator}
*
* @param field
* the field this candidate generator operates on.
*/
public static DirectCandidateGenerator candidateGenerator(String field) {
return new DirectCandidateGenerator(field);
}
/**
* A "stupid-backoff" smoothing model simialr to <a
* href="http://en.wikipedia.org/wiki/Katz's_back-off_model"> Katz's
* Backoff</a>. This model is used as the default if no model is configured.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class StupidBackoff extends SmoothingModel {
private final double discount;
/**
* Creates a Stupid-Backoff smoothing model.
*
* @param discount
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public StupidBackoff(double discount) {
super("stupid_backoff");
this.discount = discount;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("discount", discount);
return builder;
}
}
/**
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
* smoothing</a> model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class Laplace extends SmoothingModel {
private final double alpha;
/**
* Creates a Laplace smoothing model.
*
* @param discount
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public Laplace(double alpha) {
super("laplace");
this.alpha = alpha;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("alpha", alpha);
return builder;
}
}
public static abstract class SmoothingModel implements ToXContent {
private final String type;
protected SmoothingModel(String type) {
this.type = type;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(type);
innerToXContent(builder,params);
builder.endObject();
return builder;
}
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
}
/**
* Linear interpolation smoothing model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class LinearInterpolation extends SmoothingModel {
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
/**
* Creates a linear interpolation smoothing model.
*
* Note: the lambdas must sum up to one.
*
* @param trigramLambda
* the trigram lambda
* @param bigramLambda
* the bigram lambda
* @param unigramLambda
* the unigram lambda
*/
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
super("linear");
this.trigramLambda = trigramLambda;
this.bigramLambda = bigramLambda;
this.unigramLambda = unigramLambda;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("trigram_lambda", trigramLambda);
builder.field("bigram_lambda", bigramLambda);
builder.field("unigram_lambda", unigramLambda);
return builder;
}
}
/**
* {@link CandidateGenerator} base class.
*/
public static abstract class CandidateGenerator implements ToXContent {
private final String type;
public CandidateGenerator(String type) {
this.type = type;
}
public String getType() {
return type;
}
}
/**
*
*
*/
public static final class DirectCandidateGenerator extends CandidateGenerator {
private final String field;
private String preFilter;
private String postFilter;
private String suggestMode;
private Float accuracy;
private Integer size;
private String sort;
private String stringDistance;
private Integer maxEdits;
private Integer maxInspections;
private Float maxTermFreq;
private Integer prefixLength;
private Integer minWordLength;
private Float minDocFreq;
/**
* Sets from what field to fetch the candidate suggestions from. This is
* an required option and needs to be set via this setter or
* {@link org.elasticsearch.search.suggest.SuggestBuilder.TermSuggestionBuilder#setField(String)}
* method
*/
public DirectCandidateGenerator(String field) {
super("direct_generator");
this.field = field;
}
/**
* The global suggest mode controls what suggested terms are included or
* controls for what suggest text tokens, terms should be suggested for.
* Three possible values can be specified:
* <ol>
* <li><code>missing</code> - Only suggest terms in the suggest text
* that aren't in the index. This is the default.
* <li><code>popular</code> - Only suggest terms that occur in more docs
* then the original suggest text term.
* <li><code>always</code> - Suggest any matching suggest terms based on
* tokens in the suggest text.
* </ol>
*/
public DirectCandidateGenerator suggestMode(String suggestMode) {
this.suggestMode = suggestMode;
return this;
}
/**
* Sets how similar the suggested terms at least need to be compared to
* the original suggest text tokens. A value between 0 and 1 can be
* specified. This value will be compared to the string distance result
* of each candidate spelling correction.
* <p/>
* Default is <tt>0.5</tt>
*/
public DirectCandidateGenerator accuracy(float accuracy) {
this.accuracy = accuracy;
return this;
}
/**
* Sets the maximum suggestions to be returned per suggest text term.
*/
public DirectCandidateGenerator size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive");
}
this.size = size;
return this;
}
/**
* Sets how to sort the suggest terms per suggest text token. Two
* possible values:
* <ol>
* <li><code>score</code> - Sort should first be based on score, then
* document frequency and then the term itself.
* <li><code>frequency</code> - Sort should first be based on document
* frequency, then scotr and then the term itself.
* </ol>
* <p/>
* What the score is depends on the suggester being used.
*/
public DirectCandidateGenerator sort(String sort) {
this.sort = sort;
return this;
}
/**
* Sets what string distance implementation to use for comparing how
* similar suggested terms are. Four possible values can be specified:
* <ol>
* <li><code>internal</code> - This is the default and is based on
* <code>damerau_levenshtein</code>, but highly optimized for comparing
* string distance for terms inside the index.
* <li><code>damerau_levenshtein</code> - String distance algorithm
* based on Damerau-Levenshtein algorithm.
* <li><code>levenstein</code> - String distance algorithm based on
* Levenstein edit distance algorithm.
* <li><code>jarowinkler</code> - String distance algorithm based on
* Jaro-Winkler algorithm.
* <li><code>ngram</code> - String distance algorithm based on character
* n-grams.
* </ol>
*/
public DirectCandidateGenerator stringDistance(String stringDistance) {
this.stringDistance = stringDistance;
return this;
}
/**
* Sets the maximum edit distance candidate suggestions can have in
* order to be considered as a suggestion. Can only be a value between 1
* and 2. Any other value result in an bad request error being thrown.
* Defaults to <tt>2</tt>.
*/
public DirectCandidateGenerator maxEdits(Integer maxEdits) {
this.maxEdits = maxEdits;
return this;
}
/**
* A factor that is used to multiply with the size in order to inspect
* more candidate suggestions. Can improve accuracy at the cost of
* performance. Defaults to <tt>5</tt>.
*/
public DirectCandidateGenerator maxInspections(Integer maxInspections) {
this.maxInspections = maxInspections;
return this;
}
/**
* Sets a maximum threshold in number of documents a suggest text token
* can exist in order to be corrected. Can be a relative percentage
* number (e.g 0.4) or an absolute number to represent document
* frequencies. If an value higher than 1 is specified then fractional
* can not be specified. Defaults to <tt>0.01</tt>.
* <p/>
* This can be used to exclude high frequency terms from being
* suggested. High frequency terms are usually spelled correctly on top
* of this this also improves the suggest performance.
*/
public DirectCandidateGenerator maxTermFreq(float maxTermFreq) {
this.maxTermFreq = maxTermFreq;
return this;
}
/**
* Sets the number of minimal prefix characters that must match in order
* be a candidate suggestion. Defaults to 1. Increasing this number
* improves suggest performance. Usually misspellings don't occur in the
* beginning of terms.
*/
public DirectCandidateGenerator prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
return this;
}
/**
* The minimum length a suggest text term must have in order to be
* corrected. Defaults to <tt>4</tt>.
*/
public DirectCandidateGenerator minWordLength(int minWordLength) {
this.minWordLength = minWordLength;
return this;
}
/**
* Sets a minimal threshold in number of documents a suggested term
* should appear in. This can be specified as an absolute number or as a
* relative percentage of number of documents. This can improve quality
* by only suggesting high frequency terms. Defaults to 0f and is not
* enabled. If a value higher than 1 is specified then the number cannot
* be fractional.
*/
public DirectCandidateGenerator minDocFreq(float minDocFreq) {
this.minDocFreq = minDocFreq;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the tokens passed to this candidate generator.
* This filter is applied to the original token before candidates are generated.
*/
public DirectCandidateGenerator preFilter(String preFilter) {
this.preFilter = preFilter;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the generated tokens
* before they are passed to the actual phrase scorer.
*/
public DirectCandidateGenerator postFilter(String postFilter) {
this.postFilter = postFilter;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field("field", field);
}
if (suggestMode != null) {
builder.field("suggest_mode", suggestMode);
}
if (accuracy != null) {
builder.field("accuracy", accuracy);
}
if (size != null) {
builder.field("size", size);
}
if (sort != null) {
builder.field("sort", sort);
}
if (stringDistance != null) {
builder.field("string_distance", stringDistance);
}
if (maxEdits != null) {
builder.field("max_edits", maxEdits);
}
if (maxInspections != null) {
builder.field("max_inspections", maxInspections);
}
if (maxTermFreq != null) {
builder.field("max_term_freq", maxTermFreq);
}
if (prefixLength != null) {
builder.field("prefix_length", prefixLength);
}
if (minWordLength != null) {
builder.field("min_word_length", minWordLength);
}
if (minDocFreq != null) {
builder.field("min_doc_freq", minDocFreq);
}
if (preFilter != null) {
builder.field("pre_filter", preFilter);
}
if (postFilter != null) {
builder.field("post_filter", postFilter);
}
builder.endObject();
return builder;
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.rds.AmazonRDS#createDBSubnetGroup(CreateDBSubnetGroupRequest) CreateDBSubnetGroup operation}.
* <p>
* Creates a new DB subnet group. DB subnet groups must contain at least
* one subnet in at least two AZs in the region.
* </p>
*
* @see com.amazonaws.services.rds.AmazonRDS#createDBSubnetGroup(CreateDBSubnetGroupRequest)
*/
public class CreateDBSubnetGroupRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*/
private String dBSubnetGroupName;
/**
* The description for the DB subnet group.
*/
private String dBSubnetGroupDescription;
/**
* The EC2 Subnet IDs for the DB subnet group.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIds;
/**
* A list of tags.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tags;
/**
* The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*
* @return The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*/
public String getDBSubnetGroupName() {
return dBSubnetGroupName;
}
/**
* The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*
* @param dBSubnetGroupName The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*/
public void setDBSubnetGroupName(String dBSubnetGroupName) {
this.dBSubnetGroupName = dBSubnetGroupName;
}
/**
* The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param dBSubnetGroupName The name for the DB subnet group. This value is stored as a lowercase
* string. <p>Constraints: Must contain no more than 255 alphanumeric
* characters or hyphens. Must not be "Default". <p>Example:
* <code>mySubnetgroup</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withDBSubnetGroupName(String dBSubnetGroupName) {
this.dBSubnetGroupName = dBSubnetGroupName;
return this;
}
/**
* The description for the DB subnet group.
*
* @return The description for the DB subnet group.
*/
public String getDBSubnetGroupDescription() {
return dBSubnetGroupDescription;
}
/**
* The description for the DB subnet group.
*
* @param dBSubnetGroupDescription The description for the DB subnet group.
*/
public void setDBSubnetGroupDescription(String dBSubnetGroupDescription) {
this.dBSubnetGroupDescription = dBSubnetGroupDescription;
}
/**
* The description for the DB subnet group.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param dBSubnetGroupDescription The description for the DB subnet group.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withDBSubnetGroupDescription(String dBSubnetGroupDescription) {
this.dBSubnetGroupDescription = dBSubnetGroupDescription;
return this;
}
/**
* The EC2 Subnet IDs for the DB subnet group.
*
* @return The EC2 Subnet IDs for the DB subnet group.
*/
public java.util.List<String> getSubnetIds() {
if (subnetIds == null) {
subnetIds = new com.amazonaws.internal.ListWithAutoConstructFlag<String>();
subnetIds.setAutoConstruct(true);
}
return subnetIds;
}
/**
* The EC2 Subnet IDs for the DB subnet group.
*
* @param subnetIds The EC2 Subnet IDs for the DB subnet group.
*/
public void setSubnetIds(java.util.Collection<String> subnetIds) {
if (subnetIds == null) {
this.subnetIds = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size());
subnetIdsCopy.addAll(subnetIds);
this.subnetIds = subnetIdsCopy;
}
/**
* The EC2 Subnet IDs for the DB subnet group.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setSubnetIds(java.util.Collection)} or {@link
* #withSubnetIds(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param subnetIds The EC2 Subnet IDs for the DB subnet group.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withSubnetIds(String... subnetIds) {
if (getSubnetIds() == null) setSubnetIds(new java.util.ArrayList<String>(subnetIds.length));
for (String value : subnetIds) {
getSubnetIds().add(value);
}
return this;
}
/**
* The EC2 Subnet IDs for the DB subnet group.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param subnetIds The EC2 Subnet IDs for the DB subnet group.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withSubnetIds(java.util.Collection<String> subnetIds) {
if (subnetIds == null) {
this.subnetIds = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size());
subnetIdsCopy.addAll(subnetIds);
this.subnetIds = subnetIdsCopy;
}
return this;
}
/**
* A list of tags.
*
* @return A list of tags.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>();
tags.setAutoConstruct(true);
}
return tags;
}
/**
* A list of tags.
*
* @param tags A list of tags.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
/**
* A list of tags.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTags(java.util.Collection)} or {@link
* #withTags(java.util.Collection)} if you want to override the existing
* values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withTags(Tag... tags) {
if (getTags() == null) setTags(new java.util.ArrayList<Tag>(tags.length));
for (Tag value : tags) {
getTags().add(value);
}
return this;
}
/**
* A list of tags.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDBSubnetGroupRequest withTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Tag> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Tag>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBSubnetGroupName() != null) sb.append("DBSubnetGroupName: " + getDBSubnetGroupName() + ",");
if (getDBSubnetGroupDescription() != null) sb.append("DBSubnetGroupDescription: " + getDBSubnetGroupDescription() + ",");
if (getSubnetIds() != null) sb.append("SubnetIds: " + getSubnetIds() + ",");
if (getTags() != null) sb.append("Tags: " + getTags() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBSubnetGroupName() == null) ? 0 : getDBSubnetGroupName().hashCode());
hashCode = prime * hashCode + ((getDBSubnetGroupDescription() == null) ? 0 : getDBSubnetGroupDescription().hashCode());
hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof CreateDBSubnetGroupRequest == false) return false;
CreateDBSubnetGroupRequest other = (CreateDBSubnetGroupRequest)obj;
if (other.getDBSubnetGroupName() == null ^ this.getDBSubnetGroupName() == null) return false;
if (other.getDBSubnetGroupName() != null && other.getDBSubnetGroupName().equals(this.getDBSubnetGroupName()) == false) return false;
if (other.getDBSubnetGroupDescription() == null ^ this.getDBSubnetGroupDescription() == null) return false;
if (other.getDBSubnetGroupDescription() != null && other.getDBSubnetGroupDescription().equals(this.getDBSubnetGroupDescription()) == false) return false;
if (other.getSubnetIds() == null ^ this.getSubnetIds() == null) return false;
if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false) return false;
if (other.getTags() == null ^ this.getTags() == null) return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false;
return true;
}
@Override
public CreateDBSubnetGroupRequest clone() {
return (CreateDBSubnetGroupRequest) super.clone();
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.cmmn.handler;
import static org.camunda.bpm.engine.impl.cmmn.handler.ItemHandler.PROPERTY_ACTIVITY_DESCRIPTION;
import static org.camunda.bpm.engine.impl.cmmn.handler.ItemHandler.PROPERTY_ACTIVITY_TYPE;
import static org.camunda.bpm.engine.impl.cmmn.handler.ItemHandler.PROPERTY_IS_BLOCKING;
import static org.camunda.bpm.engine.impl.cmmn.handler.ItemHandler.PROPERTY_MANUAL_ACTIVATION_RULE;
import static org.camunda.bpm.engine.impl.cmmn.handler.ItemHandler.PROPERTY_REQUIRED_RULE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import org.camunda.bpm.engine.impl.cmmn.CaseControlRule;
import org.camunda.bpm.engine.impl.cmmn.behavior.CaseTaskActivityBehavior;
import org.camunda.bpm.engine.impl.cmmn.behavior.CmmnActivityBehavior;
import org.camunda.bpm.engine.impl.cmmn.handler.CasePlanModelHandler;
import org.camunda.bpm.engine.impl.cmmn.handler.CaseTaskItemHandler;
import org.camunda.bpm.engine.impl.cmmn.handler.SentryHandler;
import org.camunda.bpm.engine.impl.cmmn.model.CmmnActivity;
import org.camunda.bpm.engine.impl.cmmn.model.CmmnCaseDefinition;
import org.camunda.bpm.engine.impl.cmmn.model.CmmnSentryDeclaration;
import org.camunda.bpm.engine.impl.core.model.CallableElement;
import org.camunda.bpm.engine.impl.core.model.CallableElement.CallableElementBinding;
import org.camunda.bpm.engine.impl.core.model.CallableElementParameter;
import org.camunda.bpm.engine.impl.core.variable.mapping.value.ConstantValueProvider;
import org.camunda.bpm.engine.impl.core.variable.mapping.value.ParameterValueProvider;
import org.camunda.bpm.engine.impl.el.ElValueProvider;
import org.camunda.bpm.model.cmmn.Cmmn;
import org.camunda.bpm.model.cmmn.impl.instance.Body;
import org.camunda.bpm.model.cmmn.impl.instance.ConditionExpression;
import org.camunda.bpm.model.cmmn.impl.instance.DefaultControl;
import org.camunda.bpm.model.cmmn.impl.instance.ItemControl;
import org.camunda.bpm.model.cmmn.instance.CaseTask;
import org.camunda.bpm.model.cmmn.instance.ExtensionElements;
import org.camunda.bpm.model.cmmn.instance.IfPart;
import org.camunda.bpm.model.cmmn.instance.ManualActivationRule;
import org.camunda.bpm.model.cmmn.instance.PlanItem;
import org.camunda.bpm.model.cmmn.instance.PlanItemControl;
import org.camunda.bpm.model.cmmn.instance.RequiredRule;
import org.camunda.bpm.model.cmmn.instance.Sentry;
import org.camunda.bpm.model.cmmn.instance.camunda.CamundaIn;
import org.camunda.bpm.model.cmmn.instance.camunda.CamundaOut;
import org.junit.Before;
import org.junit.Test;
/**
* @author Roman Smirnov
*
*/
public class CaseTaskPlanItemHandlerTest extends CmmnElementHandlerTest {
protected CaseTask caseTask;
protected PlanItem planItem;
protected CaseTaskItemHandler handler = new CaseTaskItemHandler();
@Before
public void setUp() {
caseTask = createElement(casePlanModel, "aCaseTask", CaseTask.class);
planItem = createElement(casePlanModel, "PI_aCaseTask", PlanItem.class);
planItem.setDefinition(caseTask);
}
@Test
public void testCaseTaskActivityName() {
// given:
// the caseTask has a name "A CaseTask"
String name = "A CaseTask";
caseTask.setName(name);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertEquals(name, activity.getName());
}
@Test
public void testPlanItemActivityName() {
// given:
// the caseTask has a name "A CaseTask"
String name = "A CaseTask";
caseTask.setName(name);
// the planItem has an own name "My LocalName"
String planItemName = "My LocalName";
planItem.setName(planItemName);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertNotEquals(name, activity.getName());
assertEquals(planItemName, activity.getName());
}
@Test
public void testCaseTaskDescription() {
// given
String description = "This is a caseTask";
caseTask.setDescription(description);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertEquals(description, (String) activity.getProperty(PROPERTY_ACTIVITY_DESCRIPTION));
}
@Test
public void testPlanItemDescription() {
// given
String description = "This is a planItem";
planItem.setDescription(description);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertEquals(description, (String) activity.getProperty(PROPERTY_ACTIVITY_DESCRIPTION));
}
@Test
public void testCaseTaskActivityType() {
// given
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
String activityType = (String) activity.getProperty(PROPERTY_ACTIVITY_TYPE);
assertEquals("caseTask", activityType);
}
@Test
public void testActivityBehavior() {
// given: a planItem
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CmmnActivityBehavior behavior = activity.getActivityBehavior();
assertTrue(behavior instanceof CaseTaskActivityBehavior);
}
@Test
public void testIsBlockingEqualsTrueProperty() {
// given: a caseTask with isBlocking = true (defaultValue)
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
Boolean isBlocking = (Boolean) activity.getProperty(PROPERTY_IS_BLOCKING);
assertTrue(isBlocking);
}
@Test
public void testIsBlockingEqualsFalseProperty() {
// given:
// a caseTask with isBlocking = false
caseTask.setIsBlocking(false);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
Boolean isBlocking = (Boolean) activity.getProperty(PROPERTY_IS_BLOCKING);
assertFalse(isBlocking);
}
@Test
public void testWithoutParent() {
// given: a planItem
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertNull(activity.getParent());
}
@Test
public void testWithParent() {
// given:
// a new activity as parent
CmmnCaseDefinition parent = new CmmnCaseDefinition("aParentActivity");
context.setParent(parent);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
assertEquals(parent, activity.getParent());
assertTrue(parent.getActivities().contains(activity));
}
@Test
public void testCallableElement() {
// given: a plan item
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
// there exists a callableElement
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
assertNotNull(behavior.getCallableElement());
}
@Test
public void testCaseRefConstant() {
// given:
String caseRef = "aCaseToCall";
caseTask.setCase(caseRef);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider caseRefValueProvider = callableElement.getDefinitionKeyValueProvider();
assertNotNull(caseRefValueProvider);
assertTrue(caseRefValueProvider instanceof ConstantValueProvider);
ConstantValueProvider valueProvider = (ConstantValueProvider) caseRefValueProvider;
assertEquals(caseRef, valueProvider.getValue(null));
}
@Test
public void testCaseRefExpression() {
// given:
String caseRef = "${aCaseToCall}";
caseTask.setCase(caseRef);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider caseRefValueProvider = callableElement.getDefinitionKeyValueProvider();
assertNotNull(caseRefValueProvider);
assertTrue(caseRefValueProvider instanceof ElValueProvider);
ElValueProvider valueProvider = (ElValueProvider) caseRefValueProvider;
assertEquals(caseRef, valueProvider.getExpression().getExpressionText());
}
@Test
public void testBinding() {
// given:
CallableElementBinding caseBinding = CallableElementBinding.LATEST;
caseTask.setCamundaCaseBinding(caseBinding.getValue());
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementBinding binding = callableElement.getBinding();
assertNotNull(binding);
assertEquals(caseBinding, binding);
}
@Test
public void testVersionConstant() {
// given:
String caseVersion = "2";
caseTask.setCamundaCaseVersion(caseVersion);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider caseVersionValueProvider = callableElement.getVersionValueProvider();
assertNotNull(caseVersionValueProvider);
assertTrue(caseVersionValueProvider instanceof ConstantValueProvider);
assertEquals(caseVersion, caseVersionValueProvider.getValue(null));
}
@Test
public void testVersionExpression() {
// given:
String caseVersion = "${aVersion}";
caseTask.setCamundaCaseVersion(caseVersion);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider caseVersionValueProvider = callableElement.getVersionValueProvider();
assertNotNull(caseVersionValueProvider);
assertTrue(caseVersionValueProvider instanceof ElValueProvider);
ElValueProvider valueProvider = (ElValueProvider) caseVersionValueProvider;
assertEquals(caseVersion, valueProvider.getExpression().getExpressionText());
}
@Test
public void testBusinessKeyConstant() {
// given:
String businessKey = "myBusinessKey";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn businessKeyElement = createElement(extensionElements, null, CamundaIn.class);
businessKeyElement.setCamundaBusinessKey(businessKey);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider businessKeyValueProvider = callableElement.getBusinessKeyValueProvider();
assertNotNull(businessKeyValueProvider);
assertTrue(businessKeyValueProvider instanceof ConstantValueProvider);
assertEquals(businessKey, businessKeyValueProvider.getValue(null));
}
@Test
public void testBusinessKeyExpression() {
// given:
String businessKey = "${myBusinessKey}";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn businessKeyElement = createElement(extensionElements, null, CamundaIn.class);
businessKeyElement.setCamundaBusinessKey(businessKey);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
ParameterValueProvider businessKeyValueProvider = callableElement.getBusinessKeyValueProvider();
assertNotNull(businessKeyValueProvider);
assertTrue(businessKeyValueProvider instanceof ElValueProvider);
ElValueProvider valueProvider = (ElValueProvider) businessKeyValueProvider;
assertEquals(businessKey, valueProvider.getExpression().getExpressionText());
}
@Test
public void testInputs() {
// given:
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn variablesElement = createElement(extensionElements, null, CamundaIn.class);
variablesElement.setCamundaVariables("all");
CamundaIn sourceElement = createElement(extensionElements, null, CamundaIn.class);
sourceElement.setCamundaSource("a");
CamundaIn sourceExpressionElement = createElement(extensionElements, null, CamundaIn.class);
sourceExpressionElement.setCamundaSourceExpression("${b}");
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
List<CallableElementParameter> inputs = callableElement.getInputs();
assertNotNull(inputs);
assertFalse(inputs.isEmpty());
assertEquals(3, inputs.size());
}
@Test
public void testInputVariables() {
// given:
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn variablesElement = createElement(extensionElements, null, CamundaIn.class);
variablesElement.setCamundaVariables("all");
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getInputs().get(0);
assertNotNull(parameter);
assertTrue(parameter.isAllVariables());
}
@Test
public void testInputSource() {
// given:
String source = "a";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn sourceElement = createElement(extensionElements, null, CamundaIn.class);
sourceElement.setCamundaSource(source);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getInputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
ParameterValueProvider sourceValueProvider = parameter.getSourceValueProvider();
assertNotNull(sourceValueProvider);
assertTrue(sourceValueProvider instanceof ConstantValueProvider);
assertEquals(source, sourceValueProvider.getValue(null));
}
@Test
public void testInputSourceExpression() {
// given:
String source = "${a}";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn sourceElement = createElement(extensionElements, null, CamundaIn.class);
sourceElement.setCamundaSourceExpression(source);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getInputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
ParameterValueProvider sourceExpressionValueProvider = parameter.getSourceValueProvider();
assertNotNull(sourceExpressionValueProvider);
assertTrue(sourceExpressionValueProvider instanceof ElValueProvider);
ElValueProvider valueProvider = (ElValueProvider) sourceExpressionValueProvider;
assertEquals(source, valueProvider.getExpression().getExpressionText());
}
@Test
public void testInputTarget() {
// given:
String target = "b";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaIn sourceElement = createElement(extensionElements, null, CamundaIn.class);
sourceElement.setCamundaTarget(target);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getInputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
assertEquals(target, parameter.getTarget());
}
@Test
public void testOutputs() {
// given:
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaOut variablesElement = createElement(extensionElements, null, CamundaOut.class);
variablesElement.setCamundaVariables("all");
CamundaOut sourceElement = createElement(extensionElements, null, CamundaOut.class);
sourceElement.setCamundaSource("a");
CamundaOut sourceExpressionElement = createElement(extensionElements, null, CamundaOut.class);
sourceExpressionElement.setCamundaSourceExpression("${b}");
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
List<CallableElementParameter> outputs = callableElement.getOutputs();
assertNotNull(outputs);
assertFalse(outputs.isEmpty());
assertEquals(3, outputs.size());
}
@Test
public void testOutputVariables() {
// given:
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaOut variablesElement = createElement(extensionElements, null, CamundaOut.class);
variablesElement.setCamundaVariables("all");
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getOutputs().get(0);
assertNotNull(parameter);
assertTrue(parameter.isAllVariables());
}
@Test
public void testOutputSource() {
// given:
String source = "a";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaOut sourceElement = createElement(extensionElements, null, CamundaOut.class);
sourceElement.setCamundaSource(source);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getOutputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
ParameterValueProvider sourceValueProvider = parameter.getSourceValueProvider();
assertNotNull(sourceValueProvider);
assertTrue(sourceValueProvider instanceof ConstantValueProvider);
assertEquals(source, sourceValueProvider.getValue(null));
}
@Test
public void testOutputSourceExpression() {
// given:
String source = "${a}";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaOut sourceElement = createElement(extensionElements, null, CamundaOut.class);
sourceElement.setCamundaSourceExpression(source);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getOutputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
ParameterValueProvider sourceExpressionValueProvider = parameter.getSourceValueProvider();
assertNotNull(sourceExpressionValueProvider);
assertTrue(sourceExpressionValueProvider instanceof ElValueProvider);
ElValueProvider valueProvider = (ElValueProvider) sourceExpressionValueProvider;
assertEquals(source, valueProvider.getExpression().getExpressionText());
}
@Test
public void testOutputTarget() {
// given:
String target = "b";
ExtensionElements extensionElements = addExtensionElements(caseTask);
CamundaOut sourceElement = createElement(extensionElements, null, CamundaOut.class);
sourceElement.setCamundaTarget(target);
// when
CmmnActivity activity = handler.handleElement(planItem, context);
// then
CaseTaskActivityBehavior behavior = (CaseTaskActivityBehavior) activity.getActivityBehavior();
CallableElement callableElement = behavior.getCallableElement();
CallableElementParameter parameter = callableElement.getOutputs().get(0);
assertNotNull(parameter);
assertFalse(parameter.isAllVariables());
assertEquals(target, parameter.getTarget());
}
@Test
public void testExitCriteria() {
// given
// create sentry containing ifPart
Sentry sentry = createElement(casePlanModel, "Sentry_1", Sentry.class);
IfPart ifPart = createElement(sentry, "abc", IfPart.class);
ConditionExpression conditionExpression = createElement(ifPart, "def", ConditionExpression.class);
Body body = createElement(conditionExpression, null, Body.class);
body.setTextContent("${test}");
// set exitCriteria
planItem.getExitCriterias().add(sentry);
// transform casePlanModel as parent
CmmnActivity parent = new CasePlanModelHandler().handleElement(casePlanModel, context);
context.setParent(parent);
// transform Sentry
CmmnSentryDeclaration sentryDeclaration = new SentryHandler().handleElement(sentry, context);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
assertTrue(newActivity.getEntryCriteria().isEmpty());
assertFalse(newActivity.getExitCriteria().isEmpty());
assertEquals(1, newActivity.getExitCriteria().size());
assertEquals(sentryDeclaration, newActivity.getExitCriteria().get(0));
}
@Test
public void testMultipleExitCriteria() {
// given
// create first sentry containing ifPart
Sentry sentry1 = createElement(casePlanModel, "Sentry_1", Sentry.class);
IfPart ifPart1 = createElement(sentry1, "abc", IfPart.class);
ConditionExpression conditionExpression1 = createElement(ifPart1, "def", ConditionExpression.class);
Body body1 = createElement(conditionExpression1, null, Body.class);
body1.setTextContent("${test}");
// set first exitCriteria
planItem.getExitCriterias().add(sentry1);
// create first sentry containing ifPart
Sentry sentry2 = createElement(casePlanModel, "Sentry_2", Sentry.class);
IfPart ifPart2 = createElement(sentry2, "ghi", IfPart.class);
ConditionExpression conditionExpression2 = createElement(ifPart2, "jkl", ConditionExpression.class);
Body body2 = createElement(conditionExpression2, null, Body.class);
body2.setTextContent("${test}");
// set second exitCriteria
planItem.getExitCriterias().add(sentry2);
// transform casePlanModel as parent
CmmnActivity parent = new CasePlanModelHandler().handleElement(casePlanModel, context);
context.setParent(parent);
// transform Sentry
CmmnSentryDeclaration firstSentryDeclaration = new SentryHandler().handleElement(sentry1, context);
CmmnSentryDeclaration secondSentryDeclaration = new SentryHandler().handleElement(sentry2, context);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
assertTrue(newActivity.getEntryCriteria().isEmpty());
assertFalse(newActivity.getExitCriteria().isEmpty());
assertEquals(2, newActivity.getExitCriteria().size());
assertTrue(newActivity.getExitCriteria().contains(firstSentryDeclaration));
assertTrue(newActivity.getExitCriteria().contains(secondSentryDeclaration));
}
@Test
public void testEntryCriteria() {
// given
// create sentry containing ifPart
Sentry sentry = createElement(casePlanModel, "Sentry_1", Sentry.class);
IfPart ifPart = createElement(sentry, "abc", IfPart.class);
ConditionExpression conditionExpression = createElement(ifPart, "def", ConditionExpression.class);
Body body = createElement(conditionExpression, null, Body.class);
body.setTextContent("${test}");
// set exitCriteria
planItem.getEntryCriterias().add(sentry);
// transform casePlanModel as parent
CmmnActivity parent = new CasePlanModelHandler().handleElement(casePlanModel, context);
context.setParent(parent);
// transform Sentry
CmmnSentryDeclaration sentryDeclaration = new SentryHandler().handleElement(sentry, context);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
assertTrue(newActivity.getExitCriteria().isEmpty());
assertFalse(newActivity.getEntryCriteria().isEmpty());
assertEquals(1, newActivity.getEntryCriteria().size());
assertEquals(sentryDeclaration, newActivity.getEntryCriteria().get(0));
}
@Test
public void testMultipleEntryCriteria() {
// given
// create first sentry containing ifPart
Sentry sentry1 = createElement(casePlanModel, "Sentry_1", Sentry.class);
IfPart ifPart1 = createElement(sentry1, "abc", IfPart.class);
ConditionExpression conditionExpression1 = createElement(ifPart1, "def", ConditionExpression.class);
Body body1 = createElement(conditionExpression1, null, Body.class);
body1.setTextContent("${test}");
// set first entryCriteria
planItem.getEntryCriterias().add(sentry1);
// create first sentry containing ifPart
Sentry sentry2 = createElement(casePlanModel, "Sentry_2", Sentry.class);
IfPart ifPart2 = createElement(sentry2, "ghi", IfPart.class);
ConditionExpression conditionExpression2 = createElement(ifPart2, "jkl", ConditionExpression.class);
Body body2 = createElement(conditionExpression2, null, Body.class);
body2.setTextContent("${test}");
// set second entryCriteria
planItem.getEntryCriterias().add(sentry2);
// transform casePlanModel as parent
CmmnActivity parent = new CasePlanModelHandler().handleElement(casePlanModel, context);
context.setParent(parent);
// transform Sentry
CmmnSentryDeclaration firstSentryDeclaration = new SentryHandler().handleElement(sentry1, context);
CmmnSentryDeclaration secondSentryDeclaration = new SentryHandler().handleElement(sentry2, context);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
assertTrue(newActivity.getExitCriteria().isEmpty());
assertFalse(newActivity.getEntryCriteria().isEmpty());
assertEquals(2, newActivity.getEntryCriteria().size());
assertTrue(newActivity.getEntryCriteria().contains(firstSentryDeclaration));
assertTrue(newActivity.getEntryCriteria().contains(secondSentryDeclaration));
}
@Test
public void testEntryCriteriaAndExitCriteria() {
// given
// create sentry containing ifPart
Sentry sentry = createElement(casePlanModel, "Sentry_1", Sentry.class);
IfPart ifPart = createElement(sentry, "abc", IfPart.class);
ConditionExpression conditionExpression = createElement(ifPart, "def", ConditionExpression.class);
Body body = createElement(conditionExpression, null, Body.class);
body.setTextContent("${test}");
// set entry-/exitCriteria
planItem.getEntryCriterias().add(sentry);
planItem.getExitCriterias().add(sentry);
// transform casePlanModel as parent
CmmnActivity parent = new CasePlanModelHandler().handleElement(casePlanModel, context);
context.setParent(parent);
// transform Sentry
CmmnSentryDeclaration sentryDeclaration = new SentryHandler().handleElement(sentry, context);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
assertFalse(newActivity.getExitCriteria().isEmpty());
assertEquals(1, newActivity.getExitCriteria().size());
assertEquals(sentryDeclaration, newActivity.getExitCriteria().get(0));
assertFalse(newActivity.getEntryCriteria().isEmpty());
assertEquals(1, newActivity.getEntryCriteria().size());
assertEquals(sentryDeclaration, newActivity.getEntryCriteria().get(0));
}
@Test
public void testManualActivationRule() {
// given
ItemControl itemControl = createElement(planItem, "ItemControl_1", ItemControl.class);
ManualActivationRule manualActivationRule = createElement(itemControl, "ManualActivationRule_1", ManualActivationRule.class);
ConditionExpression expression = createElement(manualActivationRule, "Expression_1", ConditionExpression.class);
Body body = createElement(expression, Body.class);
body.setTextContent("${true}");
Cmmn.validateModel(modelInstance);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
Object rule = newActivity.getProperty(PROPERTY_MANUAL_ACTIVATION_RULE);
assertNotNull(rule);
assertTrue(rule instanceof CaseControlRule);
}
@Test
public void testManualActivationRuleByDefaultPlanItemControl() {
// given
PlanItemControl defaultControl = createElement(caseTask, "ItemControl_1", DefaultControl.class);
ManualActivationRule manualActivationRule = createElement(defaultControl, "ManualActivationRule_1", ManualActivationRule.class);
ConditionExpression expression = createElement(manualActivationRule, "Expression_1", ConditionExpression.class);
Body body = createElement(expression, Body.class);
body.setTextContent("${true}");
Cmmn.validateModel(modelInstance);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
Object rule = newActivity.getProperty(PROPERTY_MANUAL_ACTIVATION_RULE);
assertNotNull(rule);
assertTrue(rule instanceof CaseControlRule);
}
@Test
public void testRequiredRule() {
// given
ItemControl itemControl = createElement(planItem, "ItemControl_1", ItemControl.class);
RequiredRule requiredRule = createElement(itemControl, "RequiredRule_1", RequiredRule.class);
ConditionExpression expression = createElement(requiredRule, "Expression_1", ConditionExpression.class);
Body body = createElement(expression, Body.class);
body.setTextContent("${true}");
Cmmn.validateModel(modelInstance);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
Object rule = newActivity.getProperty(PROPERTY_REQUIRED_RULE);
assertNotNull(rule);
assertTrue(rule instanceof CaseControlRule);
}
@Test
public void testRequiredRuleByDefaultPlanItemControl() {
// given
PlanItemControl defaultControl = createElement(caseTask, "ItemControl_1", DefaultControl.class);
RequiredRule requiredRule = createElement(defaultControl, "RequiredRule_1", RequiredRule.class);
ConditionExpression expression = createElement(requiredRule, "Expression_1", ConditionExpression.class);
Body body = createElement(expression, Body.class);
body.setTextContent("${true}");
Cmmn.validateModel(modelInstance);
// when
CmmnActivity newActivity = handler.handleElement(planItem, context);
// then
Object rule = newActivity.getProperty(PROPERTY_REQUIRED_RULE);
assertNotNull(rule);
assertTrue(rule instanceof CaseControlRule);
}
}
| |
// Copyright (c) Corporation for National Research Initiatives
package org.python.util;
import org.python.core.Py;
import org.python.core.PyBuiltinFunctionSet;
import org.python.core.PyException;
import org.python.core.PyObject;
import org.python.core.PyString;
import org.python.core.PySystemState;
import org.python.core.__builtin__;
/**
* This class provides the read, execute, print loop needed by a Python console; it is not actually
* a console itself. The primary capability is the {@link #interact()} method, which repeatedly
* calls {@link #raw_input(PyObject)}, and hence {@link __builtin__#raw_input(PyObject)}, in order
* to get lines, and {@link #push(String)} them into the interpreter. The built-in
* <code>raw_input()</code> method prompts on <code>sys.stdout</code> and reads from
* <code>sys.stdin</code>, the standard console. These may be redirected using
* {@link #setOut(java.io.OutputStream)} and {@link #setIn(java.io.InputStream)}, as may also
* <code>sys.stderr</code>.
*/
// Based on CPython-1.5.2's code module
public class InteractiveConsole extends InteractiveInterpreter {
public static final String CONSOLE_FILENAME = "<stdin>";
public String filename;
/**
* Construct an interactive console, which will "run" when {@link #interact()} is called. The
* name of the console (e.g. in error messages) will be {@value #CONSOLE_FILENAME}.
*/
public InteractiveConsole() {
this(null, CONSOLE_FILENAME);
}
/**
* Construct an interactive console, which will "run" when {@link #interact()} is called. The
* name of the console (e.g. in error messages) will be {@value #CONSOLE_FILENAME}.
*
* @param locals dictionary to use, or if <code>null</code>, a new empty one will be created
*/
public InteractiveConsole(PyObject locals) {
this(locals, CONSOLE_FILENAME);
}
/**
* Construct an interactive console, which will "run" when {@link #interact()} is called.
*
* @param locals dictionary to use, or if <code>null</code>, a new empty one will be created
* @param filename name with which to label this console input (e.g. in error messages).
*/
public InteractiveConsole(PyObject locals, String filename) {
this(locals, filename, false);
}
/**
* Full-feature constructor for an interactive console, which will "run" when
* {@link #interact()} is called. This version allows the caller to replace the built-in
* raw_input() methods with {@link #raw_input(PyObject)} and
* {@link #raw_input(PyObject, PyObject)}, which may be overridden in a sub-class.
*
* @param locals dictionary to use, or if <code>null</code>, a new empty one will be created
* @param filename name with which to label this console input
* @param replaceRawInput if true, hook this class's <code>raw_input</code> into the built-ins.
*/
public InteractiveConsole(PyObject locals, String filename, boolean replaceRawInput) {
super(locals);
this.filename = filename;
if (replaceRawInput) {
PyObject newRawInput = new PyBuiltinFunctionSet("raw_input", 0, 0, 1) {
@Override
public PyObject __call__() {
return __call__(Py.EmptyString);
}
@Override
public PyObject __call__(PyObject prompt) {
return Py.newString(raw_input(prompt));
}
};
Py.getSystemState().getBuiltins().__setitem__("raw_input", newRawInput);
}
}
/**
* Operate a Python console, as in {@link #interact(String, PyObject)}, on the standard input.
* The standard input may have been redirected by {@link #setIn(java.io.InputStream)} or its
* variants. The banner (printed before first input) is obtained by calling
* {@link #getDefaultBanner()}.
*/
public void interact() {
interact(getDefaultBanner(), null);
}
/**
* Returns the banner to print before the first interaction: "Jython <version> on <platform>".
*
* @return the banner.
*/
public static String getDefaultBanner() {
return String
.format("Jython %s on %s", PySystemState.version, Py.getSystemState().platform);
}
/**
* Operate a Python console by repeatedly calling {@link #raw_input(PyObject, PyObject)} and
* interpreting the lines read. An end of file causes the method to return.
*
* @param banner to print before accepting input, or if <code>null</code>, no banner.
* @param file from which to read commands, or if <code>null</code>, read the console.
*/
public void interact(String banner, PyObject file) {
PyObject old_ps1 = systemState.ps1;
PyObject old_ps2 = systemState.ps2;
systemState.ps1 = new PyString(">>> ");
systemState.ps2 = new PyString("... ");
try {
_interact(banner, file);
} finally {
systemState.ps1 = old_ps1;
systemState.ps2 = old_ps2;
}
}
public void _interact(String banner, PyObject file) {
if (banner != null) {
write(banner);
write("\n");
}
// Dummy exec in order to speed up response on first command
exec("2");
// System.err.println("interp2");
boolean more = false;
while (true) {
PyObject prompt = more ? systemState.ps2 : systemState.ps1;
String line;
try {
if (file == null) {
line = raw_input(prompt);
} else {
line = raw_input(prompt, file);
}
} catch (PyException exc) {
if (!exc.match(Py.EOFError)) {
throw exc;
}
if (banner != null) {
write("\n");
}
break;
} catch (Throwable t) {
// catch jline.console.UserInterruptException, rethrow as a KeyboardInterrupt
throw Py.JavaError(t);
// One would expect that it would be possible to then catch the KeyboardInterrupt at the
// bottom of this loop, however, for some reason the control-C restores the input text,
// so simply doing
// resetbuffer(); more = false;
// is not sufficient
}
more = push(line);
}
}
/**
* Push a line to the interpreter.
*
* The line should not have a trailing newline; it may have internal newlines. The line is
* appended to a buffer and the interpreter's runsource() method is called with the concatenated
* contents of the buffer as source. If this indicates that the command was executed or invalid,
* the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was
* after the line was appended. The return value is 1 if more input is required, 0 if the line
* was dealt with in some way (this is the same as runsource()).
*/
public boolean push(String line) {
if (buffer.length() > 0) {
buffer.append("\n");
}
buffer.append(line);
boolean more = runsource(buffer.toString(), filename);
if (!more) {
resetbuffer();
}
return more;
}
/**
* Write a prompt and read a line from standard input. The returned line does not include the
* trailing newline. When the user enters the EOF key sequence, EOFError is raised. The base
* implementation uses the built-in function raw_input(); a subclass may replace this with a
* different implementation.
*/
public String raw_input(PyObject prompt) {
return __builtin__.raw_input(prompt);
}
/**
* Write a prompt and read a line from a file.
*/
public String raw_input(PyObject prompt, PyObject file) {
return __builtin__.raw_input(prompt, file);
}
}
| |
/*
* TwoWaySerialComm.java
*
*
* @author Kersten Tams Copyright 2009-2018
* @author Lothar Roth Copyright 2012-2018
*
*/
package my.CVNavi;
/**
*
* @author ktams
*/
import purejavacomm.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import static my.CVNavi.CVNavi.debugLevel;
public class TwoWaySerialComm {
private SerialPort tw_serialPort = null;
private boolean tw_connected = false;
private CVNavi CVNavi = null;
private String tw_dev = null;
private int tw_baud = -1;
private boolean tw_rtscts = false;
public TwoWaySerialComm()
{
super();
}
public Boolean isconnected()
{
return this.tw_connected;
}
// @SuppressWarnings("static-access")
void close() // throws Exception
{
disconnect();
}
void disconnect() // throws Exception
{
if( debugLevel >= 1 ) {
System.out.println("DISCONNECT called" );
}
if( tw_serialPort != null && tw_connected == true ) {
if( debugLevel >= 1 ) {
System.out.println("DISCONNECT closing port" );
}
tw_serialPort.close();
if( debugLevel >= 1 ) {
System.out.println("DISCONNECT closed port" );
}
}
tw_connected = false;
tw_dev = null;
tw_baud = -1;
tw_rtscts = false;
}
void connect( CVNavi cvnavi) throws Exception
{
if( debugLevel > 1 ) {
String sunModel = System.getProperty("sun.arch.data.model");
String osName = System.getProperty("os.name");
String osArch = System.getProperty("os.arch");
if( debugLevel >= 1 ) {
System.out.println("inside CONNECT myModel["+sunModel+"] osName["+osName+"] osArch["+osArch+"]");
}
}
if( debugLevel > 2 ) {
System.out.println("NEW CONNECT cvnavi["+cvnavi+"] CVNavi["+CVNavi+"]");
}
if( CVNavi == null && cvnavi != null )
CVNavi = cvnavi;
if( CVNavi != null ) {
if( debugLevel >= 1 ) {
System.out.println("NEW CONNECT dev["+CVNavi.gsSchnittstelle+"] baud["+CVNavi.gsBaudRate+"] rtscts["+CVNavi.gsRtsCts+"]");
}
} else {
if( debugLevel >= 1 ) {
System.out.println("NEW CONNECT without valid CVNavi -> ABORT CONNECT" );
}
return;
}
if( tw_connected ) {
if( tw_dev.equalsIgnoreCase(CVNavi.gsSchnittstelle) && tw_baud == CVNavi.gsBaudRate && tw_rtscts == CVNavi.gsRtsCts) {
if( debugLevel >= 1 ) {
System.out.println("CONNECT but already OPENED with identical parameters" );
}
return;
}
else {
if( debugLevel >= 1 ) {
System.out.println("CONNECT but already OPENED with other parameters OLD dev["+tw_dev+"] baud["+tw_baud+"] rtscts["+tw_rtscts+"] "
+"NEW dev["+CVNavi.gsSchnittstelle+"] baud["+CVNavi.gsBaudRate+"] rtscts["+CVNavi.gsRtsCts+"]");
}
disconnect();
}
} else {
System.out.println("CONNECT and not OPENED" );
}
CommPortIdentifier portIdentifier = null;
try {
if( debugLevel > 3 ) {
System.out.println("CommPortIdentifier.getPortIdentifier("+CVNavi.gsSchnittstelle+")");
}
portIdentifier = CommPortIdentifier.getPortIdentifier(CVNavi.gsSchnittstelle);
}
catch (NoSuchPortException e) {
System.out.println("NoSuchPortException EXCEPTION in CONNECT for "+CVNavi.gsSchnittstelle);
return;
}
catch (Exception e) {
System.out.println("EXCEPTION in CONNECT "+ e);
return;
}
catch (UnsatisfiedLinkError e) {
System.out.println("connect: UnsatisfiedLinkError: "+ e);
CVNavi.mbGeneric( CVNavi, "INFO: Library may be missing or does not fit.", "rxtxSerial.dll", e.getMessage() );
return;
}
catch (NoClassDefFoundError e) {
System.out.println("NoClassDefFoundError in CONNECT "+ e);
return;
}
catch (Error e) {
System.out.println("Error in CONNECT "+ e);
return;
}
if( debugLevel > 0 ) {
System.out.println("inside CONNECT");
}
if( debugLevel >= 2 ) {
System.out.println("inside CONNECT name["+portIdentifier.getName()+"] portType["+portIdentifier.getPortType()+"] "
+"currentOwner["+portIdentifier.getCurrentOwner()+"] str["+portIdentifier.toString()+"] class["+portIdentifier.getClass().toString()+"]");
} else if( debugLevel >= 0 ) {
System.out.println("inside CONNECT name["+portIdentifier.getName()+"] portType["+portIdentifier.getPortType()+"] "
+"currentOwner["+portIdentifier.getCurrentOwner()+"]");
}
if ( portIdentifier.isCurrentlyOwned() )
{
CVNavi.mbDeviceOwned( null );
System.out.println("mbDeviceOwned by["+portIdentifier.getCurrentOwner()+"] name["+portIdentifier.getName()+"] type["+portIdentifier.getPortType()+"]");
}
else
{
if( debugLevel > 3 ) {
System.out.println("call portIdentifier.open PRE");
}
CommPort commPort = portIdentifier.open(this.getClass().getName(),2000);
if( debugLevel > 3 ) {
System.out.println("call portIdentifier.open POST");
}
if( commPort instanceof SerialPort )
{
if( debugLevel > 3 ) {
System.out.println("commPort instanceof SerialPort A");
}
tw_serialPort = (SerialPort) commPort;
if( debugLevel > 3 ) {
System.out.println("commPort instanceof SerialPort B");
}
switch(CVNavi.getZentrale())
{
case c.cuOpenDCC: // OpenDCC
tw_serialPort.setSerialPortParams(CVNavi.gsBaudRate,SerialPort.DATABITS_8,SerialPort.STOPBITS_2,SerialPort.PARITY_NONE);
break;
case c.cuIntellibox1: // Intellibox
tw_serialPort.setSerialPortParams(CVNavi.gsBaudRate,SerialPort.DATABITS_8,SerialPort.STOPBITS_2,SerialPort.PARITY_NONE);
break;
case c.cuMasterControl: // MasterControl
tw_serialPort.setSerialPortParams(CVNavi.gsBaudRate,SerialPort.DATABITS_8,SerialPort.STOPBITS_2,SerialPort.PARITY_NONE);
break;
}
System.out.println("FlowControlMode is ["+tw_serialPort.getFlowControlMode()+"]" );
if (CVNavi.gsRtsCts) {
// tw_serialPort.setFlowControlMode(SerialPort.FLOWCONTROL_RTSCTS_IN);
// tw_serialPort.setFlowControlMode(SerialPort.FLOWCONTROL_RTSCTS_OUT);
// OR ???
tw_serialPort.setFlowControlMode(SerialPort.FLOWCONTROL_RTSCTS_IN | SerialPort.FLOWCONTROL_RTSCTS_OUT);
}
tw_serialPort.enableReceiveTimeout(500);
tw_connected = true;
tw_dev = CVNavi.gsSchnittstelle;
tw_baud = CVNavi.gsBaudRate;
tw_rtscts = CVNavi.gsRtsCts;
if( CVNavi.gsUse_RTSDTR == true ) {
showRTSDTR( "connect: ", tw_serialPort, " pre optional set" );
if( ! tw_serialPort.isRTS() ) {
System.out.println("connect: set RTS");
tw_serialPort.setRTS(true);
}
if( ! tw_serialPort.isDTR() ) {
System.out.println("connect: set DTR");
tw_serialPort.setDTR(true);
}
showRTSDTR( "connect: ", tw_serialPort, " post optional set" );
}
}
else
{
System.out.println("Error: Only serial or USB ports are handled.");
if( debugLevel > 3 ) {
System.out.println("commPort NOT instanceof SerialPort commPort"+commPort.toString());
}
}
}
}
void showRTSDTR( SerialPort sp ) {
showRTSDTR( "", sp, "" );
}
void showRTSDTR( String s1, SerialPort sp, String s2 ) {
if( CVNavi.gsUse_RTSDTR == true ) {
if( debugLevel > 4 ) {
System.out.println(s1+"RTS="+sp.isRTS()+" DTR="+sp.isDTR()+s2);
}
}
}
int read(byte[] bArray) {
int n = 0;
try {
InputStream in = tw_serialPort.getInputStream();
n = in.read(bArray);
return n;
} catch (IOException | NullPointerException ex) {
// TODO darf das deaktiviert sein ? CVNavi.mbDeviceReadProblem();
Logger.getLogger(TwoWaySerialComm.class.getName()).log(Level.SEVERE, null, ex);
}
return n;
}
void write(int[] data) {
try {
int n = data.length;
byte[] bArray = new byte[n];
OutputStream out = tw_serialPort.getOutputStream();
for (int i = 0; i < n; i++)
{
bArray[i] = (byte) data[i];
}
out.write(bArray);
} catch (IOException ex) {
CVNavi.mbDeviceWriteProblem( null );
}
}
void write(byte b) {
try {
OutputStream out = tw_serialPort.getOutputStream();
out.write(b);
} catch (IOException ex) {
CVNavi.mbDeviceWriteProblem( null );
}
}
void write(byte[] bArr) {
try {
OutputStream out = tw_serialPort.getOutputStream();
for( int i = 0 ; i < bArr.length ; i++ ) {
out.write(bArr[i]);
}
} catch (IOException ex) {
CVNavi.mbDeviceWriteProblem( null );
}
}
void write(String s) {
try {
OutputStream out = tw_serialPort.getOutputStream();
int n = s.length();
byte[] bArray = new byte[n];
for (int i = 0; i < n; i++)
{
bArray[i] = (byte) s.charAt(i);
out.write(bArray[i]);
}
// out.write(bArray);
} catch (IOException ex) {
CVNavi.mbDeviceWriteProblem( null );
}
}
}
| |
/*
* DynamicJava - Copyright (C) 1999-2001
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL DYADE BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Dyade shall not be
* used in advertising or otherwise to promote the sale, use or other
* dealings in this Software without prior written authorization from
* Dyade.
*
*/
package koala.dynamicjava.tree;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import koala.dynamicjava.tree.visitor.Visitor;
/**
* This class represents the nodes of the syntax tree
*
* @author Stephane Hillion
* @version 1.1 - 1999/11/12
*/
public abstract class Node {
/**
* The filename property name
*/
public final static String FILENAME = "filename";
/**
* The beginLine property name
*/
public final static String BEGIN_LINE = "beginLine";
/**
* The endLine property name
*/
public final static String END_LINE = "endLine";
/**
* The beginColumn property name
*/
public final static String BEGIN_COLUMN = "beginColumn";
/**
* The endColumn property name
*/
public final static String END_COLUMN = "endColumn";
/**
* The filename
*/
private String filename;
/**
* The begin line in the source code
*/
private int beginLine;
/**
* The begin column in the begin line
*/
private int beginColumn;
/**
* The end line in the source code
*/
private int endLine;
/**
* The end column in the end line
*/
private int endColumn;
/**
* The support for the property change mechanism
*/
private PropertyChangeSupport propertyChangeSupport;
/**
* The properties
*/
private Map properties;
/**
* Initializes the node
* @param fn the filename
* @param bl the begin line
* @param bc the begin column
* @param el the end line
* @param ec the end column
*/
protected Node(String fn, int bl, int bc, int el, int ec) {
filename = fn;
beginLine = bl;
beginColumn = bc;
endLine = el;
endColumn = ec;
propertyChangeSupport = new PropertyChangeSupport(this);
properties = new HashMap(11);
}
/**
* Returns the filename. Can be null.
*/
public String getFilename() {
return filename;
}
/**
* Sets the filename
*/
public void setFilename(String s) {
firePropertyChange(FILENAME, filename, filename = s);
}
/**
* Returns the begin line of this node in the source code
*/
public int getBeginLine() {
return beginLine;
}
/**
* Sets the begin line
*/
public void setBeginLine(int i) {
firePropertyChange(BEGIN_LINE, beginLine, beginLine = i);
}
/**
* Returns the begin column of this node in the begin line
*/
public int getBeginColumn() {
return beginColumn;
}
/**
* Sets the begin column
*/
public void setBeginColumn(int i) {
firePropertyChange(BEGIN_COLUMN, beginColumn, beginColumn = i);
}
/**
* Returns the end line of this node in the source code
*/
public int getEndLine() {
return endLine;
}
/**
* Sets the end line
*/
public void setEndLine(int i) {
firePropertyChange(END_LINE, endLine, endLine = i);
}
/**
* Returns the end column of this node in the end line
*/
public int getEndColumn() {
return endColumn;
}
/**
* Sets the end column
*/
public void setEndColumn(int i) {
firePropertyChange(END_COLUMN, endColumn, endColumn = i);
}
// Properties support //////////////////////////////////////////////////
/**
* Sets the value of a property
* @param name the property name
* @param value the new value to set
*/
public void setProperty(String name, Object value) {
firePropertyChange(name, properties.put(name, value), value);
}
/**
* Returns the value of a property
* @param name the property name
* @return null if the property was not previously set
*/
public Object getProperty(String name) {
return properties.get(name);
}
/**
* Returns the defined properties for this node.
* @return a set of string
*/
public Set getProperties() {
return properties.keySet();
}
/**
* Returns true if a property is defined for this node
* @param name the name of the property
*/
public boolean hasProperty(String name) {
return properties.containsKey(name);
}
/**
* Adds a PropertyChangeListener to the listener list.
* The listener is registered for all properties.
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
propertyChangeSupport.addPropertyChangeListener(listener);
}
/**
* Removes a PropertyChangeListener from the listener list.
* This removes a PropertyChangeListener that was registered
* for all properties.
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
propertyChangeSupport.removePropertyChangeListener(listener);
}
/**
* Adds a PropertyChangeListener for a specific property. The listener
* will be invoked only when a call on firePropertyChange names that
* specific property.
* @param propertyName The name of the property to listen on.
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
propertyChangeSupport.addPropertyChangeListener(propertyName, listener);
}
/**
* Removes a PropertyChangeListener for a specific property.
* @param propertyName The name of the property that was listened on.
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
propertyChangeSupport.removePropertyChangeListener(propertyName,
listener);
}
/**
* Report a bound property update to any registered listeners.
* No event is fired if old and new are equal and non-null.
* @param propertyName The programmatic name of the property that was changed.
* @param oldValue The old value of the property.
* @param newValue The new value of the property.
*/
protected void firePropertyChange(String propertyName, boolean oldValue,
boolean newValue) {
propertyChangeSupport.firePropertyChange(propertyName, oldValue,
newValue);
}
/**
* Report a bound property update to any registered listeners.
* No event is fired if old and new are equal and non-null.
* @param propertyName The programmatic name of the property that was changed.
* @param oldValue The old value of the property.
* @param newValue The new value of the property.
*/
protected void firePropertyChange(String propertyName, int oldValue,
int newValue) {
propertyChangeSupport.firePropertyChange(propertyName, oldValue,
newValue);
}
/**
* Report a bound property update to any registered listeners.
* No event is fired if old and new are equal and non-null.
* @param propertyName The programmatic name of the property that was changed.
* @param oldValue The old value of the property.
* @param newValue The new value of the property.
*/
protected void firePropertyChange(String propertyName, Object oldValue,
Object newValue) {
propertyChangeSupport.firePropertyChange(propertyName, oldValue,
newValue);
}
// Visitors support ///////////////////////////////////////////////////////////
/**
* Allows a visitor to traverse the tree
* @param visitor the visitor to accept
*/
public abstract Object acceptVisitor(Visitor visitor);
}
| |
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.pantsbuild.tools.jar;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.jar.Attributes.Name;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.regex.Pattern;
import java.util.zip.CRC32;
import java.util.zip.ZipException;
import javax.annotation.Nullable;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.MoreObjects;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Splitter;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.io.ByteProcessor;
import com.google.common.io.ByteSource;
import com.google.common.io.Closer;
import com.google.common.io.Files;
/**
* A utility than can create or update jar archives with special handling of duplicate entries.
*/
public class JarBuilder implements Closeable {
/**
* Indicates a problem encountered when building up a jar's contents for writing out.
*/
public static class JarBuilderException extends IOException {
public JarBuilderException(String message) {
super(message);
}
public JarBuilderException(String message, Throwable cause) {
super(message, cause);
}
}
/**
* Indicates a problem writing out a jar.
*/
public static class JarCreationException extends JarBuilderException {
public JarCreationException(String message) {
super(message);
}
}
/**
* Indicates a problem indexing a pre-existing jar that will be added or updated to the target
* jar.
*/
public static class IndexingException extends JarBuilderException {
public IndexingException(File jarPath, Throwable t) {
super("Problem indexing jar at " + jarPath + ": " + t.getMessage(), t);
}
}
/**
* Indicates a duplicate jar entry is being rejected.
*/
public static class DuplicateEntryException extends RuntimeException {
private final ReadableEntry entry;
DuplicateEntryException(ReadableEntry entry) {
super("Detected a duplicate entry for " + entry.getJarPath());
this.entry = entry;
}
/**
* Returns the duplicate path.
*/
public String getPath() {
return entry.getJarPath();
}
/**
* Returns the contents of the duplicate entry.
*/
public ByteSource getSource() {
return entry.contents;
}
}
/**
* Identifies an action to take when duplicate jar entries are encountered.
*/
public enum DuplicateAction {
/**
* This action skips the duplicate entry keeping the original entry.
*/
SKIP,
/**
* This action replaces the original entry with the duplicate entry.
*/
REPLACE,
/**
* This action appends the content of the duplicate entry to the original entry.
* Treats the resources are binary files.
*/
CONCAT,
/**
* Same as CONCAT, but treats these entries as newline delimited text files. Appends a newline
* to the end of the file if needed in order to separate file entries.
*/
CONCAT_TEXT,
/**
* This action throws a {@link DuplicateEntryException}.
*/
THROW
}
/**
* Encapsulates a policy for treatment of duplicate jar entries.
*/
public static class DuplicatePolicy implements Predicate<CharSequence> {
/**
* Creates a policy that applies to entries based on a path match.
*
* @param regex A regular expression to match entry paths against.
* @param action The action to apply to duplicate entries with path matching {@code regex}.
* @return The path matching policy.
*/
public static DuplicatePolicy pathMatches(String regex, DuplicateAction action) {
return new DuplicatePolicy(Predicates.containsPattern(regex), action);
}
private final Predicate<CharSequence> selector;
private final DuplicateAction action;
/**
* Creates a policy that will be applied to duplicate entries matching the given
* {@code selector}.
*
* @param selector A predicate that selects entries this policy has jurisdiction over.
* @param action The action to apply to entries selected by this policy.
*/
public DuplicatePolicy(Predicate<CharSequence> selector, DuplicateAction action) {
this.selector = Preconditions.checkNotNull(selector);
this.action = Preconditions.checkNotNull(action);
}
/**
* Returns the action that should be applied when a duplicate entry falls under this policy's
* jurisdiction.
*/
public DuplicateAction getAction() {
return action;
}
@Override
public boolean apply(CharSequence jarPath) {
return selector.apply(jarPath);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("action", action)
.add("selector", selector)
.toString();
}
}
/**
* Handles duplicate jar entries by selecting an appropriate action based on the entry path.
*/
public static class DuplicateHandler {
/**
* Creates a handler that always applies the given {@code action}.
*
* @param action The action to perform on all duplicate entries encountered.
*/
public static DuplicateHandler always(DuplicateAction action) {
Preconditions.checkNotNull(action);
return new DuplicateHandler(action,
ImmutableList.of(new DuplicatePolicy(Predicates.<CharSequence>alwaysTrue(), action)));
}
/**
* Creates a handler that merges well-known mergeable resources and otherwise skips duplicates.
* <p>
* Merged resources include META-INF/services/ files.
* </p>
*/
public static DuplicateHandler skipDuplicatesConcatWellKnownMetadata() {
DuplicatePolicy concatServices =
DuplicatePolicy.pathMatches("^META-INF/services/", DuplicateAction.CONCAT_TEXT);
ImmutableList<DuplicatePolicy> policies = ImmutableList.of(concatServices);
return new DuplicateHandler(DuplicateAction.SKIP, policies);
}
private final DuplicateAction defaultAction;
private final Iterable<DuplicatePolicy> policies;
/**
* A convenience constructor equivalent to calling:
* {@code DuplicateHandler(defaultAction, Arrays.asList(policies))}
*/
public DuplicateHandler(DuplicateAction defaultAction, DuplicatePolicy... policies) {
this(defaultAction, ImmutableList.copyOf(policies));
}
/**
* Creates a handler that applies the 1st matching policy when a duplicate entry is encountered,
* falling back to the given {@code defaultAction} if no policy applies.
*
* @param defaultAction The default action to apply when no policy matches.
* @param policies The policies to apply in preference order.
*/
public DuplicateHandler(DuplicateAction defaultAction, Iterable<DuplicatePolicy> policies) {
this.defaultAction = Preconditions.checkNotNull(defaultAction);
this.policies = ImmutableList.copyOf(policies);
}
@VisibleForTesting
DuplicateAction actionFor(String jarPath) {
for (DuplicatePolicy policy : policies) {
if (policy.apply(jarPath)) {
return policy.getAction();
}
}
return defaultAction;
}
}
/**
* Identifies a source for jar entries.
*/
public interface Source {
/**
* Returns a name for this source.
*/
String name();
/**
* Identifies a member of this source.
*/
String identify(String name);
}
private abstract static class FileSource implements Source {
protected final File source;
protected FileSource(File source) {
this.source = source;
}
public String name() {
return source.getPath();
}
}
private abstract static class JarSource extends FileSource {
protected JarSource(File source) {
super(source);
}
}
/**
* Joins the path components together with the JAR_PATH_JOINER char.
*
* Sanitation is performed to ensure that no consecutive JAR_PATH_JOINER chars appear in the
* output string.
*
* @param path List of jar path components.
* @return The path string.
*/
@VisibleForTesting
static String joinJarPath(Iterable<String> path) {
return JAR_PATH_JOINER.join(path).replaceAll("/{2,}", "/");
}
private static Source jarSource(File jar) {
return new JarSource(jar) {
@Override public String identify(String name) {
return String.format("%s!%s", source.getPath(), name);
}
@Override public String toString() {
return String.format("JarSource{jar=%s}", source.getPath());
}
};
}
private static Source fileSource(final File file) {
return new FileSource(new File("/")) {
@Override public String identify(String name) {
if (!file.getPath().equals(name)) {
throw new IllegalArgumentException(
"Cannot identify any entry name save for " + file.getPath());
}
return file.getPath();
}
@Override public String toString() {
return String.format("FileSource{file=%s}", file.getPath());
}
};
}
private static Source directorySource(File directory) {
return new FileSource(directory) {
@Override public String identify(String name) {
return new File(source, name).getPath();
}
@Override public String toString() {
return String.format("FileSource{directory=%s}", source.getPath());
}
};
}
private static Source memorySource() {
return new Source() {
@Override public String name() {
return "<memory>";
}
@Override public String identify(String name) {
return "<memory>!" + name;
}
@Override public String toString() {
return String.format("MemorySource{@%s}", Integer.toHexString(hashCode()));
}
};
}
/**
* Input stream that always insures that a non-empty stream ends with a newline.
*/
private static class NewlineAppendingInputStream extends InputStream {
private InputStream underlyingStream;
private int lastByteRead = -1;
private boolean atEOS = false;
public NewlineAppendingInputStream(InputStream stream) {
this.underlyingStream = stream;
}
@Override public int read() throws IOException {
if (atEOS) {
return -1;
}
int nextByte = this.underlyingStream.read();
if (nextByte == -1) {
atEOS = true;
if (lastByteRead == -1 || lastByteRead == '\n') {
return -1;
}
return '\n';
}
lastByteRead = nextByte;
return nextByte;
}
}
private static final class NamedTextByteSource extends NamedByteSource {
private NamedTextByteSource(NamedByteSource source) {
super(source.source, source.name, source.inputSupplier);
}
@Override
public InputStream openStream() throws IOException {
return new NewlineAppendingInputStream(inputSupplier.openStream());
}
}
private static class NamedByteSource extends ByteSource {
static NamedByteSource create(Source source, String name, ByteSource inputSupplier) {
return new NamedByteSource(source, name, inputSupplier);
}
protected final Source source;
protected final String name;
protected final ByteSource inputSupplier;
private NamedByteSource(Source source, String name, ByteSource inputSupplier) {
this.source = source;
this.name = name;
this.inputSupplier = inputSupplier;
}
@Override
public InputStream openStream() throws IOException {
return inputSupplier.openStream();
}
}
/**
* Represents an entry to be added to a jar.
*/
public interface Entry {
/**
* Returns the source that contains the entry.
*/
Source getSource();
/**
* Returns the name of the entry within its source.
*/
String getName();
/**
* Returns the path this entry will be added into the jar at.
*/
String getJarPath();
}
private static class ReadableTextEntry extends ReadableEntry {
static final Function<ReadableEntry, NamedByteSource> GET_CONTENTS =
new Function<ReadableEntry, NamedByteSource>() {
@Override public NamedByteSource apply(ReadableEntry item) {
return new NamedTextByteSource(item.contents);
}
};
ReadableTextEntry(NamedByteSource contents, String path) {
super(contents, path);
}
}
private static class ReadableEntry implements Entry {
static final Function<ReadableEntry, NamedByteSource> GET_CONTENTS =
new Function<ReadableEntry, NamedByteSource>() {
@Override public NamedByteSource apply(ReadableEntry item) {
return item.contents;
}
};
private final NamedByteSource contents;
private final String path;
ReadableEntry(NamedByteSource contents, String path) {
this.contents = contents;
this.path = path;
}
@Override
public Source getSource() {
return contents.source;
}
@Override
public String getName() {
return contents.name;
}
@Override
public String getJarPath() {
return path;
}
}
private static class ReadableJarEntry extends ReadableEntry {
private final JarEntry jarEntry;
public ReadableJarEntry(NamedByteSource contents, JarEntry jarEntry) {
super(contents, jarEntry.getName());
this.jarEntry = jarEntry;
}
public JarEntry getJarEntry() { return jarEntry; }
}
/**
* An interface for those interested in the progress of writing the target jar.
*/
public interface Listener {
/**
* A listener that ignores all events.
*/
Listener NOOP = new Listener() {
@Override public void onSkip(Optional<? extends Entry> original,
Iterable<? extends Entry> skipped) {
// noop
}
@Override public void onReplace(Iterable<? extends Entry> originals, Entry replacement) {
// noop
}
@Override public void onConcat(String name, Iterable<? extends Entry> entries) {
// noop
}
@Override public void onWrite(Entry entry) {
// noop
}
};
/**
* Called to notify the listener that entries are being skipped.
*
* If original is present this indicates it it being retained in preference to the skipped
* entries.
*
* @param original The original entry being retained.
* @param skipped The new entries being skipped.
*/
void onSkip(Optional<? extends Entry> original, Iterable<? extends Entry> skipped);
/**
* Called to notify the listener that original entries are being replaced by a subsequently
* added entry.
*
* @param originals The original entry candidates that will be replaced.
* @param replacement The entry that overwrites the originals.
*/
void onReplace(Iterable<? extends Entry> originals, Entry replacement);
/**
* Called to notify the listener an original entry is being concatenated with one or more
* subsequently added entries.
*
* @param name The name of the entry in question.
* @param entries The entries that will be concatenated with the original entry.
*/
void onConcat(String name, Iterable<? extends Entry> entries);
/**
* Called to notify the listener of a newly written non-duplicate entry.
*
* @param entry The entry to be added to the target jar.
*/
void onWrite(Entry entry);
}
private static ByteSource manifestSupplier(final Manifest mf) {
return new ByteSource() {
@Override public InputStream openStream() throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
mf.write(out);
return new ByteArrayInputStream(out.toByteArray());
}
};
}
static Manifest ensureDefaultManifestEntries(Manifest manifest) {
if (!manifest.getMainAttributes().containsKey(Name.MANIFEST_VERSION)) {
manifest.getMainAttributes().put(Name.MANIFEST_VERSION, "1.0");
}
Name createdBy = new Name("Created-By");
if (!manifest.getMainAttributes().containsKey(createdBy)) {
manifest.getMainAttributes().put(createdBy, JarBuilder.class.getName());
}
return manifest;
}
private static Manifest createDefaultManifest() {
return ensureDefaultManifestEntries(new Manifest());
}
private static final ByteSource DEFAULT_MANIFEST = manifestSupplier(createDefaultManifest());
private interface InputSupplier<T> {
T getInput() throws IOException;
}
private static class JarSupplier implements InputSupplier<JarFile>, Closeable {
private final Closer closer;
private final InputSupplier<JarFile> supplier;
JarSupplier(final File file) {
closer = Closer.create();
supplier = new InputSupplier<JarFile>() {
@Override public JarFile getInput() throws IOException {
try {
// Do not verify signed.
return JarFileUtil.openJarFile(closer, file, false);
} catch (ZipException zex) {
// JarFile is not very verbose and doesn't tell the user which file it was
// so we will create a new Exception instead
ZipException e = new ZipException("error in opening zip file " + file);
e.initCause(zex);
throw e;
}
}
};
}
@Override
public JarFile getInput() throws IOException {
return supplier.getInput();
}
@Override
public void close() throws IOException {
closer.close();
}
}
private static final Splitter JAR_PATH_SPLITTER = Splitter.on('/');
private static final Joiner JAR_PATH_JOINER = Joiner.on('/');
/*
* Implementations should add jar entries to the given {@code Multimap} index when executed.
*/
private interface EntryIndexer {
void execute(Multimap<String, ReadableEntry> entries) throws JarBuilderException;
}
private final File target;
private final Listener listener;
private final Closer closer = Closer.create();
private final List<EntryIndexer> additions = Lists.newLinkedList();
@Nullable private ByteSource manifest;
/**
* Creates a JarBuilder that will write scheduled jar additions to {@code target} upon
* {@link #write}.
* <p>
* If the {@code target} exists an attempt will be made to over-write it and if it does not
* exist a then a new jar will be created at its path.
*
* @param target The target jar file to write.
*/
public JarBuilder(File target) {
this(target, Listener.NOOP);
}
/**
* Creates a JarBuilder that will write scheduled jar additions to {@code target} upon
* {@link #write}.
* <p>
* If the {@code target} does not exist a new jar will be created at its path.
*
* @param target The target jar file to write.
*/
public JarBuilder(File target, Listener listener) {
this.target = Preconditions.checkNotNull(target);
this.listener = Preconditions.checkNotNull(listener);
}
@Override
public void close() throws IOException {
closer.close();
}
/**
* Schedules addition of the given {@code contents} to the entry at {@code jarPath}. In addition,
* individual parent directory entries will be created when this builder is
* {@link #write written} in he spirit of {@code mkdir -p}.
*
* @param contents The contents of the entry to add.
* @param jarPath The path of the entry to add.
* @return This builder for chaining.
*/
public JarBuilder add(final ByteSource contents, final String jarPath) {
Preconditions.checkNotNull(contents);
Preconditions.checkNotNull(jarPath);
additions.add(new EntryIndexer() {
@Override public void execute(Multimap<String, ReadableEntry> entries) {
add(entries, NamedByteSource.create(memorySource(), jarPath, contents), jarPath);
}
});
return this;
}
private static boolean isEmpty(@Nullable String value) {
return value == null || value.trim().isEmpty();
}
/**
* Schedules recursive addition of all files contained within {@code directory} to the resulting
* jar. The path of each file relative to {@code directory} will be used for the corresponding
* jar entry path. If a {@code jarPath} is present then all subtree entries will be prefixed
* with it.
*
* @param directory An existing directory to add to the jar.
* @param jarPath An optional base path to graft the {@code directory} onto.
* @return This builder for chaining.
*/
public JarBuilder addDirectory(final File directory, final Optional<String> jarPath) {
Preconditions.checkArgument(directory.isDirectory(),
"Expected a directory, given a file: %s", directory);
Preconditions.checkArgument(!jarPath.isPresent() || !isEmpty(jarPath.get()));
additions.add(new EntryIndexer() {
@Override public void execute(Multimap<String, ReadableEntry> entries)
throws JarBuilderException {
Source directorySource = directorySource(directory);
Iterable<String> jarBasePath = jarPath.isPresent()
? JAR_PATH_SPLITTER.split(jarPath.get()) : ImmutableList.<String>of();
Iterable<File> files =
Files.fileTreeTraverser()
.preOrderTraversal(directory)
.filter(Files.isFile());
for (File child : files) {
Iterable<String> relpathComponents = relpathComponents(child, directory);
Iterable<String> path = Iterables.concat(jarBasePath, relpathComponents);
String entryPath = joinJarPath(relpathComponents);
if (!JarFile.MANIFEST_NAME.equals(entryPath)) {
NamedByteSource contents =
NamedByteSource.create(
directorySource,
entryPath,
Files.asByteSource(child));
add(entries, contents, joinJarPath(path));
}
}
}
});
return this;
}
/**
* Schedules addition of the given {@code file}'s contents to the entry at {@code jarPath}. In
* addition, individual parent directory entries will be created when this builder is
* {@link #write written} in the spirit of {@code mkdir -p}.
*
* @param file An existing file to add to the jar.
* @param jarPath The path of the entry to add.
* @return This builder for chaining.
*/
public JarBuilder addFile(final File file, final String jarPath) {
Preconditions.checkArgument(!file.isDirectory(),
"Expected a file, given a directory: %s", file);
Preconditions.checkArgument(!isEmpty(jarPath));
additions.add(new EntryIndexer() {
@Override
public void execute(Multimap<String, ReadableEntry> entries)
throws JarBuilderException {
if (JarFile.MANIFEST_NAME.equals(jarPath)) {
throw new JarBuilderException(
"A custom manifest entry should be added via the useCustomManifest methods");
}
NamedByteSource contents =
NamedByteSource.create(
fileSource(file),
file.getName(),
Files.asByteSource(file));
add(entries, contents, jarPath);
}
});
return this;
}
/**
* Schedules addition of the given jar's contents to the file at {@code jarPath}. Even if the jar
* does not contain individual parent directory entries, they will be added for each entry added.
*
* @param file The path of the jar to add.
* @return This builder for chaining.
*/
public JarBuilder addJar(final File file) {
Preconditions.checkNotNull(file);
additions.add(new EntryIndexer() {
@Override
public void execute(final Multimap<String, ReadableEntry> entries)
throws IndexingException {
final InputSupplier<JarFile> jarSupplier = closer.register(new JarSupplier(file));
final Source jarSource = jarSource(file);
try {
enumerateJarEntries(file, new JarEntryVisitor() {
@Override public void visit(JarEntry entry) throws IOException {
if (!entry.isDirectory() && !JarFile.MANIFEST_NAME.equals(entry.getName())) {
NamedByteSource contents =
NamedByteSource.create(
jarSource,
entry.getName(),
entrySupplier(jarSupplier, entry));
add(entries, contents, entry);
}
}
});
} catch (IOException e) {
throw new IndexingException(file, e);
}
}
});
return this;
}
private static void add(
Multimap<String, ReadableEntry> entries,
NamedByteSource contents,
String jarPath) {
entries.put(jarPath, new ReadableEntry(contents, jarPath));
}
private static void add(
Multimap<String, ReadableEntry> entries,
NamedByteSource contents,
JarEntry jarEntry) {
entries.put(jarEntry.getName(), new ReadableJarEntry(contents, jarEntry));
}
/**
* Registers the given Manifest to be used in the jar written out by {@link #write}.
*
* @param customManifest The manifest to use for the built jar.
* @return This builder for chaining.
*/
public JarBuilder useCustomManifest(final Manifest customManifest) {
Preconditions.checkNotNull(customManifest);
manifest = manifestSupplier(customManifest);
return this;
}
/**
* Registers the given Manifest to be used in the jar written out by {@link #write}.
*
* @param customManifest The manifest to use for the built jar.
* @return This builder for chaining.
*/
public JarBuilder useCustomManifest(File customManifest) {
Preconditions.checkNotNull(customManifest);
NamedByteSource contents =
NamedByteSource.create(
fileSource(customManifest),
customManifest.getPath(),
Files.asByteSource(customManifest));
return useCustomManifest(contents);
}
/**
* Registers the given Manifest to be used in the jar written out by {@link #write}.
*
* @param customManifest The manifest to use for the built jar.
* @return This builder for chaining.
*/
public JarBuilder useCustomManifest(CharSequence customManifest) {
Preconditions.checkNotNull(customManifest);
return useCustomManifest(
NamedByteSource.create(
memorySource(),
JarFile.MANIFEST_NAME,
ByteSource.wrap(customManifest.toString().getBytes(Charsets.UTF_8))));
}
/**
* Registers the given Manifest to be used in the jar written out by {@link #write}.
*
* @param customManifest The manifest to use for the built jar.
* @return This builder for chaining.
*/
public JarBuilder useCustomManifest(final NamedByteSource customManifest) {
Preconditions.checkNotNull(customManifest);
return useCustomManifest(new InputSupplier<Manifest>() {
@Override public Manifest getInput() throws IOException {
Manifest mf = new Manifest();
try {
mf.read(customManifest.openStream());
return mf;
} catch (IOException e) {
throw new JarCreationException(
"Invalid manifest from " + customManifest.source.identify(customManifest.name));
}
}
});
}
private JarBuilder useCustomManifest(final InputSupplier<Manifest> manifestSource) {
manifest = new ByteSource() {
@Override public InputStream openStream() throws IOException {
return manifestSupplier(manifestSource.getInput()).openStream();
}
};
return this;
}
/**
* Creates a jar at the configured target path applying the scheduled additions and skipping any
* duplicate entries found. Entries will not be compressed.
*
* @return The jar file that was written.
* @throws IOException if there was a problem writing the jar file.
*/
public File write() throws IOException {
return write(false, DuplicateHandler.always(DuplicateAction.SKIP));
}
/**
* Creates a jar at the configured target path applying the scheduled additions and skipping any
* duplicate entries found.
*
* @param compress Pass {@code true} to compress all jar entries; otherwise, they will just be
* stored.
* @return The jar file that was written.
* @throws IOException if there was a problem writing the jar file.
*/
public File write(boolean compress) throws IOException {
return write(compress, DuplicateHandler.always(DuplicateAction.SKIP));
}
/**
* Creates a jar at the configured target path applying the scheduled additions per the given
* {@code duplicateHandler}.
*
* @param compress Pass {@code true} to compress all jar entries; otherwise, they will just be
* stored.
* @param duplicateHandler A handler for dealing with duplicate entries.
* @param skipPatterns An optional list of patterns that match entry paths that should be
* excluded.
* @return The jar file that was written.
* @throws IOException if there was a problem writing the jar file.
* @throws DuplicateEntryException if the the policy in effect for an entry is
* {@link DuplicateAction#THROW} and that entry is a duplicate.
*/
public File write(
boolean compress,
DuplicateHandler duplicateHandler,
Pattern... skipPatterns)
throws IOException {
return write(compress, duplicateHandler, ImmutableList.copyOf(skipPatterns));
}
private static final Function<Pattern, Predicate<CharSequence>> AS_PATH_SELECTOR =
new Function<Pattern, Predicate<CharSequence>>() {
@Override public Predicate<CharSequence> apply(Pattern item) {
return Predicates.contains(item);
}
};
/**
* Creates a jar at the configured target path applying the scheduled additions per the given
* {@code duplicateHandler}.
*
* @param compress Pass {@code true} to compress all jar entries; otherwise, they will just be
* stored.
* @param duplicateHandler A handler for dealing with duplicate entries.
* @param skipPatterns An optional sequence of patterns that match entry paths that should be
* excluded.
* @return The jar file that was written.
* @throws IOException if there was a problem writing the jar file.
* @throws DuplicateEntryException if the the policy in effect for an entry is
* {@link DuplicateAction#THROW} and that entry is a duplicate.
*/
public File write(
final boolean compress,
DuplicateHandler duplicateHandler,
Iterable<Pattern> skipPatterns)
throws DuplicateEntryException, IOException {
Preconditions.checkNotNull(duplicateHandler);
Predicate<CharSequence> skipPath =
Predicates.or(Iterables.transform(ImmutableList.copyOf(skipPatterns), AS_PATH_SELECTOR));
final Iterable<ReadableEntry> entries = getEntries(skipPath, duplicateHandler);
File tmp = File.createTempFile(target.getName(), ".tmp", target.getParentFile());
try {
try {
JarWriter writer = jarWriter(tmp, compress);
writer.write(JarFile.MANIFEST_NAME, manifest == null ? DEFAULT_MANIFEST : manifest);
List<ReadableJarEntry> jarEntries = Lists.newArrayList();
for (ReadableEntry entry : entries) {
if (entry instanceof ReadableJarEntry) {
jarEntries.add((ReadableJarEntry) entry);
} else {
writer.write(entry.getJarPath(), entry.contents);
}
}
copyJarFiles(writer, jarEntries);
// Close all open files, the moveFile below might need to copy instead of just rename.
closer.close();
// Rename the file (or copy if it can't be renamed)
target.delete();
Files.move(tmp, target);
} catch (IOException e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
} finally {
tmp.delete();
}
return target;
}
/**
* As an optimization, use {@link JarEntryCopier} to copy one jar file to
* another without decompressing and recompressing.
*
* @param writer target to copy JAR file entries to.
* @param entries entries that came from a jar file
*/
private void copyJarFiles(JarWriter writer, Iterable<ReadableJarEntry> entries)
throws IOException {
// Walk the entries to bucketize by input jar file names
Multimap<JarSource, ReadableJarEntry> jarEntries = HashMultimap.create();
for (ReadableJarEntry entry : entries) {
Preconditions.checkState(entry.getSource() instanceof JarSource);
jarEntries.put((JarSource) entry.getSource(), entry);
}
// Copy the data from each jar input file to the output
for (JarSource source : jarEntries.keySet()) {
Closer jarFileCloser = Closer.create();
try {
final InputSupplier<JarFile> jarSupplier = jarFileCloser.register(
new JarSupplier(new File(source.name())));
JarFile jarFile = jarSupplier.getInput();
for (ReadableJarEntry readableJarEntry : jarEntries.get(source)) {
JarEntry jarEntry = readableJarEntry.getJarEntry();
String resource = jarEntry.getName();
writer.copy(resource, jarFile, jarEntry);
}
} catch (IOException ex) {
throw jarFileCloser.rethrow(ex);
} finally {
jarFileCloser.close();
}
}
}
private Iterable<ReadableEntry> getEntries(
final Predicate<CharSequence> skipPath,
final DuplicateHandler duplicateHandler)
throws JarBuilderException {
Function<Map.Entry<String, Collection<ReadableEntry>>, Iterable<ReadableEntry>> mergeEntries =
new Function<Map.Entry<String, Collection<ReadableEntry>>, Iterable<ReadableEntry>>() {
@Override
public Iterable<ReadableEntry> apply(Map.Entry<String, Collection<ReadableEntry>> item) {
String jarPath = item.getKey();
Collection<ReadableEntry> entries = item.getValue();
return processEntries(skipPath, duplicateHandler, jarPath, entries).asSet();
}
};
return FluentIterable.from(getAdditions().asMap().entrySet()).transformAndConcat(mergeEntries);
}
private Optional<ReadableEntry> processEntries(
Predicate<CharSequence> skipPath,
DuplicateHandler duplicateHandler,
String jarPath,
Collection<ReadableEntry> itemEntries) {
if (skipPath.apply(jarPath)) {
listener.onSkip(Optional.<Entry>absent(), itemEntries);
return Optional.absent();
}
if (itemEntries.size() < 2) {
ReadableEntry entry = Iterables.getOnlyElement(itemEntries);
listener.onWrite(entry);
return Optional.of(entry);
}
DuplicateAction action = duplicateHandler.actionFor(jarPath);
switch (action) {
case SKIP: {
ReadableEntry original = Iterables.get(itemEntries, 0);
listener.onSkip(Optional.of(original), Iterables.skip(itemEntries, 1));
return Optional.of(original);
}
case REPLACE: {
ReadableEntry replacement = Iterables.getLast(itemEntries);
listener.onReplace(Iterables.limit(itemEntries, itemEntries.size() - 1), replacement);
return Optional.of(replacement);
}
case CONCAT: {
ByteSource concat =
ByteSource.concat(Iterables.transform(itemEntries, ReadableEntry.GET_CONTENTS));
ReadableEntry concatenatedEntry =
new ReadableEntry(
NamedByteSource.create(memorySource(), jarPath, concat),
jarPath);
listener.onConcat(jarPath, itemEntries);
return Optional.of(concatenatedEntry);
}
case CONCAT_TEXT: {
ByteSource concat_text =
ByteSource.concat(Iterables.transform(itemEntries, ReadableTextEntry.GET_CONTENTS));
ReadableEntry concatenatedTextEntry =
new ReadableEntry(
NamedByteSource.create(memorySource(), jarPath, concat_text),
jarPath);
listener.onConcat(jarPath, itemEntries);
return Optional.of(concatenatedTextEntry);
}
case THROW:
throw new DuplicateEntryException(Iterables.get(itemEntries, 1));
default:
throw new IllegalArgumentException("Unrecognized DuplicateAction " + action);
}
}
private Multimap<String, ReadableEntry> getAdditions() throws JarBuilderException {
final Multimap<String, ReadableEntry> entries = LinkedListMultimap.create();
if (target.exists() && target.length() > 0) {
final InputSupplier<JarFile> jarSupplier = closer.register(new JarSupplier(target));
try {
enumerateJarEntries(target, new JarEntryVisitor() {
@Override public void visit(JarEntry jarEntry) throws IOException {
String entryPath = jarEntry.getName();
ByteSource contents = entrySupplier(jarSupplier, jarEntry);
if (JarFile.MANIFEST_NAME.equals(entryPath)) {
if (manifest == null) {
manifest = contents;
}
} else if (!jarEntry.isDirectory()) {
entries.put(
entryPath,
new ReadableJarEntry(
NamedByteSource.create(jarSource(target), entryPath, contents),
jarEntry));
}
}
});
} catch (IOException e) {
throw new IndexingException(target, e);
}
}
for (EntryIndexer addition : additions) {
addition.execute(entries);
}
return entries;
}
private interface JarEntryVisitor {
void visit(JarEntry item) throws IOException;
}
private void enumerateJarEntries(File jarFile, JarEntryVisitor visitor)
throws IOException {
Closer jarFileCloser = Closer.create();
JarFile jar = JarFileUtil.openJarFile(jarFileCloser, jarFile);
try {
for (Enumeration<JarEntry> entries = jar.entries(); entries.hasMoreElements();) {
visitor.visit(entries.nextElement());
}
} catch (IOException e) {
throw jarFileCloser.rethrow(e);
} finally {
jarFileCloser.close();
}
}
private static final class JarWriter {
static class EntryFactory {
private final boolean compress;
EntryFactory(boolean compress) {
this.compress = compress;
}
JarEntry createEntry(String path, ByteSource contents)
throws IOException {
JarEntry entry = new JarEntry(path);
entry.setMethod(compress ? JarEntry.DEFLATED : JarEntry.STORED);
if (!compress) {
prepareEntry(entry, contents);
}
return entry;
}
private void prepareEntry(JarEntry entry, ByteSource contents)
throws IOException {
final CRC32 crc32 = new CRC32();
long size = contents.read(new ByteProcessor<Long>() {
private long size = 0;
@Override
public boolean processBytes(byte[] buf, int off, int len) throws IOException {
size += len;
crc32.update(buf, off, len);
return true;
}
@Override
public Long getResult() {
return size;
}
});
entry.setSize(size);
entry.setCompressedSize(size);
entry.setCrc(crc32.getValue());
}
}
private static final Joiner JAR_PATH_JOINER = Joiner.on('/');
private final Set<List<String>> directories = Sets.newHashSet();
private final JarOutputStream out;
private final EntryFactory entryFactory;
private JarWriter(JarOutputStream out, boolean compress) {
this.out = out;
this.entryFactory = new EntryFactory(compress);
}
public void write(String path, ByteSource contents) throws IOException {
ensureParentDir(path);
out.putNextEntry(entryFactory.createEntry(path, contents));
contents.copyTo(out);
}
public void copy(String path, JarFile jarIn, JarEntry srcJarEntry) throws IOException {
ensureParentDir(path);
JarEntryCopier.copyEntry(out, path, jarIn, srcJarEntry);
}
private void ensureParentDir(String path) throws IOException {
File file = new File(path);
File parent = file.getParentFile();
if (parent != null) {
List<String> components = components(parent);
List<String> ancestry = Lists.newArrayListWithCapacity(components.size());
for (String component : components) {
ancestry.add(component);
if (!directories.contains(ancestry)) {
directories.add(ImmutableList.copyOf(ancestry));
out.putNextEntry(new JarEntry(joinJarPath(ancestry) + "/"));
}
}
}
}
}
private JarWriter jarWriter(File path, boolean compress) throws IOException {
// The JAR-writing process seems to be I/O bound. To make writes to disk less frequent,
// BufferedOutputStream is used. This way, compressed data is stored in a buffer before being
// flushed to disk.
// For benchmarking, "./pants binary --no-use-nailgun" command was executed on a large project.
// The machine was 2013 MPB with SSD. The resulting project JAR is about 500 MB.
// Without BufferedOutputStream, the jar-tool step took on average about 113 seconds.
// With BufferedOutputStream and 1MB buffer, the jar-tool step took on average about 80 seconds.
// The performance gain on this particular project on this particular machine is 30%.
FileOutputStream fout = closer.register(new FileOutputStream(path));
BufferedOutputStream bout = closer.register(new BufferedOutputStream(fout, 1024 * 1024));
final JarOutputStream jar = closer.register(new JarOutputStream(bout));
closer.register(new Closeable() {
@Override public void close() throws IOException {
jar.closeEntry();
}
});
return new JarWriter(jar, compress);
}
private static ByteSource entrySupplier(final InputSupplier<JarFile> jar, final JarEntry entry) {
return new ByteSource() {
@Override public InputStream openStream() throws IOException {
return jar.getInput().getInputStream(entry);
}
};
}
@VisibleForTesting
static Iterable<String> relpathComponents(File fullPath, File relativeTo) {
List<String> base = components(relativeTo);
List<String> path = components(fullPath);
for (Iterator<String> baseIter = base.iterator(), pathIter = path.iterator();
baseIter.hasNext() && pathIter.hasNext();) {
if (!baseIter.next().equals(pathIter.next())) {
break;
} else {
baseIter.remove();
pathIter.remove();
}
}
if (!base.isEmpty()) {
path.addAll(0, Collections.nCopies(base.size(), ".."));
}
return path;
}
private static List<String> components(File file) {
LinkedList<String> components = Lists.newLinkedList();
File path = file;
do {
components.addFirst(path.getName());
} while((path = path.getParentFile()) != null);
return components;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.formats.nontagged;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import org.apache.asterix.dataflow.data.nontagged.serde.ABinarySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADayTimeDurationSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AMissingSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ANullSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AObjectSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AOrderedListSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AUUIDSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AUnorderedListSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AYearMonthDurationSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
import org.apache.asterix.om.base.AMissing;
import org.apache.asterix.om.base.ANull;
import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.types.AOrderedListType;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.AUnorderedListType;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.algebricks.data.ISerializerDeserializerProvider;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
import org.apache.hyracks.util.string.UTF8StringReader;
import org.apache.hyracks.util.string.UTF8StringWriter;
public class SerializerDeserializerProvider implements ISerializerDeserializerProvider, Serializable {
private static final long serialVersionUID = 1L;
public static final SerializerDeserializerProvider INSTANCE = new SerializerDeserializerProvider();
private SerializerDeserializerProvider() {
}
// Can't be shared among threads <Stateful>
@SuppressWarnings("rawtypes")
public ISerializerDeserializer getAStringSerializerDeserializer() {
return addTag(new AStringSerializerDeserializer(new UTF8StringWriter(), new UTF8StringReader()));
}
@SuppressWarnings("rawtypes")
@Override
public ISerializerDeserializer getSerializerDeserializer(Object typeInfo) {
IAType type = (IAType) typeInfo;
if (type == null) {
return null;
}
switch (type.getTypeTag()) {
case ANY:
case UNION:
// we could do smth better for nullable fields
return AObjectSerializerDeserializer.INSTANCE;
default:
return addTag(getNonTaggedSerializerDeserializer(type));
}
}
@SuppressWarnings("rawtypes")
public ISerializerDeserializer getNonTaggedSerializerDeserializer(IAType type) {
switch (type.getTypeTag()) {
case CIRCLE:
return ACircleSerializerDeserializer.INSTANCE;
case DATE:
return ADateSerializerDeserializer.INSTANCE;
case DATETIME:
return ADateTimeSerializerDeserializer.INSTANCE;
case DOUBLE:
return ADoubleSerializerDeserializer.INSTANCE;
case FLOAT:
return AFloatSerializerDeserializer.INSTANCE;
case BOOLEAN:
return ABooleanSerializerDeserializer.INSTANCE;
case TINYINT:
return AInt8SerializerDeserializer.INSTANCE;
case SMALLINT:
return AInt16SerializerDeserializer.INSTANCE;
case INTEGER:
return AInt32SerializerDeserializer.INSTANCE;
case BIGINT:
return AInt64SerializerDeserializer.INSTANCE;
case LINE:
return ALineSerializerDeserializer.INSTANCE;
case MISSING:
return AMissingSerializerDeserializer.INSTANCE;
case NULL:
return ANullSerializerDeserializer.INSTANCE;
case STRING:
return AStringSerializerDeserializer.INSTANCE;
case BINARY:
return ABinarySerializerDeserializer.INSTANCE;
case TIME:
return ATimeSerializerDeserializer.INSTANCE;
case DURATION:
return ADurationSerializerDeserializer.INSTANCE;
case YEARMONTHDURATION:
return AYearMonthDurationSerializerDeserializer.INSTANCE;
case DAYTIMEDURATION:
return ADayTimeDurationSerializerDeserializer.INSTANCE;
case INTERVAL:
return AIntervalSerializerDeserializer.INSTANCE;
case ARRAY:
return new AOrderedListSerializerDeserializer((AOrderedListType) type);
case POINT:
return APointSerializerDeserializer.INSTANCE;
case POINT3D:
return APoint3DSerializerDeserializer.INSTANCE;
case RECTANGLE:
return ARectangleSerializerDeserializer.INSTANCE;
case POLYGON:
return APolygonSerializerDeserializer.INSTANCE;
case OBJECT:
return new ARecordSerializerDeserializer((ARecordType) type);
case MULTISET:
return new AUnorderedListSerializerDeserializer((AUnorderedListType) type);
case UUID:
return AUUIDSerializerDeserializer.INSTANCE;
case SHORTWITHOUTTYPEINFO:
return ShortSerializerDeserializer.INSTANCE;
case GEOMETRY:
return AGeometrySerializerDeserializer.INSTANCE;
default:
throw new NotImplementedException(
"No serializer/deserializer implemented for type " + type.getTypeTag() + " .");
}
}
@SuppressWarnings("rawtypes")
public static ISerializerDeserializer addTag(final ISerializerDeserializer nonTaggedSerde) {
return new ISerializerDeserializer<IAObject>() {
private static final long serialVersionUID = 1L;
@Override
public IAObject deserialize(DataInput in) throws HyracksDataException {
try {
ATypeTag tag = SerializerDeserializerUtil.deserializeTag(in);
//deserialize the tag (move input cursor forward) and check if it's not NULL tag
if (tag == ATypeTag.MISSING) {
return AMissing.MISSING;
}
if (tag == ATypeTag.NULL) {
return ANull.NULL;
}
} catch (IOException e) {
throw HyracksDataException.create(e);
}
return (IAObject) nonTaggedSerde.deserialize(in);
}
@SuppressWarnings("unchecked")
@Override
public void serialize(IAObject instance, DataOutput out) throws HyracksDataException {
SerializerDeserializerUtil.serializeTag(instance, out);
nonTaggedSerde.serialize(instance, out);
}
};
}
}
| |
package org.apache.lucene.codecs.simpletext;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.codecs.SegmentInfoFormat;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.Version;
/**
* plain text segments file format.
* <p>
* <b>FOR RECREATIONAL USE ONLY</b>
* @lucene.experimental
*/
public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
final static BytesRef SI_VERSION = new BytesRef(" version ");
final static BytesRef SI_DOCCOUNT = new BytesRef(" number of documents ");
final static BytesRef SI_USECOMPOUND = new BytesRef(" uses compound file ");
final static BytesRef SI_NUM_DIAG = new BytesRef(" diagnostics ");
final static BytesRef SI_DIAG_KEY = new BytesRef(" key ");
final static BytesRef SI_DIAG_VALUE = new BytesRef(" value ");
final static BytesRef SI_NUM_ATT = new BytesRef(" attributes ");
final static BytesRef SI_ATT_KEY = new BytesRef(" key ");
final static BytesRef SI_ATT_VALUE = new BytesRef(" value ");
final static BytesRef SI_NUM_FILES = new BytesRef(" files ");
final static BytesRef SI_FILE = new BytesRef(" file ");
final static BytesRef SI_ID = new BytesRef(" id ");
public static final String SI_EXTENSION = "si";
@Override
public SegmentInfo read(Directory directory, String segmentName, byte[] segmentID, IOContext context) throws IOException {
BytesRefBuilder scratch = new BytesRefBuilder();
String segFileName = IndexFileNames.segmentFileName(segmentName, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
try (ChecksumIndexInput input = directory.openChecksumInput(segFileName, context)) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_VERSION);
final Version version;
try {
version = Version.parse(readString(SI_VERSION.length, scratch));
} catch (ParseException pe) {
throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe);
}
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_DOCCOUNT);
final int docCount = Integer.parseInt(readString(SI_DOCCOUNT.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_USECOMPOUND);
final boolean isCompoundFile = Boolean.parseBoolean(readString(SI_USECOMPOUND.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_NUM_DIAG);
int numDiag = Integer.parseInt(readString(SI_NUM_DIAG.length, scratch));
Map<String,String> diagnostics = new HashMap<>();
for (int i = 0; i < numDiag; i++) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_DIAG_KEY);
String key = readString(SI_DIAG_KEY.length, scratch);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_DIAG_VALUE);
String value = readString(SI_DIAG_VALUE.length, scratch);
diagnostics.put(key, value);
}
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_NUM_ATT);
int numAtt = Integer.parseInt(readString(SI_NUM_ATT.length, scratch));
Map<String,String> attributes = new HashMap<>(numAtt);
for (int i = 0; i < numAtt; i++) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_ATT_KEY);
String key = readString(SI_ATT_KEY.length, scratch);
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_ATT_VALUE);
String value = readString(SI_ATT_VALUE.length, scratch);
attributes.put(key, value);
}
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_NUM_FILES);
int numFiles = Integer.parseInt(readString(SI_NUM_FILES.length, scratch));
Set<String> files = new HashSet<>();
for (int i = 0; i < numFiles; i++) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_FILE);
String fileName = readString(SI_FILE.length, scratch);
files.add(fileName);
}
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_ID);
final byte[] id = Arrays.copyOfRange(scratch.bytes(), SI_ID.length, scratch.length());
if (!Arrays.equals(segmentID, id)) {
throw new CorruptIndexException("file mismatch, expected: " + StringHelper.idToString(segmentID)
+ ", got: " + StringHelper.idToString(id), input);
}
SimpleTextUtil.checkFooter(input);
SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
isCompoundFile, null, diagnostics, id, Collections.unmodifiableMap(attributes));
info.setFiles(files);
return info;
}
}
private String readString(int offset, BytesRefBuilder scratch) {
return new String(scratch.bytes(), offset, scratch.length()-offset, StandardCharsets.UTF_8);
}
@Override
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
String segFileName = IndexFileNames.segmentFileName(si.name, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
si.addFile(segFileName);
try (IndexOutput output = dir.createOutput(segFileName, ioContext)) {
BytesRefBuilder scratch = new BytesRefBuilder();
SimpleTextUtil.write(output, SI_VERSION);
SimpleTextUtil.write(output, si.getVersion().toString(), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_DOCCOUNT);
SimpleTextUtil.write(output, Integer.toString(si.getDocCount()), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_USECOMPOUND);
SimpleTextUtil.write(output, Boolean.toString(si.getUseCompoundFile()), scratch);
SimpleTextUtil.writeNewline(output);
Map<String,String> diagnostics = si.getDiagnostics();
int numDiagnostics = diagnostics == null ? 0 : diagnostics.size();
SimpleTextUtil.write(output, SI_NUM_DIAG);
SimpleTextUtil.write(output, Integer.toString(numDiagnostics), scratch);
SimpleTextUtil.writeNewline(output);
if (numDiagnostics > 0) {
for (Map.Entry<String,String> diagEntry : diagnostics.entrySet()) {
SimpleTextUtil.write(output, SI_DIAG_KEY);
SimpleTextUtil.write(output, diagEntry.getKey(), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_DIAG_VALUE);
SimpleTextUtil.write(output, diagEntry.getValue(), scratch);
SimpleTextUtil.writeNewline(output);
}
}
Map<String,String> attributes = si.getAttributes();
SimpleTextUtil.write(output, SI_NUM_ATT);
SimpleTextUtil.write(output, Integer.toString(attributes.size()), scratch);
SimpleTextUtil.writeNewline(output);
for (Map.Entry<String,String> attEntry : attributes.entrySet()) {
SimpleTextUtil.write(output, SI_ATT_KEY);
SimpleTextUtil.write(output, attEntry.getKey(), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_ATT_VALUE);
SimpleTextUtil.write(output, attEntry.getValue(), scratch);
SimpleTextUtil.writeNewline(output);
}
Set<String> files = si.files();
int numFiles = files == null ? 0 : files.size();
SimpleTextUtil.write(output, SI_NUM_FILES);
SimpleTextUtil.write(output, Integer.toString(numFiles), scratch);
SimpleTextUtil.writeNewline(output);
if (numFiles > 0) {
for(String fileName : files) {
SimpleTextUtil.write(output, SI_FILE);
SimpleTextUtil.write(output, fileName, scratch);
SimpleTextUtil.writeNewline(output);
}
}
SimpleTextUtil.write(output, SI_ID);
SimpleTextUtil.write(output, new BytesRef(si.getId()));
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.writeChecksum(output, scratch);
}
}
}
| |
/*
* Copyright 2011 Future Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.araqne.httpd.impl;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.InetSocketAddress;
import java.net.URLDecoder;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Part;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.ssl.SslHandler;
import org.araqne.httpd.HttpContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("rawtypes")
public class Request implements HttpServletRequest {
private ChannelHandlerContext ctx;
private boolean secure;
private boolean asyncStarted;
private AsyncContext asyncContext;
/**
* can be null if not found
*/
private HttpContext httpContext;
/**
* can be null if not found
*/
private String servletPath;
private String pathInfo;
private String queryString;
private HttpRequest req;
private Response resp;
private HttpSession session;
private ServletInputStream is;
private Map<String, Object> attributes = new HashMap<String, Object>();
private Map<String, List<String>> parameters = new HashMap<String, List<String>>();
private Cookie[] cookies;
private List<Locale> locales = new ArrayList<Locale>();
private Logger logger = LoggerFactory.getLogger(this.getClass().getName());
private ByteBuffer cb = null;
public Request(ChannelHandlerContext ctx, HttpRequest req) {
this.ctx = ctx;
this.req = req;
this.queryString = "";
ChannelBuffer c = req.getContent();
if (c.hasArray())
cb = ByteBuffer.wrap(c.array(), 0, c.readableBytes());
else {
cb = c.toByteBuffer();
}
this.is = new RequestInputStream(new ByteArrayInputStream(cb.array(), 0, cb.remaining()));
parseCookies(req);
parseLocales(req);
parseParameters();
setSslAttributes(ctx);
setChannel(ctx);
}
public void setHttpContext(HttpContext httpContext) {
this.httpContext = httpContext;
setSession();
}
public void setResponse(Response resp) {
this.resp = resp;
}
private void setSession() {
// TODO: domain check
String key = getRequestedSessionId();
if (key != null)
session = httpContext.getHttpSessions().get(key);
}
private void parseParameters() {
String contentType = req.headers().get("Content-Type");
if (req.getMethod().equals(HttpMethod.POST)) {
if (!(contentType != null && contentType.equals("application/octet-stream"))) {
ChannelBuffer c = req.getContent();
// use c.readerIndex() intentionally
String body = new String(cb.array(), c.readerIndex(), cb.remaining(), Charset.forName("utf-8"));
setParams(body);
}
}
if (req.getUri().contains("?")) {
int p = req.getUri().indexOf("?");
this.queryString = req.getUri().substring(p + 1);
setParams(this.queryString);
}
}
private void setSslAttributes(ChannelHandlerContext ctx) {
SslHandler sslHandler = (SslHandler) ctx.getPipeline().get("ssl");
this.secure = sslHandler != null;
if (secure) {
SSLSession session = sslHandler.getEngine().getSession();
String cipherSuite = session.getCipherSuite();
try {
setAttribute("javax.servlet.request.X509Certificate", session.getPeerCertificateChain());
} catch (SSLPeerUnverifiedException e) {
}
setAttribute("javax.servlet.request.cipher_suite", cipherSuite);
setAttribute("javax.servlet.request.key_size", deduceKeyLength(cipherSuite));
}
}
private void setChannel(ChannelHandlerContext ctx) {
// set netty channel (only for internal use)
setAttribute("netty.channel", ctx.getChannel());
}
private void parseLocales(HttpRequest req) {
List<String> langs = req.headers().getAll(HttpHeaders.Names.ACCEPT_LANGUAGE);
if (langs != null)
for (String lang : langs)
locales.add(new Locale(lang));
// see servlet spec section 3.9
if (langs.size() == 0)
locales.add(Locale.getDefault());
}
private void parseCookies(HttpRequest req) {
List<String> cs = req.headers().getAll(HttpHeaders.Names.COOKIE);
ArrayList<Cookie> parsed = new ArrayList<Cookie>();
this.cookies = new Cookie[cs.size()];
for (int i = 0; i < cs.size(); i++) {
String s = cs.get(i);
if (s == null || s.trim().isEmpty())
continue;
String name = null;
String value = null;
if (s.contains("=")) {
String[] split = s.split("=", 2);
name = split[0].trim();
value = split[1].trim();
} else {
name = s.trim();
}
logger.debug("araqne httpd: cookie [{} -> name={}, value={}]", new Object[] { s, name, value });
parsed.add(new Cookie(name, value));
}
this.cookies = new Cookie[parsed.size()];
parsed.toArray(this.cookies);
}
private int deduceKeyLength(String cipherSuite) {
if (cipherSuite.equals("IDEA_CBC"))
return 128;
if (cipherSuite.equals("RC2_CBC_40"))
return 40;
if (cipherSuite.equals("RC4_40"))
return 40;
if (cipherSuite.equals("RC4_128"))
return 128;
if (cipherSuite.equals("DES40_CBC"))
return 40;
if (cipherSuite.equals("DES_CBC"))
return 56;
if (cipherSuite.equals("3DES_EDE_CBC"))
return 168;
return 0;
}
private void setParams(String params) {
if (params == null || params.isEmpty())
return;
for (String param : params.split("&")) {
logger.trace("param: {}", param);
int pos = param.indexOf("=");
if (pos > 0) {
String name = param.substring(0, pos);
String value = null;
try {
String encodedValue = param.substring(pos + 1);
value = URLDecoder.decode(encodedValue, "utf-8");
} catch (UnsupportedEncodingException e) {
}
if (value.isEmpty())
value = null;
List<String> values = new ArrayList<String>();
if (!parameters.containsKey(name))
parameters.put(name, values);
else
values = parameters.get(name);
values.add(value);
logger.trace("araqne webconsole: param name [{}], value [{}]", name, value);
} else {
parameters.put(param, null);
logger.trace("araqne webconsole: param name: {}", param);
}
}
}
private class RequestInputStream extends ServletInputStream {
private InputStream is;
public RequestInputStream(InputStream is) {
this.is = is;
}
@Override
public int read() throws IOException {
return is.read();
}
}
@Override
public ServletContext getServletContext() {
return null;
}
@Override
public DispatcherType getDispatcherType() {
// TODO: implement other dispatcher routines
return DispatcherType.REQUEST;
}
@Override
public boolean isSecure() {
return secure;
}
// TODO: parse should be deferred for character encoding support
@Override
public void setCharacterEncoding(String env) throws UnsupportedEncodingException {
}
@Override
public Object getAttribute(String name) {
return attributes.get(name);
}
@SuppressWarnings("unchecked")
@Override
public Enumeration getAttributeNames() {
return Collections.enumeration(attributes.keySet());
}
@Override
public void setAttribute(String name, Object o) {
attributes.put(name, o);
}
@Override
public void removeAttribute(String name) {
attributes.remove(name);
}
@Override
public String getCharacterEncoding() {
String contentType = getContentType();
if (contentType == null || !contentType.contains("charset"))
return null;
for (String t : contentType.split(";")) {
if (t.trim().startsWith("charset"))
return t.split("=")[1].trim();
}
return null;
}
@Override
public int getContentLength() {
return (int) HttpHeaders.getContentLength(req);
}
@Override
public String getContentType() {
return req.headers().get(HttpHeaders.Names.CONTENT_TYPE);
}
@Override
public ServletInputStream getInputStream() throws IOException {
return is;
}
@Override
public String getParameter(String name) {
String[] values = getParameterValues(name);
if (values == null || values.length == 0)
return null;
return values[0];
}
@Override
public Map<String, String[]> getParameterMap() {
Map<String, String[]> m = new HashMap<String, String[]>();
for (String key : parameters.keySet())
m.put(key, (String[]) parameters.get(key).toArray());
return m;
}
@SuppressWarnings("unchecked")
@Override
public Enumeration getParameterNames() {
return Collections.enumeration(parameters.keySet());
}
@Override
public String[] getParameterValues(String name) {
List<String> values = parameters.get(name);
if (values == null)
return null;
return values.toArray(new String[0]);
}
@Override
public String getProtocol() {
return req.getProtocolVersion().getText();
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(is));
}
@Deprecated
@Override
public String getRealPath(String path) {
return null;
}
@Override
public String getLocalName() {
return null;
}
@Override
public String getLocalAddr() {
return ((InetSocketAddress) ctx.getChannel().getLocalAddress()).getAddress().getHostAddress();
}
@Override
public int getLocalPort() {
return ((InetSocketAddress) ctx.getChannel().getLocalAddress()).getPort();
}
@Override
public String getRemoteAddr() {
return ((InetSocketAddress) ctx.getChannel().getRemoteAddress()).getAddress().getHostAddress();
}
@Override
public String getRemoteHost() {
return ((InetSocketAddress) ctx.getChannel().getRemoteAddress()).getHostName();
}
@Override
public int getRemotePort() {
return ((InetSocketAddress) ctx.getChannel().getRemoteAddress()).getPort();
}
@Override
public String getScheme() {
return ctx.getPipeline().get("ssl") == null ? "http" : "https";
}
@Override
public String getServerName() {
String host = req.headers().get("Host");
if (host == null)
return null;
if (host.contains(":"))
return host.substring(0, host.indexOf(":"));
else
return host;
}
@Override
public int getServerPort() {
String host = req.headers().get("Host");
if (host == null || !host.contains(":"))
return 80;
return Integer.parseInt(host.substring(host.indexOf(":") + 1));
}
@Override
public String getAuthType() {
String auth = req.headers().get(HttpHeaders.Names.AUTHORIZATION);
if (auth == null)
return null;
return auth.substring(0, auth.indexOf(" "));
}
@Override
public Cookie[] getCookies() {
return cookies;
}
@SuppressWarnings("unchecked")
@Override
public Enumeration getHeaderNames() {
return Collections.enumeration(req.headers().names());
}
@Override
public String getHeader(String name) {
return req.headers().get(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
return Collections.enumeration(req.headers().getAll(name));
}
@Override
public long getDateHeader(String name) {
try {
return Long.parseLong(req.headers().get(name));
} catch (NumberFormatException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public int getIntHeader(String name) {
return HttpHeaders.getIntHeader(req, name);
}
@Override
public String getMethod() {
return req.getMethod().getName();
}
@Override
public String getQueryString() {
return queryString;
}
@Override
public String getRemoteUser() {
return null;
}
/**
* requested path except query string, protocol scheme and domain
*/
@Override
public String getRequestURI() {
String path = req.getUri();
// cut protocol scheme and domain
int p = path.indexOf("://");
if (p > 0) {
int p2 = path.indexOf('/', p + 3);
if (p2 > 0)
path = path.substring(p2);
else
path = "/";
}
// cut query string off
p = path.indexOf('?');
if (p > 0)
path = path.substring(0, p);
return path;
}
@Override
public String getRequestedSessionId() {
// TODO: support session id from url
String key = null;
for (Cookie c : getCookies())
if (c.getName().equals("JSESSIONID"))
key = c.getValue();
return key;
}
@Override
public StringBuffer getRequestURL() {
String uri = req.getUri();
int p = uri.indexOf('?');
if (p < 0)
return new StringBuffer(uri);
else
return new StringBuffer(uri.substring(0, p));
}
@Override
public String getContextPath() {
if (httpContext != null)
return httpContext.getServletContext().getContextPath();
return null;
}
@Override
public String getServletPath() {
return servletPath;
}
public void setServletPath(String servletPath) {
this.servletPath = servletPath;
}
@Override
public String getPathInfo() {
return pathInfo;
}
public void setPathInfo(String pathInfo) {
this.pathInfo = pathInfo;
}
@Override
public String getPathTranslated() {
// it must return null if local file system path cannot be determined
return null;
}
@Override
public Enumeration<Locale> getLocales() {
return Collections.enumeration(locales);
}
@Override
public Locale getLocale() {
return locales.get(0);
}
@Override
public HttpSession getSession() {
return getSession(true);
}
@Override
public HttpSession getSession(boolean create) {
if (!create)
return session;
if (session == null) {
String key = getRequestedSessionId();
if (key == null)
key = UUID.randomUUID().toString();
session = new HttpSessionImpl(key);
HttpSession old = httpContext.getHttpSessions().putIfAbsent(key, session);
if (old != null)
session = old;
}
return session;
}
@Override
public boolean isRequestedSessionIdFromCookie() {
// TODO: support session key from url
return getSession(false) != null;
}
@Override
public boolean isRequestedSessionIdFromURL() {
// TODO: will be implemented later
return false;
}
@Deprecated
@Override
public boolean isRequestedSessionIdFromUrl() {
return isRequestedSessionIdFromURL();
}
@Override
public boolean isRequestedSessionIdValid() {
String key = getRequestedSessionId();
if (key == null)
return false;
return httpContext.getHttpSessions().containsKey(key);
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
// TODO Auto-generated method stub
return null;
}
@Override
public AsyncContext getAsyncContext() {
if (!asyncStarted)
throw new IllegalStateException("not in async mode");
return asyncContext;
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
asyncStarted = true;
asyncContext = new AsyncContextImpl(this, resp);
return asyncContext;
}
@Override
public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) throws IllegalStateException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isAsyncStarted() {
return asyncStarted;
}
@Override
public boolean isAsyncSupported() {
return false;
}
@Override
public boolean isUserInRole(String role) {
// TODO Auto-generated method stub
return false;
}
@Override
public Principal getUserPrincipal() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean authenticate(HttpServletResponse response) throws IOException, ServletException {
// TODO Auto-generated method stub
return false;
}
@Override
public void login(String username, String password) throws ServletException {
// TODO Auto-generated method stub
}
@Override
public void logout() throws ServletException {
// TODO Auto-generated method stub
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
// TODO Auto-generated method stub
return null;
}
@Override
public Part getPart(String name) throws IOException, ServletException {
// TODO Auto-generated method stub
return null;
}
}
| |
/**
* This software is released as part of the Pumpernickel project.
*
* All com.pump resources in the Pumpernickel project are distributed under the
* MIT License:
* https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt
*
* More information about the Pumpernickel project is available here:
* https://mickleness.github.io/pumpernickel/
*/
package com.pump.showcase.demo;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.HierarchyEvent;
import java.awt.event.HierarchyListener;
import java.awt.font.TextLayout;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JSpinner;
import javax.swing.SpinnerNumberModel;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import com.pump.animation.BufferedAnimationPanel;
import com.pump.image.transition.AbstractTransition;
import com.pump.image.transition.Transition;
import com.pump.inspector.Inspector;
import com.pump.plaf.QPanelUI;
import com.pump.swing.AnimationController;
import com.pump.util.PartitionIterator;
/**
* An abstract UI to demo a set of transitions.
*/
public abstract class TransitionDemo extends ShowcaseExampleDemo {
private static final long serialVersionUID = 1L;
BufferedImage img1;
BufferedImage img2;
Map<String, List<Transition>> transitionsByFamily = new TreeMap<>();
JComboBox<String> transitionFamilyComboBox = new JComboBox<>();
JComboBox<Transition> transitionComboBox = new JComboBox<>();
JComboBox<Object> renderingHintsComboBox = new JComboBox<Object>();
AnimationController controller = new AnimationController();
JSpinner duration = new JSpinner(new SpinnerNumberModel(2, .1, 100, .1));
JLabel interpolationLabel = new JLabel("Interpolation Hint:");
TransitionPanel panel;
public TransitionDemo(Transition[]... transitions) {
this(AbstractTransition.createImage(200, "A", true, true),
AbstractTransition.createImage(200, "B", false, true),
transitions);
}
public TransitionDemo(BufferedImage bi1, BufferedImage bi2,
Transition[][] transitions) {
img1 = bi1;
img2 = bi2;
panel = new TransitionPanel(null);
transitionFamilyComboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
transitionComboBox.removeAllItems();
String familyName = (String) transitionFamilyComboBox
.getSelectedItem();
if (familyName != null) {
List<Transition> t = transitionsByFamily.get(familyName);
transitionComboBox.setEnabled(t.size() > 1);
for (int a = 0; a < t.size(); a++) {
transitionComboBox.addItem(t.get(a));
}
}
}
});
transitionComboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Transition t = (Transition) transitionComboBox
.getSelectedItem();
panel.setTransition(t);
}
});
renderingHintsComboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
panel.refresh();
}
});
for (Transition[] t : transitions) {
String familyName = getFamilyName(t);
if (transitionsByFamily.put(familyName, Arrays.asList(t)) != null)
throw new IllegalArgumentException(
"Multiple transitions had the same family name: \""
+ familyName + "\"");
}
for (String familyName : transitionsByFamily.keySet()) {
transitionFamilyComboBox.addItem(familyName);
}
examplePanel.setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.weighty = 0;
c.fill = GridBagConstraints.NONE;
c.anchor = GridBagConstraints.SOUTHWEST;
c.weighty = 1;
c.gridy++;
c.fill = GridBagConstraints.NONE;
examplePanel.add(panel, c);
c.weightx = 0;
c.gridy++;
c.anchor = GridBagConstraints.NORTHWEST;
examplePanel.add(controller, c);
Dimension d = controller.getPreferredSize();
d.width = panel.getPreferredSize().width;
controller.setPreferredSize(d);
Inspector layout = new Inspector(configurationPanel);
layout.addRow(new JLabel("Transition Type:"), transitionFamilyComboBox,
false);
layout.addRow(new JLabel("Transition:"), transitionComboBox, false);
layout.addRow(new JLabel("Duration (s):"), duration, false);
layout.addRow(new JLabel("Rendering Hints"), renderingHintsComboBox,
false);
controller.addPropertyChangeListener(AnimationController.TIME_PROPERTY,
new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent e) {
panel.refresh();
}
});
controller.setLooping(true);
addHierarchyListener(new HierarchyListener() {
@Override
public void hierarchyChanged(HierarchyEvent e) {
if (isShowing()) {
if (!controller.isPlaying()) {
controller.play();
}
} else {
if (controller.isPlaying()) {
controller.pause();
}
}
}
});
ChangeListener durationListener = new ChangeListener() {
public void stateChanged(ChangeEvent e) {
float d = ((Number) duration.getValue()).floatValue();
controller.setDuration(2 * d); // once for A->B, once for
// B->A
}
};
duration.addChangeListener(durationListener);
durationListener.stateChanged(null);
renderingHintsComboBox.addItem(RenderingHints.VALUE_RENDER_SPEED);
renderingHintsComboBox.addItem(RenderingHints.VALUE_RENDER_QUALITY);
// no rounded corners
examplePanel.setUI(new QPanelUI());
}
/**
* Given several Transitions, this identifies the run of words they have in
* common. Sometimes there will only be one word (like "Bars"), but
* sometimes there may be multiple words (like "Funky Wipe")
*/
private String getFamilyName(Transition[] t) {
String name = t[0].toString();
String[] words = name.split("\\s");
PartitionIterator<String> iter = new PartitionIterator<>(
Arrays.asList(words), 3, 0);
String bestCandidate = null;
scanNames: while (iter.hasNext()) {
List<List<String>> n = iter.next();
List<String> m = n.get(1);
StringBuilder sb = new StringBuilder();
for (int a = 0; a < m.size(); a++) {
if (a != 0) {
sb.append(' ');
}
sb.append(m.get(a));
}
String candidate = sb.toString();
for (Transition z : t) {
name = z.toString();
if (!name.contains(candidate))
continue scanNames;
}
if (bestCandidate == null
|| candidate.length() > bestCandidate.length())
bestCandidate = candidate;
}
return bestCandidate;
}
public RenderingHints getRenderingHints() {
if (renderingHintsComboBox.getSelectedIndex() == 0) {
return createSpeedHints();
} else {
return createQualityHints();
}
}
public static RenderingHints createQualityHints() {
RenderingHints hints = new RenderingHints(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION,
RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
hints.put(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_QUALITY);
// WARNING: set this to bicubic interpolation brings Windows Vista
// to
// its knees.
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BICUBIC);
hints.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
return hints;
}
public static RenderingHints createSpeedHints() {
RenderingHints hints = new RenderingHints(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION,
RenderingHints.VALUE_ALPHA_INTERPOLATION_SPEED);
hints.put(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_SPEED);
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR);
hints.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_OFF);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
return hints;
}
class TransitionPanel extends BufferedAnimationPanel {
private static final long serialVersionUID = 1L;
Transition transition;
Font font = new Font("Mono", 0, 12);
DecimalFormat format = new DecimalFormat("#.##");
public TransitionPanel(Transition transition) {
Dimension d = new Dimension(img1.getWidth(), img1.getHeight());
setPreferredSize(d);
setSize(d);
setTransition(transition);
}
public void setTransition(Transition transition) {
this.transition = transition;
refresh();
}
@Override
protected void paintAnimation(Graphics2D g, int width, int height) {
g = (Graphics2D) g.create();
g.setColor(Color.black);
g.fillRect(0, 0, width, height);
float t = controller.getTime() / controller.getDuration() * 2;
BufferedImage frameA, frameB;
if (t >= 2) { // for the very last frame
t = 0;
frameA = img1;
frameB = img2;
} else if (t >= 1) {
t = t % 1;
frameA = img2;
frameB = img1;
} else {
frameA = img1;
frameB = img2;
}
g.setRenderingHints(getRenderingHints());
if (transition != null) {
transition.paint((Graphics2D) g, frameA, frameB, t);
Graphics2D g2 = (Graphics2D) g;
TextLayout tl = new TextLayout(format.format((t * 100)) + "%",
font, g2.getFontRenderContext());
Shape outline = tl.getOutline(
AffineTransform.getTranslateInstance(5, 18));
g2.setColor(Color.black);
g2.setStroke(new BasicStroke(2));
g2.draw(outline);
g2.setColor(Color.white);
g2.fill(outline);
}
}
}
}
| |
/*
* Copyright 2014 Ruediger Moeller.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nustaq.serialization.coders;
import org.nustaq.offheap.bytez.BasicBytez;
import org.nustaq.offheap.bytez.Bytez;
import org.nustaq.offheap.bytez.onheap.HeapBytez;
import org.nustaq.serialization.*;
import org.nustaq.serialization.simpleapi.FSTBufferTooSmallException;
import org.nustaq.serialization.util.FSTInputStream;
import org.nustaq.serialization.util.FSTUtil;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Created by ruedi on 09.11.2014.
*
* no value compression is applied. uses unsafe to read values directly from memory instead an inputstream
*
*/
public class FSTBytezDecoder implements FSTDecoder {
BasicBytez input;
HeapBytez ascStringCache;
FSTConfiguration conf;
public FSTClazzNameRegistry clnames;
long pos;
InputStream inputStream;
long readUntil = 0;
public FSTBytezDecoder(FSTConfiguration conf, BasicBytez input) {
this(conf);
if ( FSTConfiguration.isAndroid )
throw new RuntimeException("not supported on android");
this.input = input;
}
public FSTBytezDecoder(FSTConfiguration conf) {
this.conf = conf;
clnames = (FSTClazzNameRegistry) conf.getCachedObject(FSTClazzNameRegistry.class);
if (clnames == null) {
clnames = new FSTClazzNameRegistry(conf.getClassRegistry(), conf);
} else {
clnames.clear();
}
}
byte tmp[];
public int ensureReadAhead(int bytes) {
if ( inputStream != null ) {
if ( pos+bytes > readUntil ) {
return readNextInputChunk(bytes);
}
} else if ( pos+bytes > input.length() ) {
return -1;
}
return 0;
}
protected int readNextInputChunk(int bytes) {
try {
int toRead = Math.max(Integer.MAX_VALUE, bytes);
if ( inputStream instanceof ByteArrayInputStream ) {
toRead = Math.min(((ByteArrayInputStream) inputStream).available(),toRead);
}
if ( tmp == null || tmp.length < toRead ) {
tmp = new byte[toRead];
}
int read = inputStream.read(tmp, 0, toRead);
if ( read > 0 ) {
if ( input.length() < pos+read ) {
BasicBytez bytez = input.newInstance(2*(pos + read));
input.copyTo(bytez,0,0,pos);
input = (HeapBytez) bytez;
}
input.set(pos,tmp,0,read);
readUntil = pos+read;
return read;
} else if ( read == -1 )
return -1;
// fixme: should loop in case read == 0
} catch (IOException e) {
throw FSTUtil.rethrow(e);
}
return 0;
}
char chBufS[];
char[] getCharBuf(int siz) {
char chars[] = chBufS;
if (chars == null || chars.length < siz) {
chars = new char[Math.max(siz, 15)];
chBufS = chars;
}
return chars;
}
public String readStringUTF() throws IOException {
int len = readFInt();
char[] charBuf = getCharBuf(len * 2);
ensureReadAhead(len*2);
input.getCharArr(pos,charBuf,0,len);
pos += len*2;
return new String(charBuf, 0, len);
}
public byte readObjectHeaderTag() throws IOException {
return readFByte();
}
/**
* len < 127 !!!!!
*
* @return
* @throws java.io.IOException
*/
@Override
public String readStringAsc() throws IOException {
int len = readFInt();
if (ascStringCache == null || ascStringCache.length() < len)
ascStringCache = new HeapBytez(new byte[len]);
ensureReadAhead(len);
// System.arraycopy(input.buf, input.pos, ascStringCache, 0, len);
input.copyTo(ascStringCache, 0, pos, len);
pos += len;
return new String(ascStringCache.getBase(), 0, 0, len);
}
public BasicBytez getInput() {
return input;
}
public void setInput(BasicBytez input) {
this.input = input;
}
/**
* assumes class header+len already read
*
* @param componentType
* @param len
* @return
*/
@Override
public Object readFPrimitiveArray(Object array, Class componentType, int len) {
// FIXME: if else chaining could be avoided
if (componentType == byte.class) {
ensureReadAhead(len);
byte arr[] = (byte[]) array;
input.getArr(pos, arr, 0, len);
pos += len;
return arr;
} else if (componentType == char.class) {
ensureReadAhead(len*2);
char[] arr = (char[]) array;
input.getCharArr(pos, arr, 0, len);
pos += len * 2;
return arr;
} else if (componentType == short.class) {
ensureReadAhead(len*2);
short[] arr = (short[]) array;
input.getShortArr(pos, arr, 0, len);
pos += len * 2;
return arr;
} else if (componentType == int.class) {
ensureReadAhead(len*4);
int[] arr = (int[]) array;
input.getIntArr(pos, arr, 0, len);
pos += len * 4;
return arr;
} else if (componentType == float.class) {
ensureReadAhead(len*4);
float[] arr = (float[]) array;
input.getFloatArr(pos, arr, 0, len);
pos += len * 4;
return arr;
} else if (componentType == double.class) {
ensureReadAhead(len*8);
double[] arr = (double[]) array;
input.getDoubleArr(pos, arr, 0, len);
pos += len * 8;
return arr;
} else if (componentType == long.class) {
ensureReadAhead(len*8);
long[] arr = (long[]) array;
input.getLongArr(pos, arr, 0, len);
pos += len * 8;
return arr;
} else if (componentType == boolean.class) {
ensureReadAhead(len);
boolean[] arr = (boolean[]) array;
input.getBooleanArr(pos, arr, 0, len);
pos += len;
return arr;
} else {
throw new RuntimeException("unexpected primitive type " + componentType.getName());
}
}
@Override
public void readFIntArr(int len, int[] arr) throws IOException {
input.getIntArr(pos, arr, 0, len);
pos += len * 4;
}
@Override
public int readFInt() throws IOException {
return readPlainInt();
}
@Override
public double readFDouble() throws IOException {
return Double.longBitsToDouble(readPlainLong());
}
/**
* Reads a 4 byte float.
*/
@Override
public float readFFloat() throws IOException {
return Float.intBitsToFloat(readPlainInt());
}
@Override
public final byte readFByte() throws IOException {
ensureReadAhead(1);
return input.get(pos++);
}
@Override
public int readIntByte() throws IOException {
final int res = ensureReadAhead(1);
if ( res == -1 )
return -1;
return input.get(pos++) & 0xff;
}
@Override
public long readFLong() throws IOException {
return readPlainLong();
}
@Override
public char readFChar() throws IOException {
return readPlainChar();
}
@Override
public short readFShort() throws IOException {
return readPlainShort();
}
private char readPlainChar() throws IOException {
ensureReadAhead(2);
char res = input.getChar(pos);
pos += 2;
return res;
}
private short readPlainShort() throws IOException {
ensureReadAhead(2);
short res = input.getShort(pos);
pos += 2;
return res;
}
@Override
public int readPlainInt() throws IOException {
ensureReadAhead(4);
int res = input.getInt(pos);
pos += 4;
return res;
}
private long readPlainLong() throws IOException {
ensureReadAhead(8);
long res = input.getLong(pos);
pos += 8;
return res;
}
@Override
public byte[] getBuffer() {
if ( input instanceof HeapBytez && ((HeapBytez) input).getOffsetIndex() == 0 ) {
return ((HeapBytez) input).asByteArray();
}
byte res[] = new byte[(int) pos];
input.getArr(0,res,0, (int) pos);
return res;
}
@Override
public int getInputPos() {
return (int) pos;
}
@Override
public void moveTo(int position) {
pos = position;
}
@Override
public void reset() {
pos = 0;
clnames.clear();
inputStream = null;
}
@Override
public void setInputStream(InputStream in) {
if ( in == FSTObjectInput.emptyStream ) {
inputStream = null;
readUntil = 0;
return;
}
this.inputStream = in;
clnames.clear();
pos = 0;
if ( input == null )
input = new HeapBytez(new byte[4096]);
readUntil = 0;
}
@Override
public void resetToCopyOf(byte[] bytes, int off, int len) {
inputStream = null;
if ( input == null ) {
byte[] base = new byte[len];
input = new HeapBytez(base,0,len);
}
if ( input.length() < len )
{
input = (HeapBytez) input.newInstance(len);
}
input.set(0,bytes,off,len);
pos = 0;
clnames.clear();
}
@Override
public void resetWith(byte[] bytes, int len) {
inputStream = null;
if ( input == null ) {
input = new HeapBytez(bytes,0,len);
return;
}
// suboptimal method for non heap backing
if ( input.getClass() == HeapBytez.class ) {
((HeapBytez)input).setBase(bytes,0,len);
} else {
BasicBytez newBytez = input.newInstance(len);
newBytez.set(0,bytes,0,len);
}
pos = 0;
clnames.clear();
}
@Override
public FSTClazzInfo readClass() throws IOException, ClassNotFoundException {
return clnames.decodeClass(this);
}
@Override
public Class classForName(String name) throws ClassNotFoundException {
return clnames.classForName(name);
}
@Override
public void registerClass(Class possible) {
clnames.registerClass(possible);
}
@Override
public void close() {
conf.returnObject(clnames);
}
@Override
public void skip(int n) {
pos += n;
}
@Override
public void readPlainBytes(byte[] b, int off, int len) {
ensureReadAhead(len);
// System.arraycopy(input.buf,input.pos,b,off,len);
input.set(pos, b, off, len);
pos += len;
}
@Override
public boolean isMapBased() {
return false;
}
@Override
public Object getDirectObject() // in case class already resolves to read object (e.g. mix input)
{
return null;
}
@Override
public int getObjectHeaderLen() {
return -1;
}
@Override
public void consumeEndMarker() {
}
@Override
public Class readArrayHeader() throws Exception {
return readClass().getClazz();
}
@Override
public void readExternalEnd() {
// do nothing for direct encoding
}
@Override
public boolean isEndMarker(String s) {
return false;
}
@Override
public int readVersionTag() throws IOException {
return readFByte();
}
@Override
public void pushBack(int bytes) {
pos -= bytes;
}
}
| |
package com.akdeniz.googleplaycrawler.cli;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.http.HttpHost;
import org.apache.http.client.HttpClient;
import org.apache.http.conn.params.ConnRoutePNames;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.future.ConnectFuture;
import org.apache.mina.core.future.WriteFuture;
import org.apache.mina.core.session.IoSession;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import com.akdeniz.googleplaycrawler.GooglePlayAPI;
import com.akdeniz.googleplaycrawler.GooglePlayAPI.RECOMMENDATION_TYPE;
import com.akdeniz.googleplaycrawler.GooglePlayAPI.REVIEW_SORT;
import com.akdeniz.googleplaycrawler.GooglePlayException;
import com.akdeniz.googleplaycrawler.GooglePlay.AppDetails;
import com.akdeniz.googleplaycrawler.GooglePlay.BrowseLink;
import com.akdeniz.googleplaycrawler.GooglePlay.BrowseResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.BulkDetailsEntry;
import com.akdeniz.googleplaycrawler.GooglePlay.BulkDetailsResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.DetailsResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.DocV2;
import com.akdeniz.googleplaycrawler.GooglePlay.GetReviewsResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.ListResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.Offer;
import com.akdeniz.googleplaycrawler.GooglePlay.ReviewResponse;
import com.akdeniz.googleplaycrawler.GooglePlay.SearchResponse;
import com.akdeniz.googleplaycrawler.gsf.GoogleServicesFramework.BindAccountResponse;
import com.akdeniz.googleplaycrawler.gsf.GoogleServicesFramework.LoginResponse;
import com.akdeniz.googleplaycrawler.gsf.packets.BindAccountRequestPacket;
import com.akdeniz.googleplaycrawler.gsf.packets.HeartBeatPacket;
import com.akdeniz.googleplaycrawler.gsf.packets.LoginRequestPacket;
import com.akdeniz.googleplaycrawler.gsf.MTalkConnector;
import com.akdeniz.googleplaycrawler.gsf.MessageFilter;
import com.akdeniz.googleplaycrawler.gsf.NotificationListener;
import com.akdeniz.googleplaycrawler.Utils;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.impl.choice.CollectionArgumentChoice;
import net.sourceforge.argparse4j.inf.Argument;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
import net.sourceforge.argparse4j.inf.ArgumentType;
import net.sourceforge.argparse4j.inf.FeatureControl;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
import net.sourceforge.argparse4j.inf.Subparsers;
/**
* @author akdeniz
*/
public class googleplay {
private static final String DELIMETER = ";";
private ArgumentParser parser;
private GooglePlayAPI service;
private Namespace namespace;
public static enum COMMAND {
LIST, DOWNLOAD, CHECKIN, CATEGORIES, SEARCH, PERMISSIONS, REVIEWS, REGISTER, USEGCM, RECOMMENDATIONS, LIST_DEVICES
}
private static final String LIST_HEADER = new StringJoiner(DELIMETER).add("Title").add("Package").add("Creator")
.add("Price").add("Installation Size").add("Number Of Downloads").toString();
private static final String CATEGORIES_HEADER = new StringJoiner(DELIMETER).add("ID").add("Name").toString();
private static final String SUBCATEGORIES_HEADER = new StringJoiner(DELIMETER).add("ID").add("Title").toString();
private static final int TIMEOUT = 10000;
public googleplay() {
parser = ArgumentParsers.newArgumentParser("googleplay").description("Play with Google Play API :)");
/* =================Common Arguments============== */
parser.addArgument("-f", "--conf")
.nargs("?")
.help("Configuration file to used for login! If any of androidid, email and password is supplied, it will be ignored!")
.setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-i", "--androidid").nargs("?")
.help("ANDROID-ID to be used! You can use \"Checkin\" mechanism, if you don't have one!")
.setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-e", "--email").nargs("?").help("Email address to be used for login.")
.setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-p", "--password").nargs("?").help("Password to be used for login.")
.setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-d", "--device").nargs("?").help("Device properties to be used for checkin");
parser.addArgument("-t", "--securitytoken").nargs("?").help("Security token that was generated at checkin. It is only required for \"usegcm\" option")
.setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-z", "--localization").nargs("?").help("Localization string that will customise fetched informations such as reviews, " +
"descriptions,... Can be : en-EN, en-US, tr-TR, fr-FR ... (default : en-EN)").setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-a", "--host").nargs("?").help("Proxy host").setDefault(FeatureControl.SUPPRESS);
parser.addArgument("-l", "--port").type(Integer.class).nargs("?").help("Proxy port")
.setDefault(FeatureControl.SUPPRESS);
Subparsers subparsers = parser.addSubparsers().description("Command to be executed.");
/* =================Download Arguments============== */
Subparser downloadParser = subparsers.addParser("download", true).description("download file(s)!")
.setDefault("command", COMMAND.DOWNLOAD);
downloadParser.addArgument("sdk").required(true).help("set application sdk");
downloadParser.addArgument("packagename").nargs("+").help("applications to download");
/* =================Check-In Arguments============== */
Subparser checkinParser = subparsers.addParser("checkin", true).description("checkin section!")
.setDefault("command", COMMAND.CHECKIN);
/* =================List-Devices Arguments============== */
Subparser listDevParser = subparsers.addParser("list-devices", true).description("list available devices")
.setDefault("command", COMMAND.LIST_DEVICES);
/* =================List Arguments============== */
Subparser listParser = subparsers.addParser("list", true)
.description("Lists sub-categories and applications within them!").setDefault("command", COMMAND.LIST);
listParser.addArgument("category").required(true).help("defines category");
listParser.addArgument("-s", "--subcategory").required(false).help("defines sub-category");
listParser.addArgument("-o", "--offset").type(Integer.class).required(false)
.help("offset to define where list begins");
listParser.addArgument("-n", "--number").type(Integer.class).required(false)
.help("how many app will be listed");
/* =================Categories Arguments============== */
Subparser categoriesParser = subparsers.addParser("categories", true)
.description("list categories for browse section").setDefault("command", COMMAND.CATEGORIES);
/* =================Search Arguments============== */
Subparser searchParser = subparsers.addParser("search", true).description("search for query!")
.setDefault("command", COMMAND.SEARCH);
searchParser.addArgument("query").help("query to be searched");
searchParser.addArgument("-o", "--offset").type(Integer.class).required(false)
.help("offset to define where list begins");
searchParser.addArgument("-n", "--number").type(Integer.class).required(false)
.help("how many app will be listed");
/* =================Permissions Arguments============== */
Subparser permissionsParser = subparsers.addParser("permissions", true)
.description("list permissions of given application").setDefault("command", COMMAND.PERMISSIONS);
permissionsParser.addArgument("package").nargs("+").help("applications whose permissions to be listed");
/* =================Reviews Arguments============== */
Subparser reviewsParser = subparsers.addParser("reviews", true)
.description("lists reviews of given application").setDefault("command", COMMAND.REVIEWS);
reviewsParser.addArgument("package").help("application whose reviews to be listed");
reviewsParser.addArgument("-s", "--sort").choices(new ReviewSortChoice()).type(new ReviewSort())
.required(false).help("sorting type").setDefault(REVIEW_SORT.HELPFUL);
reviewsParser.addArgument("-o", "--offset").type(Integer.class).required(false)
.help("offset to define where list begins");
reviewsParser.addArgument("-n", "--number").type(Integer.class).required(false)
.help("how many reviews will be listed");
/* =================Recommendation Arguments============== */
Subparser recommendationParser = subparsers.addParser("recommendations", true)
.description("lists recommended apps of given application").setDefault("command", COMMAND.RECOMMENDATIONS);
recommendationParser.addArgument("package").help("application whose recommendations to be listed");
recommendationParser.addArgument("-t", "--type").choices(new ReleationChoice()).type(new RecommendationType())
.required(false).help("releations type").setDefault(RECOMMENDATION_TYPE.ALSO_INSTALLED);
recommendationParser.addArgument("-o", "--offset").type(Integer.class).required(false)
.help("offset to define where list begins");
recommendationParser.addArgument("-n", "--number").type(Integer.class).required(false)
.help("how many recommendations will be listed");
/* =================Register Arguments============== */
Subparser registerParser = subparsers.addParser("register", true).description("registers device so that can be seen from web!")
.setDefault("command", COMMAND.REGISTER);
/* =================UseGCM Arguments============== */
subparsers.addParser("usegcm", true).description("listens GCM(GoogleCloudMessaging) for download notification and downloads them!")
.setDefault("command", COMMAND.USEGCM);
}
public static void main(String[] args) throws Exception {
new googleplay().operate(args);
}
public void operate(String[] argv) {
try {
namespace = parser.parseArgs(argv);
} catch (ArgumentParserException e) {
System.err.println(e.getMessage());
parser.printHelp();
System.exit(-1);
}
COMMAND command = (COMMAND) namespace.get("command");
try {
switch (command) {
case CHECKIN:
checkinCommand();
break;
case DOWNLOAD:
downloadCommand();
break;
case LIST_DEVICES:
listDevicesCommand();
break;
case LIST:
listCommand();
break;
case CATEGORIES:
categoriesCommand();
break;
case SEARCH:
searchCommand();
break;
case PERMISSIONS:
permissionsCommand();
break;
case REVIEWS:
reviewsCommand();
break;
case REGISTER:
registerCommand();
break;
case USEGCM:
useGCMCommand();
break;
case RECOMMENDATIONS:
recommendationsCommand();
break;
}
} catch (Exception e) {
System.err.println(e.getMessage());
System.exit(-1);
}
}
private void useGCMCommand() throws Exception {
String ac2dmAuth = loginAC2DM();
MTalkConnector connector = new MTalkConnector(new NotificationListener(service));
ConnectFuture connectFuture = connector.connect();
connectFuture.await(TIMEOUT);
if (!connectFuture.isConnected()) {
throw new IOException("Couldn't connect to GTALK server!");
}
final IoSession session = connectFuture.getSession();
send(session, IoBuffer.wrap(new byte[]{0x07})); // connection sanity check
System.out.println("Connected to server.");
String deviceIDStr = String.valueOf(new BigInteger(service.getAndroidID(), 16).longValue());
String securityTokenStr = String.valueOf(new BigInteger(service.getSecurityToken(), 16).longValue());
LoginRequestPacket loginRequestPacket = new LoginRequestPacket(deviceIDStr, securityTokenStr, service.getAndroidID());
LoginResponseFilter loginResponseFilter = new LoginResponseFilter(loginRequestPacket.getPacketID());
connector.addFilter(loginResponseFilter);
send(session, loginRequestPacket);
LoginResponse loginResponse = loginResponseFilter.nextMessage(TIMEOUT);
connector.removeFilter(loginResponseFilter);
if (loginResponse == null) {
throw new IllegalStateException("Login response could not be received!");
} else if (loginResponse.hasError()) {
throw new IllegalStateException(loginResponse.getError().getExtension(0).getMessage());
}
System.out.println("Autheticated.");
BindAccountRequestPacket bindAccountRequestPacket = new BindAccountRequestPacket(service.getEmail(), ac2dmAuth);
BindAccountResponseFilter barf = new BindAccountResponseFilter(bindAccountRequestPacket.getPacketID());
connector.addFilter(barf);
send(session, bindAccountRequestPacket);
BindAccountResponse bindAccountResponse = barf.nextMessage(TIMEOUT);
connector.removeFilter(barf);
/*if(bindAccountResponse==null){
throw new IllegalStateException("Account bind response could not be received!");
} else if(bindAccountResponse.hasError()){
throw new IllegalStateException(bindAccountResponse.getError().getExtension(0).getMessage());
}*/
System.out.println("Listening for notifications from server..");
// send heart beat packets to keep connection up.
while (true) {
send(session, new HeartBeatPacket());
Thread.sleep(30000);
}
}
private static void send(IoSession session, Object object) throws InterruptedException, IOException {
WriteFuture writeFuture = session.write(object);
writeFuture.await(TIMEOUT);
if (!writeFuture.isWritten()) {
Throwable exception = writeFuture.getException();
if (exception != null) {
throw new IOException("Error occured while writing!", exception);
}
throw new IOException("Error occured while writing!");
}
}
private void recommendationsCommand() throws Exception {
login();
String packageName = namespace.getString("package");
RECOMMENDATION_TYPE type = (RECOMMENDATION_TYPE) namespace.get("type");
Integer offset = namespace.getInt("offset");
Integer number = namespace.getInt("number");
ListResponse recommendations = service.recommendations(packageName, type, offset, number);
if (recommendations.getDoc(0).getChildCount() == 0) {
System.out.println("No recommendation found!");
} else {
for (DocV2 child : recommendations.getDoc(0).getChildList()) {
System.out.println(child.getDetails().getAppDetails().getPackageName());
}
}
}
private void reviewsCommand() throws Exception {
login();
String packageName = namespace.getString("package");
REVIEW_SORT sort = (REVIEW_SORT) namespace.get("sort");
Integer offset = namespace.getInt("offset");
Integer number = namespace.getInt("number");
ReviewResponse reviews = service.reviews(packageName, sort, offset, number);
GetReviewsResponse response = reviews.getGetResponse();
if (response.getReviewCount() == 0) {
System.out.println("No review found!");
}
System.out.println(response);
}
private void registerCommand() throws Exception {
String device = namespace.getString("device");
Properties props = new Properties();
if (device != null) {
props = Utils.parseDeviceProperties(device);
props.setProperty("default", "false");
} else {
props.setProperty("default", "true");
}
login();
service.uploadDeviceConfig(props);
System.out.println("A device is registered to your account! You can see it at \"https://play.google.com/store/account\" after a few downloads!");
}
private void permissionsCommand() throws Exception {
login();
List<String> packages = namespace.getList("package");
BulkDetailsResponse bulkDetails = service.bulkDetails(packages);
for (BulkDetailsEntry bulkDetailsEntry : bulkDetails.getEntryList()) {
DocV2 doc = bulkDetailsEntry.getDoc();
AppDetails appDetails = doc.getDetails().getAppDetails();
System.out.println(doc.getDocid());
for (String permission : appDetails.getPermissionList()) {
System.out.println("\t" + permission);
}
}
}
private void searchCommand() throws Exception {
login();
String query = namespace.getString("query");
Integer offset = namespace.getInt("offset");
Integer number = namespace.getInt("number");
SearchResponse searchResponse = service.search(query, offset, number);
System.out.println(LIST_HEADER);
for (DocV2 child : searchResponse.getDoc(0).getChildList()) {
AppDetails appDetails = child.getDetails().getAppDetails();
String formatted = new StringJoiner(DELIMETER).add(child.getTitle()).add(appDetails.getPackageName())
.add(child.getCreator()).add(child.getOffer(0).getFormattedAmount())
.add(String.valueOf(appDetails.getInstallationSize())).add(appDetails.getNumDownloads()).toString();
System.out.println(formatted);
}
}
private void categoriesCommand() throws Exception {
login();
BrowseResponse browseResponse = service.browse();
System.out.println(CATEGORIES_HEADER);
for (BrowseLink browseLink : browseResponse.getCategoryList()) {
String[] splitedStrs = browseLink.getDataUrl().split("&cat=");
System.out.println(new StringJoiner(DELIMETER).add(splitedStrs[splitedStrs.length - 1])
.add(browseLink.getName()).toString());
}
}
private void checkinCommand() throws Exception {
checkin();
System.out.println("Your account succesfully checkined!");
System.out.println("AndroidID : " + service.getAndroidID());
System.out.println("SecurityToken : " + service.getSecurityToken());
}
private void login() throws Exception {
String androidid = namespace.getString("androidid");
String email = namespace.getString("email");
String password = namespace.getString("password");
String localization = namespace.getString("localization");
if (androidid != null && email != null && password != null) {
createLoginableService(androidid, email, password, localization);
service.login();
return;
}
if (namespace.getAttrs().containsKey("conf")) {
Properties properties = new Properties();
properties.load(new FileInputStream(namespace.getString("conf")));
androidid = properties.getProperty("androidid");
email = properties.getProperty("email");
password = properties.getProperty("password");
localization = properties.getProperty("localization");
if (androidid != null && email != null && password != null) {
createLoginableService(androidid, email, password, localization);
service.login();
return;
}
}
throw new GooglePlayException("Lack of information for login!");
}
private String loginAC2DM() throws Exception {
String androidid = namespace.getString("androidid");
String email = namespace.getString("email");
String password = namespace.getString("password");
String securityToken = namespace.getString("securitytoken");
String localization = namespace.getString("localization");
if (androidid != null && email != null && password != null && securityToken != null) {
createLoginableService(androidid, email, password, localization);
service.login();
service.setSecurityToken(securityToken);
return service.loginAC2DM();
}
if (namespace.getAttrs().containsKey("conf")) {
Properties properties = new Properties();
properties.load(new FileInputStream(namespace.getString("conf")));
androidid = properties.getProperty("androidid");
email = properties.getProperty("email");
password = properties.getProperty("password");
securityToken = properties.getProperty("securitytoken");
localization = properties.getProperty("localization");
if (androidid != null && email != null && password != null && securityToken != null) {
createLoginableService(androidid, email, password, localization);
service.login();
service.setSecurityToken(securityToken);
return service.loginAC2DM();
}
}
throw new GooglePlayException("Lack of information for login!");
}
private void createLoginableService(String androidid, String email, String password, String localization) throws Exception {
service = new GooglePlayAPI(email, password, androidid);
service.setLocalization(localization);
HttpClient proxiedHttpClient = getProxiedHttpClient();
if (proxiedHttpClient != null) {
service.setClient(proxiedHttpClient);
}
}
private void createCheckinableService(String email, String password, String localization) throws Exception {
service = new GooglePlayAPI(email, password);
service.setLocalization(localization);
HttpClient proxiedHttpClient = getProxiedHttpClient();
if (proxiedHttpClient != null) {
service.setClient(proxiedHttpClient);
}
}
private void listDevicesCommand() throws Exception {
System.out.println("List of available devices:");
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document dom = db.parse(getClass().getResourceAsStream("/devices/list.xml"));
Element root = dom.getDocumentElement();
NodeList devices = root.getElementsByTagName("device");
for (int i = 0; i < devices.getLength(); i++) {
Element el = (Element) devices.item(i);
String alias = el.getAttribute("alias");
String name = el.getTextContent();
System.out.println(alias + " - " + name);
}
}
private void listCommand() throws Exception {
login();
String category = namespace.getString("category");
String subcategory = namespace.getString("subcategory");
Integer offset = namespace.getInt("offset");
Integer number = namespace.getInt("number");
ListResponse listResponse = service.list(category, subcategory, offset, number);
if (subcategory == null) {
System.out.println(SUBCATEGORIES_HEADER);
for (DocV2 child : listResponse.getDocList()) {
String formatted = new StringJoiner(DELIMETER).add(child.getDocid()).add(child.getTitle()).toString();
System.out.println(formatted);
}
} else {
System.out.println(LIST_HEADER);
for (DocV2 child : listResponse.getDoc(0).getChildList()) {
AppDetails appDetails = child.getDetails().getAppDetails();
String formatted = new StringJoiner(DELIMETER).add(child.getTitle()).add(appDetails.getPackageName())
.add(child.getCreator()).add(child.getOffer(0).getFormattedAmount())
.add(String.valueOf(appDetails.getInstallationSize())).add(appDetails.getNumDownloads())
.toString();
System.out.println(formatted);
}
}
}
private void downloadCommand() throws Exception {
login();
List<String> packageNames = namespace.getList("packagename");
String sdk = namespace.getString("sdk");
for (String packageName : packageNames) {
download(packageName, sdk);
}
}
private void checkin() throws Exception {
String email = namespace.getString("email");
String password = namespace.getString("password");
String localization = namespace.getString("localization");
String device = namespace.getString("device");
if (email != null && password != null) {
createCheckinableService(email, password, localization);
Properties props = new Properties();
if (device != null) {
props = Utils.parseDeviceProperties(device);
props.setProperty("default", "false");
} else {
props.setProperty("default", "true");
}
service.checkin(props);
return;
}
if (namespace.getAttrs().containsKey("conf")) {
Properties properties = new Properties();
properties.load(new FileInputStream(namespace.getString("conf")));
email = properties.getProperty("email");
password = properties.getProperty("password");
localization = properties.getProperty("localization");
device = properties.getProperty("device");
if (email != null && password != null) {
createCheckinableService(email, password, localization);
Properties props = new Properties();
if (device != null) {
props = Utils.parseDeviceProperties(device);
props.setProperty("default", "false");
} else {
props.setProperty("default", "true");
}
service.checkin(props);
return;
}
}
throw new GooglePlayException("Lack of information for login!");
}
private HttpClient getProxiedHttpClient() throws Exception {
String host = namespace.getString("host");
Integer port = namespace.getInt("port");
if (host != null && port != null) {
return getProxiedHttpClient(host, port);
}
if (namespace.getAttrs().containsKey("conf")) {
Properties properties = new Properties();
properties.load(new FileInputStream(namespace.getString("conf")));
host = properties.getProperty("host");
String portString = properties.getProperty("port");
if (host != null && portString != null) {
port = Integer.valueOf(portString);
return getProxiedHttpClient(host, port);
}
}
return null;
}
private static HttpClient getProxiedHttpClient(String host, Integer port) throws Exception {
HttpClient client = new DefaultHttpClient(GooglePlayAPI.getConnectionManager());
client.getConnectionManager().getSchemeRegistry().register(Utils.getMockedScheme());
HttpHost proxy = new HttpHost(host, port);
client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);
return client;
}
private void download(String packageName, String sdk) throws IOException {
DetailsResponse details = service.details(packageName);
AppDetails appDetails = details.getDocV2().getDetails().getAppDetails();
Offer offer = details.getDocV2().getOffer(0);
int versionCode = appDetails.getVersionCode();
String versionStr = appDetails.getVersionString();
System.out.print(packageName + "," + versionCode + "," + versionStr);
// TODO: download
/*
long installationSize = appDetails.getInstallationSize();
int offerType = offer.getOfferType();
boolean checkoutRequired = offer.getCheckoutFlowRequired();
// paid application...ignore
if (checkoutRequired) {
System.out.println("Checkout required! Ignoring.." + appDetails.getPackageName());
return;
}
System.out.println("Downloading..." + appDetails.getPackageName() + " : " + installationSize + " bytes");
InputStream downloadStream = service.download(appDetails.getPackageName(), versionCode, offerType);
FileOutputStream outputStream = new FileOutputStream(appDetails.getPackageName() + ".apk");
byte buffer[] = new byte[1024];
for (int k = 0; (k = downloadStream.read(buffer)) != -1;) {
outputStream.write(buffer, 0, k);
}
downloadStream.close();
outputStream.close();
System.out.println("Downloaded! " + appDetails.getPackageName() + ".apk");
//*/
}
}
class ReviewSort implements ArgumentType<Object> {
@Override
public Object convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException {
try {
return REVIEW_SORT.valueOf(value);
} catch (IllegalArgumentException ex) {
return value;
}
}
}
class ReviewSortChoice extends CollectionArgumentChoice<REVIEW_SORT> {
public ReviewSortChoice() {
super(REVIEW_SORT.NEWEST, REVIEW_SORT.HIGHRATING, REVIEW_SORT.HELPFUL);
}
@Override
public boolean contains(Object val) {
try {
return super.contains(val);
} catch (IllegalArgumentException ex) {
return false;
}
}
}
class RecommendationType implements ArgumentType<Object> {
@Override
public Object convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException {
try {
return RECOMMENDATION_TYPE.valueOf(value);
} catch (IllegalArgumentException ex) {
return value;
}
}
}
class ReleationChoice extends CollectionArgumentChoice<RECOMMENDATION_TYPE> {
public ReleationChoice() {
super(RECOMMENDATION_TYPE.ALSO_VIEWED, RECOMMENDATION_TYPE.ALSO_INSTALLED);
}
@Override
public boolean contains(Object val) {
try {
return super.contains(val);
} catch (IllegalArgumentException ex) {
return false;
}
}
}
class StringJoiner {
private String delimeter;
List<String> elements = new ArrayList<String>();
public StringJoiner(String delimeter) {
this.delimeter = delimeter;
}
public StringJoiner add(String elem) {
elements.add(elem);
return this;
}
@Override
public String toString() {
if (elements.isEmpty())
return "";
Iterator<String> iter = elements.iterator();
StringBuilder builder = new StringBuilder(iter.next());
while (iter.hasNext()) {
builder.append(delimeter).append(iter.next());
}
return builder.toString();
}
}
class LoginResponseFilter extends MessageFilter<LoginResponse> {
private String id;
public LoginResponseFilter(String id) {
super(LoginResponse.class);
this.id = id;
}
@Override
protected boolean accept(LoginResponse message) {
return id.equals(message.getPacketid());
}
}
class BindAccountResponseFilter extends MessageFilter<BindAccountResponse> {
private String id;
public BindAccountResponseFilter(String id) {
super(BindAccountResponse.class);
this.id = id;
}
@Override
protected boolean accept(BindAccountResponse message) {
return id.equals(message.getPacketid());
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.cp.internal.datastructures.atomiclong;
import com.hazelcast.client.impl.clientside.ClientMessageDecoder;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.codec.AtomicLongAddAndGetCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongAlterCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongApplyCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongCompareAndSetCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongGetAndAddCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongGetAndSetCodec;
import com.hazelcast.client.impl.protocol.codec.AtomicLongGetCodec;
import com.hazelcast.client.impl.protocol.codec.CPGroupDestroyCPObjectCodec;
import com.hazelcast.client.impl.spi.ClientContext;
import com.hazelcast.client.impl.spi.ClientProxy;
import com.hazelcast.client.impl.spi.impl.ClientInvocation;
import com.hazelcast.client.impl.spi.impl.ClientInvocationFuture;
import com.hazelcast.client.impl.ClientDelegatingFuture;
import com.hazelcast.cp.IAtomicLong;
import com.hazelcast.core.IFunction;
import com.hazelcast.cp.CPGroupId;
import com.hazelcast.cp.internal.RaftGroupId;
import com.hazelcast.cp.internal.datastructures.atomiclong.AtomicLongService;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import static com.hazelcast.cp.internal.datastructures.atomiclong.operation.AlterOp.AlterResultType.NEW_VALUE;
import static com.hazelcast.cp.internal.datastructures.atomiclong.operation.AlterOp.AlterResultType.OLD_VALUE;
/**
* Client-side Raft-based proxy implementation of {@link IAtomicLong}
*/
@SuppressWarnings("checkstyle:methodcount")
public class AtomicLongProxy extends ClientProxy implements IAtomicLong {
private static final ClientMessageDecoder ADD_AND_GET_DECODER = new ClientMessageDecoder() {
@Override
public Long decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongAddAndGetCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder COMPARE_AND_SET_DECODER = new ClientMessageDecoder() {
@Override
public Boolean decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongCompareAndSetCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder GET_AND_ADD_DECODER = new ClientMessageDecoder() {
@Override
public Long decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongGetAndAddCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder GET_AND_SET_DECODER = new ClientMessageDecoder() {
@Override
public Long decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongGetAndSetCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder GET_DECODER = new ClientMessageDecoder() {
@Override
public Long decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongGetCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder APPLY_DECODER = new ClientMessageDecoder() {
@Override
public Object decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongApplyCodec.decodeResponse(clientMessage).response;
}
};
private static final ClientMessageDecoder ALTER_DECODER = new ClientMessageDecoder() {
@Override
public Long decodeClientMessage(ClientMessage clientMessage) {
return AtomicLongAlterCodec.decodeResponse(clientMessage).response;
}
};
private final RaftGroupId groupId;
private final String objectName;
public AtomicLongProxy(ClientContext context, RaftGroupId groupId, String proxyName, String objectName) {
super(AtomicLongService.SERVICE_NAME, proxyName, context);
this.groupId = groupId;
this.objectName = objectName;
}
@Override
public long addAndGet(long delta) {
return addAndGetAsync(delta).joinInternal();
}
@Override
public boolean compareAndSet(long expect, long update) {
return compareAndSetAsync(expect, update).joinInternal();
}
@Override
public long decrementAndGet() {
return decrementAndGetAsync().joinInternal();
}
@Override
public long get() {
return getAsync().joinInternal();
}
@Override
public long getAndAdd(long delta) {
return getAndAddAsync(delta).joinInternal();
}
@Override
public long getAndSet(long newValue) {
return getAndSetAsync(newValue).joinInternal();
}
@Override
public long incrementAndGet() {
return incrementAndGetAsync().joinInternal();
}
@Override
public long getAndIncrement() {
return getAndIncrementAsync().joinInternal();
}
@Override
public void set(long newValue) {
setAsync(newValue).joinInternal();
}
@Override
public void alter(IFunction<Long, Long> function) {
alterAsync(function).joinInternal();
}
@Override
public long alterAndGet(IFunction<Long, Long> function) {
return alterAndGetAsync(function).joinInternal();
}
@Override
public long getAndAlter(IFunction<Long, Long> function) {
return getAndAlterAsync(function).joinInternal();
}
@Override
public <R> R apply(IFunction<Long, R> function) {
return applyAsync(function).joinInternal();
}
@Override
public InternalCompletableFuture<Long> addAndGetAsync(long delta) {
ClientMessage request = AtomicLongAddAndGetCodec.encodeRequest(groupId, objectName, delta);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), ADD_AND_GET_DECODER);
}
@Override
public InternalCompletableFuture<Boolean> compareAndSetAsync(long expect, long update) {
ClientMessage request = AtomicLongCompareAndSetCodec.encodeRequest(groupId, objectName, expect, update);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), COMPARE_AND_SET_DECODER);
}
@Override
public InternalCompletableFuture<Long> decrementAndGetAsync() {
return addAndGetAsync(-1);
}
@Override
public InternalCompletableFuture<Long> getAsync() {
ClientMessage request = AtomicLongGetCodec.encodeRequest(groupId, objectName);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), GET_DECODER);
}
@Override
public InternalCompletableFuture<Long> getAndAddAsync(long delta) {
ClientMessage request = AtomicLongGetAndAddCodec.encodeRequest(groupId, objectName, delta);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), GET_AND_ADD_DECODER);
}
@Override
public InternalCompletableFuture<Long> getAndSetAsync(long newValue) {
ClientMessage request = AtomicLongGetAndSetCodec.encodeRequest(groupId, objectName, newValue);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), GET_AND_SET_DECODER);
}
@Override
public InternalCompletableFuture<Long> incrementAndGetAsync() {
return addAndGetAsync(1);
}
@Override
public InternalCompletableFuture<Long> getAndIncrementAsync() {
return getAndAddAsync(1);
}
@Override
public InternalCompletableFuture<Void> setAsync(long newValue) {
return (InternalCompletableFuture) getAndSetAsync(newValue);
}
@Override
public InternalCompletableFuture<Void> alterAsync(IFunction<Long, Long> function) {
return (InternalCompletableFuture) alterAndGetAsync(function);
}
@Override
public InternalCompletableFuture<Long> alterAndGetAsync(IFunction<Long, Long> function) {
Data f = getSerializationService().toData(function);
ClientMessage request = AtomicLongAlterCodec.encodeRequest(groupId, objectName, f, NEW_VALUE.value());
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), ALTER_DECODER);
}
@Override
public InternalCompletableFuture<Long> getAndAlterAsync(IFunction<Long, Long> function) {
Data f = getSerializationService().toData(function);
ClientMessage request = AtomicLongAlterCodec.encodeRequest(groupId, objectName, f, OLD_VALUE.value());
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), ALTER_DECODER);
}
@Override
public <R> InternalCompletableFuture<R> applyAsync(IFunction<Long, R> function) {
Data f = getSerializationService().toData(function);
ClientMessage request = AtomicLongApplyCodec.encodeRequest(groupId, objectName, f);
ClientInvocationFuture future = new ClientInvocation(getClient(), request, name).invoke();
return new ClientDelegatingFuture<>(future, getSerializationService(), APPLY_DECODER);
}
@Override
public String getPartitionKey() {
throw new UnsupportedOperationException();
}
@Override
public void onDestroy() {
ClientMessage request = CPGroupDestroyCPObjectCodec.encodeRequest(groupId, getServiceName(), objectName);
new ClientInvocation(getClient(), request, name).invoke().joinInternal();
}
public CPGroupId getGroupId() {
return groupId;
}
}
| |
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package lib.stormauthlib.com.google.gson.internal.bind;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import lib.stormauthlib.com.google.gson.FieldNamingStrategy;
import lib.stormauthlib.com.google.gson.Gson;
import lib.stormauthlib.com.google.gson.JsonSyntaxException;
import lib.stormauthlib.com.google.gson.TypeAdapter;
import lib.stormauthlib.com.google.gson.TypeAdapterFactory;
import lib.stormauthlib.com.google.gson.annotations.JsonAdapter;
import lib.stormauthlib.com.google.gson.annotations.SerializedName;
import lib.stormauthlib.com.google.gson.internal.$Gson$Types;
import lib.stormauthlib.com.google.gson.internal.ConstructorConstructor;
import lib.stormauthlib.com.google.gson.internal.Excluder;
import lib.stormauthlib.com.google.gson.internal.ObjectConstructor;
import lib.stormauthlib.com.google.gson.internal.Primitives;
import lib.stormauthlib.com.google.gson.reflect.TypeToken;
import lib.stormauthlib.com.google.gson.stream.JsonReader;
import lib.stormauthlib.com.google.gson.stream.JsonToken;
import lib.stormauthlib.com.google.gson.stream.JsonWriter;
import static lib.stormauthlib.com.google.gson.internal.bind.JsonAdapterAnnotationTypeAdapterFactory.getTypeAdapter;
/**
* Type adapter that reflects over the fields and methods of a class.
*/
public final class ReflectiveTypeAdapterFactory implements TypeAdapterFactory {
private final ConstructorConstructor constructorConstructor;
private final FieldNamingStrategy fieldNamingPolicy;
private final Excluder excluder;
public ReflectiveTypeAdapterFactory(ConstructorConstructor constructorConstructor,
FieldNamingStrategy fieldNamingPolicy, Excluder excluder) {
this.constructorConstructor = constructorConstructor;
this.fieldNamingPolicy = fieldNamingPolicy;
this.excluder = excluder;
}
public boolean excludeField(Field f, boolean serialize) {
return excludeField(f, serialize, excluder);
}
static boolean excludeField(Field f, boolean serialize, Excluder excluder) {
return !excluder.excludeClass(f.getType(), serialize) && !excluder.excludeField(f, serialize);
}
private String getFieldName(Field f) {
return getFieldName(fieldNamingPolicy, f);
}
static String getFieldName(FieldNamingStrategy fieldNamingPolicy, Field f) {
SerializedName serializedName = f.getAnnotation(SerializedName.class);
return serializedName == null ? fieldNamingPolicy.translateName(f) : serializedName.value();
}
public <T> TypeAdapter<T> create(Gson gson, final TypeToken<T> type) {
Class<? super T> raw = type.getRawType();
if (!Object.class.isAssignableFrom(raw)) {
return null; // it's a primitive!
}
ObjectConstructor<T> constructor = constructorConstructor.get(type);
return new Adapter<T>(constructor, getBoundFields(gson, type, raw));
}
private ReflectiveTypeAdapterFactory.BoundField createBoundField(
final Gson context, final Field field, final String name,
final TypeToken<?> fieldType, boolean serialize, boolean deserialize) {
final boolean isPrimitive = Primitives.isPrimitive(fieldType.getRawType());
// special casing primitives here saves ~5% on Android...
return new ReflectiveTypeAdapterFactory.BoundField(name, serialize, deserialize) {
final TypeAdapter<?> typeAdapter = getFieldAdapter(context, field, fieldType);
@SuppressWarnings({"unchecked", "rawtypes"}) // the type adapter and field type always agree
@Override void write(JsonWriter writer, Object value)
throws IOException, IllegalAccessException {
Object fieldValue = field.get(value);
TypeAdapter t =
new TypeAdapterRuntimeTypeWrapper(context, this.typeAdapter, fieldType.getType());
t.write(writer, fieldValue);
}
@Override void read(JsonReader reader, Object value)
throws IOException, IllegalAccessException {
Object fieldValue = typeAdapter.read(reader);
if (fieldValue != null || !isPrimitive) {
field.set(value, fieldValue);
}
}
public boolean writeField(Object value) throws IOException, IllegalAccessException {
if (!serialized) return false;
Object fieldValue = field.get(value);
return fieldValue != value; // avoid recursion for example for Throwable.cause
}
};
}
private TypeAdapter<?> getFieldAdapter(Gson gson, Field field, TypeToken<?> fieldType) {
JsonAdapter annotation = field.getAnnotation(JsonAdapter.class);
if (annotation != null) {
TypeAdapter<?> adapter = getTypeAdapter(constructorConstructor, gson, fieldType, annotation);
if (adapter != null) return adapter;
}
return gson.getAdapter(fieldType);
}
private Map<String, BoundField> getBoundFields(Gson context, TypeToken<?> type, Class<?> raw) {
Map<String, BoundField> result = new LinkedHashMap<String, BoundField>();
if (raw.isInterface()) {
return result;
}
Type declaredType = type.getType();
while (raw != Object.class) {
Field[] fields = raw.getDeclaredFields();
for (Field field : fields) {
boolean serialize = excludeField(field, true);
boolean deserialize = excludeField(field, false);
if (!serialize && !deserialize) {
continue;
}
field.setAccessible(true);
Type fieldType = $Gson$Types.resolve(type.getType(), raw, field.getGenericType());
BoundField boundField = createBoundField(context, field, getFieldName(field),
TypeToken.get(fieldType), serialize, deserialize);
BoundField previous = result.put(boundField.name, boundField);
if (previous != null) {
throw new IllegalArgumentException(declaredType
+ " declares multiple JSON fields named " + previous.name);
}
}
type = TypeToken.get($Gson$Types.resolve(type.getType(), raw, raw.getGenericSuperclass()));
raw = type.getRawType();
}
return result;
}
static abstract class BoundField {
final String name;
final boolean serialized;
final boolean deserialized;
protected BoundField(String name, boolean serialized, boolean deserialized) {
this.name = name;
this.serialized = serialized;
this.deserialized = deserialized;
}
abstract boolean writeField(Object value) throws IOException, IllegalAccessException;
abstract void write(JsonWriter writer, Object value) throws IOException, IllegalAccessException;
abstract void read(JsonReader reader, Object value) throws IOException, IllegalAccessException;
}
public static final class Adapter<T> extends TypeAdapter<T> {
private final ObjectConstructor<T> constructor;
private final Map<String, BoundField> boundFields;
private Adapter(ObjectConstructor<T> constructor, Map<String, BoundField> boundFields) {
this.constructor = constructor;
this.boundFields = boundFields;
}
@Override public T read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
T instance = constructor.construct();
try {
in.beginObject();
while (in.hasNext()) {
String name = in.nextName();
BoundField field = boundFields.get(name);
if (field == null || !field.deserialized) {
in.skipValue();
} else {
field.read(in, instance);
}
}
} catch (IllegalStateException e) {
throw new JsonSyntaxException(e);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
in.endObject();
return instance;
}
@Override public void write(JsonWriter out, T value) throws IOException {
if (value == null) {
out.nullValue();
return;
}
out.beginObject();
try {
for (BoundField boundField : boundFields.values()) {
if (boundField.writeField(value)) {
out.name(boundField.name);
boundField.write(out, value);
}
}
} catch (IllegalAccessException e) {
throw new AssertionError();
}
out.endObject();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.metastore;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.prestosql.spi.connector.SchemaTableName;
import javax.annotation.concurrent.Immutable;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Consumer;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
@Immutable
public class Partition
{
private final String databaseName;
private final String tableName;
private final List<String> values;
private final Storage storage;
private final List<Column> columns;
private final Map<String, String> parameters;
@JsonCreator
public Partition(
@JsonProperty("databaseName") String databaseName,
@JsonProperty("tableName") String tableName,
@JsonProperty("values") List<String> values,
@JsonProperty("storage") Storage storage,
@JsonProperty("columns") List<Column> columns,
@JsonProperty("parameters") Map<String, String> parameters)
{
this.databaseName = requireNonNull(databaseName, "databaseName is null");
this.tableName = requireNonNull(tableName, "tableName is null");
this.values = ImmutableList.copyOf(requireNonNull(values, "values is null"));
this.storage = requireNonNull(storage, "storage is null");
this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null"));
this.parameters = ImmutableMap.copyOf(requireNonNull(parameters, "parameters is null"));
}
@JsonProperty
public String getDatabaseName()
{
return databaseName;
}
@JsonProperty
public String getTableName()
{
return tableName;
}
@JsonIgnore
public SchemaTableName getSchemaTableName()
{
return new SchemaTableName(databaseName, tableName);
}
@JsonProperty
public List<String> getValues()
{
return values;
}
@JsonProperty
public Storage getStorage()
{
return storage;
}
@JsonProperty
public List<Column> getColumns()
{
return columns;
}
@JsonProperty
public Map<String, String> getParameters()
{
return parameters;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("databaseName", databaseName)
.add("tableName", tableName)
.add("values", values)
.toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Partition partition = (Partition) o;
return Objects.equals(databaseName, partition.databaseName) &&
Objects.equals(tableName, partition.tableName) &&
Objects.equals(values, partition.values) &&
Objects.equals(storage, partition.storage) &&
Objects.equals(columns, partition.columns) &&
Objects.equals(parameters, partition.parameters);
}
@Override
public int hashCode()
{
return Objects.hash(databaseName, tableName, values, storage, columns, parameters);
}
public static Builder builder()
{
return new Builder();
}
public static Builder builder(Partition partition)
{
return new Builder(partition);
}
public static class Builder
{
private final Storage.Builder storageBuilder;
private String databaseName;
private String tableName;
private List<String> values;
private List<Column> columns;
private Map<String, String> parameters = ImmutableMap.of();
private Builder()
{
this.storageBuilder = Storage.builder();
}
private Builder(Partition partition)
{
this.storageBuilder = Storage.builder(partition.getStorage());
this.databaseName = partition.getDatabaseName();
this.tableName = partition.getTableName();
this.values = partition.getValues();
this.columns = partition.getColumns();
this.parameters = partition.getParameters();
}
public Builder setDatabaseName(String databaseName)
{
this.databaseName = databaseName;
return this;
}
public Builder setTableName(String tableName)
{
this.tableName = tableName;
return this;
}
public Builder setValues(List<String> values)
{
this.values = values;
return this;
}
public Storage.Builder getStorageBuilder()
{
return storageBuilder;
}
public Builder withStorage(Consumer<Storage.Builder> consumer)
{
consumer.accept(storageBuilder);
return this;
}
public Builder setColumns(List<Column> columns)
{
this.columns = columns;
return this;
}
public Builder setParameters(Map<String, String> parameters)
{
this.parameters = parameters;
return this;
}
public Partition build()
{
return new Partition(databaseName, tableName, values, storageBuilder.build(), columns, parameters);
}
}
}
| |
/*
* Copyright (c) 2014, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.dataflow.spark;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.CoderRegistry;
import com.google.cloud.dataflow.sdk.runners.AggregatorRetrievalException;
import com.google.cloud.dataflow.sdk.runners.AggregatorValues;
import com.google.cloud.dataflow.sdk.transforms.Aggregator;
import com.google.cloud.dataflow.sdk.transforms.AppliedPTransform;
import com.google.cloud.dataflow.sdk.transforms.PTransform;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.values.PCollection;
import com.google.cloud.dataflow.sdk.values.PCollectionView;
import com.google.cloud.dataflow.sdk.values.PInput;
import com.google.cloud.dataflow.sdk.values.POutput;
import com.google.cloud.dataflow.sdk.values.PValue;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import org.apache.spark.api.java.JavaRDDLike;
import org.apache.spark.api.java.JavaSparkContext;
/**
* Evaluation context allows us to define how pipeline instructions.
*/
public class EvaluationContext implements EvaluationResult {
private final JavaSparkContext jsc;
private final Pipeline pipeline;
private final SparkRuntimeContext runtime;
private final CoderRegistry registry;
private final Map<PValue, RDDHolder<?>> pcollections = new LinkedHashMap<>();
private final Set<RDDHolder<?>> leafRdds = new LinkedHashSet<>();
private final Set<PValue> multireads = new LinkedHashSet<>();
private final Map<PValue, Object> pobjects = new LinkedHashMap<>();
private final Map<PValue, Iterable<WindowedValue<?>>> pview = new LinkedHashMap<>();
private AppliedPTransform<?, ?, ?> currentTransform;
public EvaluationContext(JavaSparkContext jsc, Pipeline pipeline) {
this.jsc = jsc;
this.pipeline = pipeline;
this.registry = pipeline.getCoderRegistry();
this.runtime = new SparkRuntimeContext(jsc, pipeline);
}
/**
* Holds an RDD or values for deferred conversion to an RDD if needed. PCollections are
* sometimes created from a collection of objects (using RDD parallelize) and then
* only used to create View objects; in which case they do not need to be
* converted to bytes since they are not transferred across the network until they are
* broadcast.
*/
private class RDDHolder<T> {
private Iterable<T> values;
private Coder<T> coder;
private JavaRDDLike<T, ?> rdd;
public RDDHolder(Iterable<T> values, Coder<T> coder) {
this.values = values;
this.coder = coder;
}
public RDDHolder(JavaRDDLike<T, ?> rdd) {
this.rdd = rdd;
}
public JavaRDDLike<T, ?> getRDD() {
if (rdd == null) {
rdd = jsc.parallelize(CoderHelpers.toByteArrays(values, coder))
.map(CoderHelpers.fromByteFunction(coder));
}
return rdd;
}
public Iterable<T> getValues(PCollection<T> pcollection) {
if (values == null) {
coder = pcollection.getCoder();
JavaRDDLike<byte[], ?> bytesRDD = rdd.map(CoderHelpers.toByteFunction(coder));
List<byte[]> clientBytes = bytesRDD.collect();
values = Iterables.transform(clientBytes, new Function<byte[], T>() {
@Override
public T apply(byte[] bytes) {
return CoderHelpers.fromByteArray(bytes, coder);
}
});
}
return values;
}
}
JavaSparkContext getSparkContext() {
return jsc;
}
Pipeline getPipeline() {
return pipeline;
}
SparkRuntimeContext getRuntimeContext() {
return runtime;
}
void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
this.currentTransform = transform;
}
<I extends PInput> I getInput(PTransform<I, ?> transform) {
checkArgument(currentTransform != null && currentTransform.getTransform() == transform,
"can only be called with current transform");
@SuppressWarnings("unchecked")
I input = (I) currentTransform.getInput();
return input;
}
<O extends POutput> O getOutput(PTransform<?, O> transform) {
checkArgument(currentTransform != null && currentTransform.getTransform() == transform,
"can only be called with current transform");
@SuppressWarnings("unchecked")
O output = (O) currentTransform.getOutput();
return output;
}
<T> void setOutputRDD(PTransform<?, ?> transform, JavaRDDLike<T, ?> rdd) {
setRDD((PValue) getOutput(transform), rdd);
}
<T> void setOutputRDDFromValues(PTransform<?, ?> transform, Iterable<T> values,
Coder<T> coder) {
pcollections.put((PValue) getOutput(transform), new RDDHolder<>(values, coder));
}
void setPView(PValue view, Iterable<WindowedValue<?>> value) {
pview.put(view, value);
}
JavaRDDLike<?, ?> getRDD(PValue pvalue) {
RDDHolder<?> rddHolder = pcollections.get(pvalue);
JavaRDDLike<?, ?> rdd = rddHolder.getRDD();
leafRdds.remove(rddHolder);
if (multireads.contains(pvalue)) {
// Ensure the RDD is marked as cached
rdd.rdd().cache();
} else {
multireads.add(pvalue);
}
return rdd;
}
<T> void setRDD(PValue pvalue, JavaRDDLike<T, ?> rdd) {
try {
rdd.rdd().setName(pvalue.getName());
} catch (IllegalStateException e) {
// name not set, ignore
}
RDDHolder<T> rddHolder = new RDDHolder<>(rdd);
pcollections.put(pvalue, rddHolder);
leafRdds.add(rddHolder);
}
JavaRDDLike<?, ?> getInputRDD(PTransform<? extends PInput, ?> transform) {
return getRDD((PValue) getInput(transform));
}
<T> Iterable<WindowedValue<?>> getPCollectionView(PCollectionView<T> view) {
return pview.get(view);
}
/**
* Computes the outputs for all RDDs that are leaves in the DAG and do not have any
* actions (like saving to a file) registered on them (i.e. they are performed for side
* effects).
*/
void computeOutputs() {
for (RDDHolder<?> rddHolder : leafRdds) {
JavaRDDLike<?, ?> rdd = rddHolder.getRDD();
rdd.rdd().cache(); // cache so that any subsequent get() is cheap
rdd.count(); // force the RDD to be computed
}
}
@Override
public <T> T get(PValue value) {
if (pobjects.containsKey(value)) {
@SuppressWarnings("unchecked")
T result = (T) pobjects.get(value);
return result;
}
if (pcollections.containsKey(value)) {
JavaRDDLike<?, ?> rdd = pcollections.get(value).getRDD();
@SuppressWarnings("unchecked")
T res = (T) Iterables.getOnlyElement(rdd.collect());
pobjects.put(value, res);
return res;
}
throw new IllegalStateException("Cannot resolve un-known PObject: " + value);
}
@Override
public <T> T getAggregatorValue(String named, Class<T> resultType) {
return runtime.getAggregatorValue(named, resultType);
}
@Override
public <T> AggregatorValues<T> getAggregatorValues(Aggregator<?, T> aggregator)
throws AggregatorRetrievalException {
return runtime.getAggregatorValues(aggregator);
}
@Override
public <T> Iterable<T> get(PCollection<T> pcollection) {
@SuppressWarnings("unchecked")
RDDHolder<T> rddHolder = (RDDHolder<T>) pcollections.get(pcollection);
return rddHolder.getValues(pcollection);
}
@Override
public void close() {
SparkContextFactory.stopSparkContext(jsc);
}
/** The runner is blocking. */
@Override
public State getState() {
return State.DONE;
}
}
| |
// Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.submit;
import static com.google.common.base.Preconditions.checkState;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.UsedAt;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.SubmitTypeRecord;
import com.google.gerrit.exceptions.StorageException;
import com.google.gerrit.extensions.client.SubmitType;
import com.google.gerrit.extensions.registration.DynamicItem;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.project.NoSuchProjectException;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.ChangeIsVisibleToPredicate;
import com.google.gerrit.server.query.change.InternalChangeQuery;
import com.google.gerrit.server.submit.MergeOpRepoManager.OpenRepo;
import com.google.inject.AbstractModule;
import com.google.inject.Inject;
import com.google.inject.Provider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevSort;
/**
* Default implementation of MergeSuperSet that does the computation of the merge super set
* sequentially on the local Gerrit instance.
*/
public class LocalMergeSuperSetComputation implements MergeSuperSetComputation {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
public static class LocalMergeSuperSetComputationModule extends AbstractModule {
@Override
protected void configure() {
DynamicItem.bind(binder(), MergeSuperSetComputation.class)
.to(LocalMergeSuperSetComputation.class);
}
}
@AutoValue
abstract static class QueryKey {
private static QueryKey create(BranchNameKey branch, Iterable<String> hashes) {
return new AutoValue_LocalMergeSuperSetComputation_QueryKey(
branch, ImmutableSet.copyOf(hashes));
}
abstract BranchNameKey branch();
abstract ImmutableSet<String> hashes();
}
private final Provider<InternalChangeQuery> queryProvider;
private final Map<QueryKey, ImmutableList<ChangeData>> queryCache;
private final Map<BranchNameKey, Optional<RevCommit>> heads;
private final ChangeIsVisibleToPredicate.Factory changeIsVisibleToPredicateFactory;
@Inject
LocalMergeSuperSetComputation(
Provider<InternalChangeQuery> queryProvider,
ChangeIsVisibleToPredicate.Factory changeIsVisibleToPredicateFactory) {
this.queryProvider = queryProvider;
this.queryCache = new HashMap<>();
this.heads = new HashMap<>();
this.changeIsVisibleToPredicateFactory = changeIsVisibleToPredicateFactory;
}
@Override
public ChangeSet completeWithoutTopic(
MergeOpRepoManager orm, ChangeSet changeSet, CurrentUser user) throws IOException {
Collection<ChangeData> visibleChanges = new ArrayList<>();
Collection<ChangeData> nonVisibleChanges = new ArrayList<>();
// For each target branch we run a separate rev walk to find open changes
// reachable from changes already in the merge super set.
ImmutableSet<BranchNameKey> branches =
byBranch(Iterables.concat(changeSet.changes(), changeSet.nonVisibleChanges())).keySet();
ImmutableListMultimap<BranchNameKey, ChangeData> visibleChangesPerBranch =
byBranch(changeSet.changes());
ImmutableListMultimap<BranchNameKey, ChangeData> nonVisibleChangesPerBranch =
byBranch(changeSet.nonVisibleChanges());
for (BranchNameKey branchNameKey : branches) {
OpenRepo or = getRepo(orm, branchNameKey.project());
List<RevCommit> visibleCommits = new ArrayList<>();
List<RevCommit> nonVisibleCommits = new ArrayList<>();
for (ChangeData cd : visibleChangesPerBranch.get(branchNameKey)) {
if (submitType(cd) == SubmitType.CHERRY_PICK) {
visibleChanges.add(cd);
} else {
visibleCommits.add(or.rw.parseCommit(cd.currentPatchSet().commitId()));
}
}
for (ChangeData cd : nonVisibleChangesPerBranch.get(branchNameKey)) {
if (submitType(cd) == SubmitType.CHERRY_PICK) {
nonVisibleChanges.add(cd);
} else {
nonVisibleCommits.add(or.rw.parseCommit(cd.currentPatchSet().commitId()));
}
}
Set<String> visibleHashes =
walkChangesByHashes(visibleCommits, Collections.emptySet(), or, branchNameKey);
Set<String> nonVisibleHashes =
walkChangesByHashes(nonVisibleCommits, visibleHashes, or, branchNameKey);
ChangeSet partialSet =
byCommitsOnBranchNotMerged(or, branchNameKey, visibleHashes, nonVisibleHashes, user);
Iterables.addAll(visibleChanges, partialSet.changes());
Iterables.addAll(nonVisibleChanges, partialSet.nonVisibleChanges());
}
return new ChangeSet(visibleChanges, nonVisibleChanges);
}
private static ImmutableListMultimap<BranchNameKey, ChangeData> byBranch(
Iterable<ChangeData> changes) {
ImmutableListMultimap.Builder<BranchNameKey, ChangeData> builder =
ImmutableListMultimap.builder();
for (ChangeData cd : changes) {
builder.put(cd.change().getDest(), cd);
}
return builder.build();
}
private OpenRepo getRepo(MergeOpRepoManager orm, Project.NameKey project) throws IOException {
try {
OpenRepo or = orm.getRepo(project);
checkState(or.rw.hasRevSort(RevSort.TOPO));
return or;
} catch (NoSuchProjectException e) {
throw new IOException(e);
}
}
private SubmitType submitType(ChangeData cd) {
SubmitTypeRecord str = cd.submitTypeRecord();
if (!str.isOk()) {
logErrorAndThrow("Failed to get submit type for " + cd.getId() + ": " + str.errorMessage);
}
return str.type;
}
@UsedAt(UsedAt.Project.GOOGLE)
public ChangeSet byCommitsOnBranchNotMerged(
OpenRepo or,
BranchNameKey branch,
Set<String> visibleHashes,
Set<String> nonVisibleHashes,
CurrentUser user)
throws IOException {
List<ChangeData> potentiallyVisibleChanges =
byCommitsOnBranchNotMerged(or, branch, visibleHashes);
List<ChangeData> invisibleChanges =
new ArrayList<>(byCommitsOnBranchNotMerged(or, branch, nonVisibleHashes));
List<ChangeData> visibleChanges = new ArrayList<>(potentiallyVisibleChanges.size());
ChangeIsVisibleToPredicate changeIsVisibleToPredicate =
changeIsVisibleToPredicateFactory.forUser(user);
for (ChangeData cd : potentiallyVisibleChanges) {
// short circuit permission checks for non-private changes, as we already checked all
// permissions (except for private changes).
if (!cd.change().isPrivate() || changeIsVisibleToPredicate.match(cd)) {
visibleChanges.add(cd);
} else {
invisibleChanges.add(cd);
}
}
return new ChangeSet(visibleChanges, invisibleChanges);
}
private ImmutableList<ChangeData> byCommitsOnBranchNotMerged(
OpenRepo or, BranchNameKey branch, Set<String> hashes) throws IOException {
if (hashes.isEmpty()) {
return ImmutableList.of();
}
QueryKey k = QueryKey.create(branch, hashes);
if (queryCache.containsKey(k)) {
return queryCache.get(k);
}
ImmutableList<ChangeData> result =
ImmutableList.copyOf(
queryProvider.get().byCommitsOnBranchNotMerged(or.repo, branch, hashes));
queryCache.put(k, result);
return result;
}
@UsedAt(UsedAt.Project.GOOGLE)
public Set<String> walkChangesByHashes(
Collection<RevCommit> sourceCommits, Set<String> ignoreHashes, OpenRepo or, BranchNameKey b)
throws IOException {
Set<String> destHashes = new HashSet<>();
or.rw.reset();
markHeadUninteresting(or, b);
for (RevCommit c : sourceCommits) {
String name = c.name();
if (ignoreHashes.contains(name)) {
continue;
}
destHashes.add(name);
or.rw.markStart(c);
}
for (RevCommit c : or.rw) {
String name = c.name();
if (ignoreHashes.contains(name)) {
continue;
}
destHashes.add(name);
}
return destHashes;
}
private void markHeadUninteresting(OpenRepo or, BranchNameKey b) throws IOException {
Optional<RevCommit> head = heads.get(b);
if (head == null) {
Ref ref = or.repo.getRefDatabase().exactRef(b.branch());
head = ref != null ? Optional.of(or.rw.parseCommit(ref.getObjectId())) : Optional.empty();
heads.put(b, head);
}
if (head.isPresent()) {
or.rw.markUninteresting(head.get());
}
}
private void logErrorAndThrow(String msg) {
logger.atSevere().log("%s", msg);
throw new StorageException(msg);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.refactoring.inline;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.codeInsight.ExpressionUtil;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.java.refactoring.JavaRefactoringBundle;
import com.intellij.lang.Language;
import com.intellij.lang.findUsages.DescriptiveNameUtil;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.lang.refactoring.InlineHandler;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.impl.source.codeStyle.CodeEditUtil;
import com.intellij.psi.impl.source.javadoc.PsiDocMethodOrFieldRef;
import com.intellij.psi.impl.source.resolve.reference.impl.JavaLangClassMemberReference;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.OverrideMethodsProcessor;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.introduceParameter.Util;
import com.intellij.refactoring.listeners.RefactoringEventData;
import com.intellij.refactoring.rename.NonCodeUsageInfoFactory;
import com.intellij.refactoring.rename.RenameJavaMemberProcessor;
import com.intellij.refactoring.util.*;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.JavaPsiConstructorUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.psiutils.SideEffectChecker;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.intellij.util.ObjectUtils.tryCast;
public class InlineMethodProcessor extends BaseRefactoringProcessor {
private static final Logger LOG = Logger.getInstance(InlineMethodProcessor.class);
private PsiMethod myMethod;
private PsiReference myReference;
private final Editor myEditor;
private final boolean myInlineThisOnly;
private final boolean mySearchInComments;
private final boolean mySearchForTextOccurrences;
private final boolean myDeleteTheDeclaration;
private final Function<PsiReference, InlineTransformer> myTransformerChooser;
private final PsiManager myManager;
private final PsiElementFactory myFactory;
private final CodeStyleManager myCodeStyleManager;
private final JavaCodeStyleManager myJavaCodeStyle;
private PsiCodeBlock[] myAddedBraces;
private final String myDescriptiveName;
private Map<PsiField, PsiClassInitializer> myAddedClassInitializers;
private PsiMethod myMethodCopy;
@SuppressWarnings("LeakableMapKey") //short living refactoring
private Map<Language,InlineHandler.Inliner> myInliners;
public InlineMethodProcessor(@NotNull Project project,
@NotNull PsiMethod method,
@Nullable PsiReference reference,
Editor editor,
boolean isInlineThisOnly) {
this(project, method, reference, editor, isInlineThisOnly, false, false, true);
}
public InlineMethodProcessor(@NotNull Project project,
@NotNull PsiMethod method,
@Nullable PsiReference reference,
Editor editor,
boolean isInlineThisOnly,
boolean searchInComments,
boolean searchForTextOccurrences) {
this(project, method, reference, editor, isInlineThisOnly, searchInComments, searchForTextOccurrences, true);
}
public InlineMethodProcessor(@NotNull Project project,
@NotNull PsiMethod method,
@Nullable PsiReference reference,
Editor editor,
boolean isInlineThisOnly,
boolean searchInComments,
boolean searchForTextOccurrences,
boolean isDeleteTheDeclaration) {
super(project);
myMethod = InlineMethodSpecialization.specialize(method, reference);
myTransformerChooser = InlineTransformer.getSuitableTransformer(myMethod);
myReference = reference;
myEditor = editor;
myInlineThisOnly = isInlineThisOnly;
mySearchInComments = searchInComments;
mySearchForTextOccurrences = searchForTextOccurrences;
myDeleteTheDeclaration = isDeleteTheDeclaration;
myManager = PsiManager.getInstance(myProject);
myFactory = JavaPsiFacade.getElementFactory(myManager.getProject());
myCodeStyleManager = CodeStyleManager.getInstance(myProject);
myJavaCodeStyle = JavaCodeStyleManager.getInstance(myProject);
myDescriptiveName = DescriptiveNameUtil.getDescriptiveName(myMethod);
}
@Override
@NotNull
protected String getCommandName() {
return RefactoringBundle.message("inline.method.command", myDescriptiveName);
}
@Override
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(UsageInfo @NotNull [] usages) {
return new InlineViewDescriptor(myMethod);
}
@Override
protected UsageInfo @NotNull [] findUsages() {
if (myInlineThisOnly) return new UsageInfo[]{new UsageInfo(myReference)};
Set<UsageInfo> usages = new HashSet<>();
if (myReference != null) {
usages.add(new UsageInfo(myReference.getElement()));
}
for (PsiReference reference : MethodReferencesSearch.search(myMethod, myRefactoringScope, true)) {
usages.add(new UsageInfo(reference.getElement()));
}
if (myDeleteTheDeclaration) {
OverridingMethodsSearch.search(myMethod, myRefactoringScope, true)
.forEach(method -> {
if (shouldDeleteOverrideAttribute(method)) {
usages.add(new OverrideAttributeUsageInfo(method));
}
return true;
});
}
if (mySearchInComments || mySearchForTextOccurrences) {
final NonCodeUsageInfoFactory infoFactory = new NonCodeUsageInfoFactory(myMethod, myMethod.getName()) {
@Override
public UsageInfo createUsageInfo(@NotNull PsiElement usage, int startOffset, int endOffset) {
if (PsiTreeUtil.isAncestor(myMethod, usage, false)) return null;
return super.createUsageInfo(usage, startOffset, endOffset);
}
};
if (mySearchInComments) {
String stringToSearch = ElementDescriptionUtil.getElementDescription(myMethod, NonCodeSearchDescriptionLocation.STRINGS_AND_COMMENTS);
TextOccurrencesUtil.addUsagesInStringsAndComments(myMethod, myRefactoringScope, stringToSearch, usages, infoFactory);
}
if (mySearchForTextOccurrences && myRefactoringScope instanceof GlobalSearchScope) {
String stringToSearch = ElementDescriptionUtil.getElementDescription(myMethod, NonCodeSearchDescriptionLocation.NON_JAVA);
TextOccurrencesUtil.addTextOccurrences(myMethod, stringToSearch, (GlobalSearchScope)myRefactoringScope, usages, infoFactory);
}
}
return usages.toArray(UsageInfo.EMPTY_ARRAY);
}
private boolean shouldDeleteOverrideAttribute(PsiMethod method) {
return method.getHierarchicalMethodSignature()
.getSuperSignatures().stream()
.allMatch(signature -> {
PsiMethod superMethod = signature.getMethod();
if (superMethod == myMethod) {
return true;
}
if (JavaLanguage.INSTANCE == method.getLanguage() &&
Objects.requireNonNull(superMethod.getContainingClass()).isInterface()) {
return !PsiUtil.isLanguageLevel6OrHigher(method);
}
return false;
});
}
@Override
protected boolean isPreviewUsages(UsageInfo @NotNull [] usages) {
for (UsageInfo usage : usages) {
if (usage instanceof NonCodeUsageInfo) return true;
}
return super.isPreviewUsages(usages);
}
@Override
protected void refreshElements(PsiElement @NotNull [] elements) {
boolean condition = elements.length == 1 && elements[0] instanceof PsiMethod;
LOG.assertTrue(condition);
myMethod = (PsiMethod)elements[0];
}
@Override
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
if (!myInlineThisOnly && checkReadOnly()) {
if (!CommonRefactoringUtil.checkReadOnlyStatus(myProject, myMethod)) return false;
}
final UsageInfo[] usagesIn = refUsages.get();
final MultiMap<PsiElement, String> conflicts = new MultiMap<>();
if (!myInlineThisOnly) {
final PsiMethod[] superMethods = myMethod.findSuperMethods();
for (PsiMethod method : superMethods) {
String className = Objects.requireNonNull(method.getContainingClass()).getQualifiedName();
final String message = method.hasModifierProperty(PsiModifier.ABSTRACT) ?
JavaRefactoringBundle.message("inlined.method.implements.method.from.0", className) :
JavaRefactoringBundle.message("inlined.method.overrides.method.from.0", className);
conflicts.putValue(method, message);
}
for (UsageInfo info : usagesIn) {
final PsiElement element = info.getElement();
if (element instanceof PsiDocMethodOrFieldRef && !PsiTreeUtil.isAncestor(myMethod, element, false)) {
conflicts.putValue(element, JavaRefactoringBundle.message("inline.method.used.in.javadoc"));
}
if (element instanceof PsiLiteralExpression &&
ContainerUtil.or(element.getReferences(), JavaLangClassMemberReference.class::isInstance)) {
conflicts.putValue(element, JavaRefactoringBundle.message("inline.method.used.in.reflection"));
}
if (element instanceof PsiMethodReferenceExpression) {
final PsiExpression qualifierExpression = ((PsiMethodReferenceExpression)element).getQualifierExpression();
if (qualifierExpression != null) {
final List<PsiElement> sideEffects = new ArrayList<>();
SideEffectChecker.checkSideEffects(qualifierExpression, sideEffects);
if (!sideEffects.isEmpty()) {
conflicts.putValue(element, JavaRefactoringBundle.message("inline.method.qualifier.usage.side.effect"));
}
}
}
if (element instanceof PsiReferenceExpression && myTransformerChooser.apply((PsiReference)element).isFallBackTransformer()) {
conflicts.putValue(element, JavaRefactoringBundle.message("inlined.method.will.be.transformed.to.single.return.form"));
}
final String errorMessage = checkUnableToInsertCodeBlock(myMethod.getBody(), element);
if (errorMessage != null) {
conflicts.putValue(element, errorMessage);
}
}
}
else if (myReference != null && myTransformerChooser.apply(myReference).isFallBackTransformer()) {
conflicts.putValue(myReference.getElement(), JavaRefactoringBundle.message("inlined.method.will.be.transformed.to.single.return.form"));
}
myInliners = GenericInlineHandler.initInliners(myMethod, usagesIn, new InlineHandler.Settings() {
@Override
public boolean isOnlyOneReferenceToInline() {
return myInlineThisOnly;
}
}, conflicts, JavaLanguage.INSTANCE);
addInaccessibleMemberConflicts(myMethod, usagesIn, new ReferencedElementsCollector(), conflicts);
addInaccessibleSuperCallsConflicts(usagesIn, conflicts);
return showConflicts(conflicts, usagesIn);
}
private boolean checkReadOnly() {
return myMethod.isWritable() || myMethod instanceof PsiCompiledElement;
}
private void addInaccessibleSuperCallsConflicts(final UsageInfo[] usagesIn, final MultiMap<PsiElement, String> conflicts) {
myMethod.accept(new JavaRecursiveElementWalkingVisitor(){
@Override
public void visitClass(PsiClass aClass) {}
@Override
public void visitAnonymousClass(PsiAnonymousClass aClass) {}
@Override
public void visitSuperExpression(PsiSuperExpression expression) {
super.visitSuperExpression(expression);
final PsiType type = expression.getType();
final PsiClass superClass = PsiUtil.resolveClassInType(type);
if (superClass != null) {
final Set<PsiClass> targetContainingClasses = new HashSet<>();
PsiElement qualifiedCall = null;
for (UsageInfo info : usagesIn) {
final PsiElement element = info.getElement();
if (element != null) {
final PsiClass targetContainingClass = PsiTreeUtil.getParentOfType(element, PsiClass.class);
if (targetContainingClass != null &&
(!InheritanceUtil.isInheritorOrSelf(targetContainingClass, superClass, true) ||
PsiUtil.getEnclosingStaticElement(element, targetContainingClass) != null)) {
targetContainingClasses.add(targetContainingClass);
}
else if (element instanceof PsiReferenceExpression && !ExpressionUtil.isEffectivelyUnqualified((PsiReferenceExpression)element)) {
qualifiedCall = ((PsiReferenceExpression)element).getQualifierExpression();
}
}
}
final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(expression, PsiMethodCallExpression.class);
LOG.assertTrue(methodCallExpression != null);
if (!targetContainingClasses.isEmpty()) {
String descriptions = StringUtil.join(targetContainingClasses, psiClass -> RefactoringUIUtil.getDescription(psiClass, false), ",");
conflicts.putValue(expression, JavaRefactoringBundle.message("inline.method.calls.not.accessible.in", methodCallExpression.getText(), descriptions));
}
if (qualifiedCall != null) {
conflicts.putValue(expression, JavaRefactoringBundle.message("inline.method.calls.not.accessible.on.qualifier",
methodCallExpression.getText(), qualifiedCall.getText()));
}
}
}
});
}
public static void addInaccessibleMemberConflicts(final PsiElement element,
final UsageInfo[] usages,
final ReferencedElementsCollector collector,
final MultiMap<PsiElement, String> conflicts) {
element.accept(collector);
final Map<PsiMember, Set<PsiMember>> containersToReferenced = getInaccessible(collector.myReferencedMembers, usages, element);
containersToReferenced.forEach((container, referencedInaccessible) -> {
for (PsiMember referenced : referencedInaccessible) {
final String referencedDescription = RefactoringUIUtil.getDescription(referenced, true);
final String containerDescription = RefactoringUIUtil.getDescription(container, true);
String message = RefactoringBundle.message("0.that.is.used.in.inlined.method.is.not.accessible.from.call.site.s.in.1",
referencedDescription, containerDescription);
conflicts.putValue(container, StringUtil.capitalize(message));
}
});
}
/**
* Given a set of referencedElements, returns a map from containers (in a sense of ConflictsUtil.getContainer)
* to subsets of referencedElements that are not accessible from that container
*
* @param referencedElements
* @param usages
* @param elementToInline
*/
static Map<PsiMember, Set<PsiMember>> getInaccessible(HashSet<? extends PsiMember> referencedElements,
UsageInfo[] usages,
PsiElement elementToInline) {
final Map<PsiMember, Set<PsiMember>> result = new HashMap<>();
final PsiResolveHelper resolveHelper = JavaPsiFacade.getInstance(elementToInline.getProject()).getResolveHelper();
for (UsageInfo usage : usages) {
final PsiElement usageElement = usage.getElement();
if (usageElement == null) continue;
final PsiElement container = ConflictsUtil.getContainer(usageElement);
if (!(container instanceof PsiMember)) continue; // usage in import statement
PsiMember memberContainer = (PsiMember)container;
Set<PsiMember> inaccessibleReferenced = result.get(memberContainer);
if (inaccessibleReferenced == null) {
inaccessibleReferenced = new HashSet<>();
result.put(memberContainer, inaccessibleReferenced);
for (PsiMember member : referencedElements) {
if (PsiTreeUtil.isAncestor(elementToInline, member, false)) continue;
if (elementToInline instanceof PsiClass &&
InheritanceUtil.isInheritorOrSelf((PsiClass)elementToInline, member.getContainingClass(), true)) continue;
PsiElement resolveScope = usageElement instanceof PsiReferenceExpression
? ((PsiReferenceExpression)usageElement).advancedResolve(false).getCurrentFileResolveScope()
: null;
if (!resolveHelper.isAccessible(member, member.getModifierList(), usageElement, null, resolveScope)) {
inaccessibleReferenced.add(member);
}
}
}
}
return result;
}
@Override
protected void performRefactoring(UsageInfo @NotNull [] usages) {
RangeMarker position = null;
if (myEditor != null) {
final int offset = myEditor.getCaretModel().getOffset();
position = myEditor.getDocument().createRangeMarker(offset, offset + 1);
}
LocalHistoryAction a = LocalHistory.getInstance().startAction(getCommandName());
try {
doRefactoring(usages);
}
finally {
a.finish();
}
if (position != null) {
if (position.isValid()) {
myEditor.getCaretModel().moveToOffset(position.getStartOffset());
}
position.dispose();
}
}
@Nullable
@Override
protected String getRefactoringId() {
return "refactoring.inline.method";
}
@Nullable
@Override
protected RefactoringEventData getBeforeData() {
final RefactoringEventData data = new RefactoringEventData();
data.addElement(myMethod);
return data;
}
private void doRefactoring(UsageInfo[] usages) {
try {
if (myInlineThisOnly) {
if (JavaLanguage.INSTANCE != myReference.getElement().getLanguage()) {
GenericInlineHandler.inlineReference(new UsageInfo(myReference.getElement()), myMethod, myInliners);
}
else if (myMethod.isConstructor() && InlineUtil.isChainingConstructor(myMethod)) {
if (myReference instanceof PsiMethodReferenceExpression) {
inlineMethodReference((PsiMethodReferenceExpression)myReference);
}
else {
PsiCall constructorCall = RefactoringUtil.getEnclosingConstructorCall((PsiJavaCodeReferenceElement)myReference);
if (constructorCall != null) {
inlineConstructorCall(constructorCall);
}
}
}
else {
myReference = addBracesWhenNeeded(new PsiReferenceExpression[]{(PsiReferenceExpression)myReference})[0];
if (myReference instanceof PsiMethodReferenceExpression) {
inlineMethodReference((PsiMethodReferenceExpression)myReference);
}
else {
inlineMethodCall((PsiReferenceExpression)myReference);
}
}
}
else {
CommonRefactoringUtil.sortDepthFirstRightLeftOrder(usages);
if (myMethod.isConstructor()) {
for (UsageInfo usage : usages) {
PsiElement element = usage.getElement();
if (element instanceof PsiMethodReferenceExpression) {
inlineMethodReference((PsiMethodReferenceExpression)element);
}
else if (element instanceof PsiJavaCodeReferenceElement) {
PsiCall constructorCall = RefactoringUtil.getEnclosingConstructorCall((PsiJavaCodeReferenceElement)element);
if (constructorCall != null) {
inlineConstructorCall(constructorCall);
}
}
else if (element instanceof PsiEnumConstant) {
inlineConstructorCall((PsiEnumConstant) element);
}
else if (!(element instanceof PsiDocMethodOrFieldRef)){
GenericInlineHandler.inlineReference(usage, myMethod, myInliners);
}
}
}
else {
List<PsiReferenceExpression> refExprList = new ArrayList<>();
final List<PsiElement> imports2Delete = new ArrayList<>();
for (final UsageInfo usage : usages) {
final PsiElement element = usage.getElement();
if (element == null) continue;
if (usage instanceof OverrideAttributeUsageInfo) {
for (OverrideMethodsProcessor processor : OverrideMethodsProcessor.EP_NAME.getExtensionList()) {
if (processor.removeOverrideAttribute(element)) {
break;
}
}
continue;
}
if (element instanceof PsiReferenceExpression) {
refExprList.add((PsiReferenceExpression)element);
}
else if (element instanceof PsiImportStaticReferenceElement) {
final JavaResolveResult[] resolveResults = ((PsiImportStaticReferenceElement)element).multiResolve(false);
if (resolveResults.length < 2) {
//no overloads available: ensure broken import are deleted and
//unused overloaded imports are deleted by optimize imports helper
imports2Delete.add(PsiTreeUtil.getParentOfType(element, PsiImportStaticStatement.class));
}
}
else if (JavaLanguage.INSTANCE != element.getLanguage()) {
GenericInlineHandler.inlineReference(usage, myMethod, myInliners);
}
}
PsiReferenceExpression[] refs = refExprList.toArray(new PsiReferenceExpression[0]);
refs = addBracesWhenNeeded(refs);
for (PsiReferenceExpression ref : refs) {
if (ref instanceof PsiMethodReferenceExpression) {
inlineMethodReference((PsiMethodReferenceExpression)ref);
}
else {
inlineMethodCall(ref);
}
}
for (PsiElement psiElement : imports2Delete) {
if (psiElement != null && psiElement.isValid()) {
psiElement.delete();
}
}
}
if (myMethod.isValid() && myMethod.isWritable() && myDeleteTheDeclaration) {
CommentTracker tracker = new CommentTracker();
tracker.markUnchanged(myMethod.getBody());
tracker.markUnchanged(myMethod.getDocComment());
tracker.deleteAndRestoreComments(myMethod);
}
}
removeAddedBracesWhenPossible();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
private void inlineMethodReference(PsiMethodReferenceExpression reference) {
final PsiLambdaExpression lambdaExpression = LambdaRefactoringUtil.convertMethodReferenceToLambda(reference, false, false);
if (lambdaExpression == null) return;
final PsiExpression callExpression = LambdaUtil.extractSingleExpressionFromBody(lambdaExpression.getBody());
if (callExpression instanceof PsiMethodCallExpression) {
inlineMethodCall(((PsiMethodCallExpression)callExpression).getMethodExpression());
}
else if (callExpression instanceof PsiCall) {
inlineConstructorCall((PsiCall)callExpression);
}
else {
LOG.error("Unexpected expr: " + callExpression.getText());
}
LambdaRefactoringUtil.simplifyToExpressionLambda(lambdaExpression);
if (myInlineThisOnly) {
LambdaRefactoringUtil.removeSideEffectsFromLambdaBody(myEditor, lambdaExpression);
}
}
public static void inlineConstructorCall(PsiCall constructorCall) {
PsiMethod oldConstructor = constructorCall.resolveMethod();
LOG.assertTrue(oldConstructor != null);
oldConstructor = (PsiMethod)oldConstructor.getNavigationElement();
PsiExpression[] instanceCreationArguments = constructorCall.getArgumentList().getExpressions();
if (oldConstructor.isVarArgs()) { //wrap with explicit array
final PsiParameter[] parameters = oldConstructor.getParameterList().getParameters();
final PsiType varargType = parameters[parameters.length - 1].getType();
if (varargType instanceof PsiEllipsisType) {
final PsiType arrayType =
constructorCall.resolveMethodGenerics().getSubstitutor().substitute(((PsiEllipsisType)varargType).getComponentType());
final PsiExpression[] exprs = new PsiExpression[parameters.length];
System.arraycopy(instanceCreationArguments, 0, exprs, 0, parameters.length - 1);
StringBuilder varargs = new StringBuilder();
for (int i = parameters.length - 1; i < instanceCreationArguments.length; i++) {
if (varargs.length() > 0) varargs.append(", ");
varargs.append(instanceCreationArguments[i].getText());
}
exprs[parameters.length - 1] = JavaPsiFacade.getElementFactory(constructorCall.getProject())
.createExpressionFromText("new " + arrayType.getCanonicalText() + "[]{" + varargs + "}", constructorCall);
instanceCreationArguments = exprs;
}
}
PsiStatement[] statements = oldConstructor.getBody().getStatements();
LOG.assertTrue(statements.length == 1 && statements[0] instanceof PsiExpressionStatement);
PsiExpression expression = ((PsiExpressionStatement)statements[0]).getExpression();
LOG.assertTrue(expression instanceof PsiMethodCallExpression);
ChangeContextUtil.encodeContextInfo(expression, true);
PsiMethodCallExpression methodCall = (PsiMethodCallExpression)expression.copy();
final PsiExpression[] args = methodCall.getArgumentList().getExpressions();
for (PsiExpression arg : args) {
replaceParameterReferences(arg, oldConstructor, instanceCreationArguments);
}
try {
final PsiExpressionList exprList = (PsiExpressionList) constructorCall.getArgumentList().replace(methodCall.getArgumentList());
ChangeContextUtil.decodeContextInfo(exprList, PsiTreeUtil.getParentOfType(constructorCall, PsiClass.class), null);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
ChangeContextUtil.clearContextInfo(expression);
}
private static void replaceParameterReferences(final PsiElement element,
final PsiMethod oldConstructor,
final PsiExpression[] instanceCreationArguments) {
Map<PsiReferenceExpression, PsiExpression> replacement = new LinkedHashMap<>();
element.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
PsiElement resolved = expression.resolve();
if (resolved instanceof PsiParameter &&
element.getManager().areElementsEquivalent(((PsiParameter)resolved).getDeclarationScope(), oldConstructor)) {
int parameterIndex = oldConstructor.getParameterList().getParameterIndex((PsiParameter)resolved);
if (parameterIndex >= 0) {
replacement.put(expression, instanceCreationArguments[parameterIndex]);
}
}
}
});
for (Map.Entry<PsiReferenceExpression, PsiExpression> entry : replacement.entrySet()) {
try {
entry.getKey().replace(entry.getValue());
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
public void inlineMethodCall(PsiReferenceExpression ref) throws IncorrectOperationException {
myMethodCopy = (PsiMethod)myMethod.copy();
PsiMethodCallExpression methodCall = (PsiMethodCallExpression)ref.getParent();
InlineMethodHelper helper = new InlineMethodHelper(myProject, myMethod, myMethodCopy, methodCall);
BlockData blockData = prepareBlock(ref, helper);
ChangeContextUtil.encodeContextInfo(blockData.block, false);
InlineUtil.solveVariableNameConflicts(blockData.block, ref, myMethodCopy.getBody());
helper.initializeParameters(blockData.parmVars);
addThisInitializer(methodCall, blockData.thisVar);
PsiElement anchor = RefactoringUtil.getParentStatement(methodCall, true);
if (anchor == null) {
PsiEnumConstant enumConstant = PsiTreeUtil.getParentOfType(methodCall, PsiEnumConstant.class);
if (enumConstant != null) {
PsiExpression returnExpr = getSimpleReturnedExpression(myMethod);
if (returnExpr != null) {
ChangeContextUtil.encodeContextInfo(returnExpr, true);
PsiElement copy = returnExpr.copy();
ChangeContextUtil.clearContextInfo(returnExpr);
if (copy instanceof PsiReferenceExpression && ((PsiReferenceExpression)copy).getQualifierExpression() == null) {
copy = inlineParameterReference((PsiReferenceExpression)copy, blockData);
} else {
copy.accept(new JavaRecursiveElementVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
inlineParameterReference(expression, blockData);
}
});
}
PsiElement replace = methodCall.replace(copy);
if (blockData.thisVar != null) {
ChangeContextUtil.decodeContextInfo(replace, myMethod.getContainingClass(), blockData.thisVar.getInitializer());
}
}
}
return;
}
PsiElement anchorParent = anchor.getParent();
PsiLocalVariable thisVar = null;
PsiLocalVariable[] parmVars = new PsiLocalVariable[blockData.parmVars.length];
PsiLocalVariable resultVar = null;
PsiStatement[] statements = blockData.block.getStatements();
PsiElement firstBodyElement = blockData.block.getFirstBodyElement();
if (firstBodyElement instanceof PsiWhiteSpace) firstBodyElement = PsiTreeUtil.skipWhitespacesForward(firstBodyElement);
PsiElement firstAdded = null;
if (firstBodyElement != null && firstBodyElement != blockData.block.getRBrace()) {
int last = statements.length - 1;
final PsiElement rBraceOrReturnStatement =
last >= 0 ? PsiTreeUtil.skipWhitespacesAndCommentsForward(statements[last]) : blockData.block.getLastBodyElement();
LOG.assertTrue(rBraceOrReturnStatement != null);
final PsiElement beforeRBraceStatement = rBraceOrReturnStatement.getPrevSibling();
LOG.assertTrue(beforeRBraceStatement != null);
firstAdded = anchorParent.addRangeBefore(firstBodyElement, beforeRBraceStatement, anchor);
for (PsiElement e = firstAdded; e != anchor; e = e.getNextSibling()) {
if (e instanceof PsiDeclarationStatement) {
PsiElement[] elements = ((PsiDeclarationStatement)e).getDeclaredElements();
PsiLocalVariable var = tryCast(ArrayUtil.getFirstElement(elements), PsiLocalVariable.class);
if (var != null) {
String name = var.getName();
if (blockData.resultVar != null && name.equals(blockData.resultVar.getName())) {
resultVar = var;
}
else if (blockData.thisVar != null && name.equals(blockData.thisVar.getName())) {
thisVar = var;
} else {
for (int i = 0; i < blockData.parmVars.length; i++) {
if (name.equals(blockData.parmVars[i].getName())) {
parmVars[i] = var;
break;
}
}
}
}
}
}
}
PsiClass thisClass = myMethod.getContainingClass();
PsiExpression thisAccessExpr;
if (thisVar != null) {
if (!InlineUtil.canInlineParameterOrThisVariable(thisVar)) {
thisAccessExpr = myFactory.createExpressionFromText(thisVar.getName(), null);
}
else {
thisAccessExpr = thisVar.getInitializer();
}
}
else {
thisAccessExpr = null;
}
ChangeContextUtil.decodeContextInfo(anchorParent, thisClass, thisAccessExpr);
PsiReferenceExpression resultUsage = replaceCall(myFactory, methodCall, firstAdded, blockData.resultVar);
if (thisVar != null) {
InlineUtil.tryInlineGeneratedLocal(thisVar, false);
}
helper.inlineParameters(parmVars);
if (resultVar != null && resultUsage != null) {
InlineUtil.tryInlineResultVariable(resultVar, resultUsage);
}
ChangeContextUtil.clearContextInfo(anchorParent);
}
@Nullable
static PsiReferenceExpression replaceCall(@NotNull PsiElementFactory factory,
@NotNull PsiMethodCallExpression methodCall,
@Nullable PsiElement firstAdded,
@Nullable PsiLocalVariable resultVar) {
if (resultVar != null) {
PsiExpression expr = factory.createExpressionFromText(resultVar.getName(), null);
return (PsiReferenceExpression)new CommentTracker().replaceAndRestoreComments(methodCall, expr);
}
// If return var is not specified, we trust that InlineTransformer fully processed the original anchor statement,
// and we can delete it.
CommentTracker tracker = new CommentTracker();
PsiElement anchor = RefactoringUtil.getParentStatement(methodCall, true);
assert anchor != null;
if (firstAdded != null) {
tracker.delete(anchor);
tracker.insertCommentsBefore(firstAdded);
} else {
tracker.deleteAndRestoreComments(anchor);
}
return null;
}
@NotNull
private PsiExpression inlineParameterReference(@NotNull PsiReferenceExpression expression, BlockData blockData) {
if (expression.getQualifierExpression() != null) return expression;
PsiElement resolve = expression.resolve();
if (!(resolve instanceof PsiParameter)) return expression;
int paramIdx = ArrayUtil.find(myMethod.getParameterList().getParameters(), resolve);
if (paramIdx < 0) return expression;
PsiExpression initializer = blockData.parmVars[paramIdx].getInitializer();
if (initializer == null) return expression;
return InlineUtil.inlineInitializer((PsiVariable)resolve, initializer, expression);
}
private void substituteMethodTypeParams(PsiElement scope, final PsiSubstitutor substitutor) {
InlineUtil.substituteTypeParams(scope, substitutor, myFactory);
}
private boolean syncNeeded(final PsiReferenceExpression ref) {
if (!myMethod.hasModifierProperty(PsiModifier.SYNCHRONIZED)) return false;
final PsiMethod containingMethod = Util.getContainingMethod(ref);
if (containingMethod == null) return true;
if (!containingMethod.hasModifierProperty(PsiModifier.SYNCHRONIZED)) return true;
final PsiClass sourceContainingClass = myMethod.getContainingClass();
final PsiClass targetContainingClass = containingMethod.getContainingClass();
return !sourceContainingClass.equals(targetContainingClass);
}
private BlockData prepareBlock(PsiReferenceExpression ref, InlineMethodHelper helper)
throws IncorrectOperationException {
final PsiCodeBlock block = Objects.requireNonNull(myMethodCopy.getBody());
PsiSubstitutor callSubstitutor = helper.getSubstitutor();
if (callSubstitutor != PsiSubstitutor.EMPTY) {
substituteMethodTypeParams(block, callSubstitutor);
}
final PsiStatement[] originalStatements = block.getStatements();
PsiType returnType = callSubstitutor.substitute(myMethod.getReturnType());
InlineTransformer transformer = myTransformerChooser.apply(ref);
PsiLocalVariable[] parmVars = helper.declareParameters();
PsiLocalVariable thisVar = declareThis(callSubstitutor, block);
addSynchronization(ref, block, originalStatements, thisVar);
PsiLocalVariable resultVar = transformer.transformBody(myMethodCopy, ref, returnType);
return new BlockData(block, thisVar, parmVars, resultVar);
}
@Nullable
private PsiLocalVariable declareThis(PsiSubstitutor callSubstitutor, PsiCodeBlock block) {
PsiClass containingClass = myMethod.getContainingClass();
if (myMethod.hasModifierProperty(PsiModifier.STATIC) || containingClass == null) return null;
PsiType thisType = GenericsUtil.getVariableTypeByExpressionType(myFactory.createType(containingClass, callSubstitutor));
String[] names = myJavaCodeStyle.suggestVariableName(VariableKind.LOCAL_VARIABLE, null, null, thisType).names;
String thisVarName = names[0];
thisVarName = myJavaCodeStyle.suggestUniqueVariableName(thisVarName, myMethod.getFirstChild(), true);
PsiExpression initializer = myFactory.createExpressionFromText("null", null);
PsiDeclarationStatement declaration = myFactory.createVariableDeclarationStatement(thisVarName, thisType, initializer);
declaration = (PsiDeclarationStatement)block.addAfter(declaration, null);
return (PsiLocalVariable)declaration.getDeclaredElements()[0];
}
private void addSynchronization(PsiReferenceExpression ref,
PsiCodeBlock block,
PsiStatement[] originalStatements,
PsiLocalVariable thisVar) {
PsiClass containingClass = myMethod.getContainingClass();
String lockName = null;
if (thisVar != null) {
lockName = thisVar.getName();
}
else if (myMethod.hasModifierProperty(PsiModifier.STATIC) && containingClass != null ) {
lockName = containingClass.getQualifiedName() + ".class";
}
if (lockName != null && syncNeeded(ref)) {
PsiSynchronizedStatement synchronizedStatement =
(PsiSynchronizedStatement)myFactory.createStatementFromText("synchronized(" + lockName + "){}", block);
synchronizedStatement = (PsiSynchronizedStatement)CodeStyleManager.getInstance(myProject).reformat(synchronizedStatement);
synchronizedStatement = (PsiSynchronizedStatement)block.add(synchronizedStatement);
final PsiCodeBlock synchronizedBody = Objects.requireNonNull(synchronizedStatement.getBody());
for (final PsiStatement originalStatement : originalStatements) {
synchronizedBody.add(originalStatement);
originalStatement.delete();
}
}
}
private void addThisInitializer(PsiMethodCallExpression methodCall, PsiLocalVariable thisVar) throws IncorrectOperationException {
if (thisVar != null) {
PsiExpression qualifier = methodCall.getMethodExpression().getQualifierExpression();
if (qualifier == null) {
PsiElement parent = methodCall.getContext();
while (true) {
if (parent instanceof PsiClass) break;
if (parent instanceof PsiFile) break;
assert parent != null : methodCall;
parent = parent.getContext();
}
if (parent instanceof PsiClass) {
PsiClass parentClass = (PsiClass)parent;
final PsiClass containingClass = myMethod.getContainingClass();
if (containingClass != null && parentClass.isInheritor(containingClass, true)) {
String name = parentClass.getName();
// We cannot have qualified this reference to an anonymous class, so we leave it unqualified
// this might produce incorrect code in extremely rare cases
// when we inline a superclass method in an anonymous class,
// and the method body contains a nested class that refers to the outer one
qualifier = myFactory.createExpressionFromText(name == null ? "this" : name + ".this", null);
}
else if (containingClass != null && parentClass.equals(containingClass)) {
qualifier = myFactory.createExpressionFromText("this", null);
}
else {
if (PsiTreeUtil.isAncestor(containingClass, parent, false)) {
String name = containingClass.getName();
if (name != null) {
qualifier = myFactory.createExpressionFromText(name + ".this", null);
}
else { //?
qualifier = myFactory.createExpressionFromText("this", null);
}
} else { // we are inside the inheritor
do {
parentClass = PsiTreeUtil.getParentOfType(parentClass, PsiClass.class, true);
if (InheritanceUtil.isInheritorOrSelf(parentClass, containingClass, true)) {
final String childClassName = parentClass.getName();
qualifier = myFactory.createExpressionFromText(childClassName != null ? childClassName + ".this" : "this", null);
break;
}
}
while (parentClass != null);
}
}
}
else {
qualifier = myFactory.createExpressionFromText("this", null);
}
}
else if (qualifier instanceof PsiSuperExpression) {
qualifier = myFactory.createExpressionFromText("this", null);
}
thisVar.getInitializer().replace(qualifier);
}
}
private static final Key<String> MARK_KEY = Key.create("");
private PsiReferenceExpression[] addBracesWhenNeeded(PsiReferenceExpression[] refs) throws IncorrectOperationException {
ArrayList<PsiReferenceExpression> refsVector = new ArrayList<>();
ArrayList<PsiCodeBlock> addedBracesVector = new ArrayList<>();
myAddedClassInitializers = new HashMap<>();
for (PsiReferenceExpression ref : refs) {
if (ref instanceof PsiMethodReferenceExpression) continue;
ref.putCopyableUserData(MARK_KEY, "");
}
RefLoop:
for (PsiReferenceExpression ref : refs) {
if (!ref.isValid()) continue;
if (ref instanceof PsiMethodReferenceExpression) {
refsVector.add(ref);
continue;
}
PsiElement parentStatement = RefactoringUtil.getParentStatement(ref, true);
if (parentStatement != null) {
PsiElement parent = ref.getParent();
while (!parent.equals(parentStatement)) {
if (parent instanceof PsiExpressionStatement || parent instanceof PsiReturnStatement) {
String text = "{\n}";
PsiBlockStatement blockStatement = (PsiBlockStatement)myFactory.createStatementFromText(text, null);
blockStatement = (PsiBlockStatement)myCodeStyleManager.reformat(blockStatement);
blockStatement.getCodeBlock().add(parent);
blockStatement = (PsiBlockStatement)parent.replace(blockStatement);
PsiElement newStatement = blockStatement.getCodeBlock().getStatements()[0];
addMarkedElements(refsVector, newStatement);
addedBracesVector.add(blockStatement.getCodeBlock());
continue RefLoop;
}
parent = parent.getParent();
}
final PsiElement lambdaExpr = parentStatement.getParent();
if (lambdaExpr instanceof PsiLambdaExpression) {
final PsiLambdaExpression newLambdaExpr = (PsiLambdaExpression)myFactory.createExpressionFromText(
((PsiLambdaExpression)lambdaExpr).getParameterList().getText() + " -> " + "{\n}", lambdaExpr);
final PsiStatement statementFromText;
if (PsiType.VOID.equals(LambdaUtil.getFunctionalInterfaceReturnType((PsiLambdaExpression)lambdaExpr))) {
statementFromText = myFactory.createStatementFromText("a;", lambdaExpr);
((PsiExpressionStatement)statementFromText).getExpression().replace(parentStatement);
} else {
statementFromText = myFactory.createStatementFromText("return a;", lambdaExpr);
((PsiReturnStatement)statementFromText).getReturnValue().replace(parentStatement);
}
newLambdaExpr.getBody().add(statementFromText);
final PsiCodeBlock body = (PsiCodeBlock)((PsiLambdaExpression)lambdaExpr.replace(newLambdaExpr)).getBody();
PsiElement newStatement = body.getStatements()[0];
addMarkedElements(refsVector, newStatement);
addedBracesVector.add(body);
continue;
}
}
else {
final PsiField field = PsiTreeUtil.getParentOfType(ref, PsiField.class);
if (field != null) {
if (field instanceof PsiEnumConstant) {
inlineEnumConstantParameter(refsVector, ref);
continue;
}
if (myAddedClassInitializers.containsKey(field)) {
continue;
}
field.normalizeDeclaration();
final PsiExpression initializer = field.getInitializer();
LOG.assertTrue(initializer != null);
PsiClassInitializer classInitializer = myFactory.createClassInitializer();
final PsiClass containingClass = field.getContainingClass();
classInitializer = (PsiClassInitializer)containingClass.addAfter(classInitializer, field);
containingClass.addAfter(CodeEditUtil.createLineFeed(field.getManager()), field);
final PsiCodeBlock body = classInitializer.getBody();
PsiExpressionStatement statement = (PsiExpressionStatement)myFactory.createStatementFromText(field.getName() + " = 0;", body);
statement = (PsiExpressionStatement)body.add(statement);
final PsiAssignmentExpression assignment = (PsiAssignmentExpression)statement.getExpression();
assignment.getLExpression().replace(RenameJavaMemberProcessor.createMemberReference(field, assignment));
assignment.getRExpression().replace(initializer);
addMarkedElements(refsVector, statement);
if (field.hasModifierProperty(PsiModifier.STATIC)) {
PsiUtil.setModifierProperty(classInitializer, PsiModifier.STATIC, true);
}
myAddedClassInitializers.put(field, classInitializer);
continue;
}
}
refsVector.add(ref);
}
for (PsiReferenceExpression ref : refs) {
ref.putCopyableUserData(MARK_KEY, null);
}
myAddedBraces = addedBracesVector.toArray(PsiCodeBlock.EMPTY_ARRAY);
return refsVector.toArray(new PsiReferenceExpression[0]);
}
private void inlineEnumConstantParameter(final List<? super PsiReferenceExpression> refsVector,
final PsiReferenceExpression ref) throws IncorrectOperationException {
PsiExpression expr = getSimpleReturnedExpression(myMethod);
if (expr != null) {
refsVector.add(ref);
}
else {
PsiCall call = PsiTreeUtil.getParentOfType(ref, PsiCall.class);
@NonNls String text = "new Object() { " + myMethod.getReturnTypeElement().getText() + " evaluate() { return " + call.getText() + ";}}.evaluate";
PsiExpression callExpr = JavaPsiFacade.getInstance(myProject).getParserFacade().createExpressionFromText(text, call);
PsiReferenceExpression classExpr = (PsiReferenceExpression)ref.replace(callExpr);
PsiNewExpression newObject = (PsiNewExpression)Objects.requireNonNull(classExpr.getQualifierExpression());
PsiMethod evaluateMethod = Objects.requireNonNull(newObject.getAnonymousClass()).getMethods()[0];
PsiExpression retVal = ((PsiReturnStatement)Objects.requireNonNull(evaluateMethod.getBody())
.getStatements()[0]).getReturnValue();
if (retVal instanceof PsiMethodCallExpression) {
refsVector.add(((PsiMethodCallExpression) retVal).getMethodExpression());
}
if (classExpr.getParent() instanceof PsiMethodCallExpression) {
PsiExpressionList args = ((PsiMethodCallExpression)classExpr.getParent()).getArgumentList();
PsiExpression[] argExpressions = args.getExpressions();
if (argExpressions.length > 0) {
args.deleteChildRange(argExpressions [0], argExpressions [argExpressions.length-1]);
}
}
}
}
@Nullable
private static PsiExpression getSimpleReturnedExpression(final PsiMethod method) {
PsiCodeBlock body = method.getBody();
if (body == null) return null;
PsiStatement[] psiStatements = body.getStatements();
if (psiStatements.length != 1) return null;
PsiStatement statement = psiStatements[0];
if (!(statement instanceof PsiReturnStatement)) return null;
return ((PsiReturnStatement) statement).getReturnValue();
}
private static void addMarkedElements(final List<? super PsiReferenceExpression> array, PsiElement scope) {
scope.accept(new PsiRecursiveElementWalkingVisitor() {
@Override public void visitElement(@NotNull PsiElement element) {
if (element.getCopyableUserData(MARK_KEY) != null) {
array.add((PsiReferenceExpression)element);
element.putCopyableUserData(MARK_KEY, null);
}
super.visitElement(element);
}
});
}
private void removeAddedBracesWhenPossible() throws IncorrectOperationException {
if (myAddedBraces == null) return;
for (PsiCodeBlock codeBlock : myAddedBraces) {
PsiStatement[] statements = codeBlock.getStatements();
if (statements.length == 1) {
final PsiElement codeBlockParent = codeBlock.getParent();
PsiStatement statement = statements[0];
if (codeBlockParent instanceof PsiLambdaExpression) {
if (statement instanceof PsiReturnStatement) {
final PsiExpression returnValue = ((PsiReturnStatement)statement).getReturnValue();
if (returnValue != null) {
codeBlock.replace(returnValue);
}
} else if (statement instanceof PsiExpressionStatement){
codeBlock.replace(((PsiExpressionStatement)statement).getExpression());
}
}
else if (codeBlockParent instanceof PsiBlockStatement) {
if (!(ifStatementWithAppendableElseBranch(statement) &&
codeBlockParent.getParent() instanceof PsiIfStatement &&
((PsiIfStatement)codeBlockParent.getParent()).getElseBranch() != null)) {
codeBlockParent.replace(statement);
}
}
else {
codeBlock.replace(statement);
}
}
}
myAddedClassInitializers.forEach((psiField, classInitializer) -> {
PsiExpression newInitializer = getSimpleFieldInitializer(psiField, classInitializer);
PsiExpression fieldInitializer = Objects.requireNonNull(psiField.getInitializer());
if (newInitializer != null) {
fieldInitializer.replace(newInitializer);
classInitializer.delete();
}
else {
fieldInitializer.delete();
}
});
}
private static boolean ifStatementWithAppendableElseBranch(PsiStatement statement) {
if (statement instanceof PsiIfStatement) {
PsiStatement elseBranch = ((PsiIfStatement)statement).getElseBranch();
return elseBranch == null || elseBranch instanceof PsiIfStatement;
}
return false;
}
@Nullable
private PsiExpression getSimpleFieldInitializer(PsiField field, PsiClassInitializer initializer) {
final PsiStatement[] statements = initializer.getBody().getStatements();
if (statements.length != 1) return null;
if (!(statements[0] instanceof PsiExpressionStatement)) return null;
final PsiExpression expression = ((PsiExpressionStatement)statements[0]).getExpression();
if (!(expression instanceof PsiAssignmentExpression)) return null;
final PsiExpression lExpression = ((PsiAssignmentExpression)expression).getLExpression();
if (!(lExpression instanceof PsiReferenceExpression)) return null;
final PsiElement resolved = ((PsiReferenceExpression)lExpression).resolve();
if (!myManager.areElementsEquivalent(field, resolved)) return null;
return ((PsiAssignmentExpression)expression).getRExpression();
}
public static @NlsContexts.DialogMessage String checkUnableToInsertCodeBlock(PsiCodeBlock methodBody, final PsiElement element) {
if (checkUnableToInsertCodeBlock(methodBody, element,
expr -> JavaPsiConstructorUtil.isConstructorCall(expr) && expr.getMethodExpression() != element)) {
return JavaRefactoringBundle.message("inline.method.multiline.method.in.ctor.call");
}
return checkUnableToInsertCodeBlock(methodBody, element,
expr -> {
PsiConditionalLoopStatement loopStatement = PsiTreeUtil.getParentOfType(expr, PsiConditionalLoopStatement.class);
return loopStatement != null && PsiTreeUtil.isAncestor(loopStatement.getCondition(), expr, false);
})
? JavaRefactoringBundle.message("inline.method.multiline.method.in.loop.condition")
: null;
}
private static boolean checkUnableToInsertCodeBlock(final PsiCodeBlock methodBody,
final PsiElement element,
final Predicate<? super PsiMethodCallExpression> errorCondition) {
PsiStatement[] statements = methodBody.getStatements();
if (statements.length > 1 || statements.length == 1 &&
!(statements[0] instanceof PsiExpressionStatement) &&
!(statements[0] instanceof PsiReturnStatement)) {
PsiMethodCallExpression expr = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class, true, PsiStatement.class);
while (expr != null) {
if (errorCondition.test(expr)) {
return true;
}
expr = PsiTreeUtil.getParentOfType(expr, PsiMethodCallExpression.class, true, PsiStatement.class);
}
}
return false;
}
public static boolean checkBadReturns(PsiMethod method) {
PsiReturnStatement[] returns = PsiUtil.findReturnStatements(method);
PsiCodeBlock body = method.getBody();
return checkBadReturns(returns, body);
}
public static boolean checkBadReturns(PsiReturnStatement[] returns, PsiCodeBlock body) {
if (returns.length == 0) return false;
ControlFlow controlFlow;
try {
controlFlow = ControlFlowFactory.getInstance(body.getProject()).getControlFlow(body, new LocalsControlFlowPolicy(body), false);
}
catch (AnalysisCanceledException e) {
return false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Control flow:");
LOG.debug(controlFlow.toString());
}
List<Instruction> instructions = new ArrayList<>(controlFlow.getInstructions());
// temporary replace all return's with empty statements in the flow
for (PsiReturnStatement aReturn : returns) {
int offset = controlFlow.getStartOffset(aReturn);
int endOffset = controlFlow.getEndOffset(aReturn);
while (offset <= endOffset && !(instructions.get(offset) instanceof GoToInstruction)) {
offset++;
}
LOG.assertTrue(instructions.get(offset) instanceof GoToInstruction);
instructions.set(offset, EmptyInstruction.INSTANCE);
}
for (PsiReturnStatement aReturn : returns) {
int offset = controlFlow.getEndOffset(aReturn);
while (offset != instructions.size()) {
Instruction instruction = instructions.get(offset);
if (instruction instanceof GoToInstruction) {
offset = ((GoToInstruction)instruction).offset;
}
else if (instruction instanceof ThrowToInstruction) {
offset = ((ThrowToInstruction)instruction).offset;
}
else if (instruction instanceof ConditionalThrowToInstruction) {
// In case of "conditional throw to", control flow will not be altered
// If exception handler is in method, we will inline it to invocation site
// If exception handler is at invocation site, execution will continue to get there
offset++;
}
else {
return true;
}
}
}
return false;
}
private static class BlockData {
final PsiCodeBlock block;
final PsiLocalVariable thisVar;
final PsiLocalVariable[] parmVars;
final PsiLocalVariable resultVar;
BlockData(PsiCodeBlock block, PsiLocalVariable thisVar, PsiLocalVariable[] parmVars, PsiLocalVariable resultVar) {
this.block = block;
this.thisVar = thisVar;
this.parmVars = parmVars;
this.resultVar = resultVar;
}
}
@Override
@NotNull
protected Collection<? extends PsiElement> getElementsToWrite(@NotNull final UsageViewDescriptor descriptor) {
if (myInlineThisOnly) {
return Collections.singletonList(myReference.getElement());
}
else {
if (!checkReadOnly()) return Collections.emptyList();
return myReference == null ? Collections.singletonList(myMethod) : Arrays.asList(myReference.getElement(), myMethod);
}
}
private static class OverrideAttributeUsageInfo extends UsageInfo {
private OverrideAttributeUsageInfo(@NotNull PsiElement element) {
super(element);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.rest.controller;
import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import com.google.common.collect.ImmutableMap;
import java.util.NavigableMap;
import org.adrianwalker.multilinestring.Multiline;
import org.apache.curator.framework.CuratorFramework;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.metron.common.utils.JSONUtils;
import org.apache.metron.hbase.mock.MockHBaseTableProvider;
import org.apache.metron.hbase.mock.MockHTable;
import org.apache.metron.indexing.dao.HBaseDao;
import org.apache.metron.indexing.dao.SearchIntegrationTest;
import org.apache.metron.indexing.dao.search.AlertComment;
import org.apache.metron.indexing.dao.update.CommentAddRemoveRequest;
import org.apache.metron.rest.service.UpdateService;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@ActiveProfiles(TEST_PROFILE)
public class UpdateControllerIntegrationTest extends DaoControllerTest {
@Autowired
private UpdateService updateService;
@Autowired
public CuratorFramework client;
@Autowired
private WebApplicationContext wac;
private MockMvc mockMvc;
private String updateUrl = "/api/v1/update";
private String searchUrl = "/api/v1/search";
private String user = "user";
private String password = "password";
private String metaAlertIndex = "metaalert_index";
/**
{
"guid" : "bro_2",
"sensorType" : "bro"
}
*/
@Multiline
public static String findMessage0;
/**
{
"guid" : "bro_2",
"sensorType" : "bro",
"patch" : [
{
"op": "add"
, "path": "/project"
, "value": "metron"
}
]
}
*/
@Multiline
public static String patch;
/**
{
"guid" : "bro_2",
"sensorType" : "bro",
"replacement" : {
"source:type": "bro",
"guid" : "bro_2",
"ip_src_addr":"192.168.1.2",
"ip_src_port": 8009,
"timestamp":200,
"rejected":false
}
}
*/
@Multiline
public static String replace;
/**
{
"guid" : "bro_2",
"sensorType" : "bro",
"comment": "test_comment",
"username" : "test_username",
"timestamp":0
}
*/
@Multiline
public static String addComment;
/**
{
"guid" : "bro_2",
"sensorType" : "bro",
"comment": "test_comment",
"username" : "test_username",
"timestamp":0
}
*/
@Multiline
public static String removeComment;
@Before
public void setup() throws Exception {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build();
ImmutableMap<String, String> testData = ImmutableMap.of(
"bro_index_2017.01.01.01", SearchIntegrationTest.broData,
"snort_index_2017.01.01.01", SearchIntegrationTest.snortData,
metaAlertIndex, MetaAlertControllerIntegrationTest.metaAlertData
);
loadTestData(testData);
}
@Test
public void test() throws Exception {
String guid = "bro_2";
ResultActions result = this.mockMvc.perform(post(searchUrl + "/findOne").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(findMessage0));
try {
result.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.source:type").value("bro"))
.andExpect(jsonPath("$.guid").value(guid))
.andExpect(jsonPath("$.project").doesNotExist())
.andExpect(jsonPath("$.timestamp").value(2))
;
}
catch(Throwable t) {
System.err.println(result.andReturn().getResponse().getContentAsString());
throw t;
}
MockHTable table = (MockHTable) MockHBaseTableProvider.getFromCache(TABLE);
Assert.assertEquals(0,table.size());
this.mockMvc.perform(patch(updateUrl+ "/patch").with(httpBasic(user, password))
.with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(patch)
)
.andExpect(status().isOk());
this.mockMvc.perform(post(searchUrl + "/findOne").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(findMessage0))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.source:type").value("bro"))
.andExpect(jsonPath("$.guid").value(guid))
.andExpect(jsonPath("$.project").value("metron"))
.andExpect(jsonPath("$.timestamp").value(2))
;
Assert.assertEquals(1,table.size());
{
//ensure hbase is up to date
Get g = new Get(new HBaseDao.Key(guid,"bro").toBytes());
Result r = table.get(g);
NavigableMap<byte[], byte[]> columns = r.getFamilyMap(CF.getBytes());
Assert.assertEquals(1, columns.size());
}
this.mockMvc.perform(post(updateUrl+ "/replace").with(httpBasic(user, password))
.with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(replace)
)
.andExpect(status().isOk());
this.mockMvc.perform(post(searchUrl + "/findOne").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(findMessage0))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.source:type").value("bro"))
.andExpect(jsonPath("$.guid").value(guid))
.andExpect(jsonPath("$.project").doesNotExist())
.andExpect(jsonPath("$.timestamp").value(200))
;
Assert.assertEquals(1,table.size());
{
//ensure hbase is up to date
Get g = new Get(new HBaseDao.Key(guid, "bro").toBytes());
Result r = table.get(g);
NavigableMap<byte[], byte[]> columns = r.getFamilyMap(CF.getBytes());
Assert.assertEquals(2, columns.size());
}
}
@Test
public void shouldAddComment() throws Exception {
CommentAddRemoveRequest commentAddRemoveRequest = new CommentAddRemoveRequest();
commentAddRemoveRequest.setGuid("bro_1");
commentAddRemoveRequest.setSensorType("bro");
commentAddRemoveRequest.setComment("test_comment");
commentAddRemoveRequest.setUsername("test_username");
commentAddRemoveRequest.setTimestamp(0L);
updateService.addComment(commentAddRemoveRequest);
ResultActions result = this.mockMvc.perform(
post(updateUrl + "/add/comment")
.with(httpBasic(user, password)).with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(addComment));
result.andExpect(status().isOk());
}
@Test
public void shouldRemoveComment() throws Exception {
CommentAddRemoveRequest commentAddRemoveRequest = new CommentAddRemoveRequest();
commentAddRemoveRequest.setGuid("bro_1");
commentAddRemoveRequest.setSensorType("bro");
commentAddRemoveRequest.setComment("test_comment");
commentAddRemoveRequest.setUsername("test_username");
commentAddRemoveRequest.setTimestamp(0L);
updateService.removeComment(commentAddRemoveRequest);
ResultActions result = this.mockMvc.perform(
post(updateUrl + "/remove/comment")
.with(httpBasic(user, password)).with(csrf())
.contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))
.content(removeComment));
result.andExpect(status().isOk());
}
}
| |
/**
*
*/
package graphene.dao.es.impl;
import graphene.business.commons.exception.DataAccessException;
import graphene.dao.DocumentBuilder;
import graphene.dao.es.BasicESDAO;
import graphene.dao.es.ESRestAPIConnection;
import graphene.dao.es.JestModule;
import graphene.model.idl.G_DataAccess;
import graphene.model.idl.G_DateRange;
import graphene.model.idl.G_DirectionFilter;
import graphene.model.idl.G_Entity;
import graphene.model.idl.G_EntityQuery;
import graphene.model.idl.G_LevelOfDetail;
import graphene.model.idl.G_Link;
import graphene.model.idl.G_LinkEntityTypeFilter;
import graphene.model.idl.G_LinkTag;
import graphene.model.idl.G_PropertyDescriptors;
import graphene.model.idl.G_SearchResult;
import graphene.model.idl.G_SearchResults;
import graphene.model.idl.G_SymbolConstants;
import graphene.model.idl.G_TransactionResults;
import graphene.util.validator.ValidationUtils;
import io.searchbox.client.JestResult;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.avro.AvroRemoteException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.ioc.annotations.PostInjection;
import org.apache.tapestry5.ioc.annotations.Symbol;
import org.slf4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
/**
* This will become like the influent kiva dataAccessDataView class, which is a
* base implementation (in this case, for an ES backend)
*
*
*
* For G_EntityQuery, property match descriptors:
*
* TargetSchema = Index
*
* Key = type
*
* Variable = field
*
* @author djue
*
*/
public class CombinedDAOESImpl extends BasicESDAO implements G_DataAccess {
private static final String TYPE = "_all";
@Inject
@Symbol(JestModule.ES_DEFAULT_TIMEOUT)
protected String defaultESTimeout;
@Inject
@Symbol(JestModule.ES_SEARCH_INDEX)
private String indexName;
@Inject
@Symbol(JestModule.ES_SERVER)
private String host;
@Inject
@Symbol(G_SymbolConstants.DEFAULT_MAX_SEARCH_RESULTS)
private Long maxSearchResults;
@Inject
private DocumentBuilder db;
@Inject
public CombinedDAOESImpl(final ESRestAPIConnection c, final Logger logger) {
this.logger = logger;
auth = null;
this.c = c;
}
@Override
public Map<String, List<G_Entity>> getAccounts(final List<String> entities) throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
public G_TransactionResults getAllTransactions(final G_EntityQuery q) throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
public G_PropertyDescriptors getDescriptors() throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
public List<G_Entity> getEntities(final List<String> entities, final G_LevelOfDetail levelOfDetail)
throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
public Map<String, List<G_Link>> getFlowAggregation(final List<String> entities, final List<String> focusEntities,
final G_DirectionFilter direction, final G_LinkEntityTypeFilter entityType, final G_LinkTag tag,
final G_DateRange date) throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
public double getReadiness() {
return 1.0;
}
@Override
public Map<String, List<G_Link>> getTimeSeriesAggregation(final List<String> entities,
final List<String> focusEntities, final G_LinkTag tag, final G_DateRange date) throws AvroRemoteException {
// TODO Auto-generated method stub
return null;
}
@Override
@PostInjection
public void initialize() {
logger.debug("Doing specific initialization!");
setIndex(indexName);
setType(TYPE);
super.initialize();
}
@Override
public boolean isReady() {
return true;
}
@Override
public G_SearchResults search(final G_EntityQuery pq) {
// TODO: Use a helper class
final G_SearchResults results = G_SearchResults.newBuilder().setTotal(0)
.setResults(new ArrayList<G_SearchResult>()).build();
final List<G_SearchResult> resultsList = new ArrayList<G_SearchResult>();
JestResult jestResult = new JestResult(null);
try {
final io.searchbox.core.Search.Builder action = buildSearchAction(pq);
jestResult = c.getClient().execute(action.build());
} catch (final DataAccessException e) {
e.printStackTrace();
} catch (final Exception e) {
e.printStackTrace();
}
// logger.debug(jestResult.getJsonString());
JsonNode rootNode;
long totalNumberOfPossibleResults = 0l;
try {
rootNode = mapper.readValue(jestResult.getJsonString(), JsonNode.class);
if ((rootNode != null) && (rootNode.get("hits") != null) && (rootNode.get("hits").get("total") != null)) {
totalNumberOfPossibleResults = rootNode.get("hits").get("total").asLong();
logger.debug("Found " + totalNumberOfPossibleResults + " hits in hitparent!");
final ArrayNode actualListOfHits = getListOfHitsFromResult(jestResult);
for (int i = 0; i < actualListOfHits.size(); i++) {
final JsonNode currentHit = actualListOfHits.get(i);
if (ValidationUtils.isValid(currentHit)) {
final G_SearchResult result = db.buildSearchResultFromDocument(i, currentHit, pq);
if (result == null) {
logger.error("could not build search result from hit " + currentHit.toString());
}
CollectionUtils.addIgnoreNull(resultsList, result);
} else {
logger.error("Invalid search result at index " + i + " for query " + pq.toString());
}
}
}
} catch (final IOException e) {
e.printStackTrace();
}
results.setResults(resultsList);
results.setTotal(totalNumberOfPossibleResults);
return results;
}
@Override
public Void setReady(final boolean b) {
return null;
// TODO Auto-generated method stub
}
}
| |
package org.javacs;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import org.javacs.lsp.*;
import org.junit.Test;
public class GotoTest {
private static final String file = "/org/javacs/example/Goto.java";
private static final String defaultConstructorFile = "/org/javacs/example/GotoDefaultConstructor.java";
@Test
public void localVariable() {
var suggestions = doGoto(file, 10, 9);
assertThat(suggestions, hasItem("Goto.java:5"));
}
@Test
public void defaultConstructor() {
var suggestions = doGoto(defaultConstructorFile, 5, 46);
assertThat(suggestions, hasItem("GotoDefaultConstructor.java:3"));
}
@Test
public void constructor() {
var suggestions = doGoto(file, 11, 21);
assertThat(suggestions, hasItem("Goto.java:3"));
}
@Test
public void className() {
var suggestions = doGoto(file, 16, 9);
assertThat(suggestions, hasItem("Goto.java:3"));
}
@Test
public void staticField() {
var suggestions = doGoto(file, 13, 22);
assertThat(suggestions, hasItem("Goto.java:36"));
}
@Test
public void field() {
var suggestions = doGoto(file, 14, 22);
assertThat(suggestions, hasItem("Goto.java:37"));
}
@Test
public void staticMethod() {
var suggestions = doGoto(file, 16, 14);
assertThat(suggestions, hasItem("Goto.java:38"));
}
@Test
public void method() {
var suggestions = doGoto(file, 17, 14);
assertThat(suggestions, hasItem("Goto.java:41"));
}
@Test
public void staticMethodReference() {
var suggestions = doGoto(file, 19, 27);
assertThat(suggestions, hasItem("Goto.java:38"));
}
@Test
public void methodReference() {
var suggestions = doGoto(file, 20, 27);
assertThat(suggestions, hasItem("Goto.java:41"));
}
@Test
public void otherStaticMethod() {
var suggestions = doGoto(file, 29, 25);
assertThat(suggestions, hasItem(startsWith("GotoOther.java:")));
}
@Test
public void otherMethod() {
var suggestions = doGoto(file, 30, 18);
assertThat(suggestions, hasItem(startsWith("GotoOther.java:")));
}
@Test
public void otherCompiledFile() {
var suggestions = doGoto(file, 29, 25);
assertThat(suggestions, hasItem(startsWith("GotoOther.java:")));
}
@Test
public void constructorInOtherFile() {
var suggestions = doGoto(file, 24, 17);
assertThat(suggestions, hasItem("GotoOther.java:12"));
}
@Test
public void typeParam() {
var suggestions = doGoto(file, 46, 12);
assertThat(suggestions, hasItem("Goto.java:3"));
}
@Test
public void gotoEnum() {
var file = "/org/javacs/example/GotoEnum.java";
assertThat(doGoto(file, 5, 30), hasItem("GotoEnum.java:8"));
assertThat(doGoto(file, 5, 35), hasItem("GotoEnum.java:9"));
}
@Test
public void gotoOverload() {
var file = "/org/javacs/example/GotoOverload.java";
assertThat(doGoto(file, 7, 12), hasItem("GotoOverload.java:4"));
assertThat(doGoto(file, 8, 12), hasItem("GotoOverload.java:12"));
assertThat(doGoto(file, 9, 12), hasItem("GotoOverload.java:16"));
}
@Test
public void gotoOverloadInOtherFile() {
var file = "/org/javacs/example/GotoOverloadInOtherFile.java";
assertThat(doGoto(file, 5, 25), hasItem("GotoOverload.java:4"));
assertThat(doGoto(file, 6, 25), hasItem("GotoOverload.java:12"));
assertThat(doGoto(file, 7, 25), hasItem("GotoOverload.java:16"));
}
@Test
public void gotoImplementation() {
var file = "/org/javacs/example/GotoImplementation.java";
assertThat(doGoto(file, 5, 18), hasItems("GotoImplementation.java:9"));
// assertThat(doGoto(file, 5, 18), hasItems("GotoImplementation.java:9", "GotoImplementation.java:14"));
}
@Test
public void gotoImplementsRunnable() {
var file = "/org/javacs/example/GotoOtherPackageMethod.java";
assertThat(doGoto(file, 7, 12), empty());
// assertThat(doGoto(file, 7, 12), hasItem("ImplementsRunnable.java:5"));
}
@Test
public void gotoImplementsConsumer() {
var file = "/org/javacs/example/GotoOtherPackageMethod.java";
assertThat(doGoto(file, 11, 12), empty());
// assertThat(doGoto(file, 11, 12), hasItem("ImplementsConsumer.java:7"));
}
@Test
public void gotoError() {
var file = "/org/javacs/example/GotoError.java";
assertThat(doGoto(file, 5, 22), empty());
}
@Test
public void gotoSingleChar() {
var file = "/org/javacs/example/GotoSingleChar.java";
assertThat(doGoto(file, 6, 28, true), hasItem("GotoSingleChar.java:5,16"));
}
@Test
public void gotoInterface() {
var file = "/org/javacs/example/GotoInterface.java";
assertThat(doGoto(file, 3, 40, false), hasItem("GotoInterfaceInterface.java:3"));
}
@Test
public void gotoMismatchedMethod() {
var file = "/org/javacs/example/GotoMismatchedMethod.java";
assertThat(doGoto(file, 5, 12, false), hasItems("GotoMismatchedMethod.java:7", "GotoMismatchedMethod.java:8"));
}
@Test
public void packagePrivate() {
// There is a separate bug where javac doesn't find package-private classes in files with different names.
// This is tested in WarningsTest#referencePackagePrivateClassInFileWithDifferentName
var warmup = doGoto("/org/javacs/example/ContainsGotoPackagePrivate.java", 4, 29);
assertThat(warmup, not(empty()));
var suggestions = doGoto(file, 50, 42);
assertThat(suggestions, hasItem("ContainsGotoPackagePrivate.java:4"));
}
@Test
public void gsonSourceJar() {
var file = "/org/javacs/example/GotoGuava.java";
assertThat(doGoto(file, 7, 15, false), hasItem("Gson.java:105"));
}
private static final JavaLanguageServer server = LanguageServerFixture.getJavaLanguageServer();
private List<String> doGoto(String file, int row, int column) {
return doGoto(file, row, column, false);
}
private List<String> doGoto(String file, int row, int column, boolean includeColumn) {
TextDocumentIdentifier document = new TextDocumentIdentifier();
document.uri = FindResource.uri(file);
Position position = new Position();
position.line = row - 1;
position.character = column - 1;
TextDocumentPositionParams p = new TextDocumentPositionParams();
p.textDocument = document;
p.position = position;
var locations = server.gotoDefinition(p).orElse(List.of());
var strings = new ArrayList<String>();
for (var l : locations) {
var fileName = path(l.uri).getFileName();
var start = l.range.start;
if (includeColumn) {
strings.add(String.format("%s:%d,%d", fileName, start.line + 1, start.character + 1));
} else {
strings.add(String.format("%s:%d", fileName, start.line + 1));
}
}
return strings;
}
private Path path(URI uri) {
switch (uri.getScheme()) {
case "file":
return Paths.get(uri);
case "jar":
return Paths.get(uri.getSchemeSpecificPart().substring("file://".length()));
default:
throw new RuntimeException("Don't know what to do with " + uri.getScheme());
}
}
}
| |
/*
* Copyright 2008-2010 biaoping.yin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.frameworkset.web.servlet.view;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.activation.FileTypeMap;
import javax.activation.MimetypesFileTypeMap;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.frameworkset.http.MediaType;
import org.frameworkset.util.Assert;
import org.frameworkset.util.ClassUtils;
import org.frameworkset.util.CollectionUtils;
import org.frameworkset.util.io.ClassPathResource;
import org.frameworkset.util.io.Resource;
import org.frameworkset.web.servlet.context.RequestAttributes;
import org.frameworkset.web.servlet.context.RequestContextHolder;
import org.frameworkset.web.servlet.context.ServletRequestAttributes;
import org.frameworkset.web.servlet.support.WebApplicationObjectSupport;
import org.frameworkset.web.util.UrlPathHelper;
import org.frameworkset.web.util.WebUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.frameworkset.util.StringUtil;
/**
* Implementation of {@link ViewResolver} that resolves a view based on the request file name or {@code Accept} header.
*
* <p>The {@code ContentNegotiatingViewResolver} does not resolve views itself, but delegates to other {@link
* ViewResolver}s. By default, these other view resolvers are picked up automatically from the application context,
* though they can also be set explicitly by using the {@link #setViewResolvers(List) viewResolvers} property.
* <strong>Note</strong> that in order for this view resolver to work properly, the {@link #setOrder(int) order}
* property needs to be set to a higher precedence than the others (the default is {@link Ordered#HIGHEST_PRECEDENCE}.)
*
* <p>This view resolver uses the requested {@linkplain MediaType media type} to select a suitable {@link View} for a
* request. This media type is determined by using the following criteria:
* <ol>
* <li>If the requested path has a file extension and if the {@link #setFavorPathExtension(boolean)} property is
* {@code true}, the {@link #setMediaTypes(Map) mediaTypes} property is inspected for a matching media type.</li>
* <li>If the request contains a parameter defining the extension and if the {@link #setFavorParameter(boolean)}
* property is <code>true</code>, the {@link #setMediaTypes(Map) mediaTypes} property is inspected for a matching
* media type. The default name of the parameter is <code>format</code> and it can be configured using the
* {@link #setParameterName(String) parameterName} property.</li>
* <li>If there is no match in the {@link #setMediaTypes(Map) mediaTypes} property and if the Java Activation
* Framework (JAF) is both {@linkplain #setUseJaf(boolean) enabled} and present on the class path,
* {@link FileTypeMap#getContentType(String)} is used instead.</li>
* <li>If the previous steps did not result in a media type, and
* {@link #setIgnoreAcceptHeader(boolean) ignoreAcceptHeader} is {@code false}, the request {@code Accept} header is
* used.</li>
* </ol>
*
* Once the requested media type has been determined, this resolver queries each delegate view resolver for a
* {@link View} and determines if the requested media type is {@linkplain MediaType#includes(MediaType) compatible}
* with the view's {@linkplain View#getContentType() content type}). The most compatible view is returned.
*
* <p>Additionally, this view resolver exposes the {@link #setDefaultViews(List) defaultViews} property, allowing you to
* override the views provided by the view resolvers. Note that these default views are offered as candicates, and
* still need have the content type requested (via file extension, parameter, or {@code Accept} header, described above).
* You can also set the {@linkplain #setDefaultContentType(MediaType) default content type} directly, which will be
* returned when the other mechanisms ({@code Accept} header, file extension or parameter) do not result in a match.
*
* <p>For example, if the request path is {@code /view.html}, this view resolver will look for a view that has the
* {@code text/html} content type (based on the {@code html} file extension). A request for {@code /view} with a {@code
* text/html} request {@code Accept} header has the same result.
*
* @author Arjen Poutsma
* @author Juergen Hoeller
* @since 3.0
* @see ViewResolver
* @see InternalResourceViewResolver
* @see BeanNameViewResolver
*/
public class ContentNegotiatingViewResolver extends WebApplicationObjectSupport implements ViewResolver {
private final static Logger logger = LoggerFactory.getLogger(ContentNegotiatingViewResolver.class);
private static final String ACCEPT_HEADER = "Accept";
private static final boolean jafPresent =
ClassUtils.isPresent("javax.activation.FileTypeMap", ContentNegotiatingViewResolver.class.getClassLoader());
private static final UrlPathHelper urlPathHelper = new UrlPathHelper();
private boolean favorPathExtension = true;
private boolean favorParameter = false;
private String parameterName = "format";
private boolean useNotAcceptableStatusCode = false;
private boolean ignoreAcceptHeader = false;
private boolean useJaf = true;
private ConcurrentMap<String, MediaType> mediaTypes = new ConcurrentHashMap<String, MediaType>();
private List<View> defaultViews;
private MediaType defaultContentType;
private List<ViewResolver> viewResolvers;
/**
* Indicates whether the extension of the request path should be used to determine the requested media type,
* in favor of looking at the {@code Accept} header. The default value is {@code true}.
* <p>For instance, when this flag is <code>true</code> (the default), a request for {@code /hotels.pdf}
* will result in an {@code AbstractPdfView} being resolved, while the {@code Accept} header can be the
* browser-defined {@code text/html,application/xhtml+xml}.
*/
public void setFavorPathExtension(boolean favorPathExtension) {
this.favorPathExtension = favorPathExtension;
}
/**
* Indicates whether a request parameter should be used to determine the requested media type,
* in favor of looking at the {@code Accept} header. The default value is {@code false}.
* <p>For instance, when this flag is <code>true</code>, a request for {@code /hotels?format=pdf} will result
* in an {@code AbstractPdfView} being resolved, while the {@code Accept} header can be the browser-defined
* {@code text/html,application/xhtml+xml}.
*/
public void setFavorParameter(boolean favorParameter) {
this.favorParameter = favorParameter;
}
/**
* Sets the parameter name that can be used to determine the requested media type if the {@link
* #setFavorParameter(boolean)} property is {@code true}. The default parameter name is {@code format}.
*/
public void setParameterName(String parameterName) {
this.parameterName = parameterName;
}
/**
* Indicates whether the HTTP {@code Accept} header should be ignored. Default is {@code false}.
* If set to {@code true}, this view resolver will only refer to the file extension and/or paramter,
* as indicated by the {@link #setFavorPathExtension(boolean) favorPathExtension} and
* {@link #setFavorParameter(boolean) favorParameter} properties.
*/
public void setIgnoreAcceptHeader(boolean ignoreAcceptHeader) {
this.ignoreAcceptHeader = ignoreAcceptHeader;
}
/**
* Indicates whether a {@link HttpServletResponse#SC_NOT_ACCEPTABLE 406 Not Acceptable} status code should be
* returned if no suitable view can be found.
*
* <p>Default is {@code false}, meaning that this view resolver returns {@code null} for
* {@link #resolveViewName(String, Locale)} when an acceptable view cannot be found. This will allow for view
* resolvers chaining. When this property is set to {@code true},
* {@link #resolveViewName(String, Locale)} will respond with a view that sets the response status to
* {@code 406 Not Acceptable} instead.
*/
public void setUseNotAcceptableStatusCode(boolean useNotAcceptableStatusCode) {
this.useNotAcceptableStatusCode = useNotAcceptableStatusCode;
}
/**
* Sets the mapping from file extensions to media types.
* <p>When this mapping is not set or when an extension is not present, this view resolver
* will fall back to using a {@link FileTypeMap} when the Java Action Framework is available.
*/
public void setMediaTypes(Map<String, String> mediaTypes) {
Assert.notNull(mediaTypes, "'mediaTypes' must not be null");
for (Map.Entry<String, String> entry : mediaTypes.entrySet()) {
String extension = entry.getKey().toLowerCase(Locale.ENGLISH);
MediaType mediaType = MediaType.parseMediaType(entry.getValue());
this.mediaTypes.put(extension, mediaType);
}
}
/**
* Sets the default views to use when a more specific view can not be obtained
* from the {@link ViewResolver} chain.
*/
public void setDefaultViews(List<View> defaultViews) {
this.defaultViews = defaultViews;
}
/**
* Sets the default content type.
* <p>This content type will be used when file extension, parameter, nor {@code Accept}
* header define a content-type, either through being disabled or empty.
*/
public void setDefaultContentType(MediaType defaultContentType) {
this.defaultContentType = defaultContentType;
}
/**
* Indicates whether to use the Java Activation Framework to map from file extensions to media types.
* <p>Default is {@code true}, i.e. the Java Activation Framework is used (if available).
*/
public void setUseJaf(boolean useJaf) {
this.useJaf = useJaf;
}
/**
* Sets the view resolvers to be wrapped by this view resolver.
* <p>If this property is not set, view resolvers will be detected automatically.
*/
public void setViewResolvers(List<ViewResolver> viewResolvers) {
this.viewResolvers = viewResolvers;
}
@Override
protected void initServletContext(ServletContext servletContext) {
if (this.viewResolvers == null) {
// Map<String, ViewResolver> matchingBeans =
// BeanFactoryUtils.beansOfTypeIncludingAncestors(getApplicationContext(), ViewResolver.class);
// this.viewResolvers = new ArrayList<ViewResolver>(matchingBeans.size());
// for (ViewResolver viewResolver : matchingBeans.values()) {
// if (this != viewResolver) {
// this.viewResolvers.add(viewResolver);
// }
// }
}
if (this.viewResolvers.isEmpty()) {
logger.warn("Did not find any ViewResolvers to delegate to; please configure them using the " +
"'viewResolvers' property on the ContentNegotiatingViewResolver");
}
// OrderComparator.sort(this.viewResolvers);
}
/**
* Determines the list of {@link MediaType} for the given {@link HttpServletRequest}.
* <p>The default implementation invokes {@link #getMediaTypeFromFilename(String)} if {@linkplain
* #setFavorPathExtension(boolean) favorPathExtension} property is <code>true</code>. If the property is
* <code>false</code>, or when a media type cannot be determined from the request path, this method will
* inspect the {@code Accept} header of the request.
* <p>This method can be overriden to provide a different algorithm.
* @param request the current servlet request
* @return the list of media types requested, if any
*/
protected List<MediaType> getMediaTypes(HttpServletRequest request) {
if (this.favorPathExtension) {
String requestUri = urlPathHelper.getRequestUri(request);
String filename = WebUtils.extractFullFilenameFromUrlPath(requestUri);
MediaType mediaType = getMediaTypeFromFilename(filename);
if (mediaType != null) {
if (logger.isDebugEnabled()) {
logger.debug("Requested media type is '" + mediaType + "' (based on filename '" + filename + "')");
}
return Collections.singletonList(mediaType);
}
}
if (this.favorParameter) {
if (request.getParameter(this.parameterName) != null) {
String parameterValue = request.getParameter(this.parameterName);
MediaType mediaType = getMediaTypeFromParameter(parameterValue);
if (mediaType != null) {
if (logger.isDebugEnabled()) {
logger.debug("Requested media type is '" + mediaType + "' (based on parameter '" +
this.parameterName + "'='" + parameterValue + "')");
}
return Collections.singletonList(mediaType);
}
}
}
if (!this.ignoreAcceptHeader) {
String acceptHeader = request.getHeader(ACCEPT_HEADER);
if (StringUtil.hasText(acceptHeader)) {
List<MediaType> mediaTypes = MediaType.parseMediaTypes(acceptHeader);
MediaType.sortByQualityValue(mediaTypes);
if (logger.isDebugEnabled()) {
logger.debug("Requested media types are " + mediaTypes + " (based on Accept header)");
}
return mediaTypes;
}
}
if (this.defaultContentType != null) {
if (logger.isDebugEnabled()) {
logger.debug("Requested media types is " + this.defaultContentType +
" (based on defaultContentType property)");
}
return Collections.singletonList(this.defaultContentType);
}
else {
return Collections.emptyList();
}
}
/**
* Determines the {@link MediaType} for the given filename.
* <p>The default implementation will check the {@linkplain #setMediaTypes(Map) media types}
* property first for a defined mapping. If not present, and if the Java Activation Framework
* can be found on the classpath, it will call {@link FileTypeMap#getContentType(String)}
* <p>This method can be overriden to provide a different algorithm.
* @param filename the current request file name (i.e. {@code hotels.html})
* @return the media type, if any
*/
protected MediaType getMediaTypeFromFilename(String filename) {
String extension = StringUtil.getFilenameExtension(filename);
if (!StringUtil.hasText(extension)) {
return null;
}
extension = extension.toLowerCase(Locale.ENGLISH);
MediaType mediaType = this.mediaTypes.get(extension);
if (mediaType == null && this.useJaf && jafPresent) {
mediaType = ActivationMediaTypeFactory.getMediaType(filename);
if (mediaType != null) {
this.mediaTypes.putIfAbsent(extension, mediaType);
}
}
return mediaType;
}
/**
* Determines the {@link MediaType} for the given parameter value.
* <p>The default implementation will check the {@linkplain #setMediaTypes(Map) media types}
* property for a defined mapping.
* <p>This method can be overriden to provide a different algorithm.
* @param parameterValue the parameter value (i.e. {@code pdf}).
* @return the media type, if any
*/
protected MediaType getMediaTypeFromParameter(String parameterValue) {
return this.mediaTypes.get(parameterValue.toLowerCase(Locale.ENGLISH));
}
public View resolveViewName(String viewName, Locale locale) throws Exception {
RequestAttributes attrs = RequestContextHolder.getRequestAttributes();
Assert.isInstanceOf(ServletRequestAttributes.class, attrs);
List<MediaType> requestedMediaTypes = getMediaTypes(((ServletRequestAttributes) attrs).getRequest());
List<View> candidateViews = getCandidateViews(viewName, locale, requestedMediaTypes);
View bestView = getBestView(candidateViews, requestedMediaTypes);
if (bestView != null) {
return bestView;
}
else {
if (this.useNotAcceptableStatusCode) {
if (logger.isDebugEnabled()) {
logger.debug("No acceptable view found; returning 406 (Not Acceptable) status code");
}
return NOT_ACCEPTABLE_VIEW;
}
else {
if (logger.isDebugEnabled()) {
logger.debug("No acceptable view found; returning null");
}
return null;
}
}
}
private List<View> getCandidateViews(String viewName, Locale locale, List<MediaType> requestedMediaTypes)
throws Exception {
List<View> candidateViews = new ArrayList<View>();
for (ViewResolver viewResolver : this.viewResolvers) {
View view = viewResolver.resolveViewName(viewName, locale);
if (view != null) {
candidateViews.add(view);
}
for (MediaType requestedMediaType : requestedMediaTypes) {
List<String> extensions = getExtensionsForMediaType(requestedMediaType);
for (String extension : extensions) {
String viewNameWithExtension = viewName + "." + extension;
view = viewResolver.resolveViewName(viewNameWithExtension, locale);
if (view != null) {
candidateViews.add(view);
}
}
}
}
if (!CollectionUtils.isEmpty(this.defaultViews)) {
candidateViews.addAll(this.defaultViews);
}
return candidateViews;
}
private List<String> getExtensionsForMediaType(MediaType requestedMediaType) {
List<String> result = new ArrayList<String>();
for (Entry<String, MediaType> entry : mediaTypes.entrySet()) {
if (requestedMediaType.includes(entry.getValue())) {
result.add(entry.getKey());
}
}
return result;
}
private View getBestView(List<View> candidateViews, List<MediaType> requestedMediaTypes) {
MediaType bestRequestedMediaType = null;
View bestView = null;
for (MediaType requestedMediaType : requestedMediaTypes) {
for (View candidateView : candidateViews) {
if (StringUtil.hasText(candidateView.getContentType())) {
MediaType candidateContentType = MediaType.parseMediaType(candidateView.getContentType());
if (requestedMediaType.includes(candidateContentType)) {
bestRequestedMediaType = requestedMediaType;
bestView = candidateView;
break;
}
}
}
if (bestView != null) {
if (logger.isDebugEnabled()) {
logger.debug(
"Returning [" + bestView + "] based on requested media type '" + bestRequestedMediaType +
"'");
}
break;
}
}
return bestView;
}
/**
* Inner class to avoid hard-coded JAF dependency.
*/
private static class ActivationMediaTypeFactory {
private static final FileTypeMap fileTypeMap;
static {
fileTypeMap = loadFileTypeMapFromContextSupportModule();
}
private static FileTypeMap loadFileTypeMapFromContextSupportModule() {
// see if we can find the extended mime.types from the context-support module
Resource mappingLocation = new ClassPathResource("org/frameworkset/web/servlet/mime.types");
if (mappingLocation.exists()) {
// if (logger.isTraceEnabled())
{
logger.info("Loading Java Activation Framework FileTypeMap from " + mappingLocation);
}
InputStream inputStream = null;
try {
inputStream = mappingLocation.getInputStream();
return new MimetypesFileTypeMap(inputStream);
}
catch (IOException ex) {
// ignore
}
finally {
if (inputStream != null) {
try {
inputStream.close();
}
catch (IOException ex) {
// ignore
}
}
}
}
// if (logger.isTraceEnabled())
{
logger.info("Loading default Java Activation Framework FileTypeMap");
}
return FileTypeMap.getDefaultFileTypeMap();
}
public static MediaType getMediaType(String fileName) {
String mediaType = fileTypeMap.getContentType(fileName);
return StringUtil.hasText(mediaType) ? MediaType.parseMediaType(mediaType) : null;
}
}
private static final View NOT_ACCEPTABLE_VIEW = new View() {
public String getContentType() {
return null;
}
public void render(Map model, HttpServletRequest request, HttpServletResponse response)
throws Exception {
response.setStatus(HttpServletResponse.SC_NOT_ACCEPTABLE);
}
};
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
/**
* Manages the BPOfferService objects for the data node.
* Creation, removal, starting, stopping, shutdown on BPOfferService
* objects must be done via APIs in this class.
*/
@InterfaceAudience.Private
class BlockPoolManager {
private static final Logger LOG = DataNode.LOG;
private final Map<String, BPOfferService> bpByNameserviceId =
Maps.newHashMap();
private final Map<String, BPOfferService> bpByBlockPoolId =
Maps.newHashMap();
private final List<BPOfferService> offerServices =
new CopyOnWriteArrayList<>();
private final DataNode dn;
//This lock is used only to ensure exclusion of refreshNamenodes
private final Object refreshNamenodesLock = new Object();
BlockPoolManager(DataNode dn) {
this.dn = dn;
}
synchronized void addBlockPool(BPOfferService bpos) {
Preconditions.checkArgument(offerServices.contains(bpos),
"Unknown BPOS: %s", bpos);
if (bpos.getBlockPoolId() == null) {
throw new IllegalArgumentException("Null blockpool id");
}
bpByBlockPoolId.put(bpos.getBlockPoolId(), bpos);
}
/**
* Returns a list of BPOfferService objects. The underlying list
* implementation is a CopyOnWriteArrayList so it can be safely
* iterated while BPOfferServices are being added or removed.
*
* Caution: The BPOfferService returned could be shutdown any time.
*/
synchronized List<BPOfferService> getAllNamenodeThreads() {
return Collections.unmodifiableList(offerServices);
}
synchronized BPOfferService get(String bpid) {
return bpByBlockPoolId.get(bpid);
}
synchronized void remove(BPOfferService t) {
offerServices.remove(t);
if (t.hasBlockPoolId()) {
// It's possible that the block pool never successfully registered
// with any NN, so it was never added it to this map
bpByBlockPoolId.remove(t.getBlockPoolId());
}
boolean removed = false;
for (Iterator<BPOfferService> it = bpByNameserviceId.values().iterator();
it.hasNext() && !removed;) {
BPOfferService bpos = it.next();
if (bpos == t) {
it.remove();
LOG.info("Removed " + bpos);
removed = true;
}
}
if (!removed) {
LOG.warn("Couldn't remove BPOS " + t + " from bpByNameserviceId map");
}
}
void shutDownAll(List<BPOfferService> bposList) throws InterruptedException {
for (BPOfferService bpos : bposList) {
bpos.stop(); //interrupts the threads
}
//now join
for (BPOfferService bpos : bposList) {
bpos.join();
}
}
synchronized void startAll() throws IOException {
try {
UserGroupInformation.getLoginUser().doAs(
new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
for (BPOfferService bpos : offerServices) {
bpos.start();
}
return null;
}
});
} catch (InterruptedException ex) {
IOException ioe = new IOException();
ioe.initCause(ex.getCause());
throw ioe;
}
}
void joinAll() {
for (BPOfferService bpos: this.getAllNamenodeThreads()) {
bpos.join();
}
}
void refreshNamenodes(Configuration conf)
throws IOException {
LOG.info("Refresh request received for nameservices: " + conf.get
(DFSConfigKeys.DFS_NAMESERVICES));
Map<String, Map<String, InetSocketAddress>> newAddressMap = DFSUtil
.getNNServiceRpcAddressesForCluster(conf);
Map<String, Map<String, InetSocketAddress>> newLifelineAddressMap = DFSUtil
.getNNLifelineRpcAddressesForCluster(conf);
synchronized (refreshNamenodesLock) {
doRefreshNamenodes(newAddressMap, newLifelineAddressMap);
}
}
private void doRefreshNamenodes(
Map<String, Map<String, InetSocketAddress>> addrMap,
Map<String, Map<String, InetSocketAddress>> lifelineAddrMap)
throws IOException {
assert Thread.holdsLock(refreshNamenodesLock);
Set<String> toRefresh = Sets.newLinkedHashSet();
Set<String> toAdd = Sets.newLinkedHashSet();
Set<String> toRemove;
synchronized (this) {
// Step 1. For each of the new nameservices, figure out whether
// it's an update of the set of NNs for an existing NS,
// or an entirely new nameservice.
for (String nameserviceId : addrMap.keySet()) {
if (bpByNameserviceId.containsKey(nameserviceId)) {
toRefresh.add(nameserviceId);
} else {
toAdd.add(nameserviceId);
}
}
// Step 2. Any nameservices we currently have but are no longer present
// need to be removed.
toRemove = Sets.newHashSet(Sets.difference(
bpByNameserviceId.keySet(), addrMap.keySet()));
assert toRefresh.size() + toAdd.size() ==
addrMap.size() :
"toAdd: " + Joiner.on(",").useForNull("<default>").join(toAdd) +
" toRemove: " + Joiner.on(",").useForNull("<default>").join(toRemove) +
" toRefresh: " + Joiner.on(",").useForNull("<default>").join(toRefresh);
// Step 3. Start new nameservices
if (!toAdd.isEmpty()) {
LOG.info("Starting BPOfferServices for nameservices: " +
Joiner.on(",").useForNull("<default>").join(toAdd));
for (String nsToAdd : toAdd) {
Map<String, InetSocketAddress> nnIdToAddr = addrMap.get(nsToAdd);
Map<String, InetSocketAddress> nnIdToLifelineAddr =
lifelineAddrMap.get(nsToAdd);
ArrayList<InetSocketAddress> addrs =
Lists.newArrayListWithCapacity(nnIdToAddr.size());
ArrayList<InetSocketAddress> lifelineAddrs =
Lists.newArrayListWithCapacity(nnIdToAddr.size());
for (String nnId : nnIdToAddr.keySet()) {
addrs.add(nnIdToAddr.get(nnId));
lifelineAddrs.add(nnIdToLifelineAddr != null ?
nnIdToLifelineAddr.get(nnId) : null);
}
BPOfferService bpos = createBPOS(addrs, lifelineAddrs);
bpByNameserviceId.put(nsToAdd, bpos);
offerServices.add(bpos);
}
}
startAll();
}
// Step 4. Shut down old nameservices. This happens outside
// of the synchronized(this) lock since they need to call
// back to .remove() from another thread
if (!toRemove.isEmpty()) {
LOG.info("Stopping BPOfferServices for nameservices: " +
Joiner.on(",").useForNull("<default>").join(toRemove));
for (String nsToRemove : toRemove) {
BPOfferService bpos = bpByNameserviceId.get(nsToRemove);
bpos.stop();
bpos.join();
// they will call remove on their own
}
}
// Step 5. Update nameservices whose NN list has changed
if (!toRefresh.isEmpty()) {
LOG.info("Refreshing list of NNs for nameservices: " +
Joiner.on(",").useForNull("<default>").join(toRefresh));
for (String nsToRefresh : toRefresh) {
BPOfferService bpos = bpByNameserviceId.get(nsToRefresh);
Map<String, InetSocketAddress> nnIdToAddr = addrMap.get(nsToRefresh);
Map<String, InetSocketAddress> nnIdToLifelineAddr =
lifelineAddrMap.get(nsToRefresh);
ArrayList<InetSocketAddress> addrs =
Lists.newArrayListWithCapacity(nnIdToAddr.size());
ArrayList<InetSocketAddress> lifelineAddrs =
Lists.newArrayListWithCapacity(nnIdToAddr.size());
for (String nnId : nnIdToAddr.keySet()) {
addrs.add(nnIdToAddr.get(nnId));
lifelineAddrs.add(nnIdToLifelineAddr != null ?
nnIdToLifelineAddr.get(nnId) : null);
}
bpos.refreshNNList(addrs, lifelineAddrs);
}
}
}
/**
* Extracted out for test purposes.
*/
protected BPOfferService createBPOS(List<InetSocketAddress> nnAddrs,
List<InetSocketAddress> lifelineNnAddrs) {
return new BPOfferService(nnAddrs, lifelineNnAddrs, dn);
}
}
| |
/* $Id: MyApp.java 5102 2015-05-28 07:14:24Z riza $ */
/*
* Copyright (C) 2013 Teluu Inc. (http://www.teluu.com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.pjsip.pjsua2.app;
import java.io.File;
import java.util.ArrayList;
import org.pjsip.pjsua2.*;
/* Interface to separate UI & engine a bit better */
interface MyAppObserver
{
abstract void notifyRegState(pjsip_status_code code, String reason,
int expiration);
abstract void notifyIncomingCall(MyCall call);
abstract void notifyCallState(MyCall call);
abstract void notifyCallMediaState(MyCall call);
abstract void notifyBuddyState(MyBuddy buddy);
}
class MyLogWriter extends LogWriter
{
@Override
public void write(LogEntry entry)
{
System.out.println(entry.getMsg());
}
}
class MyCall extends Call
{
public VideoWindow vidWin;
public VideoPreview vidPrev;
MyCall(MyAccount acc, int call_id)
{
super(acc, call_id);
vidWin = null;
}
@Override
public void onCallState(OnCallStateParam prm)
{
MyApp.observer.notifyCallState(this);
try {
CallInfo ci = getInfo();
if (ci.getState() ==
pjsip_inv_state.PJSIP_INV_STATE_DISCONNECTED)
{
this.delete();
}
} catch (Exception e) {
return;
}
}
@Override
public void onCallMediaState(OnCallMediaStateParam prm)
{
CallInfo ci;
try {
ci = getInfo();
} catch (Exception e) {
return;
}
CallMediaInfoVector cmiv = ci.getMedia();
for (int i = 0; i < cmiv.size(); i++) {
CallMediaInfo cmi = cmiv.get(i);
if (cmi.getType() == pjmedia_type.PJMEDIA_TYPE_AUDIO &&
(cmi.getStatus() ==
pjsua_call_media_status.PJSUA_CALL_MEDIA_ACTIVE ||
cmi.getStatus() ==
pjsua_call_media_status.PJSUA_CALL_MEDIA_REMOTE_HOLD))
{
// unfortunately, on Java too, the returned Media cannot be
// downcasted to AudioMedia
Media m = getMedia(i);
AudioMedia am = AudioMedia.typecastFromMedia(m);
// connect ports
try {
MyApp.ep.audDevManager().getCaptureDevMedia().
startTransmit(am);
am.startTransmit(MyApp.ep.audDevManager().
getPlaybackDevMedia());
} catch (Exception e) {
continue;
}
} else if (cmi.getType() == pjmedia_type.PJMEDIA_TYPE_VIDEO &&
cmi.getStatus() ==
pjsua_call_media_status.PJSUA_CALL_MEDIA_ACTIVE &&
cmi.getVideoIncomingWindowId() != pjsua2.INVALID_ID)
{
vidWin = new VideoWindow(cmi.getVideoIncomingWindowId());
vidPrev = new VideoPreview(cmi.getVideoCapDev());
}
}
MyApp.observer.notifyCallMediaState(this);
}
}
class MyAccount extends Account
{
public ArrayList<MyBuddy> buddyList = new ArrayList<MyBuddy>();
public AccountConfig cfg;
MyAccount(AccountConfig config)
{
super();
cfg = config;
}
public MyBuddy addBuddy(BuddyConfig bud_cfg)
{
/* Create Buddy */
MyBuddy bud = new MyBuddy(bud_cfg);
try {
bud.create(this, bud_cfg);
} catch (Exception e) {
bud.delete();
bud = null;
}
if (bud != null) {
buddyList.add(bud);
if (bud_cfg.getSubscribe())
try {
bud.subscribePresence(true);
} catch (Exception e) {}
}
return bud;
}
public void delBuddy(MyBuddy buddy)
{
buddyList.remove(buddy);
buddy.delete();
}
public void delBuddy(int index)
{
MyBuddy bud = buddyList.get(index);
buddyList.remove(index);
bud.delete();
}
@Override
public void onRegState(OnRegStateParam prm)
{
MyApp.observer.notifyRegState(prm.getCode(), prm.getReason(),
prm.getExpiration());
}
@Override
public void onIncomingCall(OnIncomingCallParam prm)
{
System.out.println("======== Incoming call ======== ");
MyCall call = new MyCall(this, prm.getCallId());
MyApp.observer.notifyIncomingCall(call);
}
@Override
public void onInstantMessage(OnInstantMessageParam prm)
{
System.out.println("======== Incoming pager ======== ");
System.out.println("From : " + prm.getFromUri());
System.out.println("To : " + prm.getToUri());
System.out.println("Contact : " + prm.getContactUri());
System.out.println("Mimetype : " + prm.getContentType());
System.out.println("Body : " + prm.getMsgBody());
}
}
class MyBuddy extends Buddy
{
public BuddyConfig cfg;
MyBuddy(BuddyConfig config)
{
super();
cfg = config;
}
String getStatusText()
{
BuddyInfo bi;
try {
bi = getInfo();
} catch (Exception e) {
return "?";
}
String status = "";
if (bi.getSubState() == pjsip_evsub_state.PJSIP_EVSUB_STATE_ACTIVE) {
if (bi.getPresStatus().getStatus() ==
pjsua_buddy_status.PJSUA_BUDDY_STATUS_ONLINE)
{
status = bi.getPresStatus().getStatusText();
if (status == null || status.length()==0) {
status = "Online";
}
} else if (bi.getPresStatus().getStatus() ==
pjsua_buddy_status.PJSUA_BUDDY_STATUS_OFFLINE)
{
status = "Offline";
} else {
status = "Unknown";
}
}
return status;
}
@Override
public void onBuddyState()
{
MyApp.observer.notifyBuddyState(this);
}
}
class MyAccountConfig
{
public AccountConfig accCfg = new AccountConfig();
public ArrayList<BuddyConfig> buddyCfgs = new ArrayList<BuddyConfig>();
public void readObject(ContainerNode node)
{
try {
ContainerNode acc_node = node.readContainer("Account");
accCfg.readObject(acc_node);
ContainerNode buddies_node = acc_node.readArray("buddies");
buddyCfgs.clear();
while (buddies_node.hasUnread()) {
BuddyConfig bud_cfg = new BuddyConfig();
bud_cfg.readObject(buddies_node);
buddyCfgs.add(bud_cfg);
}
} catch (Exception e) {}
}
public void writeObject(ContainerNode node)
{
try {
ContainerNode acc_node = node.writeNewContainer("Account");
accCfg.writeObject(acc_node);
ContainerNode buddies_node = acc_node.writeNewArray("buddies");
for (int j = 0; j < buddyCfgs.size(); j++) {
buddyCfgs.get(j).writeObject(buddies_node);
}
} catch (Exception e) {}
}
}
class MyApp {
static {
try{
System.loadLibrary("openh264");
System.loadLibrary("yuv");
} catch (UnsatisfiedLinkError e) {
System.out.println("UnsatisfiedLinkError: " + e.getMessage());
System.out.println("This could be safely ignored if you " +
"don't need video.");
}
System.loadLibrary("pjsua2");
System.out.println("Library loaded");
}
public static Endpoint ep = new Endpoint();
public static MyAppObserver observer;
public ArrayList<MyAccount> accList = new ArrayList<MyAccount>();
private ArrayList<MyAccountConfig> accCfgs =
new ArrayList<MyAccountConfig>();
private EpConfig epConfig = new EpConfig();
private TransportConfig sipTpConfig = new TransportConfig();
private String appDir;
/* Maintain reference to log writer to avoid premature cleanup by GC */
private MyLogWriter logWriter;
private final String configName = "pjsua2.json";
private final int SIP_PORT = 6000;
private final int LOG_LEVEL = 4;
public void init(MyAppObserver obs, String app_dir)
{
init(obs, app_dir, false);
}
public void init(MyAppObserver obs, String app_dir,
boolean own_worker_thread)
{
observer = obs;
appDir = app_dir;
/* Create endpoint */
try {
ep.libCreate();
} catch (Exception e) {
return;
}
/* Load config */
String configPath = appDir + "/" + configName;
File f = new File(configPath);
if (f.exists()) {
loadConfig(configPath);
} else {
/* Set 'default' values */
sipTpConfig.setPort(SIP_PORT);
}
/* Override log level setting */
epConfig.getLogConfig().setLevel(LOG_LEVEL);
epConfig.getLogConfig().setConsoleLevel(LOG_LEVEL);
/* Set log config. */
LogConfig log_cfg = epConfig.getLogConfig();
logWriter = new MyLogWriter();
log_cfg.setWriter(logWriter);
log_cfg.setDecor(log_cfg.getDecor() &
~(pj_log_decoration.PJ_LOG_HAS_CR.swigValue() |
pj_log_decoration.PJ_LOG_HAS_NEWLINE.swigValue()));
/* Set ua config. */
UaConfig ua_cfg = epConfig.getUaConfig();
ua_cfg.setUserAgent("Pjsua2 Android " + ep.libVersion().getFull());
StringVector stun_servers = new StringVector();
stun_servers.add("stun.pjsip.org");
ua_cfg.setStunServer(stun_servers);
if (own_worker_thread) {
ua_cfg.setThreadCnt(0);
ua_cfg.setMainThreadOnly(true);
}
/* Init endpoint */
try {
ep.libInit(epConfig);
} catch (Exception e) {
return;
}
/* Create transports. */
try {
ep.transportCreate(pjsip_transport_type_e.PJSIP_TRANSPORT_UDP,
sipTpConfig);
} catch (Exception e) {
System.out.println(e);
}
try {
ep.transportCreate(pjsip_transport_type_e.PJSIP_TRANSPORT_TCP,
sipTpConfig);
} catch (Exception e) {
System.out.println(e);
}
/* Create accounts. */
for (int i = 0; i < accCfgs.size(); i++) {
MyAccountConfig my_cfg = accCfgs.get(i);
/* Customize account config */
my_cfg.accCfg.getNatConfig().setIceEnabled(true);
my_cfg.accCfg.getVideoConfig().setAutoTransmitOutgoing(true);
my_cfg.accCfg.getVideoConfig().setAutoShowIncoming(true);
MyAccount acc = addAcc(my_cfg.accCfg);
if (acc == null)
continue;
/* Add Buddies */
for (int j = 0; j < my_cfg.buddyCfgs.size(); j++) {
BuddyConfig bud_cfg = my_cfg.buddyCfgs.get(j);
acc.addBuddy(bud_cfg);
}
}
/* Start. */
try {
ep.libStart();
} catch (Exception e) {
return;
}
}
public MyAccount addAcc(AccountConfig cfg)
{
MyAccount acc = new MyAccount(cfg);
try {
acc.create(cfg);
} catch (Exception e) {
acc = null;
return null;
}
accList.add(acc);
return acc;
}
public void delAcc(MyAccount acc)
{
accList.remove(acc);
}
private void loadConfig(String filename)
{
JsonDocument json = new JsonDocument();
try {
/* Load file */
json.loadFile(filename);
ContainerNode root = json.getRootContainer();
/* Read endpoint config */
epConfig.readObject(root);
/* Read transport config */
ContainerNode tp_node = root.readContainer("SipTransport");
sipTpConfig.readObject(tp_node);
/* Read account configs */
accCfgs.clear();
ContainerNode accs_node = root.readArray("accounts");
while (accs_node.hasUnread()) {
MyAccountConfig acc_cfg = new MyAccountConfig();
acc_cfg.readObject(accs_node);
accCfgs.add(acc_cfg);
}
} catch (Exception e) {
System.out.println(e);
}
/* Force delete json now, as I found that Java somehow destroys it
* after lib has been destroyed and from non-registered thread.
*/
json.delete();
}
private void buildAccConfigs()
{
/* Sync accCfgs from accList */
accCfgs.clear();
for (int i = 0; i < accList.size(); i++) {
MyAccount acc = accList.get(i);
MyAccountConfig my_acc_cfg = new MyAccountConfig();
my_acc_cfg.accCfg = acc.cfg;
my_acc_cfg.buddyCfgs.clear();
for (int j = 0; j < acc.buddyList.size(); j++) {
MyBuddy bud = acc.buddyList.get(j);
my_acc_cfg.buddyCfgs.add(bud.cfg);
}
accCfgs.add(my_acc_cfg);
}
}
private void saveConfig(String filename)
{
JsonDocument json = new JsonDocument();
try {
/* Write endpoint config */
json.writeObject(epConfig);
/* Write transport config */
ContainerNode tp_node = json.writeNewContainer("SipTransport");
sipTpConfig.writeObject(tp_node);
/* Write account configs */
buildAccConfigs();
ContainerNode accs_node = json.writeNewArray("accounts");
for (int i = 0; i < accCfgs.size(); i++) {
accCfgs.get(i).writeObject(accs_node);
}
/* Save file */
json.saveFile(filename);
} catch (Exception e) {}
/* Force delete json now, as I found that Java somehow destroys it
* after lib has been destroyed and from non-registered thread.
*/
json.delete();
}
public void deinit()
{
String configPath = appDir + "/" + configName;
saveConfig(configPath);
/* Try force GC to avoid late destroy of PJ objects as they should be
* deleted before lib is destroyed.
*/
Runtime.getRuntime().gc();
/* Shutdown pjsua. Note that Endpoint destructor will also invoke
* libDestroy(), so this will be a test of double libDestroy().
*/
try {
ep.libDestroy();
} catch (Exception e) {}
/* Force delete Endpoint here, to avoid deletion from a non-
* registered thread (by GC?).
*/
ep.delete();
ep = null;
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.CodingConvention.SubclassRelationship;
import com.google.javascript.jscomp.NodeTraversal.Callback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.ArrayList;
import java.util.List;
/**
* Callback that gathers subexpressions that may have side effects
* and appends copies of those subexpressions to the replacements
* list. In the case of branching subexpressions, it simplifies the
* subexpression before adding it to the replacement list.
*
*/
class GatherSideEffectSubexpressionsCallback implements Callback {
/**
* Used by GatherSideEffectSubexpressionsCallback to notify client
* code about side effect expressions that should be kept.
*/
interface SideEffectAccumulator {
/**
* Returns true if the "mixin" and "inherits" function calls
* should be treated as if they had side effects.
*/
boolean classDefiningCallsHaveSideEffects();
/**
* Adds subtree to the list of nodes that have side effects.
*
* @param original - root of the tree.
*/
void keepSubTree(Node original);
/**
* Simplifies a subtree whose root node is an AND or OR expression
* and adds the resulting subtree to the list of nodes that have
* side effects.
*
* @param original - root of the and/or expression.
*/
void keepSimplifiedShortCircuitExpression(Node original);
/**
* Simplifies a subtree whose root node is a HOOK expression
* and adds the resulting subtree to the list of nodes that have
* side effects.
*
* @param hook - root of the hook expression.
* @param thenHasSideEffects - then branch has side effects
* @param elseHasSideEffects - else branch has side effects
*/
void keepSimplifiedHookExpression(Node hook,
boolean thenHasSideEffects,
boolean elseHasSideEffects);
}
/**
* Populates the provided replacement list by appending copies of
* subtrees that have side effects.
*
* It is OK if this class tears up the original tree, because
* we're going to throw the tree out anyway.
*/
static final class GetReplacementSideEffectSubexpressions
implements SideEffectAccumulator {
private final AbstractCompiler compiler;
private final List<Node> replacements;
/**
* Creates the accumulator.
*
* @param compiler - the AbstractCompiler
* @param replacements - list to accumulate into
*/
GetReplacementSideEffectSubexpressions(AbstractCompiler compiler,
List<Node> replacements) {
this.compiler = compiler;
this.replacements = replacements;
}
@Override
public boolean classDefiningCallsHaveSideEffects() {
return true;
}
@Override
public void keepSubTree(Node original) {
if (original.getParent() != null) {
original.detach();
}
replacements.add(original);
}
@Override
public void keepSimplifiedShortCircuitExpression(Node original) {
Preconditions.checkArgument(
(original.isAnd()) || (original.isOr()),
"Expected: AND or OR, Got: %s",
original.getToken());
Node left = original.getFirstChild();
Node right = left.getNext();
Node simplifiedRight = simplifyShortCircuitBranch(right);
original.detachChildren();
original.addChildToBack(left);
original.addChildToBack(simplifiedRight);
keepSubTree(original);
}
@Override
public void keepSimplifiedHookExpression(Node hook,
boolean thenHasSideEffects,
boolean elseHasSideEffects) {
Preconditions.checkArgument(hook.isHook(), "Expected: HOOK, Got: %s", hook.getToken());
Node condition = hook.getFirstChild();
Node thenBranch = condition.getNext();
Node elseBranch = thenBranch.getNext();
if (thenHasSideEffects && elseHasSideEffects) {
hook.detachChildren();
hook.addChildToBack(condition);
hook.addChildToBack(simplifyShortCircuitBranch(thenBranch));
hook.addChildToBack(simplifyShortCircuitBranch(elseBranch));
keepSubTree(hook);
} else if (thenHasSideEffects || elseHasSideEffects) {
Token type = thenHasSideEffects ? Token.AND : Token.OR;
Node body = thenHasSideEffects ? thenBranch : elseBranch;
Node simplified = new Node(
type, condition.detach(),
simplifyShortCircuitBranch(body))
.useSourceInfoIfMissingFrom(hook);
keepSubTree(simplified);
} else {
throw new IllegalArgumentException(
"keepSimplifiedHookExpression must keep at least 1 branch");
}
}
private Node simplifyShortCircuitBranch(Node node) {
List<Node> parts = new ArrayList<>();
NodeTraversal.traverseEs6(
compiler, node,
new GatherSideEffectSubexpressionsCallback(
compiler,
new GetReplacementSideEffectSubexpressions(compiler, parts)));
Node ret = null;
for (Node part : parts) {
if (ret != null) {
ret = IR.comma(ret, part).srcref(node);
} else {
ret = part;
}
}
if (ret == null) {
throw new IllegalArgumentException(
"expected at least one side effect subexpression in short "
+ "circuit branch.");
}
return ret;
}
}
private static final ImmutableSet<Token> FORBIDDEN_TYPES =
ImmutableSet.of(Token.BLOCK, Token.SCRIPT, Token.VAR, Token.EXPR_RESULT, Token.RETURN);
private final AbstractCompiler compiler;
private final SideEffectAccumulator accumulator;
/**
* @param compiler - AbstractCompiler object
* @param accumulator - object that will accumulate roots of
* subtrees that have side effects.
*/
GatherSideEffectSubexpressionsCallback(AbstractCompiler compiler,
SideEffectAccumulator accumulator) {
this.compiler = compiler;
this.accumulator = accumulator;
}
/**
* Determines if a call defines a class inheritance or mixing
* relation, according to the current coding convention.
*/
private boolean isClassDefiningCall(Node callNode) {
SubclassRelationship classes =
compiler.getCodingConvention().getClassesDefinedByCall(callNode);
return classes != null;
}
/**
* Computes the list of subtrees whose root nodes have side effects.
*
* <p>If the current subtree's root has side effects this method should
* call accumulator.keepSubTree and return 'false' to add the
* subtree to the result list and avoid avoid traversing the nodes children.
*
* <p>Branching nodes whose then or else branch contain side effects
* must be simplified by doing a recursive traversal; this method
* should call the appropriate accumulator 'keepSimplified' method
* and return 'false' to stop the regular traversal.
*/
@Override
public boolean shouldTraverse(
NodeTraversal traversal, Node node, Node parent) {
if (FORBIDDEN_TYPES.contains(node.getToken()) || NodeUtil.isControlStructure(node)) {
throw new IllegalArgumentException(node.getToken() + " nodes are not supported.");
}
// Do not recurse into nested functions.
if (node.isFunction()) {
return false;
}
// simplify and maybe keep hook expression.
if (node.isHook()) {
return processHook(node);
}
// simplify and maybe keep AND/OR expression.
if ((node.isAnd()) || (node.isOr())) {
return processShortCircuitExpression(node);
}
if (!NodeUtil.nodeTypeMayHaveSideEffects(node, compiler)) {
return true;
} else {
// Node type suggests that the expression has side effects.
if (node.isCall()) {
return processFunctionCall(node);
} else if (node.isNew()) {
return processConstructorCall(node);
} else {
accumulator.keepSubTree(node);
return false;
}
}
}
/**
* Processes an AND or OR expression.
*
* @return true to continue traversal, false otherwise
*/
boolean processShortCircuitExpression(Node node) {
Preconditions.checkArgument(
(node.isAnd()) || (node.isOr()), "Expected: AND or OR, Got: %s", node.getToken());
// keep whole expression if RHS of the branching expression
// contains a call.
Node left = node.getFirstChild();
Node right = left.getNext();
if (NodeUtil.mayHaveSideEffects(right, compiler)) {
accumulator.keepSimplifiedShortCircuitExpression(node);
return false;
} else {
return true;
}
}
/**
* Processes a HOOK expression.
*
* @return true to continue traversal, false otherwise
*/
boolean processHook(Node node) {
Preconditions.checkArgument(node.isHook(), "Expected: HOOK, Got: %s", node.getToken());
Node condition = node.getFirstChild();
Node ifBranch = condition.getNext();
Node elseBranch = ifBranch.getNext();
boolean thenHasSideEffects = NodeUtil.mayHaveSideEffects(
ifBranch, compiler);
boolean elseHasSideEffects = NodeUtil.mayHaveSideEffects(
elseBranch, compiler);
if (thenHasSideEffects || elseHasSideEffects) {
accumulator.keepSimplifiedHookExpression(
node, thenHasSideEffects, elseHasSideEffects);
return false;
} else {
return true;
}
}
/**
* Processes a CALL expression.
*
* @return true to continue traversal, false otherwise
*/
boolean processFunctionCall(Node node) {
Preconditions.checkArgument(node.isCall(), "Expected: CALL, Got: %s", node.getToken());
// Calls to functions that are known to be "pure" have no side
// effects.
Node functionName = node.getFirstChild();
if (functionName.isName() || functionName.isGetProp()) {
if (!accumulator.classDefiningCallsHaveSideEffects() &&
isClassDefiningCall(node)) {
return true;
}
}
if (!NodeUtil.functionCallHasSideEffects(node)) {
return true;
}
accumulator.keepSubTree(node);
return false;
}
/**
* Processes a NEW expression.
*
* @return true to continue traversal, false otherwise
*/
boolean processConstructorCall(Node node) {
Preconditions.checkArgument(node.isNew(), "Expected: NEW, Got: %s", node.getToken());
// Calls to constructors that are known to be "pure" have no
// side effects.
if (!NodeUtil.constructorCallHasSideEffects(node)) {
return true;
}
accumulator.keepSubTree(node);
return false;
}
@Override
public void visit(NodeTraversal traversal, Node node, Node parent) {}
}
| |
/*
* Copyright 2014 Frank Asseg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.objecthunter.larch.service.backend.elasticsearch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.annotation.PostConstruct;
import net.objecthunter.larch.exceptions.NotFoundException;
import net.objecthunter.larch.model.Entities;
import net.objecthunter.larch.model.Entity;
import net.objecthunter.larch.model.SearchResult;
import net.objecthunter.larch.service.backend.BackendPublishService;
import net.objecthunter.larch.service.backend.elasticsearch.ElasticSearchEntityService.EntitiesSearchField;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.databind.ObjectMapper;
public class ElasticSearchPublishService extends AbstractElasticSearchService implements BackendPublishService {
private static final Logger log = LoggerFactory.getLogger(ElasticSearchPublishService.class);
public static final String INDEX_PUBLISHED = "publish";
public static final String TYPE_PUBLISHED = "publishedentity";
private int maxRecords = 50;
@Autowired
private Client client;
@Autowired
private ObjectMapper mapper;
@PostConstruct
public void init() throws IOException {
this.checkAndOrCreateIndex(INDEX_PUBLISHED);
}
@Override
public String publish(Entity e) throws IOException {
String publishId = new StringBuilder(e.getId()).append(":").append(e.getVersion()).toString();
e.setPublishId(publishId);
try {
this.client
.prepareIndex(INDEX_PUBLISHED, TYPE_PUBLISHED, publishId).setSource(
this.mapper.writeValueAsBytes(e))
.execute().actionGet();
} catch (ElasticsearchException ex) {
throw new IOException(ex.getMostSpecificCause().getMessage());
}
this.refreshIndex(INDEX_PUBLISHED);
return publishId;
}
@Override
public Entity retrievePublishedEntity(String publishId) throws IOException {
final GetResponse resp;
try {
resp =
this.client.prepareGet(INDEX_PUBLISHED, TYPE_PUBLISHED, publishId).execute().actionGet();
} catch (ElasticsearchException ex) {
throw new IOException(ex.getMostSpecificCause().getMessage());
}
if (!resp.isExists()) {
throw new NotFoundException("The entity with the publishId " + publishId
+ " can not be found in the publish index");
}
return this.mapper.readValue(resp.getSourceAsBytes(), Entity.class);
}
@Override
public Entities retrievePublishedEntities(String entityId) throws IOException {
final SearchResponse search;
try {
search =
this.client
.prepareSearch(INDEX_PUBLISHED)
.setTypes(TYPE_PUBLISHED)
.setQuery(
QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
FilterBuilders.termFilter("id", entityId))).addSort("publishId",
SortOrder.ASC).execute()
.actionGet();
} catch (ElasticsearchException ex) {
throw new IOException(ex.getMostSpecificCause().getMessage());
}
if (search.getHits().getTotalHits() == 0) {
throw new NotFoundException("There are no published versions of the entity " + entityId);
}
final List<Entity> result = new ArrayList<>();
for (SearchHit hit : search.getHits().getHits()) {
result.add(this.mapper.readValue(hit.getSourceAsString(), Entity.class));
}
Entities entities = new Entities();
entities.setEntities(result);
return entities;
}
@Override
public SearchResult scanIndex(int offset, int numRecords) throws IOException {
final long time = System.currentTimeMillis();
numRecords = numRecords > maxRecords ? maxRecords : numRecords;
final SearchResponse resp;
try {
resp =
this.client
.prepareSearch(INDEX_PUBLISHED).setQuery(QueryBuilders.matchAllQuery())
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setFrom(offset).setSize(numRecords)
.addFields("id", "publishId", "version", "label", "type", "tags").execute().actionGet();
} catch (ElasticsearchException ex) {
throw new IOException(ex.getMostSpecificCause().getMessage());
}
final SearchResult result = new SearchResult();
result.setOffset(offset);
result.setNumRecords(numRecords);
result.setHits(resp.getHits().getHits().length);
result.setTotalHits(resp.getHits().getTotalHits());
result.setOffset(offset);
result.setNextOffset(offset + numRecords);
result.setPrevOffset(Math.max(offset - numRecords, 0));
result.setMaxRecords(maxRecords);
final List<Entity> entites = new ArrayList<>(numRecords);
for (final SearchHit hit : resp.getHits()) {
// TODO: check if JSON docuemnt is prefetched or laziliy initialised
int version = hit.field("version") != null ? hit.field("version").getValue() : 0;
String label = hit.field("label") != null ? hit.field("label").getValue() : "";
String type = hit.field("type") != null ? hit.field("type").getValue() : "";
final Entity e = new Entity();
e.setId(hit.field("id").getValue());
e.setPublishId(hit.field("publishId").getValue());
e.setVersion(version);
e.setLabel(label);
e.setType(type);
List<String> tags = new ArrayList<>();
if (hit.field("tags") != null) {
for (Object o : hit.field("tags").values()) {
tags.add((String) o);
}
}
e.setTags(tags);
entites.add(e);
}
result.setData(entites);
result.setDuration(System.currentTimeMillis() - time);
return result;
}
@Override
public SearchResult searchEntities(Map<EntitiesSearchField, String[]> searchFields) throws IOException {
BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
for (Entry<EntitiesSearchField, String[]> searchField : searchFields.entrySet()) {
if (searchField.getValue() != null && searchField.getValue().length > 0) {
BoolQueryBuilder childQueryBuilder = QueryBuilders.boolQuery();
for (int i = 0; i < searchField.getValue().length; i++) {
if (StringUtils.isNotBlank(searchField.getValue()[i])) {
childQueryBuilder.should(QueryBuilders.wildcardQuery(searchField.getKey().getFieldName(),
searchField.getValue()[i].toLowerCase()));
}
}
queryBuilder.must(childQueryBuilder);
}
}
int numRecords = 20;
final long time = System.currentTimeMillis();
final ActionFuture<RefreshResponse> refresh =
this.client.admin().indices().refresh(new RefreshRequest(INDEX_PUBLISHED));
final SearchResponse resp;
try {
refresh.actionGet();
resp =
this.client
.prepareSearch(INDEX_PUBLISHED).addFields("id", "publishId", "version", "label", "type",
"tags")
.setQuery(queryBuilder).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).execute()
.actionGet();
} catch (ElasticsearchException ex) {
throw new IOException(ex.getMostSpecificCause().getMessage());
}
log.debug("ES returned {} results for '{}'", resp.getHits().getHits().length, new String(queryBuilder
.buildAsBytes().toBytes()));
final SearchResult result = new SearchResult();
final List<Entity> entities = new ArrayList<>();
for (final SearchHit hit : resp.getHits()) {
int version = hit.field("version") != null ? hit.field("version").getValue() : 0;
String label = hit.field("label") != null ? hit.field("label").getValue() : "";
String type = hit.field("type") != null ? hit.field("type").getValue() : "";
final Entity e = new Entity();
e.setId(hit.field("id").getValue());
e.setPublishId(hit.field("publishId").getValue());
e.setVersion(version);
e.setType(type);
e.setLabel(label);
final List<String> tags = new ArrayList<>();
if (hit.field("tags") != null) {
for (Object o : hit.field("tags").values()) {
tags.add((String) o);
}
}
e.setTags(tags);
entities.add(e);
}
result.setData(entities);
result.setTotalHits(resp.getHits().getTotalHits());
result.setMaxRecords(maxRecords);
result.setHits(resp.getHits().getHits().length);
result.setNumRecords(numRecords);
result.setOffset(0);
result.setTerm(new String(queryBuilder.buildAsBytes().toBytes()));
result.setPrevOffset(0);
result.setNextOffset(0);
result.setTotalHits(resp.getHits().getTotalHits());
result.setDuration(System.currentTimeMillis() - time);
return result;
}
@Override
public SearchResult scanIndex(int offset) throws IOException {
return scanIndex(offset, maxRecords);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.history;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.engine.history.HistoricTaskInstance;
import org.flowable.engine.history.HistoricTaskInstanceQuery;
import org.flowable.engine.impl.history.HistoryLevel;
import org.flowable.engine.impl.test.HistoryTestHelper;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.task.IdentityLinkInfo;
import org.flowable.engine.task.Task;
import org.flowable.engine.test.Deployment;
/**
* @author Tijs Rademakers
*/
public class HistoricTaskAndVariablesQueryTest extends PluggableFlowableTestCase {
private List<String> taskIds;
public void setUp() throws Exception {
identityService.saveUser(identityService.newUser("kermit"));
identityService.saveUser(identityService.newUser("gonzo"));
identityService.saveUser(identityService.newUser("fozzie"));
identityService.saveGroup(identityService.newGroup("management"));
identityService.saveGroup(identityService.newGroup("accountancy"));
identityService.createMembership("kermit", "management");
identityService.createMembership("kermit", "accountancy");
identityService.createMembership("fozzie", "management");
taskIds = generateTestTasks();
}
public void tearDown() throws Exception {
identityService.deleteGroup("accountancy");
identityService.deleteGroup("management");
identityService.deleteUser("fozzie");
identityService.deleteUser("gonzo");
identityService.deleteUser("kermit");
taskService.deleteTasks(taskIds, true);
}
@Deployment
public void testQuery() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
HistoricTaskInstance task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().taskAssignee("gonzo").singleResult();
Map<String, Object> variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals(0, task.getProcessVariables().size());
assertNotNull(variableMap.get("testVar"));
assertEquals("someVariable", variableMap.get("testVar"));
assertNotNull(variableMap.get("testVar2"));
assertEquals(123, variableMap.get("testVar2"));
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().list();
assertEquals(3, tasks.size());
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskAssignee("gonzo").singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
Map<String, Object> startMap = new HashMap<String, Object>();
startMap.put("processVar", true);
runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap);
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskAssignee("kermit").singleResult();
assertEquals(1, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertTrue((Boolean) task.getProcessVariables().get("processVar"));
taskService.setVariable(task.getId(), "anotherProcessVar", 123);
taskService.setVariableLocal(task.getId(), "localVar", "test");
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().taskAssignee("kermit").singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskAssignee("kermit").singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery().taskVariableValueLike("testVar", "someVaria%").singleResult();
assertNotNull(task);
assertEquals("gonzoTask", task.getName());
task = historyService.createHistoricTaskInstanceQuery().taskVariableValueLikeIgnoreCase("testVar", "somevaria%").singleResult();
assertNotNull(task);
assertEquals("gonzoTask", task.getName());
task = historyService.createHistoricTaskInstanceQuery().taskVariableValueLikeIgnoreCase("testVar", "somevaria2%").singleResult();
assertNull(task);
tasks = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().taskInvolvedUser("kermit").orderByTaskCreateTime().asc().list();
assertEquals(3, tasks.size());
assertEquals(1, tasks.get(0).getTaskLocalVariables().size());
assertEquals("test", tasks.get(0).getTaskLocalVariables().get("test"));
assertEquals(0, tasks.get(0).getProcessVariables().size());
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskInvolvedUser("kermit").orderByTaskCreateTime().asc().list();
assertEquals(3, tasks.size());
assertEquals(0, tasks.get(0).getProcessVariables().size());
assertEquals(0, tasks.get(0).getTaskLocalVariables().size());
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().taskAssignee("kermit").taskVariableValueEquals("localVar", "test").singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskAssignee("kermit").taskVariableValueEquals("localVar", "test").singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().includeProcessVariables().taskAssignee("kermit").singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery().taskAssignee("gonzo").singleResult();
taskService.complete(task.getId());
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = (HistoricTaskInstance) historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().finished().singleResult();
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals(0, task.getProcessVariables().size());
assertNotNull(variableMap.get("testVar"));
assertEquals("someVariable", variableMap.get("testVar"));
assertNotNull(variableMap.get("testVar2"));
assertEquals(123, variableMap.get("testVar2"));
}
}
@Deployment
public void testOrQuery() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
HistoricTaskInstance task = historyService.createHistoricTaskInstanceQuery()
.includeTaskLocalVariables()
.or()
.taskAssignee("gonzo")
.endOr()
.singleResult();
Map<String, Object> variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals(0, task.getProcessVariables().size());
assertNotNull(variableMap.get("testVar"));
assertEquals("someVariable", variableMap.get("testVar"));
assertNotNull(variableMap.get("testVar2"));
assertEquals(123, variableMap.get("testVar2"));
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().list();
assertEquals(3, tasks.size());
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().or().taskAssignee("gonzo").taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
Map<String, Object> startMap = new HashMap<String, Object>();
startMap.put("processVar", true);
runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap);
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().or().taskAssignee("kermit").taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(1, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertTrue((Boolean) task.getProcessVariables().get("processVar"));
task = historyService.createHistoricTaskInstanceQuery()
.includeProcessVariables()
.or()
.taskAssignee("kermit")
.taskVariableValueEquals("localVar", "nonExisting")
.endOr()
.or()
.processDefinitionKey("oneTaskProcess")
.taskVariableValueEquals("localVar", "nonExisting")
.endOr()
.singleResult();
assertNotNull(task);
assertEquals(1, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertTrue((Boolean) task.getProcessVariables().get("processVar"));
taskService.setVariable(task.getId(), "anotherProcessVar", 123);
taskService.setVariableLocal(task.getId(), "localVar", "test");
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().or().taskAssignee("kermit").taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().or().taskAssignee("kermit").taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery()
.includeTaskLocalVariables()
.or()
.taskAssignee("nonexisting")
.taskVariableValueLike("testVar", "someVar%")
.endOr()
.singleResult();
assertEquals(2, task.getTaskLocalVariables().size());
assertEquals(0, task.getProcessVariables().size());
assertEquals("someVariable", task.getTaskLocalVariables().get("testVar"));
assertEquals(123, task.getTaskLocalVariables().get("testVar2"));
task = historyService.createHistoricTaskInstanceQuery()
.includeTaskLocalVariables()
.or()
.taskAssignee("nonexisting")
.taskVariableValueLikeIgnoreCase("testVar", "somevar%")
.endOr()
.singleResult();
assertEquals(2, task.getTaskLocalVariables().size());
assertEquals(0, task.getProcessVariables().size());
assertEquals("someVariable", task.getTaskLocalVariables().get("testVar"));
assertEquals(123, task.getTaskLocalVariables().get("testVar2"));
task = historyService.createHistoricTaskInstanceQuery()
.includeTaskLocalVariables()
.or()
.taskAssignee("nonexisting")
.taskVariableValueLike("testVar", "someVar2%")
.endOr()
.singleResult();
assertNull(task);
tasks = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables()
.or()
.taskInvolvedUser("kermit")
.taskVariableValueEquals("localVar", "nonExisting")
.endOr()
.orderByTaskCreateTime().asc().list();
assertEquals(3, tasks.size());
assertEquals(1, tasks.get(0).getTaskLocalVariables().size());
assertEquals("test", tasks.get(0).getTaskLocalVariables().get("test"));
assertEquals(0, tasks.get(0).getProcessVariables().size());
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables()
.or()
.taskInvolvedUser("kermit")
.taskVariableValueEquals("localVar", "nonExisting")
.endOr()
.orderByTaskCreateTime().asc().list();
assertEquals(3, tasks.size());
assertEquals(0, tasks.get(0).getProcessVariables().size());
assertEquals(0, tasks.get(0).getTaskLocalVariables().size());
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().taskAssignee("kermit").or().taskVariableValueEquals("localVar", "test")
.taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(0, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
task = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().taskAssignee("kermit").or().taskVariableValueEquals("localVar", "test")
.taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(0, task.getTaskLocalVariables().size());
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().includeProcessVariables().or().taskAssignee("kermit").taskVariableValueEquals("localVar", "nonExisting")
.endOr().singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(1, task.getTaskLocalVariables().size());
assertEquals("test", task.getTaskLocalVariables().get("localVar"));
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
task = historyService.createHistoricTaskInstanceQuery().taskAssignee("gonzo").singleResult();
taskService.complete(task.getId());
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
task = historyService.createHistoricTaskInstanceQuery().includeTaskLocalVariables().or().finished().taskVariableValueEquals("localVar", "nonExisting").endOr().singleResult();
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals(0, task.getProcessVariables().size());
assertNotNull(variableMap.get("testVar"));
assertEquals("someVariable", variableMap.get("testVar"));
assertNotNull(variableMap.get("testVar2"));
assertEquals(123, variableMap.get("testVar2"));
}
}
@Deployment
public void testOrQueryMultipleVariableValues() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
Map<String, Object> startMap = new HashMap<String, Object>();
startMap.put("processVar", true);
startMap.put("anotherProcessVar", 123);
runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap);
startMap.put("anotherProcessVar", 999);
runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap);
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
HistoricTaskInstanceQuery query0 = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().or();
for (int i = 0; i < 20; i++) {
query0 = query0.processVariableValueEquals("anotherProcessVar", i);
}
query0 = query0.endOr();
assertNull(query0.singleResult());
HistoricTaskInstanceQuery query1 = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 123);
for (int i = 0; i < 20; i++) {
query1 = query1.processVariableValueEquals("anotherProcessVar", i);
}
query1 = query1.endOr();
HistoricTaskInstance task = query1.singleResult();
assertEquals(2, task.getProcessVariables().size());
assertEquals(true, task.getProcessVariables().get("processVar"));
assertEquals(123, task.getProcessVariables().get("anotherProcessVar"));
}
}
@Deployment
public void testCandidate() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("kermit").list();
assertEquals(3, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("gonzo").list();
assertEquals(0, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("fozzie").list();
assertEquals(1, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateGroup("management").list();
assertEquals(1, tasks.size());
List<String> groups = new ArrayList<String>();
groups.add("management");
groups.add("accountancy");
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateGroupIn(groups).list();
assertEquals(1, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("kermit").taskCandidateGroupIn(groups).list();
assertEquals(3, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("gonzo").taskCandidateGroupIn(groups).list();
assertEquals(1, tasks.size());
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
assertEquals(0, taskService.createTaskQuery().processInstanceId(processInstance.getId()).count());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("kermit").list();
assertEquals(3, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("gonzo").list();
assertEquals(0, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("fozzie").list();
assertEquals(1, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateGroup("management").list();
assertEquals(1, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("kermit").taskCandidateGroup("management").list();
assertEquals(3, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("gonzo").taskCandidateGroup("management").list();
assertEquals(1, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateUser("gonzo").taskCandidateGroup("invalid").list();
assertEquals(0, tasks.size());
tasks = historyService.createHistoricTaskInstanceQuery().taskCandidateGroupIn(groups).list();
assertEquals(1, tasks.size());
}
}
public void testQueryWithPagingAndVariables() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.AUDIT, processEngineConfiguration)) {
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().desc().listPage(0, 1);
assertEquals(1, tasks.size());
HistoricTaskInstance task = tasks.get(0);
Map<String, Object> variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(1, 2);
assertEquals(2, tasks.size());
task = tasks.get(1);
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(2, 4);
assertEquals(1, tasks.size());
task = tasks.get(0);
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(4, 2);
assertEquals(0, tasks.size());
}
}
public void testQueryWithPagingVariablesAndIdentityLinks() {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.AUDIT, processEngineConfiguration)) {
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().includeIdentityLinks().orderByTaskPriority().desc().listPage(0, 1);
assertEquals(1, tasks.size());
HistoricTaskInstance task = tasks.get(0);
Map<String, Object> variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
assertEquals(1, task.getIdentityLinks().size());
IdentityLinkInfo identityLink = task.getIdentityLinks().get(0);
assertNull(identityLink.getProcessInstanceId());
assertEquals("assignee", identityLink.getType());
assertNull(identityLink.getGroupId());
assertEquals("gonzo", identityLink.getUserId());
assertEquals(task.getId(), identityLink.getTaskId());
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().includeIdentityLinks().orderByTaskPriority().asc().listPage(1, 2);
assertEquals(2, tasks.size());
task = tasks.get(1);
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
assertEquals(1, task.getIdentityLinks().size());
identityLink = task.getIdentityLinks().get(0);
assertNull(identityLink.getProcessInstanceId());
assertEquals("assignee", identityLink.getType());
assertNull(identityLink.getGroupId());
assertEquals("gonzo", identityLink.getUserId());
assertEquals(task.getId(), identityLink.getTaskId());
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().includeIdentityLinks().orderByTaskPriority().asc().listPage(2, 4);
assertEquals(1, tasks.size());
task = tasks.get(0);
variableMap = task.getTaskLocalVariables();
assertEquals(2, variableMap.size());
assertEquals("someVariable", variableMap.get("testVar"));
assertEquals(123, variableMap.get("testVar2"));
assertEquals(1, task.getIdentityLinks().size());
identityLink = task.getIdentityLinks().get(0);
assertNull(identityLink.getProcessInstanceId());
assertEquals("assignee", identityLink.getType());
assertNull(identityLink.getGroupId());
assertEquals("gonzo", identityLink.getUserId());
assertEquals(task.getId(), identityLink.getTaskId());
tasks = historyService.createHistoricTaskInstanceQuery().includeProcessVariables().includeTaskLocalVariables().includeIdentityLinks().orderByTaskPriority().asc().listPage(4, 2);
assertEquals(0, tasks.size());
}
}
@Deployment(resources = { "org/flowable/engine/test/api/task/TaskQueryTest.testProcessDefinition.bpmn20.xml" })
public void testWithoutDueDateQuery() throws Exception {
if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.AUDIT, processEngineConfiguration)) {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
HistoricTaskInstance historicTask = historyService.createHistoricTaskInstanceQuery().processInstanceId(processInstance.getId()).withoutTaskDueDate().singleResult();
assertNotNull(historicTask);
assertNull(historicTask.getDueDate());
// Set due-date on task
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
Date dueDate = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss").parse("01/02/2003 01:12:13");
task.setDueDate(dueDate);
taskService.saveTask(task);
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
assertEquals(0, historyService.createHistoricTaskInstanceQuery().processInstanceId(processInstance.getId()).withoutTaskDueDate().count());
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
// Clear due-date on task
task.setDueDate(null);
taskService.saveTask(task);
waitForHistoryJobExecutorToProcessAllJobs(5000, 100);
assertEquals(1, historyService.createHistoricTaskInstanceQuery().processInstanceId(processInstance.getId()).withoutTaskDueDate().count());
}
}
// Unit test for https://activiti.atlassian.net/browse/ACT-4152
public void testQueryWithIncludeTaskVariableAndTaskCategory() {
List<HistoricTaskInstance> tasks = historyService.createHistoricTaskInstanceQuery().taskAssignee("gonzo").list();
for (HistoricTaskInstance task : tasks) {
assertNotNull(task.getCategory());
assertEquals("testCategory", task.getCategory());
}
tasks = historyService.createHistoricTaskInstanceQuery().taskAssignee("gonzo").includeTaskLocalVariables().list();
for (HistoricTaskInstance task : tasks) {
assertNotNull(task.getCategory());
assertEquals("testCategory", task.getCategory());
}
tasks = historyService.createHistoricTaskInstanceQuery().taskAssignee("gonzo").includeProcessVariables().list();
for (HistoricTaskInstance task : tasks) {
assertNotNull(task.getCategory());
assertEquals("testCategory", task.getCategory());
}
}
/**
* Generates some test tasks. - 2 tasks where kermit is a candidate and 1 task where gonzo is assignee
*/
private List<String> generateTestTasks() throws Exception {
List<String> ids = new ArrayList<String>();
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss.SSS");
// 2 tasks for kermit
processEngineConfiguration.getClock().setCurrentTime(sdf.parse("01/01/2001 01:01:01.000"));
for (int i = 0; i < 2; i++) {
Task task = taskService.newTask();
task.setName("testTask");
task.setDescription("testTask description");
task.setPriority(3);
taskService.saveTask(task);
ids.add(task.getId());
taskService.setVariableLocal(task.getId(), "test", "test");
taskService.addCandidateUser(task.getId(), "kermit");
}
processEngineConfiguration.getClock().setCurrentTime(sdf.parse("02/02/2002 02:02:02.000"));
// 1 task for gonzo
Task task = taskService.newTask();
task.setName("gonzoTask");
task.setDescription("gonzo description");
task.setPriority(4);
task.setCategory("testCategory");
taskService.saveTask(task);
taskService.setAssignee(task.getId(), "gonzo");
taskService.setVariableLocal(task.getId(), "testVar", "someVariable");
taskService.setVariableLocal(task.getId(), "testVar2", 123);
ids.add(task.getId());
return ids;
}
}
| |
/**
* The MIT License
* Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package ee.ria.xroad.signer.protocol.handler;
import ee.ria.xroad.common.CodedException;
import ee.ria.xroad.common.SystemProperties;
import ee.ria.xroad.common.util.CryptoUtils;
import ee.ria.xroad.signer.protocol.AbstractRequestHandler;
import ee.ria.xroad.signer.protocol.dto.KeyUsageInfo;
import ee.ria.xroad.signer.protocol.message.GenerateCertRequest;
import ee.ria.xroad.signer.protocol.message.GenerateCertRequestResponse;
import ee.ria.xroad.signer.tokenmanager.TokenManager;
import ee.ria.xroad.signer.tokenmanager.token.SoftwareTokenType;
import ee.ria.xroad.signer.util.CalculateSignature;
import ee.ria.xroad.signer.util.CalculatedSignature;
import ee.ria.xroad.signer.util.TokenAndKey;
import akka.actor.ActorRef;
import akka.actor.Props;
import akka.actor.UntypedActor;
import lombok.extern.slf4j.Slf4j;
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.openssl.PEMWriter;
import org.bouncycastle.operator.ContentSigner;
import org.bouncycastle.operator.DefaultSignatureAlgorithmIdentifierFinder;
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static ee.ria.xroad.common.ErrorCodes.X_INTERNAL_ERROR;
import static ee.ria.xroad.common.ErrorCodes.X_WRONG_CERT_USAGE;
import static ee.ria.xroad.common.ErrorCodes.translateException;
import static ee.ria.xroad.common.util.CryptoUtils.calculateDigest;
import static ee.ria.xroad.common.util.CryptoUtils.decodeBase64;
import static ee.ria.xroad.common.util.CryptoUtils.readX509PublicKey;
import static ee.ria.xroad.signer.util.ExceptionHelper.keyNotAvailable;
/**
* Handles certificate request generations.
*/
@Slf4j
public class GenerateCertRequestRequestHandler extends AbstractRequestHandler<GenerateCertRequest> {
@Override
protected Object handle(GenerateCertRequest message) throws Exception {
TokenAndKey tokenAndKey = TokenManager.findTokenAndKey(message.getKeyId());
if (!TokenManager.isKeyAvailable(tokenAndKey.getKeyId())) {
throw keyNotAvailable(tokenAndKey.getKeyId());
}
if (message.getKeyUsage() == KeyUsageInfo.AUTHENTICATION
&& !SoftwareTokenType.ID.equals(tokenAndKey.getTokenId())) {
throw CodedException.tr(X_WRONG_CERT_USAGE,
"auth_cert_under_softtoken",
"Authentication certificate requests can only be created under software tokens");
}
if (tokenAndKey.getKey().getPublicKey() == null) {
throw new CodedException(X_INTERNAL_ERROR, "Key '%s' has no public key", message.getKeyId());
}
PublicKey publicKey = readPublicKey(tokenAndKey.getKey().getPublicKey());
JcaPKCS10CertificationRequestBuilder certRequestBuilder = new JcaPKCS10CertificationRequestBuilder(
new X500Name(message.getSubjectName()), publicKey);
ContentSigner signer = new TokenContentSigner(tokenAndKey);
PKCS10CertificationRequest generatedRequest = certRequestBuilder.build(signer);
String certReqId = TokenManager.addCertRequest(tokenAndKey.getKeyId(), message.getMemberId(),
message.getSubjectName(), message.getKeyUsage());
return new GenerateCertRequestResponse(certReqId, convert(generatedRequest, message.getFormat()),
message.getFormat());
}
private static PublicKey readPublicKey(String publicKeyBase64) throws Exception {
return readX509PublicKey(decodeBase64(publicKeyBase64));
}
private static byte[] convert(PKCS10CertificationRequest request, GenerateCertRequest.RequestFormat format)
throws Exception {
switch (format) {
case PEM:
return toPem(request);
default:
return request.getEncoded(); // DER
}
}
private static byte[] toPem(PKCS10CertificationRequest req) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (PEMWriter pw = new PEMWriter(new OutputStreamWriter(out))) {
pw.writeObject(req);
}
return out.toByteArray();
}
private class TokenContentSigner implements ContentSigner {
private static final int SIGNATURE_TIMEOUT_SECONDS = 10;
private final ByteArrayOutputStream out = new ByteArrayOutputStream();
private final TokenAndKey tokenAndKey;
private final String digestAlgoId;
private final String signAlgoId;
private final CountDownLatch latch = new CountDownLatch(1);
private volatile CalculatedSignature signature;
TokenContentSigner(TokenAndKey tokenAndKey) throws NoSuchAlgorithmException {
this.tokenAndKey = tokenAndKey;
digestAlgoId = SystemProperties.getSignerCsrSignatureDigestAlgorithm();
signAlgoId = CryptoUtils.getSignatureAlgorithmId(digestAlgoId, tokenAndKey.getSignMechanism());
}
@Override
public AlgorithmIdentifier getAlgorithmIdentifier() {
return new DefaultSignatureAlgorithmIdentifierFinder().find(signAlgoId);
}
@Override
public OutputStream getOutputStream() {
return out;
}
@Override
public byte[] getSignature() {
log.debug("Calculating signature for certificate request...");
byte[] digest;
try {
digest = calculateDigest(digestAlgoId, out.toByteArray());
} catch (Exception e) {
throw new CodedException(X_INTERNAL_ERROR, e);
}
ActorRef signatureReceiver = getContext().actorOf(Props.create(SignatureReceiverActor.class, this));
try {
tellToken(new CalculateSignature(getSelf(), tokenAndKey.getKeyId(), signAlgoId, digest),
tokenAndKey.getTokenId(), signatureReceiver);
waitForSignature();
if (signature.getException() != null) {
throw translateException(signature.getException());
}
return signature.getSignature();
} finally {
getContext().stop(signatureReceiver);
}
}
private void waitForSignature() {
try {
if (!latch.await(SIGNATURE_TIMEOUT_SECONDS, TimeUnit.SECONDS)) {
throw new CodedException(X_INTERNAL_ERROR, "Signature calculation timed out");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
void setSignature(CalculatedSignature sig) {
this.signature = sig;
latch.countDown();
}
}
static class SignatureReceiverActor extends UntypedActor {
private final TokenContentSigner signer;
SignatureReceiverActor(TokenContentSigner signer) {
this.signer = signer;
}
@Override
public void onReceive(Object message) throws Exception {
if (message instanceof CalculatedSignature) {
signer.setSignature((CalculatedSignature) message);
} else {
unhandled(message);
}
}
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package se.kth.karamel.backend.converter;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import se.kth.karamel.backend.running.model.ClusterRuntime;
import se.kth.karamel.backend.running.model.GroupRuntime;
import se.kth.karamel.backend.running.model.MachineRuntime;
import se.kth.karamel.common.clusterdef.json.JsonCluster;
import se.kth.karamel.common.clusterdef.json.JsonCookbook;
import se.kth.karamel.common.clusterdef.json.JsonGroup;
import se.kth.karamel.common.clusterdef.json.JsonRecipe;
import se.kth.karamel.common.util.Settings;
import se.kth.karamel.common.exception.KaramelException;
/**
*
* @author kamal
*/
public class ChefJsonGenerator {
/**
* Generates all purge chef-jsons per machine&purge pair through the following steps:
* 1. root-json: makes an empty json object as root
* 2. group-jsons: per group in the cluster clones a new json from the root json
* 2.1 all cookbooks' attributes: adds all attributes related to all cookbooks in that group
* 2.2 purge-json: clones the group-json and adds machine-ips and run-list for that recipe
* 2.3 returns all generated jsons for all machine-cookbook-purge combination
*
* @param definition
* @param clusterEntity
* @return map of machineId-recipeName->json
*/
public static Map<String, JsonObject> generateClusterChefJsonsForPurge(JsonCluster definition,
ClusterRuntime clusterEntity) throws KaramelException {
Map<String, JsonObject> chefJsons = new HashMap<>();
JsonObject root = new JsonObject();
for (GroupRuntime groupEntity : clusterEntity.getGroups()) {
JsonObject clone = cloneJsonObject(root);
JsonGroup jsonGroup = UserClusterDataExtractor.findGroup(definition, groupEntity.getName());
//Adding all attribtues to all chef-jsons
for (JsonCookbook cb : jsonGroup.getCookbooks()) {
addCookbookAttributes(cb, clone);
}
for (JsonCookbook cb : jsonGroup.getCookbooks()) {
Map<String, JsonObject> gj = generatePurgeChefJsons(clone, cb, groupEntity);
chefJsons.putAll(gj);
}
}
return chefJsons;
}
/**
* Generates all chef-jsons per machine&recipe pair through the following steps:
* 1. root-json: makes an empty json object as root
* 2. all-ips: adds all recipe private and public ips into the root json
* 3. group-jsons: per group in the cluster clones a new json from the root json
* 3.1 all cookbooks' attributes: adds all attributes related to all cookbooks in that group
* 3.2 recipe-json: clones the group-json per recipe and adds machine-ips and run-list for that recipe
* 3.3 returns all generated jsons for all machine-cookbook-recipe combination
*
* @param definition
* @param clusterEntity
* @return map of machineId-recipeName->json
*/
public static Map<String, JsonObject> generateClusterChefJsonsForInstallation(JsonCluster definition,
ClusterRuntime clusterEntity) throws KaramelException {
Map<String, JsonObject> chefJsons = new HashMap<>();
JsonObject root = new JsonObject();
aggregateIpAddresses(root, definition, clusterEntity);
// Add global attributes
for (JsonCookbook cb : definition.getCookbooks()) {
addCookbookAttributes(cb, root);
}
for (GroupRuntime groupEntity : clusterEntity.getGroups()) {
JsonObject clone = cloneJsonObject(root);
JsonGroup jsonGroup = UserClusterDataExtractor.findGroup(definition, groupEntity.getName());
//Adding all attribtues to all chef-jsons
for (JsonCookbook cb : jsonGroup.getCookbooks()) {
addCookbookAttributes(cb, clone);
}
for (JsonCookbook cb : jsonGroup.getCookbooks()) {
Map<String, JsonObject> gj = generateRecipesChefJsons(clone, cb, groupEntity);
chefJsons.putAll(gj);
}
}
return chefJsons;
}
/**
* For a specific cookbook, generates chef-json of purge for all the combinations of machine-purge.
* @param json
* @param cb
* @param groupEntity
* @return
* @throws KaramelException
*/
public static Map<String, JsonObject> generatePurgeChefJsons(JsonObject json, JsonCookbook cb,
GroupRuntime groupEntity) throws KaramelException {
Map<String, JsonObject> groupJsons = new HashMap<>();
for (MachineRuntime me : groupEntity.getMachines()) {
String purgeRecipeName = cb.getName() + Settings.COOKBOOK_DELIMITER + Settings.PURGE_RECIPE;
JsonObject clone = addMachineNRecipeToJson(json, me, purgeRecipeName);
groupJsons.put(me.getId() + purgeRecipeName, clone);
}
return groupJsons;
}
/**
* For a specific cookbook, generates all chef-jsons for all the combinations of machine-recipe.
* @param json
* @param cb
* @param groupEntity
* @return
* @throws KaramelException
*/
public static Map<String, JsonObject> generateRecipesChefJsons(JsonObject json, JsonCookbook cb,
GroupRuntime groupEntity) throws KaramelException {
Map<String, JsonObject> groupJsons = new HashMap<>();
for (MachineRuntime me : groupEntity.getMachines()) {
for (JsonRecipe recipe : cb.getRecipes()) {
JsonObject clone = addMachineNRecipeToJson(json, me, recipe.getCanonicalName());
groupJsons.put(me.getId() + recipe.getCanonicalName(), clone);
}
String installRecipeName = cb.getName() + Settings.COOKBOOK_DELIMITER + Settings.INSTALL_RECIPE;
JsonObject clone = addMachineNRecipeToJson(json, me, installRecipeName);
groupJsons.put(me.getId() + installRecipeName, clone);
}
return groupJsons;
}
public static JsonObject addMachineNRecipeToJson(JsonObject json, MachineRuntime me, String recipeName) {
JsonObject clone = cloneJsonObject(json);
addMachineIps(clone, me);
addRunListForRecipe(clone, recipeName);
return clone;
}
/**
* Takes a machine-specific json with a recipe-name that has to be run on that machine, it then generates
* the run_list section into the json with the recipe-name.
* @param chefJson
* @param recipeName
*/
public static void addRunListForRecipe(JsonObject chefJson, String recipeName) {
JsonArray jarr = new JsonArray();
jarr.add(new JsonPrimitive(recipeName));
chefJson.add(Settings.REMOTE_CHEFJSON_RUNLIST_TAG, jarr);
}
/**
* It takes a machine-specfic json and adds private_ips and public_ips into it.
* @param json
* @param machineEntity
*/
public static void addMachineIps(JsonObject json, MachineRuntime machineEntity) {
JsonArray ips = new JsonArray();
ips.add(new JsonPrimitive(machineEntity.getPrivateIp()));
json.add("private_ips", ips);
ips = new JsonArray();
ips.add(new JsonPrimitive(machineEntity.getPublicIp()));
json.add("public_ips", ips);
JsonObject hosts = new JsonObject();
hosts.add(machineEntity.getPublicIp(), new JsonPrimitive(machineEntity.getName()));
json.add("hosts", hosts);
}
/**
* It adds those attributes related to one cookbook into the json object.
* For example [ndb/ports=[123, 134, 145], ndb/DataMemory=111]
* @param jc
* @param root
*/
public static void addCookbookAttributes(JsonCookbook jc, JsonObject root) {
Set<Map.Entry<String, Object>> entrySet = jc.getAttrs().entrySet();
for (Map.Entry<String, Object> entry : entrySet) {
String[] keyComps = entry.getKey().split(Settings.ATTR_DELIMITER);
Object value = entry.getValue();
// Object value = valStr;
// if (valStr.startsWith("$")) {
// if (valStr.contains(".")) {
// value = cluster.getVariable(valStr.substring(1));
// } else {
// value = getVariable(valStr.substring(1));
// }
// }
JsonObject o1 = root;
for (int i = 0; i < keyComps.length; i++) {
String comp = keyComps[i];
if (i == keyComps.length - 1) {
if (value instanceof Collection) {
JsonArray jarr = new JsonArray();
for (Object valElem : ((Collection) value)) {
jarr.add(new JsonPrimitive(valElem.toString()));
}
o1.add(comp, jarr);
} else {
o1.addProperty(comp, value.toString());
}
} else {
JsonElement o2 = o1.get(comp);
if (o2 == null) {
JsonObject o3 = new JsonObject();
o1.add(comp, o3);
o1 = o3;
} else {
o1 = o2.getAsJsonObject();
}
}
}
}
}
/**
* Adds private_ips and public_ips of all machines per each recipe as an attribute.
* For example
* hadoop::dn/private_ips: [192.168.0.1, 192.168.0.2]
* hadoop::dn/public_ips: [80.70.33.22, 80.70.33.23]
* install recipes are ignored here
* @param json
* @param definition
* @param clusterEntity
*/
public static void aggregateIpAddresses(JsonObject json, JsonCluster definition, ClusterRuntime clusterEntity) {
Map<String, List<String>> privateIps = new HashMap<>();
Map<String, List<String>> publicIps = new HashMap<>();
Map<String, Map<String, String>> hosts = new HashMap<>();
Map<String, Map<String, String>> privateIpsDomainIds = new HashMap<>();
for (GroupRuntime ge : clusterEntity.getGroups()) {
JsonGroup jg = UserClusterDataExtractor.findGroup(definition, ge.getName());
for (MachineRuntime me : ge.getMachines()) {
for (JsonCookbook jc : jg.getCookbooks()) {
for (JsonRecipe recipe : jc.getRecipes()) {
if (!recipe.getCanonicalName().endsWith(Settings.COOKBOOK_DELIMITER + Settings.INSTALL_RECIPE)) {
String privateAttr = recipe.getCanonicalName() + Settings.ATTR_DELIMITER +
Settings.REMOTE_CHEFJSON_PRIVATEIPS_TAG;
String publicAttr = recipe.getCanonicalName() + Settings.ATTR_DELIMITER +
Settings.REMOTE_CHEFJSON_PUBLICIPS_TAG;
String hostsAttr = recipe.getCanonicalName() + Settings.ATTR_DELIMITER +
Settings.REMOTE_CHEFJSON_HOSTS_TAG;
String privateAttrDomain =
recipe.getCanonicalName() + Settings.ATTR_DELIMITER +
Settings.REMOTE_CHEFJSON_PRIVATEIPS_DOMAIN_IDS_TAG;
if (!privateIps.containsKey(privateAttr)) {
privateIps.put(privateAttr, new ArrayList<>());
publicIps.put(publicAttr, new ArrayList<>());
hosts.put(hostsAttr, new HashMap<String, String>());
privateIpsDomainIds.put(privateAttrDomain,
new HashMap<>());
}
privateIps.get(privateAttr).add(me.getPrivateIp());
publicIps.get(publicAttr).add(me.getPublicIp());
hosts.get(hostsAttr).put(me.getPublicIp(), me.getName());
hosts.get(hostsAttr).put(me.getPrivateIp(), me.getName());
privateIpsDomainIds.get(privateAttrDomain).put(me.getPrivateIp(),
String.valueOf(me.getLocationDomainId()));
}
}
}
}
}
attr2Json(json, privateIps);
attr2Json(json, publicIps);
attrMap2Json(json, privateIpsDomainIds);
attrMap2Json(json, hosts);
}
/**
* It converts attributes into the json format and adds them into the root json object.
* For example hadoop::dn/hosts: {192.168.0.1: node-name} is converted into
* {"hadoop":{"dn":{"hosts": {"192.168.0.1": "node-name"},}}}
* @param root
* @param attrs
*/
public static void attrMap2Json(JsonObject root, Map<String, Map<String, String>> attrs) {
for (Map.Entry<String, Map<String, String>> entry : attrs.entrySet()) {
String[] keyComps = entry.getKey().split(Settings.COOKBOOK_DELIMITER + "|" + Settings.ATTR_DELIMITER);
JsonObject o1 = root;
for (int i = 0; i < keyComps.length; i++) {
String comp = keyComps[i];
if (i == keyComps.length - 1) {
JsonObject jobj = new JsonObject();
for (Map.Entry<String, String> e2 : entry.getValue().entrySet()) {
jobj.add(e2.getKey(), new JsonPrimitive(e2.getValue().toString()));
}
o1.add(comp, jobj);
} else {
JsonElement o2 = o1.get(comp);
if (o2 == null) {
JsonObject o3 = new JsonObject();
o1.add(comp, o3);
o1 = o3;
} else {
o1 = o2.getAsJsonObject();
}
}
}
}
}
/**
* It converts attributes into the json format and adds them into the root json object.
* For example hadoop::dn/private_ips: [192.168.0.1, 192.168.0.2] is converted into
* {"hadoop":{"dn":{"private_ips":["192.168.0.1", "192.168.0.2"]}}}
* @param root
* @param attrs
*/
public static void attr2Json(JsonObject root,
Map<String, List<String>> attrs) {
Set<Map.Entry<String, List<String>>> entrySet = attrs.entrySet();
for (Map.Entry<String, List<String>> entry : entrySet) {
String[] keyComps = entry.getKey().split(Settings.COOKBOOK_DELIMITER + "|" + Settings.ATTR_DELIMITER);
JsonObject o1 = root;
for (int i = 0; i < keyComps.length; i++) {
String comp = keyComps[i];
if (i == keyComps.length - 1) {
JsonArray jarr = new JsonArray();
for (Object valElem : entry.getValue()) {
jarr.add(new JsonPrimitive(valElem.toString()));
}
o1.add(comp, jarr);
} else {
JsonElement o2 = o1.get(comp);
if (o2 == null) {
JsonObject o3 = new JsonObject();
o1.add(comp, o3);
o1 = o3;
} else {
o1 = o2.getAsJsonObject();
}
}
}
}
}
public static JsonObject cloneJsonObject(JsonObject jo) {
Gson gson = new Gson();
JsonElement jelem = gson.fromJson(jo.toString(), JsonElement.class);
JsonObject clone = jelem.getAsJsonObject();
return clone;
}
}
| |
/* Copyright (c) 2008, University of Oslo, Norway
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the University of Oslo nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package vtk.web.decorating.components;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Required;
import vtk.repository.Path;
import vtk.repository.search.QueryException;
import vtk.web.RequestContext;
import vtk.web.decorating.DecoratorRequest;
import vtk.web.decorating.DecoratorResponse;
import vtk.web.tags.RepositoryTagElementsDataProvider;
import vtk.web.tags.TagElement;
import vtk.web.tags.TagsHelper;
/**
* Decorator component for tag cloud.
*
*/
public class TagCloudComponent extends ViewRenderingDecoratorComponent implements InitializingBean {
private static final String DESCRIPTION = "Renders a tag cloud as an alphabetically sorted list. Classes are put on "
+ "the elements for representing the magnitude of the individual tags in the cloud. "
+ "List elements will be assigned classes 'tag-magnitude-N', where "
+ "N represents the magnitude as a bounded positive integer number.";
private static final String PARAMETER_TAG_LIMIT = "limit";
private static final int PARAMETER_TAG_LIMIT_DEFAULT_VALUE = 20;
private static final String PARAMETER_TAG_LIMIT_DESC = "Set limit on how many tags to include. Setting this to a low value will "
+ "show only the most popular tags. Default is: " + PARAMETER_TAG_LIMIT_DEFAULT_VALUE;
private static final String PARAMETER_TAG_OCCURENCE_MIN = "tag-occurence-min";
private static final int PARAMETER_TAG_OCCURENCE_MIN_DEFAULT_VALUE = 1;
private static final String PARAMETER_TAG_OCCURENCE_MIN_DESC = "Limit tag cloud to include only tags with an occurence count higher than "
+ "or equal to this minimal value. This can be used to weed out tags "
+ "with for instance only one or two occurences within the scope. "
+ "The default value is 1. Increase this as needed, if your tag cloud "
+ "contains many undesirable small tags with only few occurences.";
private static final String PARAMETER_SCOPE = TagsHelper.SCOPE_PARAMETER;
private static final String PARAMETER_SCOPE_DESC = "Set the URI scope for the tag cloud. Relative URIs are allowed. "
+ "Only tags existing in the folder tree given by the URI will be "
+ "taken into consideration when generating the tag cloud. "
+ "The default value is the current directory and below.";
private static final String PARAMETER_MAGNITUDE_MAX = "magnitude-max";
private static final int PARAMETER_MAGNITUDE_MAX_DEFAULT_VALUE = 5;
private static final String PARAMETER_MAGNITUDE_MAX_DESC = "Sets the maximum magnitude for a tags in the cloud (an integer number bigger than 1). "
+ "The tags with the highest occurence will be assigned the maximum magnitude value. "
+ "You can use this to adjust the granularity of the magnitude-scale. "
+ "Note that this number must be bigger than or equal to the minimum value (see next parameter). "
+ "Default value is: " + PARAMETER_MAGNITUDE_MAX_DEFAULT_VALUE;
private static final String PARAMETER_MAGNITUDE_MIN = "magnitude-min";
private static final int PARAMETER_MAGNITUDE_MIN_DEFAULT_VALUE = 1;
private static final String PARAMETER_MAGNITUDE_MIN_DESC = "Sets the minimum magnitude for a tag in the tag cloud (an integer number bigger than 1). "
+ "The tags with the lowest occurence within the result set will be assigned the minimum magnitude value. "
+ "The result set can be restricted using the parameters '"
+ PARAMETER_SCOPE
+ "' and '"
+ PARAMETER_TAG_LIMIT + "'. " + "Default value is: " + PARAMETER_MAGNITUDE_MIN_DEFAULT_VALUE;
private static final String PARAMETER_SERVICE_URL = "service-url";
private static final String PARAMETER_SERVICE_URL_DESC = "Deprecated: NO LONGER USED. Kept to avoid breaking existing component references.";
private RepositoryTagElementsDataProvider tagElementsProvider;
@Override
protected String getDescriptionInternal() {
return DESCRIPTION;
}
@Override
protected Map<String, String> getParameterDescriptionsInternal() {
Map<String, String> map = new LinkedHashMap<>();
map.put(PARAMETER_SCOPE, PARAMETER_SCOPE_DESC);
map.put(PARAMETER_TAG_LIMIT, PARAMETER_TAG_LIMIT_DESC);
map.put(PARAMETER_TAG_OCCURENCE_MIN, PARAMETER_TAG_OCCURENCE_MIN_DESC);
map.put(PARAMETER_MAGNITUDE_MAX, PARAMETER_MAGNITUDE_MAX_DESC);
map.put(PARAMETER_MAGNITUDE_MIN, PARAMETER_MAGNITUDE_MIN_DESC);
map.put(PARAMETER_SERVICE_URL, PARAMETER_SERVICE_URL_DESC);
return map;
}
@Override
protected void processModel(Map<String, Object> model, DecoratorRequest request, DecoratorResponse response)
throws Exception {
super.processModel(model, request, response);
RequestContext requestContext = RequestContext.getRequestContext(request.getServletRequest());
Path scopeUri = requestContext.getCurrentCollection();
String token = requestContext.isViewUnauthenticated() ? null : requestContext.getSecurityToken(); // VTK-2460
if (request.getStringParameter(PARAMETER_SCOPE) != null) {
scopeUri = buildScopePath(request);
}
int magnitudeMin = PARAMETER_MAGNITUDE_MIN_DEFAULT_VALUE;
int magnitudeMax = PARAMETER_MAGNITUDE_MAX_DEFAULT_VALUE;
int limit = PARAMETER_TAG_LIMIT_DEFAULT_VALUE;
int tagOccurenceMin = PARAMETER_TAG_OCCURENCE_MIN_DEFAULT_VALUE;
try {
if (request.getStringParameter(PARAMETER_MAGNITUDE_MIN) != null) {
magnitudeMin = Integer.parseInt(request.getStringParameter(PARAMETER_MAGNITUDE_MIN));
}
if (request.getStringParameter(PARAMETER_MAGNITUDE_MAX) != null) {
magnitudeMax = Integer.parseInt(request.getStringParameter(PARAMETER_MAGNITUDE_MAX));
}
if (request.getStringParameter(PARAMETER_TAG_LIMIT) != null) {
limit = Integer.parseInt(request.getStringParameter(PARAMETER_TAG_LIMIT));
}
if (request.getStringParameter(PARAMETER_TAG_OCCURENCE_MIN) != null) {
tagOccurenceMin = Integer.parseInt(request.getStringParameter(PARAMETER_TAG_OCCURENCE_MIN));
}
if (tagOccurenceMin < 1) {
throw new DecoratorComponentException("Parameter '" + PARAMETER_TAG_OCCURENCE_MIN
+ "' must be a number larger than or equal to 1.");
}
if (limit <= 0) {
throw new DecoratorComponentException("Parameter '" + PARAMETER_TAG_LIMIT
+ "' cannot be zero or negative");
}
if (magnitudeMin < 1 || magnitudeMax < magnitudeMin) {
throw new DecoratorComponentException("Value of parameter '" + PARAMETER_MAGNITUDE_MAX
+ "' must be greater or equal to value of parameter '" + PARAMETER_MAGNITUDE_MIN
+ "' and both parameters must be greater than zero.");
}
} catch (NumberFormatException nfe) {
throw new DecoratorComponentException("An invalid numeric parameter value was supplied: "
+ nfe.getMessage());
}
// Legacy exception handling, should be refactored.
try {
List<TagElement> tagElements =
tagElementsProvider.getTagElements(request.getServletRequest(),
scopeUri, token, magnitudeMin,
magnitudeMax, limit, tagOccurenceMin);
// Populate model
model.put("tagElements", tagElements);
} catch (QueryException qe) {
throw new DecoratorComponentException("There was a problem with the data report query: " + qe.getMessage());
} catch (IllegalArgumentException e) {
throw new DecoratorComponentException("Illegal value for parameter '" + PARAMETER_SCOPE
+ "', must be a valid URI.");
}
}
Path buildScopePath(DecoratorRequest request) {
String href = request.getStringParameter(PARAMETER_SCOPE);
if (href.startsWith("/")) {
return Path.fromString(href);
}
Path requestURI = RequestContext
.getRequestContext(request.getServletRequest()).getResourceURI();
return requestURI.expand(href);
}
@Required
public void setTagElementsProvider(
RepositoryTagElementsDataProvider tagElementsProvider) {
this.tagElementsProvider = tagElementsProvider;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.