gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package net.binaryaura.customize.client.gui.huditem;
import net.binaryaura.customize.client.ClientProxy;
import net.binaryaura.customize.client.gui.GuiScreenAdjustHud;
import net.binaryaura.customize.client.gui.LayeredSprite;
import net.binaryaura.customize.client.gui.Sprite;
import net.binaryaura.customize.client.gui.SpriteSet;
import net.binaryaura.customize.client.gui.huditem.defaults.HudItemHealth;
import net.minecraft.util.MathHelper;
/**
* This HUDItem acts as a gauge. It is made up of several
* icons each with one or more stages. When increasing, the
* value the gauge goes through the the different stages
* of each icon.
*
* 0 0 0 0 0
* 1 0 0 0 0 <
* 2 0 0 0 0 <3
* 2 1 0 0 0 <3 <
* 2 2 0 0 0 <3 <3
* etc
*
* Each icon is rendered independently as layers. Rendering
* background layers first then, successive layers on top.
* Each icon is separated by <code>space</code>.
*
* When the number of icons exceeds <code>maxPerRow</code>
* Additional rows are rendered perpendicularly to the gauge
* with <code>stackSpace</code> difference between each row.
* <code>stackSpace</code> decreases as more and more stacks
* are added.
*
* Specific instances should extend this class. Default values
* should be overridden in the extended class.
*
* @author BinaryAura
* @see HudItem
*/
public abstract class HudItemIconGauge extends HudItem {
/**
* Default Maximum value to be used when displaying
* Icon Gauges in {@link GuiScreenAdjustHud}.
*
* @see GuiScreenAdjustHud
*/
protected static final int DFLT_AMT = 20;
/**
* Default Maximum amount of icons to be rendered
* in a row for all Icon Gauges.
*/
protected static final int DFLT_MAX_PER_ROW = 10;
/**
* Default Maximum space between rows for all Icon
* Gauges.
*/
protected static final int DFLT_MAX_STK_SPC = 11;
/**
* Default Minimum space between rows for all Icon
* Gauges.
*/
protected static final int DFLT_MIN_STK_SPC = 3;
/**
* Default space between icons in the rows of all
* Icon Gauges.
*/
protected static final int DFLT_SPC = 8;
// TODO: Fix Button when Rotating with IconGauge Stacks
/**
* Constructs an instance of an Icon Gauge with the
* specified <code>name</code>. IconGauge characteristics
* are set here along with the default values. The default
* values can be overridden by the specific gauge's class.
*
* @see HudItem
*
* @param name The name of the HUDItem
*/
public HudItemIconGauge(String name) {
super(name);
}
@Override
protected void init() {
canFlip = true;
canRotate = true;
maxPerRow = DFLT_MAX_PER_ROW;
maxStackSpace = DFLT_MAX_STK_SPC;
minStackSpace = DFLT_MIN_STK_SPC;
space = DFLT_SPC;
}
/**
* Amount is reset each time {@link #getAmount()} changes. If the game
* {@link #isInPreview()} then, <code>demoAmount</code> is used instead.
* Height and Width is reset as well.
*
* How the gauge is rendered depends on the orientation of the gauge.
* The texture and specific attributes are obtained from the subclass.
* From the information retrieved the subclass is rendered icon by icon.
*
* @param x The relative x-value of the upper left corner.
* @param y The relative y-value of the upper left corner.
*/
@Override
public void renderHUDItem(int x, int y) {
mc.mcProfiler.startSection(name);
SpriteSet iconLayers;
bind(layers.getLocation());
// if (name.equalsIgnoreCase(ClientProxy.HEALTH)) log.info(anchor + " : " + x + " : " + y + " : " + width + " : " + height + " : " + orientation);
// log.info("Res: " + res.getScaledWidth() + " : " + res.getScaledHeight());
// log.info(name + ": X: " + this.x + " : " + anchor.getX() + " : " + width + " -> " + x);
// log.info(name + ": Y: " + this.y + " : " + anchor.getY() + " : " + width + " -> " + y);
switch(orientation) {
case RIGHT:
break;
case DOWN:
break;
case LEFT:
x += width - layers.getWidth();
break;
case UP:
y += height - layers.getHeight();
break;
}
for(int i = MathHelper.ceiling_float_int(amount / (layers.getAmount() - 1) - 1); i >= 0; --i) {
iconLayers = getIconSpriteSet(i);
int stack = MathHelper.ceiling_float_int((float)(i+1) / maxPerRow) - 1;
int iconX = 0, iconY = 0;
switch(orientation) {
case RIGHT:
iconX = x + space*(i % maxPerRow) + getIconDeltaPara(i) + getDeltaX();
iconY = y + (flip ? stack : stacks - stack - 1)*stackSpace + getIconDeltaPerp(i) + getDeltaY();
break;
case DOWN:
iconX = x + (flip ? stack : stacks - stack - 1)*stackSpace + getIconDeltaPerp(i) + getDeltaX();
iconY = y + space*(i % maxPerRow) + getIconDeltaPara(i) + getDeltaY();
break;
case LEFT:
iconX = x - space*(i % maxPerRow) + getIconDeltaPara(i) + getDeltaX();
iconY = y + (flip ? stack : stacks - stack - 1)*stackSpace + getIconDeltaPerp(i) + getDeltaY();
break;
case UP:
iconX = x + (flip ? stack : stacks - stack - 1)*stackSpace + getIconDeltaPerp(i) + getDeltaX();
iconY = y - space*(i % maxPerRow) + getIconDeltaPara(i) + getDeltaY();
}
for(int j = 0; j < iconLayers.getAmount(); j++) {
Sprite sprite = iconLayers.getSprite(j);
if (sprite == null) continue;
guiRenderer.drawTexturedModalRect(iconX, iconY, sprite.getX(), sprite.getY(), layers.getWidth(), layers.getHeight());
}
}
mc.mcProfiler.endSection();
}
/**
* Setter for {@link #maxPerRow}.
*
* @param max New maximum amount of icons for each row.
*/
public void setMaxPerRow(int max) {
maxPerRow = max;
}
@Override
public void preRender() {
setAmount(getAmount());
super.preRender();
}
/**
* Sets Height and Width of the gauge based on the orientation.
* Also, sets Icon Gauge specific settings.
*/
@Override
protected void setHeightAndWidth() {
stacks = MathHelper.ceiling_float_int(amount / (layers.getAmount() - 1) / maxPerRow);
stackSpace = Math.max(maxStackSpace - (stacks - 1), minStackSpace);
switch(orientation) {
case RIGHT:
case LEFT:
height = (stacks - 1)*stackSpace + layers.getHeight();
width = space*(maxPerRow - 1) + layers.getWidth();
break;
case DOWN:
case UP:
height = space*(maxPerRow - 1) + layers.getHeight();
width = (stacks - 1)*stackSpace + layers.getWidth();
break;
}
}
/**
* Sets the amount and recalculates height and width
*
* @param amount Maximum value for the gauge
*/
protected void setAmount(float amount) {
if(this.amount == amount) return;
this.amount = amount;
}
/**
* Creates a bidirectional shake among the icons when used in
* {@link #getIconDeltaPara(int)} or {@link #getIconDeltaPerp(int)}.
*
* @return delta movement per tick to produce a bidirectional shake.
*/
protected int bidirectionalShake() {
return rand.nextInt(3) - 1;
}
/**
* Creates an animation of a moving wave through the bar. It uses a
* single sine wave with only the positive section when used in
* {{@link #getIconDeltaPara(int)} or {@link #getIconDeltaPerp(int)}.
*
* @param icon Index of the <code>icon</code>
*
* @return delta movement per tick to produce a single moving wave.
*/
protected int movingHalfSinWave(int icon) {
int leadIcon = updateCounter % MathHelper.ceiling_float_int(getAmount() + 5);
if(leadIcon < icon && icon < leadIcon + 5) {
return MathHelper.ceiling_double_int(5*Math.cos((icon - leadIcon)*5 / Math.PI));
}
return 0;
}
/**
* Creates a shaking animation among the icons when used in
* {@link #getIconDeltaPara(int)} or {@link #getIconDeltaPerp(int)}.
*
* @return delta movement per tick to produce a shaking animation.
*/
protected int shake() {
return 2*rand.nextInt(2);
}
/**
* Creates a continuous Sine wave among the icons when used in
* {@link #getIconDeltaPara(int)} or {@link #getIconDeltaPerp(int)}.
*
* @param icon Index of the <code>icon</code>
*
* @return delta movement per tick to produce a continuous sine wave.
*/
protected int sinWave(int icon) {
int leadIcon = updateCounter % MathHelper.ceiling_float_int(getAmount());
return MathHelper.ceiling_double_int(5*Math.sin(leadIcon*5 / Math.PI));
}
// TODO: Fix IconGauge Animation Methods
/**
* Creates a wave through the icons when used in {@link #getIconDeltaPara(int)}
* or {@link #getIconDeltaPerp(int)}.
*
* @param icon Index of the <code>icon</code>
*
* @return delta movement per tick to produce a wave.
*/
protected int wave(int icon) {
if(updateCounter % MathHelper.ceiling_float_int(getAmount() + 5) == icon) {
animationFinished = false;
if(MathHelper.ceiling_float_int(getAmount()) - 1 == icon)
animationFinished = true;
return 2;
}
return 0;
}
/**
* Calculates parallel movement of each icon of the gauge. <3 <- <3 -> <3
*
* @param icon The index of the icon. <3 0 <3 1 <3 2 <3 3 <3 4 <3 5 etc
*
* @return parallel movement direction and distance.
*/
protected int getIconDeltaPara(int icon) {
return 0;
}
/** /\
* Calculates perpendicular movement of each icon of the gauge. <3 <3 <3
* \/
* @param icon The index of the icon.
*
* @return perpendicular movement direction and distance.
*/
protected int getIconDeltaPerp(int icon) {
return 0;
}
@Override
protected int getDeltaX() {
return 0;
}
@Override
protected int getDeltaY() {
return 0;
}
/**
* Gets the maximum value for during game-play.
*
* @return the maximum value for the gauge (20 = 2 states per icon x 10 icons).
*/
protected float getAmount() {
return DFLT_AMT;
}
/**
* Gets the textures to be used when displaying the specific
* <code>icon</code> of the gauge during game-play.
*
* @param icon The index of the icon.
*
* @return the textures to be used in <code>icon</code>.
*/
protected abstract SpriteSet getIconSpriteSet(int icon);
/**
* Flag for when an animation finishes.
*/
@Deprecated
protected boolean animationFinished = true;
/**
* Maximum amount of icons to be rendered
* in a row.
*/
protected int maxPerRow;
/**
* Maximum amount of icons to be rendered
* in a row.
*/
protected int maxStackSpace;
/**
* Minimum space between rows.
*/
protected int minStackSpace;
/**
* Space between icons in the rows
*/
protected int space;
/**
* Textures for the icon gauge. Multiple layers could be made
* to represent different situations for textures. For example,
* for {@link HudItemHealth}:
*
* background
* default
* defaultHL
* absorb
* poison
* poisonHL
* wither
* witherHL
* defaultHC
* defaultHCHL
* absorbHC
* poisonHC
* poisonHCHL
* witherHC
* witherHCHL
*/
protected LayeredSprite layers;
/**
* Amount of rows required to render the gauge.
*/
private int stacks = 0;
/**
* Space in between the rows.
*/
private int stackSpace = 0;
/**
* Maximum value for the rendering. This value is used to determine
* the amount of icons to be rendered.
*/
private float amount = 0;
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.airlift.json.JsonCodec;
import com.facebook.presto.Session.ResourceEstimateBuilder;
import com.facebook.presto.spi.function.SqlFunctionId;
import com.facebook.presto.spi.function.SqlInvokedFunction;
import com.facebook.presto.spi.security.Identity;
import com.facebook.presto.spi.security.SelectedRole;
import com.facebook.presto.spi.session.ResourceEstimates;
import com.facebook.presto.sql.parser.ParsingException;
import com.facebook.presto.sql.parser.ParsingOptions;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.parser.SqlParserOptions;
import com.facebook.presto.transaction.TransactionId;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import static com.facebook.airlift.json.JsonCodec.jsonCodec;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_CATALOG;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_CLIENT_INFO;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_CLIENT_TAGS;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_EXTRA_CREDENTIAL;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_LANGUAGE;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_PREPARED_STATEMENT;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_RESOURCE_ESTIMATE;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_ROLE;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_SCHEMA;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_SESSION;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_SESSION_FUNCTION;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_SOURCE;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_TIME_ZONE;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_TRACE_TOKEN;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_TRANSACTION_ID;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_USER;
import static com.facebook.presto.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import static com.google.common.net.HttpHeaders.X_FORWARDED_FOR;
import static java.lang.String.format;
public final class HttpRequestSessionContext
implements SessionContext
{
private static final Splitter DOT_SPLITTER = Splitter.on('.');
private static final JsonCodec<SqlFunctionId> SQL_FUNCTION_ID_JSON_CODEC = jsonCodec(SqlFunctionId.class);
private static final JsonCodec<SqlInvokedFunction> SQL_INVOKED_FUNCTION_JSON_CODEC = jsonCodec(SqlInvokedFunction.class);
private final String catalog;
private final String schema;
private final Identity identity;
private final String source;
private final Optional<String> traceToken;
private final String userAgent;
private final String remoteUserAddress;
private final String timeZoneId;
private final String language;
private final Set<String> clientTags;
private final ResourceEstimates resourceEstimates;
private final Map<String, String> systemProperties;
private final Map<String, Map<String, String>> catalogSessionProperties;
private final Map<String, String> preparedStatements;
private final Optional<TransactionId> transactionId;
private final boolean clientTransactionSupport;
private final String clientInfo;
private final Map<SqlFunctionId, SqlInvokedFunction> sessionFunctions;
public HttpRequestSessionContext(HttpServletRequest servletRequest, SqlParserOptions sqlParserOptions)
throws WebApplicationException
{
catalog = trimEmptyToNull(servletRequest.getHeader(PRESTO_CATALOG));
schema = trimEmptyToNull(servletRequest.getHeader(PRESTO_SCHEMA));
assertRequest((catalog != null) || (schema == null), "Schema is set but catalog is not");
String user = trimEmptyToNull(servletRequest.getHeader(PRESTO_USER));
assertRequest(user != null, "User must be set");
identity = new Identity(
user,
Optional.ofNullable(servletRequest.getUserPrincipal()),
parseRoleHeaders(servletRequest),
parseExtraCredentials(servletRequest),
ImmutableMap.of());
source = servletRequest.getHeader(PRESTO_SOURCE);
traceToken = Optional.ofNullable(trimEmptyToNull(servletRequest.getHeader(PRESTO_TRACE_TOKEN)));
userAgent = servletRequest.getHeader(USER_AGENT);
remoteUserAddress = !isNullOrEmpty(servletRequest.getHeader(X_FORWARDED_FOR)) ? servletRequest.getHeader(X_FORWARDED_FOR) : servletRequest.getRemoteAddr();
timeZoneId = servletRequest.getHeader(PRESTO_TIME_ZONE);
language = servletRequest.getHeader(PRESTO_LANGUAGE);
clientInfo = servletRequest.getHeader(PRESTO_CLIENT_INFO);
clientTags = parseClientTags(servletRequest);
resourceEstimates = parseResourceEstimate(servletRequest);
// parse session properties
ImmutableMap.Builder<String, String> systemProperties = ImmutableMap.builder();
Map<String, Map<String, String>> catalogSessionProperties = new HashMap<>();
for (Entry<String, String> entry : parseSessionHeaders(servletRequest).entrySet()) {
String fullPropertyName = entry.getKey();
String propertyValue = entry.getValue();
List<String> nameParts = DOT_SPLITTER.splitToList(fullPropertyName);
if (nameParts.size() == 1) {
String propertyName = nameParts.get(0);
assertRequest(!propertyName.isEmpty(), "Invalid %s header", PRESTO_SESSION);
// catalog session properties can not be validated until the transaction has stated, so we delay system property validation also
systemProperties.put(propertyName, propertyValue);
}
else if (nameParts.size() == 2) {
String catalogName = nameParts.get(0);
String propertyName = nameParts.get(1);
assertRequest(!catalogName.isEmpty(), "Invalid %s header", PRESTO_SESSION);
assertRequest(!propertyName.isEmpty(), "Invalid %s header", PRESTO_SESSION);
// catalog session properties can not be validated until the transaction has stated
catalogSessionProperties.computeIfAbsent(catalogName, id -> new HashMap<>()).put(propertyName, propertyValue);
}
else {
throw badRequest(format("Invalid %s header", PRESTO_SESSION));
}
}
this.systemProperties = systemProperties.build();
this.catalogSessionProperties = catalogSessionProperties.entrySet().stream()
.collect(toImmutableMap(Entry::getKey, entry -> ImmutableMap.copyOf(entry.getValue())));
preparedStatements = parsePreparedStatementsHeaders(servletRequest, sqlParserOptions);
String transactionIdHeader = servletRequest.getHeader(PRESTO_TRANSACTION_ID);
clientTransactionSupport = transactionIdHeader != null;
transactionId = parseTransactionId(transactionIdHeader);
this.sessionFunctions = parseSessionFunctionHeader(servletRequest);
}
public static List<String> splitSessionHeader(Enumeration<String> headers)
{
Splitter splitter = Splitter.on(',').trimResults().omitEmptyStrings();
return Collections.list(headers).stream()
.map(splitter::splitToList)
.flatMap(Collection::stream)
.collect(toImmutableList());
}
private static Map<String, String> parseSessionHeaders(HttpServletRequest servletRequest)
{
return parseProperty(servletRequest, PRESTO_SESSION);
}
private static Map<String, SelectedRole> parseRoleHeaders(HttpServletRequest servletRequest)
{
ImmutableMap.Builder<String, SelectedRole> roles = ImmutableMap.builder();
for (String header : splitSessionHeader(servletRequest.getHeaders(PRESTO_ROLE))) {
List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", PRESTO_ROLE);
roles.put(nameValue.get(0), SelectedRole.valueOf(urlDecode(nameValue.get(1))));
}
return roles.build();
}
private static Map<String, String> parseExtraCredentials(HttpServletRequest servletRequest)
{
return parseCredentialProperty(servletRequest, PRESTO_EXTRA_CREDENTIAL);
}
private static Map<String, String> parseProperty(HttpServletRequest servletRequest, String headerName)
{
Map<String, String> properties = new HashMap<>();
for (String header : splitSessionHeader(servletRequest.getHeaders(headerName))) {
List<String> nameValue = Splitter.on('=').trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", headerName);
properties.put(nameValue.get(0), urlDecode(nameValue.get(1)));
}
return properties;
}
private static Map<String, String> parseCredentialProperty(HttpServletRequest servletRequest, String headerName)
{
Map<String, String> properties = new HashMap<>();
for (String header : splitSessionHeader(servletRequest.getHeaders(headerName))) {
List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", headerName);
properties.put(nameValue.get(0), urlDecode(nameValue.get(1)));
}
return properties;
}
private static void assertRequest(boolean expression, String format, Object... args)
{
if (!expression) {
throw badRequest(format(format, args));
}
}
private static Map<String, String> parsePreparedStatementsHeaders(HttpServletRequest servletRequest, SqlParserOptions sqlParserOptions)
{
ImmutableMap.Builder<String, String> preparedStatements = ImmutableMap.builder();
for (String header : splitSessionHeader(servletRequest.getHeaders(PRESTO_PREPARED_STATEMENT))) {
List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", PRESTO_PREPARED_STATEMENT);
String statementName;
String sqlString;
try {
statementName = urlDecode(nameValue.get(0));
sqlString = urlDecode(nameValue.get(1));
}
catch (IllegalArgumentException e) {
throw badRequest(format("Invalid %s header: %s", PRESTO_PREPARED_STATEMENT, e.getMessage()));
}
// Validate statement
SqlParser sqlParser = new SqlParser(sqlParserOptions);
try {
sqlParser.createStatement(sqlString, new ParsingOptions(AS_DOUBLE /* anything */));
}
catch (ParsingException e) {
throw badRequest(format("Invalid %s header: %s", PRESTO_PREPARED_STATEMENT, e.getMessage()));
}
preparedStatements.put(statementName, sqlString);
}
return preparedStatements.build();
}
private static Optional<TransactionId> parseTransactionId(String transactionId)
{
transactionId = trimEmptyToNull(transactionId);
if (transactionId == null || transactionId.equalsIgnoreCase("none")) {
return Optional.empty();
}
try {
return Optional.of(TransactionId.valueOf(transactionId));
}
catch (Exception e) {
throw badRequest(e.getMessage());
}
}
private static Map<SqlFunctionId, SqlInvokedFunction> parseSessionFunctionHeader(HttpServletRequest req)
{
ImmutableMap.Builder<SqlFunctionId, SqlInvokedFunction> sessionFunctions = ImmutableMap.builder();
for (String header : splitSessionHeader(req.getHeaders(PRESTO_SESSION_FUNCTION))) {
List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", PRESTO_SESSION_FUNCTION);
String serializedFunctionSignature;
String serializedFunctionDefinition;
try {
serializedFunctionSignature = urlDecode(nameValue.get(0));
serializedFunctionDefinition = urlDecode(nameValue.get(1));
}
catch (IllegalArgumentException e) {
throw badRequest(format("Invalid %s header: %s", PRESTO_SESSION_FUNCTION, e.getMessage()));
}
sessionFunctions.put(SQL_FUNCTION_ID_JSON_CODEC.fromJson(serializedFunctionSignature), SQL_INVOKED_FUNCTION_JSON_CODEC.fromJson(serializedFunctionDefinition));
}
return sessionFunctions.build();
}
private static WebApplicationException badRequest(String message)
{
throw new WebApplicationException(Response
.status(Status.BAD_REQUEST)
.type(MediaType.TEXT_PLAIN)
.entity(message)
.build());
}
private static String trimEmptyToNull(String value)
{
return emptyToNull(nullToEmpty(value).trim());
}
private static String urlDecode(String value)
{
try {
return URLDecoder.decode(value, "UTF-8");
}
catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
@Override
public Identity getIdentity()
{
return identity;
}
@Override
public String getCatalog()
{
return catalog;
}
@Override
public String getSchema()
{
return schema;
}
@Override
public String getSource()
{
return source;
}
@Override
public String getRemoteUserAddress()
{
return remoteUserAddress;
}
@Override
public String getUserAgent()
{
return userAgent;
}
@Override
public String getClientInfo()
{
return clientInfo;
}
@Override
public Set<String> getClientTags()
{
return clientTags;
}
@Override
public ResourceEstimates getResourceEstimates()
{
return resourceEstimates;
}
@Override
public String getTimeZoneId()
{
return timeZoneId;
}
@Override
public String getLanguage()
{
return language;
}
@Override
public Map<String, String> getSystemProperties()
{
return systemProperties;
}
@Override
public Map<String, Map<String, String>> getCatalogSessionProperties()
{
return catalogSessionProperties;
}
@Override
public Map<String, String> getPreparedStatements()
{
return preparedStatements;
}
@Override
public Optional<TransactionId> getTransactionId()
{
return transactionId;
}
@Override
public boolean supportClientTransaction()
{
return clientTransactionSupport;
}
@Override
public Map<SqlFunctionId, SqlInvokedFunction> getSessionFunctions()
{
return sessionFunctions;
}
@Override
public Optional<String> getTraceToken()
{
return traceToken;
}
private Set<String> parseClientTags(HttpServletRequest servletRequest)
{
Splitter splitter = Splitter.on(',').trimResults().omitEmptyStrings();
return ImmutableSet.copyOf(splitter.split(nullToEmpty(servletRequest.getHeader(PRESTO_CLIENT_TAGS))));
}
public static ResourceEstimates parseResourceEstimate(HttpServletRequest servletRequest)
{
ResourceEstimateBuilder builder = new ResourceEstimateBuilder();
for (String header : splitSessionHeader(servletRequest.getHeaders(PRESTO_RESOURCE_ESTIMATE))) {
List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header);
assertRequest(nameValue.size() == 2, "Invalid %s header", PRESTO_RESOURCE_ESTIMATE);
String name = nameValue.get(0);
String value = nameValue.get(1);
try {
switch (name.toUpperCase()) {
case ResourceEstimates.EXECUTION_TIME:
builder.setExecutionTime(Duration.valueOf(value));
break;
case ResourceEstimates.CPU_TIME:
builder.setCpuTime(Duration.valueOf(value));
break;
case ResourceEstimates.PEAK_MEMORY:
builder.setPeakMemory(DataSize.valueOf(value));
break;
case ResourceEstimates.PEAK_TASK_MEMORY:
builder.setPeakTaskMemory(DataSize.valueOf(value));
break;
default:
throw badRequest(format("Unsupported resource name %s", name));
}
}
catch (IllegalArgumentException e) {
throw badRequest(format("Unsupported format for resource estimate '%s': %s", value, e));
}
}
return builder.build();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.globalaccelerator.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/globalaccelerator-2018-08-08/UpdateCustomRoutingAccelerator"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateCustomRoutingAcceleratorRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) of the accelerator to update.
* </p>
*/
private String acceleratorArn;
/**
* <p>
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
* </p>
*/
private String name;
/**
* <p>
* The value for the address type must be IPv4.
* </p>
*/
private String ipAddressType;
/**
* <p>
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true.
* </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be deleted.
* </p>
*/
private Boolean enabled;
/**
* <p>
* The Amazon Resource Name (ARN) of the accelerator to update.
* </p>
*
* @param acceleratorArn
* The Amazon Resource Name (ARN) of the accelerator to update.
*/
public void setAcceleratorArn(String acceleratorArn) {
this.acceleratorArn = acceleratorArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the accelerator to update.
* </p>
*
* @return The Amazon Resource Name (ARN) of the accelerator to update.
*/
public String getAcceleratorArn() {
return this.acceleratorArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the accelerator to update.
* </p>
*
* @param acceleratorArn
* The Amazon Resource Name (ARN) of the accelerator to update.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCustomRoutingAcceleratorRequest withAcceleratorArn(String acceleratorArn) {
setAcceleratorArn(acceleratorArn);
return this;
}
/**
* <p>
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
* </p>
*
* @param name
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
* </p>
*
* @return The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
* </p>
*
* @param name
* The name of the accelerator. The name can have a maximum of 32 characters, must contain only alphanumeric
* characters or hyphens (-), and must not begin or end with a hyphen.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCustomRoutingAcceleratorRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The value for the address type must be IPv4.
* </p>
*
* @param ipAddressType
* The value for the address type must be IPv4.
* @see IpAddressType
*/
public void setIpAddressType(String ipAddressType) {
this.ipAddressType = ipAddressType;
}
/**
* <p>
* The value for the address type must be IPv4.
* </p>
*
* @return The value for the address type must be IPv4.
* @see IpAddressType
*/
public String getIpAddressType() {
return this.ipAddressType;
}
/**
* <p>
* The value for the address type must be IPv4.
* </p>
*
* @param ipAddressType
* The value for the address type must be IPv4.
* @return Returns a reference to this object so that method calls can be chained together.
* @see IpAddressType
*/
public UpdateCustomRoutingAcceleratorRequest withIpAddressType(String ipAddressType) {
setIpAddressType(ipAddressType);
return this;
}
/**
* <p>
* The value for the address type must be IPv4.
* </p>
*
* @param ipAddressType
* The value for the address type must be IPv4.
* @return Returns a reference to this object so that method calls can be chained together.
* @see IpAddressType
*/
public UpdateCustomRoutingAcceleratorRequest withIpAddressType(IpAddressType ipAddressType) {
this.ipAddressType = ipAddressType.toString();
return this;
}
/**
* <p>
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true.
* </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be deleted.
* </p>
*
* @param enabled
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true. </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be
* deleted.
*/
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
/**
* <p>
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true.
* </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be deleted.
* </p>
*
* @return Indicates whether an accelerator is enabled. The value is true or false. The default value is true. </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be
* deleted.
*/
public Boolean getEnabled() {
return this.enabled;
}
/**
* <p>
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true.
* </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be deleted.
* </p>
*
* @param enabled
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true. </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be
* deleted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCustomRoutingAcceleratorRequest withEnabled(Boolean enabled) {
setEnabled(enabled);
return this;
}
/**
* <p>
* Indicates whether an accelerator is enabled. The value is true or false. The default value is true.
* </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be deleted.
* </p>
*
* @return Indicates whether an accelerator is enabled. The value is true or false. The default value is true. </p>
* <p>
* If the value is set to true, the accelerator cannot be deleted. If set to false, the accelerator can be
* deleted.
*/
public Boolean isEnabled() {
return this.enabled;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAcceleratorArn() != null)
sb.append("AcceleratorArn: ").append(getAcceleratorArn()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getIpAddressType() != null)
sb.append("IpAddressType: ").append(getIpAddressType()).append(",");
if (getEnabled() != null)
sb.append("Enabled: ").append(getEnabled());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateCustomRoutingAcceleratorRequest == false)
return false;
UpdateCustomRoutingAcceleratorRequest other = (UpdateCustomRoutingAcceleratorRequest) obj;
if (other.getAcceleratorArn() == null ^ this.getAcceleratorArn() == null)
return false;
if (other.getAcceleratorArn() != null && other.getAcceleratorArn().equals(this.getAcceleratorArn()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getIpAddressType() == null ^ this.getIpAddressType() == null)
return false;
if (other.getIpAddressType() != null && other.getIpAddressType().equals(this.getIpAddressType()) == false)
return false;
if (other.getEnabled() == null ^ this.getEnabled() == null)
return false;
if (other.getEnabled() != null && other.getEnabled().equals(this.getEnabled()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAcceleratorArn() == null) ? 0 : getAcceleratorArn().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getIpAddressType() == null) ? 0 : getIpAddressType().hashCode());
hashCode = prime * hashCode + ((getEnabled() == null) ? 0 : getEnabled().hashCode());
return hashCode;
}
@Override
public UpdateCustomRoutingAcceleratorRequest clone() {
return (UpdateCustomRoutingAcceleratorRequest) super.clone();
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.devtools.restart;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.ThreadFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.boot.devtools.restart.classloader.ClassLoaderFile;
import org.springframework.boot.devtools.restart.classloader.ClassLoaderFile.Kind;
import org.springframework.boot.devtools.restart.classloader.ClassLoaderFiles;
import org.springframework.boot.test.OutputCapture;
import org.springframework.context.ApplicationListener;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.event.ContextClosedEvent;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* Tests for {@link Restarter}.
*
* @author Phillip Webb
* @author Andy Wilkinson
*/
public class RestarterTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public OutputCapture out = new OutputCapture();
@Before
public void setup() {
Restarter.setInstance(new TestableRestarter());
}
@After
public void cleanup() {
Restarter.clearInstance();
}
@Test
public void cantGetInstanceBeforeInitialize() throws Exception {
Restarter.clearInstance();
this.thrown.expect(IllegalStateException.class);
this.thrown.expectMessage("Restarter has not been initialized");
Restarter.getInstance();
}
@Test
public void testRestart() throws Exception {
Restarter.clearInstance();
Thread thread = new Thread() {
@Override
public void run() {
SampleApplication.main();
};
};
thread.start();
Thread.sleep(2600);
String output = this.out.toString();
assertThat(StringUtils.countOccurrencesOf(output, "Tick 0")).isGreaterThan(1);
assertThat(StringUtils.countOccurrencesOf(output, "Tick 1")).isGreaterThan(1);
assertThat(CloseCountingApplicationListener.closed).isGreaterThan(0);
}
@Test
@SuppressWarnings("rawtypes")
public void getOrAddAttributeWithNewAttribute() throws Exception {
ObjectFactory objectFactory = mock(ObjectFactory.class);
given(objectFactory.getObject()).willReturn("abc");
Object attribute = Restarter.getInstance().getOrAddAttribute("x", objectFactory);
assertThat(attribute).isEqualTo("abc");
}
public void addUrlsMustNotBeNull() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("Urls must not be null");
Restarter.getInstance().addUrls(null);
}
@Test
public void addUrls() throws Exception {
URL url = new URL("file:/proj/module-a.jar!/");
Collection<URL> urls = Collections.singleton(url);
Restarter restarter = Restarter.getInstance();
restarter.addUrls(urls);
restarter.restart();
ClassLoader classLoader = ((TestableRestarter) restarter)
.getRelaunchClassLoader();
assertThat(((URLClassLoader) classLoader).getURLs()[0]).isEqualTo(url);
}
@Test
public void addClassLoaderFilesMustNotBeNull() throws Exception {
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("ClassLoaderFiles must not be null");
Restarter.getInstance().addClassLoaderFiles(null);
}
@Test
public void addClassLoaderFiles() throws Exception {
ClassLoaderFiles classLoaderFiles = new ClassLoaderFiles();
classLoaderFiles.addFile("f", new ClassLoaderFile(Kind.ADDED, "abc".getBytes()));
Restarter restarter = Restarter.getInstance();
restarter.addClassLoaderFiles(classLoaderFiles);
restarter.restart();
ClassLoader classLoader = ((TestableRestarter) restarter)
.getRelaunchClassLoader();
assertThat(FileCopyUtils.copyToByteArray(classLoader.getResourceAsStream("f")))
.isEqualTo("abc".getBytes());
}
@Test
@SuppressWarnings("rawtypes")
public void getOrAddAttributeWithExistingAttribute() throws Exception {
Restarter.getInstance().getOrAddAttribute("x", new ObjectFactory<String>() {
@Override
public String getObject() throws BeansException {
return "abc";
}
});
ObjectFactory objectFactory = mock(ObjectFactory.class);
Object attribute = Restarter.getInstance().getOrAddAttribute("x", objectFactory);
assertThat(attribute).isEqualTo("abc");
verifyZeroInteractions(objectFactory);
}
@Test
public void getThreadFactory() throws Exception {
final ClassLoader parentLoader = Thread.currentThread().getContextClassLoader();
final ClassLoader contextClassLoader = new URLClassLoader(new URL[0]);
Thread thread = new Thread() {
@Override
public void run() {
Runnable runnable = mock(Runnable.class);
Thread regular = new Thread();
ThreadFactory factory = Restarter.getInstance().getThreadFactory();
Thread viaFactory = factory.newThread(runnable);
// Regular threads will inherit the current thread
assertThat(regular.getContextClassLoader()).isEqualTo(contextClassLoader);
// Factory threads should should inherit from the initial thread
assertThat(viaFactory.getContextClassLoader()).isEqualTo(parentLoader);
};
};
thread.setContextClassLoader(contextClassLoader);
thread.start();
thread.join();
}
@Test
public void getInitialUrls() throws Exception {
Restarter.clearInstance();
RestartInitializer initializer = mock(RestartInitializer.class);
URL[] urls = new URL[] { new URL("file:/proj/module-a.jar!/") };
given(initializer.getInitialUrls(any(Thread.class))).willReturn(urls);
Restarter.initialize(new String[0], false, initializer, false);
assertThat(Restarter.getInstance().getInitialUrls()).isEqualTo(urls);
}
@Component
@EnableScheduling
public static class SampleApplication {
private int count = 0;
private static volatile boolean quit = false;
@Scheduled(fixedDelay = 200)
public void tickBean() {
System.out.println("Tick " + this.count++ + " " + Thread.currentThread());
}
@Scheduled(initialDelay = 500, fixedDelay = 500)
public void restart() {
System.out.println("Restart " + Thread.currentThread());
if (!SampleApplication.quit) {
Restarter.getInstance().restart();
}
}
public static void main(String... args) {
Restarter.initialize(args, false, new MockRestartInitializer(), true);
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
SampleApplication.class);
context.addApplicationListener(new CloseCountingApplicationListener());
Restarter.getInstance().prepare(context);
System.out.println("Sleep " + Thread.currentThread());
sleep();
quit = true;
}
private static void sleep() {
try {
Thread.sleep(1200);
}
catch (InterruptedException ex) {
// Ignore
}
}
}
private static class CloseCountingApplicationListener
implements ApplicationListener<ContextClosedEvent> {
static int closed = 0;
@Override
public void onApplicationEvent(ContextClosedEvent event) {
closed++;
}
}
private static class TestableRestarter extends Restarter {
private ClassLoader relaunchClassLoader;
TestableRestarter() {
this(Thread.currentThread(), new String[] {}, false,
new MockRestartInitializer());
}
protected TestableRestarter(Thread thread, String[] args,
boolean forceReferenceCleanup, RestartInitializer initializer) {
super(thread, args, forceReferenceCleanup, initializer);
}
@Override
public void restart(FailureHandler failureHandler) {
try {
stop();
start(failureHandler);
}
catch (Exception ex) {
throw new IllegalStateException(ex);
}
}
@Override
protected Throwable relaunch(ClassLoader classLoader) throws Exception {
this.relaunchClassLoader = classLoader;
return null;
}
@Override
protected void stop() throws Exception {
}
public ClassLoader getRelaunchClassLoader() {
return this.relaunchClassLoader;
}
}
}
| |
package com.example.unique_type_name;
import com.apollographql.apollo.api.FragmentResponseFieldMapper;
import com.apollographql.apollo.api.Operation;
import com.apollographql.apollo.api.OperationName;
import com.apollographql.apollo.api.Query;
import com.apollographql.apollo.api.ResponseField;
import com.apollographql.apollo.api.ResponseFieldMapper;
import com.apollographql.apollo.api.ResponseFieldMarshaller;
import com.apollographql.apollo.api.ResponseReader;
import com.apollographql.apollo.api.ResponseWriter;
import com.apollographql.apollo.api.internal.Optional;
import com.example.unique_type_name.fragment.HeroDetails;
import com.example.unique_type_name.type.Episode;
import java.lang.Double;
import java.lang.NullPointerException;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@Generated("Apollo GraphQL")
public final class HeroDetailQuery implements Query<HeroDetailQuery.Data, Optional<HeroDetailQuery.Data>, Operation.Variables> {
public static final String OPERATION_DEFINITION = "query HeroDetailQuery {\n"
+ " heroDetailQuery {\n"
+ " __typename\n"
+ " name\n"
+ " friends {\n"
+ " __typename\n"
+ " name\n"
+ " }\n"
+ " ... on Human {\n"
+ " __typename\n"
+ " height\n"
+ " friends {\n"
+ " __typename\n"
+ " appearsIn\n"
+ " friends {\n"
+ " __typename\n"
+ " ...HeroDetails\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}";
public static final String QUERY_DOCUMENT = OPERATION_DEFINITION + "\n"
+ HeroDetails.FRAGMENT_DEFINITION;
private static final OperationName OPERATION_NAME = new OperationName() {
@Override
public String name() {
return "HeroDetailQuery";
}
};
private final Operation.Variables variables;
public HeroDetailQuery() {
this.variables = Operation.EMPTY_VARIABLES;
}
@Override
public String operationId() {
return "1bffe51c14b1ef09ab54127a8e231d654eac3570c6106b924e0ac9d351061acb";
}
@Override
public String queryDocument() {
return QUERY_DOCUMENT;
}
@Override
public Optional<HeroDetailQuery.Data> wrapData(HeroDetailQuery.Data data) {
return Optional.fromNullable(data);
}
@Override
public Operation.Variables variables() {
return variables;
}
@Override
public ResponseFieldMapper<HeroDetailQuery.Data> responseFieldMapper() {
return new Data.Mapper();
}
public static Builder builder() {
return new Builder();
}
@Override
public OperationName name() {
return OPERATION_NAME;
}
public static final class Builder {
Builder() {
}
public HeroDetailQuery build() {
return new HeroDetailQuery();
}
}
public static class Data implements Operation.Data {
static final ResponseField[] $responseFields = {
ResponseField.forObject("heroDetailQuery", "heroDetailQuery", null, true)
};
final Optional<HeroDetailQuery1> heroDetailQuery;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public Data(@Nullable HeroDetailQuery1 heroDetailQuery) {
this.heroDetailQuery = Optional.fromNullable(heroDetailQuery);
}
public Optional<HeroDetailQuery1> heroDetailQuery() {
return this.heroDetailQuery;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeObject($responseFields[0], heroDetailQuery.isPresent() ? heroDetailQuery.get().marshaller() : null);
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "Data{"
+ "heroDetailQuery=" + heroDetailQuery
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof Data) {
Data that = (Data) o;
return this.heroDetailQuery.equals(that.heroDetailQuery);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= heroDetailQuery.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements ResponseFieldMapper<Data> {
final HeroDetailQuery1.Mapper heroDetailQuery1FieldMapper = new HeroDetailQuery1.Mapper();
@Override
public Data map(ResponseReader reader) {
final HeroDetailQuery1 heroDetailQuery = reader.readObject($responseFields[0], new ResponseReader.ObjectReader<HeroDetailQuery1>() {
@Override
public HeroDetailQuery1 read(ResponseReader reader) {
return heroDetailQuery1FieldMapper.map(reader);
}
});
return new Data(heroDetailQuery);
}
}
}
public static class HeroDetailQuery1 {
static final ResponseField[] $responseFields = {
ResponseField.forString("__typename", "__typename", null, false),
ResponseField.forString("name", "name", null, false),
ResponseField.forObjectList("friends", "friends", null, true),
ResponseField.forInlineFragment("__typename", "__typename", Arrays.asList("Human"))
};
final @Nonnull String __typename;
final @Nonnull String name;
final Optional<List<Friend>> friends;
final Optional<AsHuman> asHuman;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public HeroDetailQuery1(@Nonnull String __typename, @Nonnull String name,
@Nullable List<Friend> friends, @Nullable AsHuman asHuman) {
if (__typename == null) {
throw new NullPointerException("__typename can't be null");
}
this.__typename = __typename;
if (name == null) {
throw new NullPointerException("name can't be null");
}
this.name = name;
this.friends = Optional.fromNullable(friends);
this.asHuman = Optional.fromNullable(asHuman);
}
public @Nonnull String __typename() {
return this.__typename;
}
/**
* The name of the character
*/
public @Nonnull String name() {
return this.name;
}
/**
* The friends of the character, or an empty list if they have none
*/
public Optional<List<Friend>> friends() {
return this.friends;
}
public Optional<AsHuman> asHuman() {
return this.asHuman;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeString($responseFields[0], __typename);
writer.writeString($responseFields[1], name);
writer.writeList($responseFields[2], friends.isPresent() ? new ResponseWriter.ListWriter() {
@Override
public void write(ResponseWriter.ListItemWriter listItemWriter) {
for (Friend $item : friends.get()) {
listItemWriter.writeObject($item.marshaller());
}
}
} : null);
final AsHuman $asHuman = asHuman.isPresent() ? asHuman.get() : null;
if ($asHuman != null) {
$asHuman.marshaller().marshal(writer);
}
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "HeroDetailQuery1{"
+ "__typename=" + __typename + ", "
+ "name=" + name + ", "
+ "friends=" + friends + ", "
+ "asHuman=" + asHuman
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof HeroDetailQuery1) {
HeroDetailQuery1 that = (HeroDetailQuery1) o;
return this.__typename.equals(that.__typename)
&& this.name.equals(that.name)
&& this.friends.equals(that.friends)
&& this.asHuman.equals(that.asHuman);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= __typename.hashCode();
h *= 1000003;
h ^= name.hashCode();
h *= 1000003;
h ^= friends.hashCode();
h *= 1000003;
h ^= asHuman.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements ResponseFieldMapper<HeroDetailQuery1> {
final Friend.Mapper friendFieldMapper = new Friend.Mapper();
final AsHuman.Mapper asHumanFieldMapper = new AsHuman.Mapper();
@Override
public HeroDetailQuery1 map(ResponseReader reader) {
final String __typename = reader.readString($responseFields[0]);
final String name = reader.readString($responseFields[1]);
final List<Friend> friends = reader.readList($responseFields[2], new ResponseReader.ListReader<Friend>() {
@Override
public Friend read(ResponseReader.ListItemReader reader) {
return reader.readObject(new ResponseReader.ObjectReader<Friend>() {
@Override
public Friend read(ResponseReader reader) {
return friendFieldMapper.map(reader);
}
});
}
});
final AsHuman asHuman = reader.readConditional((ResponseField.ConditionalTypeField) $responseFields[3], new ResponseReader.ConditionalTypeReader<AsHuman>() {
@Override
public AsHuman read(String conditionalType, ResponseReader reader) {
return asHumanFieldMapper.map(reader);
}
});
return new HeroDetailQuery1(__typename, name, friends, asHuman);
}
}
}
public static class Friend {
static final ResponseField[] $responseFields = {
ResponseField.forString("__typename", "__typename", null, false),
ResponseField.forString("name", "name", null, false)
};
final @Nonnull String __typename;
final @Nonnull String name;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public Friend(@Nonnull String __typename, @Nonnull String name) {
if (__typename == null) {
throw new NullPointerException("__typename can't be null");
}
this.__typename = __typename;
if (name == null) {
throw new NullPointerException("name can't be null");
}
this.name = name;
}
public @Nonnull String __typename() {
return this.__typename;
}
/**
* The name of the character
*/
public @Nonnull String name() {
return this.name;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeString($responseFields[0], __typename);
writer.writeString($responseFields[1], name);
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "Friend{"
+ "__typename=" + __typename + ", "
+ "name=" + name
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof Friend) {
Friend that = (Friend) o;
return this.__typename.equals(that.__typename)
&& this.name.equals(that.name);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= __typename.hashCode();
h *= 1000003;
h ^= name.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements ResponseFieldMapper<Friend> {
@Override
public Friend map(ResponseReader reader) {
final String __typename = reader.readString($responseFields[0]);
final String name = reader.readString($responseFields[1]);
return new Friend(__typename, name);
}
}
}
public static class AsHuman {
static final ResponseField[] $responseFields = {
ResponseField.forString("__typename", "__typename", null, false),
ResponseField.forString("name", "name", null, false),
ResponseField.forObjectList("friends", "friends", null, true),
ResponseField.forDouble("height", "height", null, true)
};
final @Nonnull String __typename;
final @Nonnull String name;
final Optional<List<Friend1>> friends;
final Optional<Double> height;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public AsHuman(@Nonnull String __typename, @Nonnull String name,
@Nullable List<Friend1> friends, @Nullable Double height) {
if (__typename == null) {
throw new NullPointerException("__typename can't be null");
}
this.__typename = __typename;
if (name == null) {
throw new NullPointerException("name can't be null");
}
this.name = name;
this.friends = Optional.fromNullable(friends);
this.height = Optional.fromNullable(height);
}
public @Nonnull String __typename() {
return this.__typename;
}
/**
* What this human calls themselves
*/
public @Nonnull String name() {
return this.name;
}
/**
* This human's friends, or an empty list if they have none
*/
public Optional<List<Friend1>> friends() {
return this.friends;
}
/**
* Height in the preferred unit, default is meters
*/
public Optional<Double> height() {
return this.height;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeString($responseFields[0], __typename);
writer.writeString($responseFields[1], name);
writer.writeList($responseFields[2], friends.isPresent() ? new ResponseWriter.ListWriter() {
@Override
public void write(ResponseWriter.ListItemWriter listItemWriter) {
for (Friend1 $item : friends.get()) {
listItemWriter.writeObject($item.marshaller());
}
}
} : null);
writer.writeDouble($responseFields[3], height.isPresent() ? height.get() : null);
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "AsHuman{"
+ "__typename=" + __typename + ", "
+ "name=" + name + ", "
+ "friends=" + friends + ", "
+ "height=" + height
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof AsHuman) {
AsHuman that = (AsHuman) o;
return this.__typename.equals(that.__typename)
&& this.name.equals(that.name)
&& this.friends.equals(that.friends)
&& this.height.equals(that.height);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= __typename.hashCode();
h *= 1000003;
h ^= name.hashCode();
h *= 1000003;
h ^= friends.hashCode();
h *= 1000003;
h ^= height.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements ResponseFieldMapper<AsHuman> {
final Friend1.Mapper friend1FieldMapper = new Friend1.Mapper();
@Override
public AsHuman map(ResponseReader reader) {
final String __typename = reader.readString($responseFields[0]);
final String name = reader.readString($responseFields[1]);
final List<Friend1> friends = reader.readList($responseFields[2], new ResponseReader.ListReader<Friend1>() {
@Override
public Friend1 read(ResponseReader.ListItemReader reader) {
return reader.readObject(new ResponseReader.ObjectReader<Friend1>() {
@Override
public Friend1 read(ResponseReader reader) {
return friend1FieldMapper.map(reader);
}
});
}
});
final Double height = reader.readDouble($responseFields[3]);
return new AsHuman(__typename, name, friends, height);
}
}
}
public static class Friend1 {
static final ResponseField[] $responseFields = {
ResponseField.forString("__typename", "__typename", null, false),
ResponseField.forString("name", "name", null, false),
ResponseField.forScalarList("appearsIn", "appearsIn", null, false),
ResponseField.forObjectList("friends", "friends", null, true)
};
final @Nonnull String __typename;
final @Nonnull String name;
final @Nonnull List<Episode> appearsIn;
final Optional<List<Friend2>> friends;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public Friend1(@Nonnull String __typename, @Nonnull String name,
@Nonnull List<Episode> appearsIn, @Nullable List<Friend2> friends) {
if (__typename == null) {
throw new NullPointerException("__typename can't be null");
}
this.__typename = __typename;
if (name == null) {
throw new NullPointerException("name can't be null");
}
this.name = name;
if (appearsIn == null) {
throw new NullPointerException("appearsIn can't be null");
}
this.appearsIn = appearsIn;
this.friends = Optional.fromNullable(friends);
}
public @Nonnull String __typename() {
return this.__typename;
}
/**
* The name of the character
*/
public @Nonnull String name() {
return this.name;
}
/**
* The movies this character appears in
*/
public @Nonnull List<Episode> appearsIn() {
return this.appearsIn;
}
/**
* The friends of the character, or an empty list if they have none
*/
public Optional<List<Friend2>> friends() {
return this.friends;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeString($responseFields[0], __typename);
writer.writeString($responseFields[1], name);
writer.writeList($responseFields[2], new ResponseWriter.ListWriter() {
@Override
public void write(ResponseWriter.ListItemWriter listItemWriter) {
for (Episode $item : appearsIn) {
listItemWriter.writeString($item.name());
}
}
});
writer.writeList($responseFields[3], friends.isPresent() ? new ResponseWriter.ListWriter() {
@Override
public void write(ResponseWriter.ListItemWriter listItemWriter) {
for (Friend2 $item : friends.get()) {
listItemWriter.writeObject($item.marshaller());
}
}
} : null);
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "Friend1{"
+ "__typename=" + __typename + ", "
+ "name=" + name + ", "
+ "appearsIn=" + appearsIn + ", "
+ "friends=" + friends
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof Friend1) {
Friend1 that = (Friend1) o;
return this.__typename.equals(that.__typename)
&& this.name.equals(that.name)
&& this.appearsIn.equals(that.appearsIn)
&& this.friends.equals(that.friends);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= __typename.hashCode();
h *= 1000003;
h ^= name.hashCode();
h *= 1000003;
h ^= appearsIn.hashCode();
h *= 1000003;
h ^= friends.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements ResponseFieldMapper<Friend1> {
final Friend2.Mapper friend2FieldMapper = new Friend2.Mapper();
@Override
public Friend1 map(ResponseReader reader) {
final String __typename = reader.readString($responseFields[0]);
final String name = reader.readString($responseFields[1]);
final List<Episode> appearsIn = reader.readList($responseFields[2], new ResponseReader.ListReader<Episode>() {
@Override
public Episode read(ResponseReader.ListItemReader reader) {
return Episode.valueOf(reader.readString());
}
});
final List<Friend2> friends = reader.readList($responseFields[3], new ResponseReader.ListReader<Friend2>() {
@Override
public Friend2 read(ResponseReader.ListItemReader reader) {
return reader.readObject(new ResponseReader.ObjectReader<Friend2>() {
@Override
public Friend2 read(ResponseReader reader) {
return friend2FieldMapper.map(reader);
}
});
}
});
return new Friend1(__typename, name, appearsIn, friends);
}
}
}
public static class Friend2 {
static final ResponseField[] $responseFields = {
ResponseField.forString("__typename", "__typename", null, false),
ResponseField.forFragment("__typename", "__typename", Arrays.asList("Human",
"Droid"))
};
final @Nonnull String __typename;
private final @Nonnull Fragments fragments;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public Friend2(@Nonnull String __typename, @Nonnull Fragments fragments) {
if (__typename == null) {
throw new NullPointerException("__typename can't be null");
}
this.__typename = __typename;
if (fragments == null) {
throw new NullPointerException("fragments can't be null");
}
this.fragments = fragments;
}
public @Nonnull String __typename() {
return this.__typename;
}
public @Nonnull Fragments fragments() {
return this.fragments;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
writer.writeString($responseFields[0], __typename);
fragments.marshaller().marshal(writer);
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "Friend2{"
+ "__typename=" + __typename + ", "
+ "fragments=" + fragments
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof Friend2) {
Friend2 that = (Friend2) o;
return this.__typename.equals(that.__typename)
&& this.fragments.equals(that.fragments);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= __typename.hashCode();
h *= 1000003;
h ^= fragments.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static class Fragments {
final @Nonnull HeroDetails heroDetails;
private volatile String $toString;
private volatile int $hashCode;
private volatile boolean $hashCodeMemoized;
public Fragments(@Nonnull HeroDetails heroDetails) {
if (heroDetails == null) {
throw new NullPointerException("heroDetails can't be null");
}
this.heroDetails = heroDetails;
}
public @Nonnull HeroDetails heroDetails() {
return this.heroDetails;
}
public ResponseFieldMarshaller marshaller() {
return new ResponseFieldMarshaller() {
@Override
public void marshal(ResponseWriter writer) {
final HeroDetails $heroDetails = heroDetails;
if ($heroDetails != null) {
$heroDetails.marshaller().marshal(writer);
}
}
};
}
@Override
public String toString() {
if ($toString == null) {
$toString = "Fragments{"
+ "heroDetails=" + heroDetails
+ "}";
}
return $toString;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof Fragments) {
Fragments that = (Fragments) o;
return this.heroDetails.equals(that.heroDetails);
}
return false;
}
@Override
public int hashCode() {
if (!$hashCodeMemoized) {
int h = 1;
h *= 1000003;
h ^= heroDetails.hashCode();
$hashCode = h;
$hashCodeMemoized = true;
}
return $hashCode;
}
public static final class Mapper implements FragmentResponseFieldMapper<Fragments> {
final HeroDetails.Mapper heroDetailsFieldMapper = new HeroDetails.Mapper();
@Override
public @Nonnull Fragments map(ResponseReader reader, @Nonnull String conditionalType) {
HeroDetails heroDetails = null;
if (HeroDetails.POSSIBLE_TYPES.contains(conditionalType)) {
heroDetails = heroDetailsFieldMapper.map(reader);
}
return new Fragments(heroDetails);
}
}
}
public static final class Mapper implements ResponseFieldMapper<Friend2> {
final Fragments.Mapper fragmentsFieldMapper = new Fragments.Mapper();
@Override
public Friend2 map(ResponseReader reader) {
final String __typename = reader.readString($responseFields[0]);
final Fragments fragments = reader.readConditional((ResponseField.ConditionalTypeField) $responseFields[1], new ResponseReader.ConditionalTypeReader<Fragments>() {
@Override
public Fragments read(String conditionalType, ResponseReader reader) {
return fragmentsFieldMapper.map(reader, conditionalType);
}
});
return new Friend2(__typename, fragments);
}
}
}
}
| |
/*
* Copyright 2004 Sun Microsystems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.rometools.rome.feed.synd.impl;
import java.util.ArrayList;
import java.util.List;
import org.jdom2.Element;
import com.rometools.rome.feed.WireFeed;
import com.rometools.rome.feed.module.impl.ModuleUtils;
import com.rometools.rome.feed.rss.Channel;
import com.rometools.rome.feed.rss.Image;
import com.rometools.rome.feed.rss.Item;
import com.rometools.rome.feed.rss.Source;
import com.rometools.rome.feed.synd.Converter;
import com.rometools.rome.feed.synd.SyndEntry;
import com.rometools.rome.feed.synd.SyndEntryImpl;
import com.rometools.rome.feed.synd.SyndFeed;
import com.rometools.rome.feed.synd.SyndFeedImpl;
import com.rometools.rome.feed.synd.SyndImage;
import com.rometools.rome.feed.synd.SyndImageImpl;
import com.rometools.rome.feed.synd.SyndLink;
/**
*/
public class ConverterForRSS090 implements Converter {
private final String type;
public ConverterForRSS090() {
this("rss_0.9");
}
protected ConverterForRSS090(final String type) {
this.type = type;
}
@Override
public String getType() {
return type;
}
@Override
public void copyInto(final WireFeed feed, final SyndFeed syndFeed) {
syndFeed.setModules(ModuleUtils.cloneModules(feed.getModules()));
final List<Element> foreignMarkup = feed.getForeignMarkup();
if (!foreignMarkup.isEmpty()) {
syndFeed.setForeignMarkup(foreignMarkup);
}
syndFeed.setStyleSheet(feed.getStyleSheet());
syndFeed.setEncoding(feed.getEncoding());
final Channel channel = (Channel) feed;
syndFeed.setTitle(channel.getTitle());
syndFeed.setLink(channel.getLink());
syndFeed.setDescription(channel.getDescription());
final Image image = channel.getImage();
if (image != null) {
syndFeed.setImage(createSyndImage(image));
}
final List<Item> items = channel.getItems();
if (items != null) {
syndFeed.setEntries(createSyndEntries(items, syndFeed.isPreservingWireFeed()));
}
}
protected SyndImage createSyndImage(final Image rssImage) {
final SyndImage syndImage = new SyndImageImpl();
syndImage.setTitle(rssImage.getTitle());
syndImage.setUrl(rssImage.getUrl());
syndImage.setLink(rssImage.getLink());
syndImage.setWidth(rssImage.getWidth());
syndImage.setHeight(rssImage.getHeight());
return syndImage;
}
protected List<SyndEntry> createSyndEntries(final List<Item> rssItems, final boolean preserveWireItems) {
final List<SyndEntry> syndEntries = new ArrayList<SyndEntry>();
for (final Item item : rssItems) {
syndEntries.add(createSyndEntry(item, preserveWireItems));
}
return syndEntries;
}
protected SyndEntry createSyndEntry(final Item item, final boolean preserveWireItem) {
final SyndEntryImpl syndEntry = new SyndEntryImpl();
if (preserveWireItem) {
syndEntry.setWireEntry(item);
}
syndEntry.setModules(ModuleUtils.cloneModules(item.getModules()));
final List<Element> foreignMarkup = item.getForeignMarkup();
if (!foreignMarkup.isEmpty()) {
syndEntry.setForeignMarkup(foreignMarkup);
}
syndEntry.setUri(item.getUri());
syndEntry.setLink(item.getLink());
syndEntry.setTitle(item.getTitle());
syndEntry.setLink(item.getLink());
syndEntry.setSource(createSource(item.getSource()));
return syndEntry;
}
protected SyndFeed createSource(final Source source) {
SyndFeed feed = null;
if (source != null) {
feed = new SyndFeedImpl();
feed.setLink(source.getUrl());
feed.setUri(source.getUrl());
feed.setTitle(source.getValue());
}
return feed;
}
@Override
public WireFeed createRealFeed(final SyndFeed syndFeed) {
return this.createRealFeed(getType(), syndFeed);
}
protected WireFeed createRealFeed(final String type, final SyndFeed syndFeed) {
final Channel channel = new Channel(type);
channel.setModules(ModuleUtils.cloneModules(syndFeed.getModules()));
channel.setStyleSheet(syndFeed.getStyleSheet());
channel.setEncoding(syndFeed.getEncoding());
channel.setTitle(syndFeed.getTitle());
final String link = syndFeed.getLink();
final List<SyndLink> links = syndFeed.getLinks();
if (link != null) {
channel.setLink(link);
} else if (!links.isEmpty()) {
channel.setLink(links.get(0).getHref());
}
channel.setDescription(syndFeed.getDescription());
final SyndImage sImage = syndFeed.getImage();
if (sImage != null) {
channel.setImage(createRSSImage(sImage));
}
final List<SyndEntry> sEntries = syndFeed.getEntries();
if (sEntries != null) {
channel.setItems(createRSSItems(sEntries));
}
final List<Element> foreignMarkup = syndFeed.getForeignMarkup();
if (!foreignMarkup.isEmpty()) {
channel.setForeignMarkup(foreignMarkup);
}
return channel;
}
protected Image createRSSImage(final SyndImage sImage) {
final Image image = new Image();
image.setTitle(sImage.getTitle());
image.setUrl(sImage.getUrl());
image.setLink(sImage.getLink());
image.setHeight(sImage.getHeight());
image.setWidth(sImage.getWidth());
return image;
}
protected List<Item> createRSSItems(final List<SyndEntry> sEntries) {
final List<Item> list = new ArrayList<Item>();
for (final SyndEntry syndEntry : sEntries) {
list.add(createRSSItem(syndEntry));
}
return list;
}
protected Item createRSSItem(final SyndEntry sEntry) {
final Item item = new Item();
item.setModules(ModuleUtils.cloneModules(sEntry.getModules()));
item.setTitle(sEntry.getTitle());
item.setLink(sEntry.getLink());
final List<Element> foreignMarkup = sEntry.getForeignMarkup();
if (!foreignMarkup.isEmpty()) {
item.setForeignMarkup(foreignMarkup);
}
item.setSource(createSource(sEntry.getSource()));
final String uri = sEntry.getUri();
if (uri != null) {
item.setUri(uri);
}
return item;
}
protected Source createSource(final SyndFeed feed) {
Source source = null;
if (feed != null) {
source = new Source();
source.setUrl(feed.getUri());
source.setValue(feed.getTitle());
}
return source;
}
}
| |
package org.drools.compiler.rule.builder.dialect;
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.commons.jci.readers.ResourceReader;
import org.drools.compiler.compiler.BoundIdentifiers;
import org.drools.compiler.compiler.DescrBuildError;
import org.drools.compiler.lang.descr.BaseDescr;
import org.drools.compiler.lang.descr.FunctionDescr;
import org.drools.compiler.lang.descr.ImportDescr;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.compiler.rule.builder.dialect.java.JavaAnalysisResult;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaCatchBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaContainerBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaElseBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaFinalBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaForBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaIfBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaInterfacePointsDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaLocalDeclarationDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaLocalDeclarationDescr.IdentifierDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaModifyBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaThrowBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaTryBlockDescr;
import org.drools.compiler.rule.builder.dialect.java.parser.JavaWhileBlockDescr;
import org.drools.compiler.rule.builder.dialect.mvel.MVELAnalysisResult;
import org.drools.compiler.rule.builder.dialect.mvel.MVELConsequenceBuilder;
import org.drools.compiler.rule.builder.dialect.mvel.MVELDialect;
import org.drools.core.factmodel.ClassDefinition;
import org.drools.core.rule.ConsequenceMetaData;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.TypeDeclaration;
import org.drools.core.spi.ClassWireable;
import org.drools.core.spi.KnowledgeHelper;
import org.drools.core.util.BitMaskUtil;
import org.drools.core.util.ClassUtils;
import org.kie.api.definition.type.FactField;
import org.mvel2.CompileException;
import org.mvel2.Macro;
import org.mvel2.MacroProcessor;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.drools.core.util.ClassUtils.*;
import static org.drools.core.util.StringUtils.*;
public final class DialectUtil {
private static final Pattern NON_ALPHA_REGEX = Pattern.compile("[ -/:-@\\[-`\\{-\\xff]");
private static final Pattern LINE_BREAK_FINDER = Pattern.compile( "\\r\\n|\\r|\\n" );
/**
* Takes a given name and makes sure that its legal and doesn't already exist. If the file exists it increases counter appender untill it is unique.
* <p/>
*
* @param packageName
* @param name
* @param ext
* @return
*/
public static String getUniqueLegalName(final String packageName,
final String name,
final int seed,
final String ext,
final String prefix,
final ResourceReader src) {
// replaces all non alphanumeric or $ chars with _
final String newName = prefix + "_" + normalizeRuleName( name );
if (ext.equals("java")) {
return newName + Math.abs(seed);
}
final String fileName = packageName.replace('.', '/') + "/" + newName;
if (src == null || !src.isAvailable(fileName + "." + ext)) return newName;
// make sure the class name does not exist, if it does increase the counter
int counter = -1;
while (true) {
counter++;
final String actualName = fileName + "_" + counter + "." + ext;
//MVEL:test null to Fix failing test on MVELConsequenceBuilderTest.testImperativeCodeError()
if (!src.isAvailable(actualName)) break;
}
// we have duplicate file names so append counter
return newName + "_" + counter;
}
public static String fixBlockDescr(final RuleBuildContext context,
final JavaAnalysisResult analysis,
Map<String, Declaration> decls) {
// This is a list of all the non container blocks, which initially are in tree form.
List<JavaBlockDescr> blocks = buildBlockDescrs(new ArrayList<JavaBlockDescr>(), analysis.getBlockDescrs());
return fixBlockDescr(context, analysis, decls, blocks);
}
public static String fixBlockDescr(final RuleBuildContext context,
final JavaAnalysisResult analysis,
Map<String, Declaration> decls,
List<JavaBlockDescr> blocks) {
MVELDialect mvel = (MVELDialect) context.getDialect("mvel");
String originalCode = analysis.getAnalyzedExpr();
BoundIdentifiers bindings = analysis.getBoundIdentifiers();
// sorting exit points for correct order iteration
Collections.sort(blocks,
new Comparator<JavaBlockDescr>() {
public int compare(JavaBlockDescr o1,
JavaBlockDescr o2) {
return o1.getStart() - o2.getStart();
}
});
StringBuilder consequence = new StringBuilder();
int lastAdded = 0;
for (JavaBlockDescr block : blocks) {
if (block.getEnd() == 0 || block.getEnd() > originalCode.length() ) {
// do nothing, it was incorrectly parsed, but this error should be picked up else where
continue;
}
// adding chunk
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1));
lastAdded = block.getEnd();
switch (block.getType()) {
case MODIFY:
case UPDATE:
case RETRACT:
rewriteDescr(context,
originalCode,
mvel,
consequence,
block,
bindings,
decls);
break;
case ENTRY:
case EXIT:
case CHANNEL:
rewriteInterfacePoint(context,
originalCode,
consequence,
(JavaInterfacePointsDescr) block);
break;
case INSERT:
parseInsertDescr(context, block, consequence);
default:
consequence.append(originalCode.substring(block.getStart() - 1, lastAdded));
}
}
consequence.append(originalCode.substring(lastAdded));
return consequence.toString();
}
private static List<JavaBlockDescr> buildBlockDescrs(List<JavaBlockDescr> descrs,
JavaContainerBlockDescr parentBlock) {
for (JavaBlockDescr block : parentBlock.getJavaBlockDescrs()) {
if (block instanceof JavaContainerBlockDescr) {
buildBlockDescrs(descrs, (JavaContainerBlockDescr) block);
} else {
descrs.add(block);
}
}
return descrs;
}
/**
* This code is not currently used, it's commented out in method caller. This is because we couldn't
* get this to work and will have to wait until MVEL supports genercs (mdp).
*
* @param context
* @param descrs
* @param parentBlock
* @param originalCode
* @param bindings
* @param parentVars
* @param offset
*/
public static void setContainerBlockInputs(RuleBuildContext context,
List<JavaBlockDescr> descrs,
JavaContainerBlockDescr parentBlock,
String originalCode,
BoundIdentifiers bindings,
Map<String, Class<?>> parentVars,
int offset) {
StringBuilder consequence = new StringBuilder();
int lastAdded = 0;
// strip blocks, so we can analyse this block with MVEL
for (JavaBlockDescr block : parentBlock.getJavaBlockDescrs()) {
if (block.getEnd() == 0) {
// do nothing, it was incorrectly parsed, but this error should be picked up else where
continue;
}
if (block.getType() == JavaBlockDescr.BlockType.TRY) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaTryBlockDescr tryDescr = (JavaTryBlockDescr) block;
if (tryDescr.getFinal() != null) {
lastAdded = tryDescr.getFinal().getEnd() - offset;
} else {
lastAdded = tryDescr.getCatches().get(tryDescr.getCatches().size() - 1).getEnd() - offset;
}
stripTryDescr(originalCode,
consequence,
(JavaTryBlockDescr) block,
offset);
} else if (block.getType() == JavaBlockDescr.BlockType.THROW) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaThrowBlockDescr throwBlock = (JavaThrowBlockDescr) block;
addWhiteSpaces(originalCode, consequence, throwBlock.getStart() - offset, throwBlock.getTextStart() - offset);
consequence.append(originalCode.substring(throwBlock.getTextStart() - offset - 1, throwBlock.getEnd() - 1 - offset)).append(";");
lastAdded = throwBlock.getEnd() - offset;
} else if (block.getType() == JavaBlockDescr.BlockType.IF) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaIfBlockDescr ifDescr = (JavaIfBlockDescr) block;
lastAdded = ifDescr.getEnd() - offset;
stripBlockDescr(originalCode,
consequence,
ifDescr,
offset);
} else if (block.getType() == JavaBlockDescr.BlockType.ELSE) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaElseBlockDescr elseDescr = (JavaElseBlockDescr) block;
lastAdded = elseDescr.getEnd() - offset;
stripBlockDescr(originalCode,
consequence,
elseDescr,
offset);
} else if (block.getType() == JavaBlockDescr.BlockType.WHILE) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaWhileBlockDescr whileDescr = (JavaWhileBlockDescr) block;
lastAdded = whileDescr.getEnd() - offset;
stripBlockDescr(originalCode,
consequence,
whileDescr,
offset);
} else if (block.getType() == JavaBlockDescr.BlockType.FOR) {
// adding previous chunk up to the start of this block
consequence.append(originalCode.substring(lastAdded,
block.getStart() - 1 - offset));
JavaForBlockDescr forDescr = (JavaForBlockDescr) block;
lastAdded = forDescr.getEnd() - offset;
stripBlockDescr(originalCode,
consequence,
forDescr,
offset);
}
}
consequence.append(originalCode.substring(lastAdded));
// We need to do this as MVEL doesn't recognise "modify"
MacroProcessor macroProcessor = new MacroProcessor();
Map<String, Macro> macros = new HashMap<String, Macro>(MVELConsequenceBuilder.macros);
macros.put("modify",
new Macro() {
public String doMacro() {
return "with ";
}
});
macroProcessor.setMacros(macros);
String mvelCode = macroProcessor.parse(consequence.toString());
Map<String, Class<?>> inputs = (Map<String, Class<?>>) getInputs(context, mvelCode, bindings, parentVars);
inputs.putAll(parentVars);
parentBlock.setInputs(inputs);
// now go depth, set inputs for each nested container
// set inputs for current container blocks to be rewritten
for (JavaBlockDescr block : parentBlock.getJavaBlockDescrs()) {
if (block.getType() == JavaBlockDescr.BlockType.TRY) {
JavaTryBlockDescr tryBlock = (JavaTryBlockDescr) block;
setContainerBlockInputs(context,
descrs,
tryBlock,
originalCode.substring(tryBlock.getTextStart() - offset, tryBlock.getEnd() - 1 - offset),
bindings,
inputs,
tryBlock.getTextStart());
for (JavaCatchBlockDescr catchBlock : tryBlock.getCatches()) {
setContainerBlockInputs(context,
descrs,
catchBlock,
catchBlock.getClause() + "=null;" + originalCode.substring(catchBlock.getTextStart() - offset, catchBlock.getEnd() - 1 - offset),
bindings,
inputs,
tryBlock.getTextStart());
}
if (tryBlock.getFinal() != null) {
JavaFinalBlockDescr finalBlock = tryBlock.getFinal();
setContainerBlockInputs(context,
descrs,
finalBlock,
originalCode.substring(finalBlock.getTextStart() - offset, finalBlock.getEnd() - 1 - offset),
bindings,
inputs,
tryBlock.getTextStart());
}
} else if (block.getType() == JavaBlockDescr.BlockType.IF) {
JavaIfBlockDescr ifBlock = (JavaIfBlockDescr) block;
int adjustBlock = (originalCode.charAt(ifBlock.getTextStart() - offset - 1) == '{') ? 0 : 1;
setContainerBlockInputs(context,
descrs,
ifBlock,
originalCode.substring(ifBlock.getTextStart() - offset + adjustBlock, ifBlock.getEnd() - 1 - offset - adjustBlock),
bindings,
inputs,
ifBlock.getTextStart());
} else if (block.getType() == JavaBlockDescr.BlockType.ELSE) {
JavaElseBlockDescr elseBlock = (JavaElseBlockDescr) block;
int adjustBlock = (originalCode.charAt(elseBlock.getTextStart() - offset - 1) == '{') ? 0 : 1;
setContainerBlockInputs(context,
descrs,
elseBlock,
originalCode.substring(elseBlock.getTextStart() - offset + adjustBlock, elseBlock.getEnd() - 1 - offset - adjustBlock),
bindings,
inputs,
elseBlock.getTextStart());
} else if (block.getType() == JavaBlockDescr.BlockType.WHILE) {
JavaWhileBlockDescr whileBlock = (JavaWhileBlockDescr) block;
int adjustBlock = (originalCode.charAt(whileBlock.getTextStart() - offset - 1) == '{') ? 0 : 1;
setContainerBlockInputs(context,
descrs,
whileBlock,
originalCode.substring(whileBlock.getTextStart() - offset + adjustBlock, whileBlock.getEnd() - 1 - offset - adjustBlock),
bindings,
inputs,
whileBlock.getTextStart());
} else if (block.getType() == JavaBlockDescr.BlockType.FOR) {
JavaForBlockDescr forBlock = (JavaForBlockDescr) block;
int adjustBlock = (originalCode.charAt(forBlock.getTextStart() - offset - 1) == '{') ? 0 : 1;
setContainerBlockInputs(context,
descrs,
forBlock,
originalCode.substring(forBlock.getStartParen() - offset, forBlock.getInitEnd() - offset) +
originalCode.substring(forBlock.getTextStart() - offset + adjustBlock, forBlock.getEnd() - 1 - offset - adjustBlock),
bindings,
inputs,
forBlock.getTextStart() - (forBlock.getInitEnd() - forBlock.getStartParen()));
} else {
block.setInputs(inputs); // each block to be rewritten now knows it's own variables
descrs.add(block);
}
}
}
private static Map<String, Class<?>> getInputs(final RuleBuildContext context,
String code,
BoundIdentifiers bindings,
Map<String, Class<?>> parentVars) {
MVELDialect mvel = (MVELDialect) context.getDialect("mvel");
MVELAnalysisResult mvelAnalysis = null;
try {
mvelAnalysis = (MVELAnalysisResult) mvel.analyzeBlock(context,
context.getRuleDescr(),
null,
code,
bindings,
parentVars,
"drools",
KnowledgeHelper.class);
} catch (Exception e) {
// swallow this as the error will be reported else where
}
return (mvelAnalysis != null) ? mvelAnalysis.getMvelVariables() : new HashMap<String, Class<?>>();
}
private static void addWhiteSpaces(String original, StringBuilder consequence, int start, int end) {
for (int i = start; i < end; i++) {
switch (original.charAt(i)) {
case '\n':
case '\r':
case '\t':
case ' ':
consequence.append(original.charAt(i));
break;
default:
consequence.append(" ");
}
}
}
private static void stripTryDescr(String originalCode,
StringBuilder consequence,
JavaTryBlockDescr block,
int offset) {
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getTextStart() - offset);
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getEnd() - offset);
for (JavaCatchBlockDescr catchBlock : block.getCatches()) {
addWhiteSpaces(originalCode, consequence, consequence.length(),
catchBlock.getTextStart() - offset);
addWhiteSpaces(originalCode, consequence, consequence.length(),
catchBlock.getEnd() - offset);
}
if (block.getFinal() != null) {
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getFinal().getTextStart() - offset);
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getFinal().getEnd() - offset);
}
}
private static void stripBlockDescr(String originalCode,
StringBuilder consequence,
JavaBlockDescr block,
int offset) {
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getEnd() - offset);
}
private static void stripElseDescr(String originalCode,
StringBuilder consequence,
JavaElseBlockDescr block,
int offset) {
addWhiteSpaces(originalCode, consequence, consequence.length(), block.getEnd() - offset);
}
@SuppressWarnings("unchecked")
private static void rewriteInterfacePoint(final RuleBuildContext context,
final String originalCode,
final StringBuilder consequence,
final JavaInterfacePointsDescr ep) {
// rewriting it for proper exitPoints access
consequence.append("drools.get");
if (ep.getType() == JavaBlockDescr.BlockType.EXIT) {
consequence.append("ExitPoint( ");
} else if (ep.getType() == JavaBlockDescr.BlockType.ENTRY) {
consequence.append("EntryPoint( ");
} else if (ep.getType() == JavaBlockDescr.BlockType.CHANNEL) {
consequence.append("Channel( ");
} else {
context.addError(new DescrBuildError(context.getParentDescr(),
context.getRuleDescr(),
ep,
"Unable to rewrite code block: " + ep + "\n"));
return;
}
consequence.append(ep.getId());
consequence.append(" )");
// the following is a hack to preserve line breaks.
String originalBlock = originalCode.substring(ep.getStart() - 1,
ep.getEnd());
int end = originalBlock.indexOf("]");
addLineBreaks(consequence,
originalBlock.substring(0,
end));
}
private static boolean rewriteDescr(final RuleBuildContext context,
final String originalCode,
final MVELDialect mvel,
final StringBuilder consequence,
final JavaBlockDescr d,
final BoundIdentifiers bindings,
final Map<String, Declaration> decls) {
if ( d.getEnd() == 0 ) {
// do nothing, it was incorrectly parsed, but this error should be picked up else where
return false;
}
boolean typeSafety = context.isTypesafe();
context.setTypesafe( false ); // we have to analyse in dynamic mode for now, as we cannot safely determine all input vars
Map<String, Class<?>> localTypes = d.getInputs();
if( d.getInScopeLocalVars() != null && ! d.getInScopeLocalVars().isEmpty() ) {
localTypes = new HashMap<String, Class<?>>( d.getInputs() != null ? d.getInputs() : Collections.EMPTY_MAP );
for( JavaLocalDeclarationDescr local : d.getInScopeLocalVars() ) {
// these are variables declared in the code itself that are in the scope for this expression
try {
Class<?> type = context.getDialect( "java" ).getPackageRegistry().getTypeResolver().resolveType( local.getType() );
for( IdentifierDescr id : local.getIdentifiers() ) {
localTypes.put( id.getIdentifier(), type );
}
} catch ( ClassNotFoundException e ) {
context.addError(new DescrBuildError(context.getRuleDescr(),
context.getParentDescr(),
null,
"Unable to resolve type " + local.getType() + ":\n" + e.getMessage()));
}
}
}
MVELAnalysisResult mvelAnalysis = ( MVELAnalysisResult ) mvel.analyzeBlock( context,
context.getRuleDescr(),
mvel.getInterceptors(),
d.getTargetExpression(),
bindings,
localTypes,
"drools",
KnowledgeHelper.class);
context.setTypesafe( typeSafety );
if ( mvelAnalysis == null ) {
// something bad happened, issue already logged in errors
return false;
}
Class ret = mvelAnalysis.getReturnType();
if ( ret == null ) {
// not possible to evaluate expression return value
context.addError(new DescrBuildError(context.getParentDescr(),
context.getRuleDescr(),
originalCode,
"Unable to determine the resulting type of the expression: " + d.getTargetExpression() + "\n"));
return false;
}
// adding modify expression
String retString = ClassUtils.canonicalName( ret );
String declrString;
if (d.getTargetExpression().charAt( 0 ) == '(' ) {
declrString = d.getTargetExpression().substring( 1,d.getTargetExpression().length() -1 ).trim();
} else {
declrString = d.getTargetExpression();
}
String obj = declrString;
Declaration declr = decls.get( declrString );
consequence.append( "{ " );
if ( declr == null ) {
obj = "__obj__";
consequence.append( retString );
consequence.append( " " );
consequence.append( obj);
consequence.append( " = " );
consequence.append( d.getTargetExpression() );
consequence.append( "; " );
}
if ( declr == null || declr.isInternalFact() ) {
consequence.append( "org.kie.api.runtime.rule.FactHandle " );
consequence.append( obj );
consequence.append( "__Handle2__ = drools.getFactHandle(" );
consequence.append( obj );
consequence.append( ");" );
}
// the following is a hack to preserve line breaks.
String originalBlock = originalCode.substring( d.getStart() - 1, d.getEnd() );
switch (d.getType()) {
case MODIFY:
rewriteModifyDescr(context, d, originalBlock, consequence, declr, obj);
break;
case UPDATE:
rewriteUpdateDescr(context, d, originalBlock, consequence, declr, obj);
break;
case RETRACT:
rewriteRetractDescr( context, d, originalBlock, consequence, declr, obj );
break;
}
return declr != null;
}
private static void rewriteModifyDescr( RuleBuildContext context,
JavaBlockDescr d,
String originalBlock,
StringBuilder consequence,
Declaration declr,
String obj ) {
List<String> settableProperties = null;
Class<?> typeClass = findModifiedClass(context, d, declr);
TypeDeclaration typeDeclaration = typeClass == null ? null : context.getKnowledgeBuilder().getTypeDeclaration(typeClass);
boolean isPropertyReactive = typeDeclaration != null && typeDeclaration.isPropertyReactive();
if (isPropertyReactive) {
typeDeclaration.setTypeClass(typeClass);
settableProperties = typeDeclaration.getSettableProperties();
}
ConsequenceMetaData.Statement statement = null;
if (typeDeclaration != null) {
statement = new ConsequenceMetaData.Statement(ConsequenceMetaData.Statement.Type.MODIFY, typeClass);
context.getRule().getConsequenceMetaData().addStatement(statement);
}
long modificationMask = isPropertyReactive ? 0 : Long.MAX_VALUE;
int end = originalBlock.indexOf("{");
if (end == -1) {
// no block
context.addError(new DescrBuildError(context.getParentDescr(),
context.getRuleDescr(),
null,
"Block missing after modify" + d.getTargetExpression() + " ?\n"));
return;
}
addLineBreaks(consequence, originalBlock.substring(0, end));
int start = end + 1;
// adding each of the expressions:
for (String exprStr : ((JavaModifyBlockDescr) d).getExpressions()) {
end = originalBlock.indexOf(exprStr, start);
addLineBreaks(consequence, originalBlock.substring(start, end));
consequence.append(obj).append(".");
consequence.append(exprStr);
consequence.append("; ");
start = end + exprStr.length();
if (typeDeclaration != null) {
modificationMask = parseModifiedProperties(statement, settableProperties, typeDeclaration, isPropertyReactive, modificationMask, exprStr);
}
}
addLineBreaks(consequence, originalBlock.substring(end));
appendUpdateStatement(consequence, declr, obj, modificationMask, typeClass);
}
private static void rewriteUpdateDescr(RuleBuildContext context,
JavaBlockDescr d,
String originalBlock,
StringBuilder consequence,
Declaration declr,
String obj) {
long modificationMask = Long.MAX_VALUE;
Class<?> typeClass = findModifiedClass(context, d, declr);
TypeDeclaration typeDeclaration = typeClass == null ? null : context.getKnowledgeBuilder().getTypeDeclaration(typeClass);
if (typeDeclaration != null) {
boolean isPropertyReactive = typeDeclaration != null && typeDeclaration.isPropertyReactive();
List<String> settableProperties = null;
if (isPropertyReactive) {
modificationMask = 0;
typeDeclaration.setTypeClass(typeClass);
settableProperties = typeDeclaration.getSettableProperties();
}
ConsequenceMetaData.Statement statement = new ConsequenceMetaData.Statement(ConsequenceMetaData.Statement.Type.MODIFY, typeClass);
context.getRule().getConsequenceMetaData().addStatement(statement);
for (String expr : splitStatements(consequence)) {
String updateExpr = expr.replaceFirst("^\\Q" + obj + "\\E\\s*\\.", "");
if (!updateExpr.equals(expr)) {
modificationMask = parseModifiedProperties(statement, settableProperties, typeDeclaration, isPropertyReactive, modificationMask, updateExpr);
}
}
}
appendUpdateStatement(consequence, declr, obj, modificationMask, typeClass);
}
private static void appendUpdateStatement(StringBuilder consequence, Declaration declr, String obj, long modificationMask, Class<?> typeClass) {
boolean isInternalFact = declr == null || declr.isInternalFact();
consequence
.append("drools.update( ")
.append(obj)
.append(isInternalFact ? "__Handle2__, " : "__Handle__, ")
.append(modificationMask)
.append("L, ")
.append(typeClass != null ? typeClass.getCanonicalName() : "java.lang.Object")
.append(".class")
.append(" ); }");
}
private static long parseModifiedProperties(ConsequenceMetaData.Statement statement,
List<String> settableProperties,
TypeDeclaration typeDeclaration,
boolean propertyReactive,
long modificationMask,
String exprStr) {
int endMethodName = exprStr.indexOf('(');
if (endMethodName >= 0) {
String methodName = exprStr.substring(0, endMethodName).trim();
String propertyName = setter2property(methodName);
int endMethodArgs = findEndOfMethodArgsIndex(exprStr, endMethodName);
String methodParams = exprStr.substring(endMethodName+1, endMethodArgs).trim();
List<String> args = splitArgumentsList(methodParams);
int argsNr = args.size();
if (propertyName == null && exprStr.length() > endMethodArgs+1 && exprStr.substring(endMethodArgs+1).trim().startsWith(".")) {
propertyName = getter2property(methodName);
}
if (propertyName != null) {
modificationMask = updateModificationMask(settableProperties, propertyReactive, modificationMask, propertyName);
statement.addField(propertyName, argsNr > 0 ? args.get(0) : null);
}
String methodWithArgsNr = methodName + "_" + argsNr;
List<String> modifiedProps = typeDeclaration.getTypeClassDef().getModifiedPropsByMethod(methodWithArgsNr);
if (modifiedProps != null) {
for (String modifiedProp : modifiedProps) {
modificationMask = updateModificationMask(settableProperties, propertyReactive, modificationMask, modifiedProp);
statement.addField(modifiedProp, argsNr > 0 ? args.get(0) : null);
}
}
} else {
String propertyName = extractFirstIdentifier(exprStr, 0);
if (propertyName != null) {
modificationMask = updateModificationMask(settableProperties, propertyReactive, modificationMask, propertyName);
int equalPos = exprStr.indexOf('=');
if (equalPos >= 0) {
String value = exprStr.substring(equalPos+1).trim();
statement.addField(propertyName, value);
}
}
}
return modificationMask;
}
private static long updateModificationMask(List<String> settableProperties,
boolean propertyReactive,
long modificationMask,
String propertyName) {
if (propertyReactive) {
int pos = settableProperties.indexOf(propertyName);
if (pos >= 0) modificationMask = BitMaskUtil.set(modificationMask, pos);
}
return modificationMask;
}
private static Class<?> findModifiedClass(RuleBuildContext context, JavaBlockDescr d, Declaration declr) {
if (declr != null) {
return ((ClassWireable) declr.getPattern().getObjectType()).getClassType();
}
String targetId = d.getTargetExpression().trim();
while (targetId.charAt(0) == '(' && targetId.charAt(targetId.length()-1) == ')') {
targetId = targetId.substring(1, targetId.length()-1).trim();
}
if (targetId.charAt(0) == '(') {
int endCast = targetId.indexOf(')');
if (endCast > 0) {
String castName = targetId.substring(1, endCast).trim();
Class<?> cast = findClassByName(context, castName);
if (cast != null) {
return cast;
}
targetId = targetId.substring(endCast+1).trim();
}
}
return targetId.contains("(") ? findFunctionReturnedClass(context, targetId) : findDeclarationClass(context, d, targetId);
}
private static Class<?> findDeclarationClass(RuleBuildContext context, JavaBlockDescr d, String statement) {
Class<?> inputClass = d.getInputs() == null ? null : d.getInputs().get(statement);
if (inputClass != null) {
return inputClass;
}
List<JavaLocalDeclarationDescr> localDeclarationDescrs = d.getInScopeLocalVars();
if (localDeclarationDescrs == null) {
return null;
}
String className = null;
for (JavaLocalDeclarationDescr localDeclr : localDeclarationDescrs) {
for (IdentifierDescr idDescr : localDeclr.getIdentifiers()) {
if (statement.equals(idDescr.getIdentifier())) {
className = localDeclr.getType();
break;
}
}
if (className != null) {
break;
}
}
return findClassByName(context, className);
}
public static Class<?> findClassByName(RuleBuildContext context, String className) {
if (className == null) {
return null;
}
String namespace = context.getRuleDescr().getNamespace();
KnowledgeBuilderImpl packageBuilder = context.getKnowledgeBuilder();
Class<?> clazz = null;
try {
clazz = Class.forName(className.indexOf('.') < 0 ? namespace + "." + className : className, false, packageBuilder.getRootClassLoader());
} catch (ClassNotFoundException e) { }
if (clazz != null) {
return clazz;
}
Set<String> imports = new HashSet<String>();
List<PackageDescr> pkgDescrs = packageBuilder.getPackageDescrs(namespace);
if (pkgDescrs == null) {
return null;
}
for (PackageDescr pkgDescr : pkgDescrs) {
for (ImportDescr importDescr : pkgDescr.getImports()) {
imports.add(importDescr.getTarget());
}
}
return findClass(className, imports, packageBuilder.getRootClassLoader());
}
private static Class<?> findFunctionReturnedClass(RuleBuildContext context, String statement) {
String functionName = statement.substring(0, statement.indexOf('('));
FunctionDescr function = lookupFunction(context, functionName);
return function == null ? null : findClassByName(context, function.getReturnType());
}
private static boolean rewriteRetractDescr(RuleBuildContext context,
JavaBlockDescr d,
String originalBlock,
StringBuilder consequence,
Declaration declr,
String obj) {
Class<?> typeClass = findModifiedClass(context, d, declr);
if (typeClass != null) {
ConsequenceMetaData.Statement statement = new ConsequenceMetaData.Statement(ConsequenceMetaData.Statement.Type.RETRACT, typeClass);
context.getRule().getConsequenceMetaData().addStatement(statement);
}
if (declr != null && !declr.isInternalFact()) {
consequence.append("drools.retract( ").append(obj).append("__Handle__ ); }");
} else {
consequence.append("drools.retract( ").append(obj).append("__Handle2__ ); }");
}
return declr != null;
}
private static void parseInsertDescr(RuleBuildContext context, JavaBlockDescr block, StringBuilder consequence) {
String expr = block.getTargetExpression();
if (expr.startsWith("new ")) {
int argsStart = expr.indexOf('(');
if (argsStart > 0) {
String className = expr.substring(4, argsStart).trim();
Class<?> typeClass = findClassByName(context, className);
TypeDeclaration typeDeclaration = typeClass == null ? null : context.getKnowledgeBuilder().getTypeDeclaration(typeClass);
if (typeDeclaration != null) {
ConsequenceMetaData.Statement statement = new ConsequenceMetaData.Statement(ConsequenceMetaData.Statement.Type.INSERT, typeClass);
context.getRule().getConsequenceMetaData().addStatement(statement);
String constructorParams = expr.substring(argsStart+1, expr.indexOf(')')).trim();
List<String> args = splitArgumentsList(constructorParams);
ClassDefinition classDefinition = typeDeclaration.getTypeClassDef();
List<FactField> fields = classDefinition.getFields();
if (args.size() == fields.size()) {
for (int i = 0; i < args.size(); i++) {
statement.addField(fields.get(i).getName(), args.get(i));
}
}
}
}
}
}
/**
* @param consequence
* @param chunk
*/
private static void addLineBreaks(StringBuilder consequence,
String chunk) {
Matcher m = LINE_BREAK_FINDER.matcher(chunk);
while (m.find()) {
consequence.append("\n");
}
}
public static void copyErrorLocation(Exception e, BaseDescr descr) {
if (e instanceof CompileException) {
CompileException compileException = (CompileException)e;
compileException.setLineNumber(descr.getLine());
compileException.setColumn(descr.getColumn());
}
}
private static FunctionDescr lookupFunction(RuleBuildContext context, String functionName) {
String packageName = context.getRule().getPackageName();
List<PackageDescr> pkgDescrs = context.getKnowledgeBuilder().getPackageDescrs(packageName);
for (PackageDescr pkgDescr : pkgDescrs) {
for (FunctionDescr function : pkgDescr.getFunctions()) {
if (function.getName().equals(functionName)) {
return function;
}
}
}
return null;
}
static String normalizeRuleName(String name) {
String normalized = name.replace(' ', '_');
if (!NON_ALPHA_REGEX.matcher(normalized).find()) {
return normalized;
}
StringBuilder sb = new StringBuilder(normalized.length());
for (char ch : normalized.toCharArray()) {
if (ch == '$') {
sb.append("_dollar_");
} else if (Character.isJavaIdentifierPart(ch)) {
sb.append(ch);
} else {
sb.append("$u").append((int)ch).append("$");
}
}
return sb.toString();
}
}
| |
package ru.stqa.pft.addressbook.appmanager;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.Select;
import org.testng.Assert;
import ru.stqa.pft.addressbook.model.ContactData;
import ru.stqa.pft.addressbook.model.Contacts;
import java.util.List;
public class ContactHelper extends HelperBase {
public ContactHelper(WebDriver wd) {
super(wd);
}
public void fillContactForm(ContactData contactData, boolean creation) {
type(By.name("firstname"), contactData.getFirstname());
type(By.name("lastname"), contactData.getLastname());
type(By.name("address"), contactData.getAddress());
type(By.name("mobile"), contactData.getMobilePhone());
type(By.name("work"), contactData.getWorkPhone());
type(By.name("email"), contactData.getEmail());
if (creation) {
new Select(wd.findElement(By.name("new_group"))).selectByVisibleText(contactData.getGroup());
} else {
Assert.assertFalse(isElementPresent(By.name("new_group")));
}
}
public void submitContactCreation() {
click(By.xpath("//div[@id='content']/form/input[21]"));
}
/* public void selectContact(int index) {
wd.findElements(By.xpath(".//td/input")).get(index).click();
}*/
public void selectContactById(int id) {
wd.findElement(By.cssSelector("input[value='" + id + "']")).click();
}
public void deleteContact() {
click(By.xpath("//div[@id='content']/form[2]/div[2]/input"));
}
public void acceptAlert() {
wd.switchTo().alert().accept();
}
public void initContactModification(int index) {
wd.findElements(By.xpath("//img[@alt = 'Edit']")).get(index).click();
/* click(By.xpath("//table[@id='maintable']/tbody/tr/td[8]/a/img"));*/
}
public void initContactModificationById(int id) {
wd.findElement(By.cssSelector(String.format("a[href='edit.php?id=%s']", id))).click();
/* wd.findElement(By.cssSelector("input[id='" + id + "']"));
wd.findElement(By.xpath(".//td[8]")).click();*/
}
public void submitContactModification() {
click(By.xpath("//div[@id='content']/form[1]/input[22]"));
}
public void create(ContactData contact, boolean b) {
//*create();
fillContactForm(contact, b);
submitContactCreation();
contactCache = null;
goToHomePage();
}
public void modify(ContactData contact) {
initContactModificationById(contact.getId());
fillContactForm(contact, false);
submitContactModification();
contactCache = null;
goToHomePage();
}
public void deleteContact(ContactData contact) {
selectContactById(contact.getId());
deleteContact();
acceptAlert();
contactCache = null;
goToHomePage();
}
public void goToHomePage() {
click(By.linkText("home"));
}
public void create() {
click(By.linkText("add new"));
}
public boolean isThereAContact() {
return isElementPresent(By.xpath("//div/div[4]/form[2]/table/tbody/tr[2]/td[1]/input"));
}
public int count() {
return wd.findElements(By.xpath("//div/div[4]/form[2]/table/tbody/tr[2]/td[1]/input")).size();
}
private Contacts contactCache = null;
public Contacts all() {
if (contactCache != null) {
return new Contacts(contactCache);
}
contactCache = new Contacts();
List<WebElement> elements = wd.findElements(By.xpath("//tr[@name = 'entry']"));
for (WebElement element : elements) {
List<WebElement> cells = element.findElements(By.tagName("td"));
String lastname = cells.get(1).getText();
String firstname = cells.get(2).getText();
String allPhones = cells.get(5).getText();
String allEmails = cells.get(4).getText();
String address = cells.get(3).getText();
int id = Integer.parseInt(element.findElement(By.tagName("input")).getAttribute("value"));
ContactData contact = new ContactData().withId(id).withFirstname(firstname).withLastname(lastname)
.withAddress(address).withAllPhones(allPhones).withAllEmails(allEmails);
contactCache.add(contact);
}
return new Contacts(contactCache);
}
public ContactData infoFromEditForm(ContactData contact) {
initContactModificationById(contact.getId());
String firstname = wd.findElement(By.name("firstname")).getAttribute("value");
String lastname = wd.findElement(By.name("lastname")).getAttribute("value");
String home = wd.findElement(By.name("home")).getAttribute("value");
String mobile = wd.findElement(By.name("mobile")).getAttribute("value");
String work = wd.findElement(By.name("work")).getAttribute("value");
String email = wd.findElement(By.name("email")).getAttribute("value");
String email2 = wd.findElement(By.name("email2")).getAttribute("value");
String email3 = wd.findElement(By.name("email3")).getAttribute("value");
String address = wd.findElement(By.name("address")).getAttribute("value");
wd.navigate().back();
return new ContactData().withId(contact.getId()).withFirstname(firstname).withLastname(lastname)
.withHomePhone(home).withMobilePhone(mobile).withWorkPhone(work)
.withEmail(email).withEmail2(email2).withEmail3(email3).withAddress(address);
}
public ContactData infoFromEditFormForDetails(ContactData contact) {
initContactModificationById(contact.getId());
String firstname = wd.findElement(By.name("firstname")).getAttribute("value");
String lastname = wd.findElement(By.name("lastname")).getAttribute("value");
String home = wd.findElement(By.name("home")).getAttribute("value");
String mobile = wd.findElement(By.name("mobile")).getAttribute("value");
String work = wd.findElement(By.name("work")).getAttribute("value");
String email = wd.findElement(By.name("email")).getAttribute("value");
String email2 = wd.findElement(By.name("email2")).getAttribute("value");
String email3 = wd.findElement(By.name("email3")).getAttribute("value");
String address = wd.findElement(By.name("address")).getAttribute("value");
wd.navigate().back();
if (!(home.equals(""))) {
home = "H:" + home;
}if (!(mobile.equals(""))) {
mobile = "M:" + mobile;
}if (!(work.equals(""))) {
work = "W:" + work;
}
return new ContactData().withId(contact.getId()).withFirstname(firstname).withLastname(lastname)
.withHomePhone(home).withMobilePhone(mobile).withWorkPhone(work)
.withEmail(email).withEmail2(email2).withEmail3(email3).withAddress(address);
}
public ContactData infoFromDetailsForm(ContactData contact) {
initContactInfoDetails(contact.getId());
String allDetails = wd.findElement(By.xpath("//*[@id='content']")).getText();
wd.navigate().back();
return new ContactData().withId(contact.getId()).withAllDetails(allDetails);
}
public void initContactInfoDetails(int id) {
wd.findElement(By.cssSelector(String.format("a[href='view.php?id=%s']", id))).click();
}
}
/*
public void deleteContact(int index) {
selectContact(index);
deleteContact();
acceptAlert();
goToHomePage();
}
public List<ContactData> list() {
List<ContactData> contacts = new ArrayList<ContactData>();
List<WebElement> elements = wd.findElements(By.xpath("//tr[@name = 'entry']"));
for (WebElement element : elements){
String firstname = element.findElement(By.xpath(".//td[3]")).getText();
String lastname = element.findElement(By.xpath(".//td[2]")).getText();
int id = Integer.parseInt(element.findElement(By.tagName("input")).getAttribute("value"));
ContactData contact = new ContactData().withId(id).withFirstname(firstname).withLastname(lastname);
contacts.add(contact);
}
return contacts;
}
*/
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.benchmark.authentication.external;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.jcr.Credentials;
import javax.jcr.Repository;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import javax.security.auth.login.Configuration;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.benchmark.AbstractTest;
import org.apache.jackrabbit.oak.fixture.JcrCreator;
import org.apache.jackrabbit.oak.fixture.OakRepositoryFixture;
import org.apache.jackrabbit.oak.fixture.RepositoryFixture;
import org.apache.jackrabbit.oak.jcr.Jcr;
import org.apache.jackrabbit.oak.security.SecurityProviderImpl;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalGroup;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentity;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityException;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProviderManager;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalUser;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncHandler;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncManager;
import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncConfig;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncConfigImpl;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncHandler;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalIDPManagerImpl;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.SyncHandlerMapping;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.SyncManagerImpl;
import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.principal.ExternalPrincipalConfiguration;
import org.apache.jackrabbit.oak.spi.security.principal.CompositePrincipalConfiguration;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils;
import org.apache.sling.testing.mock.osgi.context.OsgiContextImpl;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Base benchmark test for external authentication.
*
* The setup currently defines the following configuration options:
*
* - {@code numberOfUsers} : number of user accounts that are 'known' to the IDP
* - {@code numberOfGroups}: number of groups 'known' to the IDP and equally used to define the membershipSize of each user.
* - {@code expirationTime}: expiration time as set with
* {@link DefaultSyncConfig.Authorizable#setExpirationTime(long)}, used for both users and groups
* - {@code dynamicMembership}: boolean flag to enable dynamic membership (see OAK-4101)
*
* Note: by default the {@link DefaultSyncConfig.User#setMembershipNestingDepth(long)}
* is set to 1 and each user will become member of each of the groups as defined
* by {@code numberOfGroups}.
*/
abstract class AbstractExternalTest extends AbstractTest {
private static final String PATH_PREFIX = "pathPrefix";
private final Random random = new Random();
private final ExternalPrincipalConfiguration externalPrincipalConfiguration = new ExternalPrincipalConfiguration();
final DefaultSyncConfig syncConfig = new DefaultSyncConfig();
final SyncHandler syncHandler = new DefaultSyncHandler(syncConfig);
final ExternalIdentityProvider idp;
SyncManagerImpl syncManager;
ExternalIdentityProviderManager idpManager;
protected AbstractExternalTest(int numberOfUsers, int numberOfGroups,
long expTime, boolean dynamicMembership,
@Nonnull List<String> autoMembership) {
idp = new TestIdentityProvider(numberOfUsers, numberOfGroups);
syncConfig.user()
.setMembershipNestingDepth(1)
.setDynamicMembership(dynamicMembership)
.setAutoMembership(autoMembership.toArray(new String[autoMembership.size()]))
.setExpirationTime(expTime).setPathPrefix(PATH_PREFIX);
syncConfig.group()
.setExpirationTime(expTime).setPathPrefix(PATH_PREFIX);
}
protected abstract Configuration createConfiguration();
protected String getRandomUserId() {
int index = random.nextInt(((TestIdentityProvider) idp).numberOfUsers);
return "u" + index;
}
protected String getRandomGroupId() {
int index = random.nextInt(((TestIdentityProvider) idp).membershipSize);
return "g" + index;
}
@Override
public void run(Iterable iterable, List concurrencyLevels) {
// make sure the desired JAAS config is set
Configuration.setConfiguration(createConfiguration());
super.run(iterable, concurrencyLevels);
}
@Override
protected void beforeSuite() throws Exception {
Set<String> autoMembership = syncConfig.user().getAutoMembership();
if (!autoMembership.isEmpty()) {
Session s = loginAdministrative();
UserManager userManager = ((JackrabbitSession) s).getUserManager();
for (String groupId : autoMembership) {
userManager.createGroup(groupId, new PrincipalImpl(groupId), PATH_PREFIX);
}
s.save();
}
}
/**
* Remove any user/group accounts that have been synchronized into the repo.
*
* @throws Exception
*/
@Override
protected void afterSuite() throws Exception {
Session s = loginAdministrative();
for (String creationRoot : new String[] {UserConstants.DEFAULT_USER_PATH, UserConstants.DEFAULT_GROUP_PATH}) {
String path = creationRoot + "/" + PATH_PREFIX;
if (s.nodeExists(path)) {
s.getNode(path).remove();
}
}
s.save();
}
@Override
protected Repository[] createRepository(RepositoryFixture fixture) throws Exception {
if (fixture instanceof OakRepositoryFixture) {
return ((OakRepositoryFixture) fixture).setUpCluster(1, new JcrCreator() {
@Override
public Jcr customize(Oak oak) {
Whiteboard whiteboard = oak.getWhiteboard();
syncManager = new SyncManagerImpl(whiteboard);
whiteboard.register(SyncManager.class, syncManager, Collections.emptyMap());
idpManager = new ExternalIDPManagerImpl(whiteboard);
whiteboard.register(ExternalIdentityProviderManager.class, idpManager, Collections.emptyMap());
whiteboard.register(ExternalIdentityProvider.class, idp, Collections.emptyMap());
whiteboard.register(SyncHandler.class, syncHandler, Collections.emptyMap());
// assert proper init of the 'externalPrincipalConfiguration' if dynamic membership is enabled
if (syncConfig.user().getDynamicMembership()) {
OsgiContextImpl context = new OsgiContextImpl();
// register the ExternalPrincipal configuration in order to have it's
// activate method invoked.
context.registerInjectActivateService(externalPrincipalConfiguration);
// now register the sync-handler with the dynamic membership config
// in order to enable dynamic membership with the external principal configuration
Map props = ImmutableMap.of(
DefaultSyncConfigImpl.PARAM_USER_DYNAMIC_MEMBERSHIP, syncConfig.user().getDynamicMembership(),
DefaultSyncConfigImpl.PARAM_GROUP_AUTO_MEMBERSHIP, syncConfig.user().getAutoMembership());
context.registerService(SyncHandler.class, WhiteboardUtils.getService(whiteboard, SyncHandler.class), props);
Map shMappingProps = ImmutableMap.of(
SyncHandlerMapping.PARAM_IDP_NAME, idp.getName(),
SyncHandlerMapping.PARAM_SYNC_HANDLER_NAME, syncConfig.getName());
context.registerService(SyncHandlerMapping.class, new SyncHandlerMapping() {}, shMappingProps);
}
SecurityProvider sp = new TestSecurityProvider(ConfigurationParameters.EMPTY);
return new Jcr(oak).with(sp);
}
});
} else {
throw new UnsupportedOperationException("unsupported fixture" + fixture);
}
}
private final class TestSecurityProvider extends SecurityProviderImpl {
public TestSecurityProvider(@Nonnull ConfigurationParameters configuration) {
super(configuration);
PrincipalConfiguration principalConfiguration = getConfiguration(PrincipalConfiguration.class);
if (!(principalConfiguration instanceof CompositePrincipalConfiguration)) {
throw new IllegalStateException();
} else {
PrincipalConfiguration defConfig = checkNotNull(((CompositePrincipalConfiguration) principalConfiguration).getDefaultConfig());
bindPrincipalConfiguration(externalPrincipalConfiguration);
bindPrincipalConfiguration(defConfig);
}
}
}
private final class TestIdentityProvider implements ExternalIdentityProvider {
private final int numberOfUsers;
private final int membershipSize;
private TestIdentityProvider(int numberOfUsers, int membershipSize) {
this.numberOfUsers = numberOfUsers;
this.membershipSize = membershipSize;
}
@Nonnull
@Override
public String getName() {
return "test";
}
@CheckForNull
@Override
public ExternalIdentity getIdentity(@Nonnull ExternalIdentityRef ref) {
String id = ref.getId();
long index = Long.valueOf(id.substring(1));
if (id.charAt(0) == 'u') {
return new TestUser(index);
} else {
return new TestGroup(index);
}
}
@CheckForNull
@Override
public ExternalUser getUser(@Nonnull String userId) {
return new TestUser(Long.valueOf(userId.substring(1)));
}
@CheckForNull
@Override
public ExternalUser authenticate(@Nonnull Credentials credentials) {
return getUser(((SimpleCredentials) credentials).getUserID());
}
@CheckForNull
@Override
public ExternalGroup getGroup(@Nonnull String name) {
return new TestGroup(Long.valueOf(name.substring(1)));
}
@Nonnull
@Override
public Iterator<ExternalUser> listUsers() {
Set<ExternalUser> all = new HashSet<>();
for (long i = 0; i < numberOfUsers; i++) {
all.add(new TestUser(i));
}
return all.iterator();
}
@Nonnull
@Override
public Iterator<ExternalGroup> listGroups() {
Set<ExternalGroup> all = new HashSet<>();
for (long i = 0; i < membershipSize; i++) {
all.add(new TestGroup(i));
}
return all.iterator();
}
Iterable<ExternalIdentityRef> getDeclaredGroupRefs(String userId) {
if (userId.charAt(0) == 'u') {
Set<ExternalIdentityRef> groupRefs = new HashSet<>();
for (long i = 0; i < membershipSize; i++) {
groupRefs.add(new ExternalIdentityRef("g"+ i, idp.getName()));
}
return groupRefs;
} else {
return ImmutableSet.of();
}
}
}
private class TestIdentity implements ExternalIdentity {
private final String userId;
private final String principalName;
private final ExternalIdentityRef id;
public TestIdentity(@Nonnull String userId) {
this.userId = userId;
this.principalName = "p_"+userId;
id = new ExternalIdentityRef(userId, idp.getName());
}
@Nonnull
@Override
public String getId() {
return userId;
}
@Nonnull
@Override
public String getPrincipalName() {
return principalName;
}
@Nonnull
@Override
public ExternalIdentityRef getExternalId() {
return id;
}
@Override
public String getIntermediatePath() {
return null;
}
@Nonnull
@Override
public Iterable<ExternalIdentityRef> getDeclaredGroups() {
return ((TestIdentityProvider) idp).getDeclaredGroupRefs(userId);
}
@Nonnull
@Override
public Map<String, ?> getProperties() {
return ImmutableMap.of();
}
}
private class TestUser extends TestIdentity implements ExternalUser {
public TestUser(long index) {
super("u" + index);
}
}
private class TestGroup extends TestIdentity implements ExternalGroup {
public TestGroup(long index) {
super("g" + index);
}
@Nonnull
@Override
public Iterable<ExternalIdentityRef> getDeclaredMembers() throws ExternalIdentityException {
return ImmutableSet.of();
}
}
}
| |
package assignment1;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import javax.swing.JPanel;
/**
* @author Christoph Riesinger (riesinge@in.tum.de)
* @since November 06, 2011
* @version 1.0
*
* This class is a simple implementation of a plotter. It accepts two
* arrays of float values which represent the x- and y-coordinates of
* points which should be plotted in a carthesian coordinate system. x-
* and y-axis are scaled logarithmically. No interpolation between the
* dots is done!
*/
public class Plotter extends JPanel {
/* Just to avoid compiler warning. */
private static final long serialVersionUID = -8725968470735352529L;
/* Top, bottom, left and right margin of the coordinate system in the plot. */
private final int PADDING = 20;
/* Class members. Are set in class constructor. */
private float[] xData = null;
private float[] yData = null;
private float minX = Float.MAX_VALUE;
private float minY = Float.MAX_VALUE;
private float maxX = -Float.MAX_VALUE;
private float maxY = -Float.MAX_VALUE;
/**
* Constructor of this class. Assigns the passed x- and y-values of the
* points to plot to the internal private member variables.
*
* @param xData
* x-values of the points which should be plotted by this class.
* @param yData
* y-values of the points which should be plotted by this class.
* @throws InstantiationException
* The lengths of the x- and y-coorinates arrays have to be
* equal. Elsewise an exception is thrown.
*/
public Plotter(float[] xData, float[] yData) throws InstantiationException {
/*
* Make sure the arrays which contain the x- and y-ccordinates which
* should be plotted by this class have the same length.
*/
if (xData.length != yData.length) {
throw (new InstantiationException(
"The arrays for the x- and y-components of the "
+ "coordinates have to be of the same length."));
}
this.xData = xData;
this.yData = yData;
/*
* Determine the smallest and largest value which should be plotted by
* this class. These values are the boundaries of the axes of the
* coordinate system which will be plotted.
*/
for (int i = 0; i < xData.length; i++) {
if (xData[i] < minX) {
minX = xData[i];
}
if (xData[i] > maxX) {
maxX = xData[i];
}
if (yData[i] < minY) {
minY = yData[i];
}
if (yData[i] > maxY) {
maxY = yData[i];
}
}
if (1.0d / Math.sqrt(maxX) < minY) {
minY = (float) (1.0d / Math.sqrt(maxX));
}
if (1.0d / Math.sqrt(minX) < minY) {
minY = (float) (1.0d / Math.sqrt(minX));
}
if (1.0d / Math.sqrt(maxX) > maxY) {
maxY = (float) (1.0d / Math.sqrt(maxX));
}
if (1.0d / Math.sqrt(minX) > maxY) {
maxY = (float) (1.0d / Math.sqrt(minX));
}
}
/*
* (non-Javadoc)
*
* @see javax.swing.JComponent#paintComponent(java.awt.Graphics)
*/
protected void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D graphics = (Graphics2D) g;
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
int height = getHeight();
int width = getWidth();
float x, y;
/* draw x-axis */
graphics.draw(new Line2D.Double(PADDING, PADDING, PADDING, height
- PADDING));
graphics.draw(new Line2D.Double(
PADDING + 0.25f * (width - 2 * PADDING), height - PADDING - 5,
PADDING + 0.25f * (width - 2 * PADDING), height - PADDING + 5));
graphics.draw(new Line2D.Double(0.5f * width, height - PADDING - 5,
0.5f * width, height - PADDING + 5));
graphics.draw(new Line2D.Double(
PADDING + 0.75f * (width - 2 * PADDING), height - PADDING - 5,
PADDING + 0.75f * (width - 2 * PADDING), height - PADDING + 5));
graphics.draw(new Line2D.Double(width - PADDING, height - PADDING - 5,
width - PADDING, height - PADDING + 5));
/* draw x-axis caption */
graphics.drawString((new Float(minX)).toString(), PADDING + 2, height
- PADDING + 12);
graphics.drawString(
(new Float(Math.pow(10.0d, 0.25f * Math.log10(maxX) + 0.75f
* Math.log10(minX)))).toString(), PADDING + 0.25f
* (width - 2 * PADDING) + 2, height - PADDING + 12);
graphics.drawString(
(new Float(Math.pow(10.0d,
0.5f * (Math.log10(maxX) + Math.log10(minX)))))
.toString(), 0.5f * width + 2, height - PADDING + 12);
graphics.drawString(
(new Float(Math.pow(10.0d, 0.75f * Math.log10(maxX) + 0.25f
* Math.log10(minX)))).toString(), PADDING + 0.75f
* (width - 2 * PADDING) + 2, height - PADDING + 12);
graphics.drawString((new Float(maxX)).toString(), width - PADDING + 2,
height - PADDING + 12);
/* draw y-axis */
graphics.draw(new Line2D.Double(PADDING, height - PADDING, width
- PADDING, height - PADDING));
graphics.draw(new Line2D.Double(PADDING - 5, height
- (PADDING + 0.25f * (height - 2 * PADDING)), PADDING + 5,
height - (PADDING + 0.25f * (height - 2 * PADDING))));
graphics.draw(new Line2D.Double(PADDING - 5, 0.5f * height,
PADDING + 5, 0.5f * height));
graphics.draw(new Line2D.Double(PADDING - 5, height
- (PADDING + 0.75f * (height - 2 * PADDING)), PADDING + 5,
height - (PADDING + 0.75f * (height - 2 * PADDING))));
graphics.draw(new Line2D.Double(PADDING - 5, height
- (PADDING + (height - 2 * PADDING)), PADDING + 5, height
- (PADDING + (height - 2 * PADDING))));
/* draw y-axis caption */
graphics.drawString((new Float(minY)).toString(), PADDING + 2,
(height - 2 * PADDING) + PADDING - 2);
graphics.drawString(
(new Float(Math.pow(10.0d, 0.25f * Math.log10(maxY) + 0.75f
* Math.log10(minY)))).toString(), PADDING + 2, height
- (PADDING + 0.25f * (height - 2 * PADDING)) - 2);
graphics.drawString(
(new Float(Math.pow(10.0d,
0.5f * (Math.log10(maxY) + Math.log10(minY)))))
.toString(), PADDING + 2, 0.5f * height - 2);
graphics.drawString(
(new Float(Math.pow(10.0d, 0.75f * Math.log10(maxY) + 0.25f
* Math.log10(minY)))).toString(), PADDING + 2, height
- (PADDING + 0.75f * (height - 2 * PADDING)) - 2);
graphics.drawString((new Float(maxY)).toString(), PADDING + 2,
PADDING - 2);
/* draw "exact" solution */
graphics.setPaint(Color.GREEN);
for (int i = 0; i < xData.length; i++) {
x = scaleX(xData[i], width);
y = scaleY((float) (1.0d / Math.sqrt(xData[i])), height);
graphics.fill(new Ellipse2D.Float(x - 1.0f, y - 1.0f, 2.0f, 2.0f));
}
/* draw assigned values */
graphics.setPaint(Color.red);
for (int i = 0; i < xData.length; i++) {
x = scaleX(xData[i], width);
y = scaleY(yData[i], height);
graphics.fill(new Ellipse2D.Float(x - 1.0f, y - 1.0f, 2.0f, 2.0f));
}
}
/**
* The x-values are logarithmically scaled before they are drawn. This is
* done by this method. The padding of the coordinate system is respected.
*
* @param x
* Value which should be scaled logarithmically.
* @param width
* Width of the plottable area.
* @return Logarithmically scaled value of x.
*/
private float scaleX(float x, float width) {
float xScale = (float) (width - 2 * PADDING)
/ (float) (Math.log10(maxX) - Math.log10(minX));
float xOffset = (float) -Math.log10(minX);
float result = PADDING + xScale * (xOffset + (float) Math.log10(x));
return result;
}
/**
* The y-values are logarithmically scaled before they are drawn. This is
* done by this method. The padding of the coordinate system is respected.
*
* @param y
* Value which should be scaled logarithmically.
* @param height
* Height of the plottable area.
* @return Logarithmically scaled value of y.
*/
private float scaleY(float y, float height) {
float yScale = (float) (height - 2 * PADDING)
/ (float) (Math.log10(maxY) - Math.log10(minY));
float yOffset = (float) -Math.log10(minY);
float result = height
- (PADDING + yScale * (yOffset + (float) Math.log10(y)));
return result;
}
}
| |
/*
* Copyright 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.oned;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.ChecksumException;
import com.google.zxing.DecodeHintType;
import com.google.zxing.FormatException;
import com.google.zxing.NotFoundException;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.common.BitArray;
import java.util.Arrays;
import java.util.Map;
/**
* <p>Decodes Code 93 barcodes.</p>
*
* @author Sean Owen
* @see Code39Reader
*/
public final class Code93Reader extends OneDReader {
// Note that 'abcd' are dummy characters in place of control characters.
static final String ALPHABET_STRING = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ-. $/+%abcd*";
private static final char[] ALPHABET = ALPHABET_STRING.toCharArray();
/**
* These represent the encodings of characters, as patterns of wide and narrow bars.
* The 9 least-significant bits of each int correspond to the pattern of wide and narrow.
*/
static final int[] CHARACTER_ENCODINGS = {
0x114, 0x148, 0x144, 0x142, 0x128, 0x124, 0x122, 0x150, 0x112, 0x10A, // 0-9
0x1A8, 0x1A4, 0x1A2, 0x194, 0x192, 0x18A, 0x168, 0x164, 0x162, 0x134, // A-J
0x11A, 0x158, 0x14C, 0x146, 0x12C, 0x116, 0x1B4, 0x1B2, 0x1AC, 0x1A6, // K-T
0x196, 0x19A, 0x16C, 0x166, 0x136, 0x13A, // U-Z
0x12E, 0x1D4, 0x1D2, 0x1CA, 0x16E, 0x176, 0x1AE, // - - %
0x126, 0x1DA, 0x1D6, 0x132, 0x15E, // Control chars? $-*
};
private static final int ASTERISK_ENCODING = CHARACTER_ENCODINGS[47];
private final StringBuilder decodeRowResult;
private final int[] counters;
public Code93Reader() {
decodeRowResult = new StringBuilder(20);
counters = new int[6];
}
@Override
public Result decodeRow(int rowNumber, BitArray row, Map<DecodeHintType,?> hints)
throws NotFoundException, ChecksumException, FormatException {
int[] start = findAsteriskPattern(row);
// Read off white space
int nextStart = row.getNextSet(start[1]);
int end = row.getSize();
int[] theCounters = counters;
Arrays.fill(theCounters, 0);
StringBuilder result = decodeRowResult;
result.setLength(0);
char decodedChar;
int lastStart;
do {
recordPattern(row, nextStart, theCounters);
int pattern = toPattern(theCounters);
if (pattern < 0) {
throw NotFoundException.getNotFoundInstance();
}
decodedChar = patternToChar(pattern);
result.append(decodedChar);
lastStart = nextStart;
for (int counter : theCounters) {
nextStart += counter;
}
// Read off white space
nextStart = row.getNextSet(nextStart);
} while (decodedChar != '*');
result.deleteCharAt(result.length() - 1); // remove asterisk
int lastPatternSize = 0;
for (int counter : theCounters) {
lastPatternSize += counter;
}
// Should be at least one more black module
if (nextStart == end || !row.get(nextStart)) {
throw NotFoundException.getNotFoundInstance();
}
if (result.length() < 2) {
// false positive -- need at least 2 checksum digits
throw NotFoundException.getNotFoundInstance();
}
checkChecksums(result);
// Remove checksum digits
result.setLength(result.length() - 2);
String resultString = decodeExtended(result);
float left = (float) (start[1] + start[0]) / 2.0f;
float right = lastStart + lastPatternSize / 2.0f;
return new Result(
resultString,
null,
new ResultPoint[]{
new ResultPoint(left, (float) rowNumber),
new ResultPoint(right, (float) rowNumber)},
BarcodeFormat.CODE_93);
}
private int[] findAsteriskPattern(BitArray row) throws NotFoundException {
int width = row.getSize();
int rowOffset = row.getNextSet(0);
Arrays.fill(counters, 0);
int[] theCounters = counters;
int patternStart = rowOffset;
boolean isWhite = false;
int patternLength = theCounters.length;
int counterPosition = 0;
for (int i = rowOffset; i < width; i++) {
if (row.get(i) ^ isWhite) {
theCounters[counterPosition]++;
} else {
if (counterPosition == patternLength - 1) {
if (toPattern(theCounters) == ASTERISK_ENCODING) {
return new int[]{patternStart, i};
}
patternStart += theCounters[0] + theCounters[1];
System.arraycopy(theCounters, 2, theCounters, 0, patternLength - 2);
theCounters[patternLength - 2] = 0;
theCounters[patternLength - 1] = 0;
counterPosition--;
} else {
counterPosition++;
}
theCounters[counterPosition] = 1;
isWhite = !isWhite;
}
}
throw NotFoundException.getNotFoundInstance();
}
private static int toPattern(int[] counters) {
int max = counters.length;
int sum = 0;
for (int counter : counters) {
sum += counter;
}
int pattern = 0;
for (int i = 0; i < max; i++) {
int scaled = Math.round(counters[i] * 9.0f / sum);
if (scaled < 1 || scaled > 4) {
return -1;
}
if ((i & 0x01) == 0) {
for (int j = 0; j < scaled; j++) {
pattern = (pattern << 1) | 0x01;
}
} else {
pattern <<= scaled;
}
}
return pattern;
}
private static char patternToChar(int pattern) throws NotFoundException {
for (int i = 0; i < CHARACTER_ENCODINGS.length; i++) {
if (CHARACTER_ENCODINGS[i] == pattern) {
return ALPHABET[i];
}
}
throw NotFoundException.getNotFoundInstance();
}
private static String decodeExtended(CharSequence encoded) throws FormatException {
int length = encoded.length();
StringBuilder decoded = new StringBuilder(length);
for (int i = 0; i < length; i++) {
char c = encoded.charAt(i);
if (c >= 'a' && c <= 'd') {
if (i >= length - 1) {
throw FormatException.getFormatInstance();
}
char next = encoded.charAt(i + 1);
char decodedChar = '\0';
switch (c) {
case 'd':
// +A to +Z map to a to z
if (next >= 'A' && next <= 'Z') {
decodedChar = (char) (next + 32);
} else {
throw FormatException.getFormatInstance();
}
break;
case 'a':
// $A to $Z map to control codes SH to SB
if (next >= 'A' && next <= 'Z') {
decodedChar = (char) (next - 64);
} else {
throw FormatException.getFormatInstance();
}
break;
case 'b':
if (next >= 'A' && next <= 'E') {
// %A to %E map to control codes ESC to USep
decodedChar = (char) (next - 38);
} else if (next >= 'F' && next <= 'J') {
// %F to %J map to ; < = > ?
decodedChar = (char) (next - 11);
} else if (next >= 'K' && next <= 'O') {
// %K to %O map to [ \ ] ^ _
decodedChar = (char) (next + 16);
} else if (next >= 'P' && next <= 'S') {
// %P to %S map to { | } ~
decodedChar = (char) (next + 43);
} else if (next >= 'T' && next <= 'Z') {
// %T to %Z all map to DEL (127)
decodedChar = 127;
} else {
throw FormatException.getFormatInstance();
}
break;
case 'c':
// /A to /O map to ! to , and /Z maps to :
if (next >= 'A' && next <= 'O') {
decodedChar = (char) (next - 32);
} else if (next == 'Z') {
decodedChar = ':';
} else {
throw FormatException.getFormatInstance();
}
break;
}
decoded.append(decodedChar);
// bump up i again since we read two characters
i++;
} else {
decoded.append(c);
}
}
return decoded.toString();
}
private static void checkChecksums(CharSequence result) throws ChecksumException {
int length = result.length();
checkOneChecksum(result, length - 2, 20);
checkOneChecksum(result, length - 1, 15);
}
private static void checkOneChecksum(CharSequence result, int checkPosition, int weightMax)
throws ChecksumException {
int weight = 1;
int total = 0;
for (int i = checkPosition - 1; i >= 0; i--) {
total += weight * ALPHABET_STRING.indexOf(result.charAt(i));
if (++weight > weightMax) {
weight = 1;
}
}
if (result.charAt(checkPosition) != ALPHABET[total % 47]) {
throw ChecksumException.getChecksumInstance();
}
}
}
| |
/*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.examples.hospital.ui;
import org.rioproject.examples.hospital.Bed;
import org.rioproject.examples.hospital.Patient;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A panel of beds.
*/
public class BedPanel extends JPanel {
private final List<BedComponent> beds = new ArrayList<BedComponent>();
private final Map<String, JPanel> roomMap = new HashMap<String, JPanel>();
private ImageIcon availableBedIcon;
private ImageIcon occupiedBedIcon;
private PatientListener listener;
private JPanel roomPanel;
public BedPanel(List<Bed> beds) {
super();
setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
ImageIcon bedIcon = Util.getImageIcon("images/hospital-bed.png");
add(new JLabel(bedIcon));
add(Box.createVerticalStrut(8));
roomPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
availableBedIcon = Util.getImageIcon("images/empty-bed.png");
occupiedBedIcon = Util.getImageIcon("images/occupied-bed.png");
addBeds(beds);
add(roomPanel);
setPreferredSize(new Dimension(500, 50));
}
int getBedCount() {
int count;
synchronized(beds) {
count = beds.size();
}
return count;
}
int getOccupiedBedCount() {
int count = 0;
synchronized(beds) {
for(BedComponent b : beds) {
if(b.isOccupied())
count++;
}
}
return count;
}
private int addBeds(List<Bed> beds) {
int count = 0;
for(Bed b : beds) {
try {
addBed(b);
} catch (IOException e) {
e.printStackTrace();
}
}
return count;
}
void registerListener(PatientListener listener) {
this.listener = listener;
}
boolean occupyBed(Patient p) {
BedComponent b = getBedComponent(p.getBed());
if(b==null)
return false;
b.button.setIcon(occupiedBedIcon);
b.setOccupied(true);
String s = p.getPatientInfo().getName();
b.button.setToolTipText("Patient: "+s);
b.button.repaint();
return true;
}
boolean emptyBed(Patient p) {
BedComponent b = getBedComponent(p.getBed());
if(b==null)
return false;
b.button.setIcon(availableBedIcon);
b.button.setToolTipText(null);
b.button.repaint();
b.setOccupied(false);
return true;
}
void addBed(Bed b) throws IOException {
try {
String roomNumber = b.getRoomNumber();
JPanel panel = roomMap.get(roomNumber);
if(panel==null) {
panel = new JPanel(new FlowLayout(FlowLayout.LEFT));
panel.setBackground(new Color(222, 227, 233));
panel.setBorder(BorderFactory.createEtchedBorder());
roomPanel.add(panel);
roomMap.put(roomNumber, panel);
}
boolean empty = b.getPatient()==null;
addBed(panel, b, empty);
} catch (IOException e) {
e.printStackTrace();
}
}
private void addBed(JPanel roomPanel, Bed bed, boolean empty) {
JButton b = new JButton();
b.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent actionEvent) {
Bed bed = findSelectedBed((JButton) actionEvent.getSource());
if(bed!=null) {
try {
Patient p = bed.getPatient();
if(p!=null && listener!=null) {
listener.patientSelected(p);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
b.setIcon(empty?availableBedIcon:occupiedBedIcon);
roomPanel.add(b);
BedComponent bc = new BedComponent(b, bed);
bc.setOccupied(!empty);
beds.add(bc);
}
private BedComponent getBedComponent(Bed bed) {
BedComponent bedC = null;
synchronized(beds) {
for(BedComponent b : beds) {
if(b.bed.equals(bed)) {
bedC = b;
break;
}
}
}
return bedC;
}
private Bed findSelectedBed(JButton button) {
Bed bed = null;
synchronized(beds) {
for(BedComponent b : beds) {
if(b.button.equals(button)) {
bed = b.bed;
break;
}
}
}
return bed;
}
private class BedComponent {
JButton button;
Bed bed;
boolean occupied = false;
BedComponent(JButton button, Bed bed) {
this.button = button;
this.bed = bed;
}
boolean isOccupied() {
return occupied;
}
void setOccupied(boolean occupied) {
this.occupied = occupied;
}
}
}
| |
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.doclets.formats.html;
import java.io.*;
import java.util.*;
import com.sun.javadoc.*;
import com.sun.tools.doclets.formats.html.markup.*;
import com.sun.tools.doclets.internal.toolkit.*;
import com.sun.tools.doclets.internal.toolkit.util.*;
/**
* Class to generate file for each package contents in the right-hand
* frame. This will list all the Class Kinds in the package. A click on any
* class-kind will update the frame with the clicked class-kind page.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*
* @author Atul M Dambalkar
* @author Bhavesh Patel (Modified)
*/
public class PackageWriterImpl extends HtmlDocletWriter
implements PackageSummaryWriter {
/**
* The prev package name in the alpha-order list.
*/
protected PackageDoc prev;
/**
* The next package name in the alpha-order list.
*/
protected PackageDoc next;
/**
* The package being documented.
*/
protected PackageDoc packageDoc;
/**
* Constructor to construct PackageWriter object and to generate
* "package-summary.html" file in the respective package directory.
* For example for package "java.lang" this will generate file
* "package-summary.html" file in the "java/lang" directory. It will also
* create "java/lang" directory in the current or the destination directory
* if it doesn't exist.
*
* @param configuration the configuration of the doclet.
* @param packageDoc PackageDoc under consideration.
* @param prev Previous package in the sorted array.
* @param next Next package in the sorted array.
*/
public PackageWriterImpl(ConfigurationImpl configuration,
PackageDoc packageDoc, PackageDoc prev, PackageDoc next)
throws IOException {
super(configuration, DocPath.forPackage(packageDoc).resolve(DocPaths.PACKAGE_SUMMARY));
this.prev = prev;
this.next = next;
this.packageDoc = packageDoc;
}
/**
* {@inheritDoc}
*/
public Content getPackageHeader(String heading) {
String pkgName = packageDoc.name();
Content bodyTree = getBody(true, getWindowTitle(pkgName));
addTop(bodyTree);
addNavLinks(true, bodyTree);
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.header);
Content annotationContent = new HtmlTree(HtmlTag.P);
addAnnotationInfo(packageDoc, annotationContent);
div.addContent(annotationContent);
Content tHeading = HtmlTree.HEADING(HtmlConstants.TITLE_HEADING, true,
HtmlStyle.title, packageLabel);
tHeading.addContent(getSpace());
Content packageHead = new StringContent(heading);
tHeading.addContent(packageHead);
div.addContent(tHeading);
addDeprecationInfo(div);
if (packageDoc.inlineTags().length > 0 && ! configuration.nocomment) {
HtmlTree docSummaryDiv = new HtmlTree(HtmlTag.DIV);
docSummaryDiv.addStyle(HtmlStyle.docSummary);
addSummaryComment(packageDoc, docSummaryDiv);
div.addContent(docSummaryDiv);
Content space = getSpace();
Content descLink = getHyperLink(getDocLink(
SectionName.PACKAGE_DESCRIPTION),
descriptionLabel, "", "");
Content descPara = new HtmlTree(HtmlTag.P, seeLabel, space, descLink);
div.addContent(descPara);
}
bodyTree.addContent(div);
return bodyTree;
}
/**
* {@inheritDoc}
*/
public Content getContentHeader() {
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.contentContainer);
return div;
}
/**
* Add the package deprecation information to the documentation tree.
*
* @param div the content tree to which the deprecation information will be added
*/
public void addDeprecationInfo(Content div) {
Tag[] deprs = packageDoc.tags("deprecated");
if (Util.isDeprecated(packageDoc)) {
HtmlTree deprDiv = new HtmlTree(HtmlTag.DIV);
deprDiv.addStyle(HtmlStyle.deprecatedContent);
Content deprPhrase = HtmlTree.SPAN(HtmlStyle.deprecatedLabel, deprecatedPhrase);
deprDiv.addContent(deprPhrase);
if (deprs.length > 0) {
Tag[] commentTags = deprs[0].inlineTags();
if (commentTags.length > 0) {
addInlineDeprecatedComment(packageDoc, deprs[0], deprDiv);
}
}
div.addContent(deprDiv);
}
}
/**
* {@inheritDoc}
*/
public Content getSummaryHeader() {
HtmlTree ul = new HtmlTree(HtmlTag.UL);
ul.addStyle(HtmlStyle.blockList);
return ul;
}
/**
* {@inheritDoc}
*/
public void addClassesSummary(ClassDoc[] classes, String label,
String tableSummary, String[] tableHeader, Content summaryContentTree) {
if(classes.length > 0) {
Arrays.sort(classes);
Content caption = getTableCaption(new RawHtml(label));
Content table = HtmlTree.TABLE(HtmlStyle.typeSummary, 0, 3, 0,
tableSummary, caption);
table.addContent(getSummaryTableHeader(tableHeader, "col"));
Content tbody = new HtmlTree(HtmlTag.TBODY);
for (int i = 0; i < classes.length; i++) {
if (!Util.isCoreClass(classes[i]) ||
!configuration.isGeneratedDoc(classes[i])) {
continue;
}
Content classContent = getLink(new LinkInfoImpl(
configuration, LinkInfoImpl.Kind.PACKAGE, classes[i]));
Content tdClass = HtmlTree.TD(HtmlStyle.colFirst, classContent);
HtmlTree tr = HtmlTree.TR(tdClass);
if (i%2 == 0)
tr.addStyle(HtmlStyle.altColor);
else
tr.addStyle(HtmlStyle.rowColor);
HtmlTree tdClassDescription = new HtmlTree(HtmlTag.TD);
tdClassDescription.addStyle(HtmlStyle.colLast);
if (Util.isDeprecated(classes[i])) {
tdClassDescription.addContent(deprecatedLabel);
if (classes[i].tags("deprecated").length > 0) {
addSummaryDeprecatedComment(classes[i],
classes[i].tags("deprecated")[0], tdClassDescription);
}
}
else
addSummaryComment(classes[i], tdClassDescription);
tr.addContent(tdClassDescription);
tbody.addContent(tr);
}
table.addContent(tbody);
Content li = HtmlTree.LI(HtmlStyle.blockList, table);
summaryContentTree.addContent(li);
}
}
/**
* {@inheritDoc}
*/
public void addPackageDescription(Content packageContentTree) {
if (packageDoc.inlineTags().length > 0) {
packageContentTree.addContent(
getMarkerAnchor(SectionName.PACKAGE_DESCRIPTION));
Content h2Content = new StringContent(
configuration.getText("doclet.Package_Description",
packageDoc.name()));
packageContentTree.addContent(HtmlTree.HEADING(HtmlConstants.PACKAGE_HEADING,
true, h2Content));
addInlineComment(packageDoc, packageContentTree);
}
}
/**
* {@inheritDoc}
*/
public void addPackageTags(Content packageContentTree) {
addTagsInfo(packageDoc, packageContentTree);
}
/**
* {@inheritDoc}
*/
public void addPackageFooter(Content contentTree) {
addNavLinks(false, contentTree);
addBottom(contentTree);
}
/**
* {@inheritDoc}
*/
public void printDocument(Content contentTree) throws IOException {
printHtmlDocument(configuration.metakeywords.getMetaKeywords(packageDoc),
true, contentTree);
}
/**
* Get "Use" link for this pacakge in the navigation bar.
*
* @return a content tree for the class use link
*/
protected Content getNavLinkClassUse() {
Content useLink = getHyperLink(DocPaths.PACKAGE_USE,
useLabel, "", "");
Content li = HtmlTree.LI(useLink);
return li;
}
/**
* Get "PREV PACKAGE" link in the navigation bar.
*
* @return a content tree for the previous link
*/
public Content getNavLinkPrevious() {
Content li;
if (prev == null) {
li = HtmlTree.LI(prevpackageLabel);
} else {
DocPath path = DocPath.relativePath(packageDoc, prev);
li = HtmlTree.LI(getHyperLink(path.resolve(DocPaths.PACKAGE_SUMMARY),
prevpackageLabel, "", ""));
}
return li;
}
/**
* Get "NEXT PACKAGE" link in the navigation bar.
*
* @return a content tree for the next link
*/
public Content getNavLinkNext() {
Content li;
if (next == null) {
li = HtmlTree.LI(nextpackageLabel);
} else {
DocPath path = DocPath.relativePath(packageDoc, next);
li = HtmlTree.LI(getHyperLink(path.resolve(DocPaths.PACKAGE_SUMMARY),
nextpackageLabel, "", ""));
}
return li;
}
/**
* Get "Tree" link in the navigation bar. This will be link to the package
* tree file.
*
* @return a content tree for the tree link
*/
protected Content getNavLinkTree() {
Content useLink = getHyperLink(DocPaths.PACKAGE_TREE,
treeLabel, "", "");
Content li = HtmlTree.LI(useLink);
return li;
}
/**
* Highlight "Package" in the navigation bar, as this is the package page.
*
* @return a content tree for the package link
*/
protected Content getNavLinkPackage() {
Content li = HtmlTree.LI(HtmlStyle.navBarCell1Rev, packageLabel);
return li;
}
}
| |
/*
* Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck.testcase.variation;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Team;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
import org.ensembl.healthcheck.util.DBUtils;
/**
* Check that allele frequencies add up to 1
*/
public class AlleleFrequencies extends SingleDatabaseTestCase {
/**
* Creates a new instance of Check Allele Frequencies
*/
public AlleleFrequencies() {
//addToGroup("variation-long");
setHintLongRunning(true);
setDescription("Check that the allele frequencies add up to 1");
setTeamResponsible(Team.VARIATION);
}
/**
* Check that all allele/genotype frequencies add up to 1 for the same variation/subsnp and sample.
*
* @param dbre
* The database to use.
* @return Result.
*/
public boolean run(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
String[] tables = new String[] { "population_genotype", "allele" };
// Set this flag to true if we want to count ALL failed frequencies and not just break as soon as we've found one
boolean countAll = false;
// Tolerance for the deviation from 1.0
float tol = 0.025f;
// Get the results in batches (determined by the variation_id)
int chunk = 250000;
// long mainStart = System.currentTimeMillis();
try {
Statement stmt = con.createStatement();
// Get variations with allele/genotype frequencies that don't add up to 1 for the same variation_id, subsnp_id and population_id
for (int i = 0; i < tables.length; i++) {
// long subStart = System.currentTimeMillis();
// Get the maximum variation id
String sql = "SELECT MAX(s.variation_id) FROM " + tables[i] + " s";
sql = DBUtils.getRowColumnValue(con, sql);
if (sql.length() == 0) {
sql = "0";
}
int maxId = Integer.parseInt(sql);
// The query to get the data
sql = "SELECT s.variation_id, s.subsnp_id, s.population_id, s.frequency FROM " + tables[i]
+ " s USE INDEX (variation_idx,subsnp_idx) WHERE s.variation_id BETWEEN VIDLOWER AND VIDUPPER ORDER BY s.variation_id, s.subsnp_id, s.population_id";
int offset = 1;
// Count the number of failed
int failed = 0;
// Keep the failed entries
ArrayList failedEntries = new ArrayList();
// Loop until we've reached the maximum variation_id
while (offset <= maxId) {
// long s = System.currentTimeMillis();
// Replace the offsets in the SQL query
ResultSet rs = stmt.executeQuery(sql.replaceFirst("VIDLOWER", String.valueOf(offset)).replaceFirst("VIDUPPER", String.valueOf(offset + chunk)));
// long e = System.currentTimeMillis();
// System.out.println("Got " + String.valueOf(chunk) + " variations in " + String.valueOf(((e-s)/1000)) +
// " seconds. Offset is " + String.valueOf(offset));
// Increase the offset with the chunk size and add 1
offset += chunk + 1;
int lastVid = 0;
int lastSSid = 0;
int lastSid = 0;
int curVid;
int curSSid;
int curSid;
float freq;
float sum = 1.f;
int count = 0;
while (rs.next()) {
// Get the variation_id, subsnp_id, population_id and frequency. If any of these are NULL, they will be returned as 0
curVid = rs.getInt(1);
curSSid = rs.getInt(2);
curSid = rs.getInt(3);
freq = rs.getFloat(4);
// If any of the values was NULL, skip processing the row. For the frequency, we have to use the wasNull() function to
// check this. The ids it is sufficient to check if they were 0
if (curVid != 0 && curSSid != 0 && curSid != 0 && !rs.wasNull()) {
// If any of the ids is different from the last one, stop summing and check the sum of the latest variation
if (curVid != lastVid || curSSid != lastSSid || curSid != lastSid) {
// See if the sum of the frequencies deviates from 1 more than what we tolerate. In that case, count it as a failed
if (Math.abs(1.f - sum) > tol) {
// Store the failed data in failedEntries
failedEntries.add(new int[] { lastVid, lastSSid, lastSid, Math.round(1000 * sum) });
failed++;
}
// Set the last ids to this one and reset the sum
lastVid = curVid;
lastSSid = curSSid;
lastSid = curSid;
sum = 0.f;
}
// Add the frequency to the sum
sum += freq;
}
count++;
// Break if we've encountered a failed frequency (unless flagged not to)
if (failed > 0 && !countAll) {
break;
}
}
rs.close();
// s = System.currentTimeMillis();
// System.out.println("Processed " + String.valueOf(count) + " rows in " + String.valueOf(((s-e)/1000)) + " seconds");
}
if (failed == 0) {
// Report that the current table is ok
ReportManager.correct(this, con, "Frequencies in " + tables[i] + " all add up to 1");
} else {
// Get an example and print it
int[] entry = (int[]) failedEntries.get(0);
String example = "variation_id = " + String.valueOf(entry[0]) + ", subsnp_id = " + String.valueOf(entry[1]) + ", population_id = " + String.valueOf(entry[2]) + ", sum is "
+ String.valueOf((0.001f * entry[3]));
ReportManager.problem(this, con, "There are " + String.valueOf(failed) + " variations in " + tables[i] + " where the frequencies don't add up to 1 +/- " + String.valueOf(tol) + " (e.g. "
+ example + ")");
result = false;
// Loop over the failed entries and print a list of variation_id, subsnp_id, population_id and summed frequency to stdout
/*
for (int j = 0; j < failedEntries.size(); j++) {
entry = (int[]) failedEntries.get(j);
System.out.println(String.valueOf(entry[0]) + "\t" + String.valueOf(entry[1]) + "\t" + String.valueOf(entry[2]) + "\t" + String.valueOf((0.001f * entry[3])));
}
*/
}
// long subEnd = System.currentTimeMillis();
// System.out.println("Time for healthcheck on " + tables[i] + " (~" + String.valueOf(maxId) + " variations) was " +
// String.valueOf(((subEnd-subStart)/1000)) + " seconds");
}
stmt.close();
} catch (Exception e) {
result = false;
e.printStackTrace();
}
// long mainEnd = System.currentTimeMillis();
// System.out.println("Total time for healthcheck was " + String.valueOf(((mainEnd-mainStart)/1000)) + " seconds");
if (result) {
ReportManager.correct(this, con, "Allele/Genotype frequency healthcheck passed without any problem");
}
return result;
} // run
} // AlleleFrequencies
| |
/*
* Copyright (C) 2015 Information Management Services, Inc.
*/
package com.imsweb.naaccrxml.gui.pages;
import java.awt.BorderLayout;
import java.awt.CardLayout;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Font;
import java.io.File;
import java.io.IOException;
import java.io.LineNumberReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JProgressBar;
import javax.swing.JScrollPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import javax.swing.border.EmptyBorder;
import javax.swing.border.LineBorder;
import javax.swing.border.TitledBorder;
import javax.swing.filechooser.FileFilter;
import org.apache.commons.lang3.StringUtils;
import com.imsweb.naaccrxml.NaaccrErrorUtils;
import com.imsweb.naaccrxml.NaaccrFormat;
import com.imsweb.naaccrxml.NaaccrIOException;
import com.imsweb.naaccrxml.NaaccrObserver;
import com.imsweb.naaccrxml.NaaccrOptions;
import com.imsweb.naaccrxml.NaaccrValidationError;
import com.imsweb.naaccrxml.NaaccrXmlDictionaryUtils;
import com.imsweb.naaccrxml.NaaccrXmlUtils;
import com.imsweb.naaccrxml.entity.Patient;
import com.imsweb.naaccrxml.entity.dictionary.NaaccrDictionary;
import com.imsweb.naaccrxml.gui.Standalone;
import com.imsweb.naaccrxml.gui.StandaloneOptions;
public abstract class AbstractProcessingPage extends AbstractPage {
protected static final String _COMPRESSION_NONE = "None";
protected static final String _COMPRESSION_GZIP = "GZip";
protected static final String _COMPRESSION_XZ = "XZ (LZMA)";
protected static final String _NORTH_PANEL_ID_NO_FILE = "no-file";
protected static final String _NORTH_PANEL_ID_ERROR = "pre-analysis-error";
protected static final String _NORTH_PANEL_ID_ANALYSIS_RESULTS = "pre-analysis-results";
protected static final String _CENTER_PANEL_ID_HELP = "help";
protected static final String _CENTER_PANEL_ID_OPTIONS = "options";
protected static final String _CENTER_PANEL_ID_PROCESSING = "processing";
protected static final String _NORTH_PROCESSING_PANEL_ID_ANALYSIS = "processing-analysis";
protected static final String _NORTH_PROCESSING_PANEL_ID_PROGRESS = "processing-progress";
protected static final String _NORTH_PROCESSING_PANEL_ID_RESULTS = "processing-results";
protected static final String _NORTH_PROCESSING_PANEL_ID_INTERRUPTED = "processing-interrupted";
protected static final String _NORTH_PROCESSING_PANEL_ID_ERROR = "processing-error";
private static final String _TXT_DICT_NOT_NEEDED = "The data file does not reference user-defined dictionaries.";
private static final String _TXT_DICT_NEEDED = "The following user-defined dictionaries need to be provided (use the Browse button to select them):";
private static final String _TXT_DICT_PROVIDED = "All the user-defined dictionaries have been provided";
protected JFileChooser _fileChooser, _dictionaryFileChooser;
protected CardLayout _northLayout, _centerLayout, _northProcessingLayout;
protected JPanel _northPnl, _centerPnl, _northProcessingPnl, _dictionaryPnl;
protected JTextField _sourceFld, _targetFld;
protected JComboBox<String> _compressionBox;
protected JProgressBar _analysisBar, _processingBar;
protected JLabel _analysisErrorLbl, _processingErrorLbl, _processingResult1Lbl, _processingResult2Lbl, _formatLbl, _numLinesLbl, _fileSizeLbl;
protected JLabel _numPatLbl, _numTumLbl, _dictionaryLbl, _dictionaryDisclaimerLbl;
protected JTextArea _warningsTextArea, _warningsSummaryTextArea;
protected JTabbedPane _warningsPane;
protected StandaloneOptions _guiOptions;
protected transient SwingWorker<Void, Void> _analysisWorker;
protected transient SwingWorker<Void, Patient> _processingWorker;
protected Map<String, NaaccrDictionary> _userDictionaries = new HashMap<>();
protected boolean _maxWarningsReached = false, _maxWarningsDiscAdded = false;
protected Map<String, AtomicInteger> _warningStats = new HashMap<>();
protected Map<String, Set<String>> _warningStatsDetails = new HashMap<>();
public AbstractProcessingPage(boolean isSourceXml) {
super();
_fileChooser = new JFileChooser();
_fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
_fileChooser.setDialogTitle("Select File");
_fileChooser.setApproveButtonToolTipText("Select file");
_fileChooser.setMultiSelectionEnabled(false);
_dictionaryFileChooser = new JFileChooser();
_dictionaryFileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
_dictionaryFileChooser.setDialogTitle("Select File");
_dictionaryFileChooser.setApproveButtonToolTipText("Select file");
_dictionaryFileChooser.setMultiSelectionEnabled(false);
_dictionaryFileChooser.addChoosableFileFilter(new FileFilter() {
@Override
public String getDescription() {
return "XML files (*.xml)";
}
@Override
public boolean accept(File f) {
return f != null && (f.isDirectory() || f.getName().toLowerCase().endsWith(".xml"));
}
});
JPanel inputFilePnl = new JPanel(new BorderLayout());
JPanel sourceFilePnl = new JPanel();
sourceFilePnl.setOpaque(false);
sourceFilePnl.setBorder(null);
sourceFilePnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
sourceFilePnl.add(Standalone.createBoldLabel(getSourceLabelText()));
sourceFilePnl.add(Box.createHorizontalStrut(5));
_sourceFld = new JTextField(60);
_sourceFld.setBackground(Color.WHITE);
sourceFilePnl.add(_sourceFld);
sourceFilePnl.add(Box.createHorizontalStrut(5));
JButton browseBtn = new JButton("Browse...");
browseBtn.addActionListener(e -> {
if (_fileChooser.showDialog(AbstractProcessingPage.this, "Select") == JFileChooser.APPROVE_OPTION) {
_sourceFld.setText(_fileChooser.getSelectedFile().getAbsolutePath());
performPreAnalysis();
}
});
sourceFilePnl.add(browseBtn);
inputFilePnl.add(sourceFilePnl, BorderLayout.NORTH);
_northPnl = new JPanel();
_northLayout = new CardLayout();
_northPnl.setLayout(_northLayout);
_northPnl.add(_NORTH_PANEL_ID_NO_FILE, buildNoFileSelectedPanel());
_northPnl.add(_NORTH_PANEL_ID_ANALYSIS_RESULTS, buildAnalysisResultsPanel(isSourceXml));
_northPnl.add(_NORTH_PANEL_ID_ERROR, buildAnalysisErrorPanel());
inputFilePnl.add(_northPnl, BorderLayout.SOUTH);
this.add(inputFilePnl, BorderLayout.NORTH);
_centerPnl = new JPanel();
_centerLayout = new CardLayout();
_centerPnl.setLayout(_centerLayout);
_centerPnl.add(_CENTER_PANEL_ID_HELP, buildHelpPanel());
_centerPnl.add(_CENTER_PANEL_ID_OPTIONS, buildOptionsPanel());
_centerPnl.add(_CENTER_PANEL_ID_PROCESSING, buildProcessingPanel());
this.add(_centerPnl, BorderLayout.CENTER);
}
protected abstract String getSourceLabelText();
protected abstract String getTargetLabelText();
private JPanel buildTextPnl(String text) {
JPanel pnl = new JPanel(new FlowLayout(FlowLayout.LEADING, 0, 2));
pnl.add(new JLabel(text));
return pnl;
}
private JPanel buildNoFileSelectedPanel() {
JPanel pnl = new JPanel(new FlowLayout(FlowLayout.LEADING, 0, 0));
pnl.setBorder(new EmptyBorder(10, 10, 20, 0));
pnl.add(buildTextPnl("No file selected; please use the Browse button to select one."));
return pnl;
}
private JPanel buildHelpPanel() {
JPanel pnl = new JPanel();
pnl.setBorder(new EmptyBorder(10, 10, 0, 0));
pnl.setLayout(new BoxLayout(pnl, BoxLayout.Y_AXIS));
pnl.add(buildTextPnl("The following NAACCR versions are supported:"));
pnl.add(buildTextPnl(" NAACCR 18"));
pnl.add(buildTextPnl(" NAACCR 16"));
pnl.add(buildTextPnl(" NAACCR 15"));
pnl.add(buildTextPnl(" NAACCR 14"));
pnl.add(buildTextPnl("The Abstract, Modified, Confidential and Incidence flavors are supported for those versions."));
pnl.add(Box.createVerticalStrut(25));
pnl.add(buildTextPnl("The following compressions are supported:"));
pnl.add(buildTextPnl(" GZip (\".gz\" extension)"));
pnl.add(buildTextPnl(" XZ (\".xz\" extension; this compression will usually produce smaller files than GZip but will take longer to process)"));
pnl.add(buildTextPnl(" Uncompressed (anything not ending in .gz or .xz will be treated as uncompressed)"));
pnl.add(Box.createVerticalStrut(25));
pnl.add(buildTextPnl("Note that this utility is not a data conversion tool, it simply translates one format into another."));
pnl.add(buildTextPnl("That means the created file (Flat or XML) will always have the same NAACCR version (and same data) as its source."));
JPanel wrapperPnl = new JPanel(new BorderLayout());
wrapperPnl.add(pnl, BorderLayout.NORTH);
return wrapperPnl;
}
private JPanel buildAnalysisResultsPanel(boolean isSourceXml) {
JPanel pnl = new JPanel();
pnl.setBorder(new EmptyBorder(15, 25, 10, 0));
pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
pnl.add(Standalone.createBoldLabel("Source File format: "));
pnl.add(Box.createHorizontalStrut(5));
_formatLbl = new JLabel(" ");
pnl.add(_formatLbl);
pnl.add(Box.createHorizontalStrut(25));
pnl.add(Standalone.createBoldLabel("Num lines: "));
pnl.add(Box.createHorizontalStrut(5));
_numLinesLbl = new JLabel(" ");
pnl.add(_numLinesLbl);
pnl.add(Box.createHorizontalStrut(25));
if (isSourceXml) {
pnl.add(Standalone.createBoldLabel("Num patients: "));
pnl.add(Box.createHorizontalStrut(5));
_numPatLbl = new JLabel(" ");
pnl.add(_numPatLbl);
pnl.add(Box.createHorizontalStrut(25));
pnl.add(Standalone.createBoldLabel("Num tumors: "));
pnl.add(Box.createHorizontalStrut(5));
_numTumLbl = new JLabel(" ");
pnl.add(_numTumLbl);
pnl.add(Box.createHorizontalStrut(25));
}
pnl.add(Standalone.createBoldLabel("File size: "));
pnl.add(Box.createHorizontalStrut(5));
_fileSizeLbl = new JLabel(" ");
pnl.add(_fileSizeLbl);
return pnl;
}
private JPanel buildAnalysisErrorPanel() {
JPanel pnl = new JPanel();
pnl.setBorder(new EmptyBorder(10, 10, 0, 0));
pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
JLabel lbl = Standalone.createBoldLabel("Error analysing file: ");
lbl.setForeground(Color.RED);
pnl.add(lbl);
_analysisErrorLbl = new JLabel(" ");
_analysisErrorLbl.setForeground(Color.RED);
pnl.add(_analysisErrorLbl);
return pnl;
}
private JPanel buildOptionsPanel() {
JPanel pnl = new JPanel(new BorderLayout());
JPanel headerPnl = new JPanel(new FlowLayout(FlowLayout.LEADING, 0, 0));
headerPnl.setBorder(new EmptyBorder(10, 0, 15, 0));
JLabel headerLbl = Standalone.createBoldLabel("Please review and/or change the following options. Once you are ready, click the process button at the bottom of the page.");
headerLbl.setForeground(new Color(150, 0, 0));
headerPnl.add(headerLbl);
pnl.add(headerPnl, BorderLayout.NORTH);
JPanel allOptionsPnl = new JPanel();
allOptionsPnl.setBorder(new EmptyBorder(0, 15, 0, 0));
allOptionsPnl.setLayout(new BoxLayout(allOptionsPnl, BoxLayout.Y_AXIS));
pnl.add(allOptionsPnl, BorderLayout.CENTER);
if (showTargetInput()) {
JPanel targetFieldPnl = new JPanel();
targetFieldPnl.setOpaque(false);
targetFieldPnl.setBorder(null);
targetFieldPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
targetFieldPnl.add(Standalone.createBoldLabel(getTargetLabelText()));
targetFieldPnl.add(Box.createHorizontalStrut(5));
_targetFld = new JTextField(60);
targetFieldPnl.add(_targetFld);
targetFieldPnl.add(Box.createHorizontalStrut(5));
JButton browseBtn = new JButton("Browse...");
browseBtn.addActionListener(e -> {
if (_fileChooser.showDialog(AbstractProcessingPage.this, "Select") == JFileChooser.APPROVE_OPTION)
_targetFld.setText(_fileChooser.getSelectedFile().getAbsolutePath());
});
targetFieldPnl.add(browseBtn);
targetFieldPnl.add(Box.createHorizontalStrut(10));
targetFieldPnl.add(Standalone.createBoldLabel("Compression:"));
targetFieldPnl.add(Box.createHorizontalStrut(5));
_compressionBox = new JComboBox<>(new String[] {_COMPRESSION_NONE, _COMPRESSION_GZIP, _COMPRESSION_XZ});
_compressionBox.addActionListener(e -> {
if (!_targetFld.getText().isEmpty())
_targetFld.setText(fixFileExtension(_targetFld.getText(), (String)_compressionBox.getSelectedItem()));
});
targetFieldPnl.add(_compressionBox);
allOptionsPnl.add(targetFieldPnl);
allOptionsPnl.add(Box.createVerticalStrut(15));
}
JPanel optionsPnl = new JPanel(new BorderLayout());
Font font = new JLabel().getFont();
optionsPnl.setBorder(new TitledBorder(null, "Processing Options", TitledBorder.LEADING, TitledBorder.DEFAULT_POSITION, font.deriveFont(Font.BOLD), Color.BLACK));
_guiOptions = createOptions();
_guiOptions.setBorder(new EmptyBorder(10, 20, 10, 10));
optionsPnl.add(_guiOptions);
allOptionsPnl.add(optionsPnl);
JPanel dictionaryDisclaimerPnl = new JPanel(new FlowLayout(FlowLayout.LEADING, 0, 0));
dictionaryDisclaimerPnl.setBorder(new EmptyBorder(15, 0, 0, 0));
_dictionaryDisclaimerLbl = Standalone.createBoldLabel(_TXT_DICT_NOT_NEEDED);
dictionaryDisclaimerPnl.add(_dictionaryDisclaimerLbl);
allOptionsPnl.add(dictionaryDisclaimerPnl);
_dictionaryPnl = new JPanel();
_dictionaryPnl.setLayout(new BoxLayout(_dictionaryPnl, BoxLayout.Y_AXIS));
JPanel dictionarySelectionPnl = new JPanel();
dictionarySelectionPnl.setBorder(new EmptyBorder(15, 0, 0, 0));
dictionarySelectionPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
dictionarySelectionPnl.add(Box.createHorizontalStrut(25));
_dictionaryLbl = new JLabel(" ");
dictionarySelectionPnl.add(_dictionaryLbl);
dictionarySelectionPnl.add(Box.createHorizontalStrut(10));
JButton dictionaryBrowseBtn = new JButton("Browse...");
dictionaryBrowseBtn.addActionListener(e -> {
// dictionaries tend to be in same folder as data files...
if (_targetFld != null)
_dictionaryFileChooser.setCurrentDirectory(new File(_targetFld.getText()));
if (_dictionaryFileChooser.showDialog(AbstractProcessingPage.this, "Select") == JFileChooser.APPROVE_OPTION) {
try {
NaaccrDictionary dictionary = NaaccrXmlDictionaryUtils.readDictionary(_dictionaryFileChooser.getSelectedFile());
_userDictionaries.put(dictionary.getDictionaryUri(), dictionary);
List<String> neededDictionaries = new ArrayList<>(getRequiredUserDefinedDictionaries(new File(_sourceFld.getText())));
neededDictionaries.remove(dictionary.getDictionaryUri());
if (neededDictionaries.isEmpty()) {
_dictionaryDisclaimerLbl.setText(_TXT_DICT_PROVIDED);
_dictionaryDisclaimerLbl.setForeground(Color.BLACK);
_dictionaryLbl.setText("");
_dictionaryPnl.setVisible(false);
}
else
_dictionaryLbl.setText(String.join(" ", neededDictionaries));
}
catch (IOException ex) {
String msg = "Unexpected error reading dictionary\n\n" + ex.getMessage();
JOptionPane.showMessageDialog(this, msg, "Error", JOptionPane.ERROR_MESSAGE);
}
}
});
dictionarySelectionPnl.add(dictionaryBrowseBtn);
_dictionaryPnl.add(dictionarySelectionPnl);
allOptionsPnl.add(_dictionaryPnl);
JPanel controlsPnl = new JPanel(new FlowLayout(FlowLayout.LEADING, 0, 0));
controlsPnl.setBorder(new EmptyBorder(25, 300, 10, 0));
JButton processBtn = new JButton("Process Source File");
processBtn.addActionListener(e -> performAnalysis());
controlsPnl.add(processBtn);
allOptionsPnl.add(controlsPnl);
// need this to make sure process button is visible; shouldn't need it, but I can't make it work otherwise!
pnl.add(new JLabel(" "), BorderLayout.SOUTH);
JPanel wrapperPnl = new JPanel(new BorderLayout());
wrapperPnl.add(pnl, BorderLayout.NORTH);
return wrapperPnl;
}
protected boolean showTargetInput() {
return true;
}
protected List<String> getRequiredUserDefinedDictionaries(File file) {
return Collections.emptyList();
}
protected abstract StandaloneOptions createOptions();
private JPanel buildProcessingPanel() {
JPanel pnl = new JPanel(new BorderLayout());
_northProcessingPnl = new JPanel();
_northProcessingLayout = new CardLayout();
_northProcessingPnl.setLayout(_northProcessingLayout);
_northProcessingPnl.add(_NORTH_PROCESSING_PANEL_ID_ANALYSIS, buildProcessingAnalysisPanel());
_northProcessingPnl.add(_NORTH_PROCESSING_PANEL_ID_PROGRESS, buildProcessingProgressPanel());
_northProcessingPnl.add(_NORTH_PROCESSING_PANEL_ID_RESULTS, buildProcessingResultsPanel());
_northProcessingPnl.add(_NORTH_PROCESSING_PANEL_ID_INTERRUPTED, buildProcessingInterruptedPanel());
_northProcessingPnl.add(_NORTH_PROCESSING_PANEL_ID_ERROR, buildProcessingErrorPanel());
pnl.add(_northProcessingPnl, BorderLayout.NORTH);
JPanel centerPnl = new JPanel(new BorderLayout());
_warningsPane = new JTabbedPane();
centerPnl.add(_warningsPane, BorderLayout.CENTER);
JPanel warningsPnl = new JPanel(new BorderLayout());
warningsPnl.setBorder(null);
_warningsTextArea = new JTextArea("Processing not starting...");
_warningsTextArea.setForeground(Color.GRAY);
_warningsTextArea.setEditable(false);
_warningsTextArea.setBorder(new EmptyBorder(2, 3, 2, 3));
JScrollPane warningsPane = new JScrollPane(_warningsTextArea);
warningsPane.setBorder(new LineBorder(Color.LIGHT_GRAY));
warningsPnl.add(warningsPane, BorderLayout.CENTER);
_warningsPane.add("Warnings", warningsPnl);
JPanel summaryPnl = new JPanel(new BorderLayout());
summaryPnl.setBorder(null);
_warningsSummaryTextArea = new JTextArea("Processing not starting...");
_warningsSummaryTextArea.setForeground(Color.GRAY);
_warningsSummaryTextArea.setEditable(false);
_warningsSummaryTextArea.setBorder(new EmptyBorder(2, 3, 2, 3));
JScrollPane summaryPane = new JScrollPane(_warningsSummaryTextArea);
summaryPane.setBorder(new LineBorder(Color.LIGHT_GRAY));
summaryPnl.add(summaryPane, BorderLayout.CENTER);
_warningsPane.add("Summary", summaryPnl);
pnl.add(centerPnl, BorderLayout.CENTER);
return pnl;
}
private JPanel buildProcessingAnalysisPanel() {
JPanel pnl = new JPanel(new BorderLayout());
pnl.setBorder(new EmptyBorder(0, 0, 10, 0));
JPanel lblPnl = new JPanel();
lblPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
lblPnl.setBorder(null);
lblPnl.add(Standalone.createItalicLabel("Analyzing file (this can take a while, especially when reading network resources)..."));
pnl.add(lblPnl, BorderLayout.NORTH);
JPanel contentPnl = new JPanel(new BorderLayout());
JPanel progressPnl = new JPanel(new BorderLayout());
progressPnl.setBorder(new EmptyBorder(5, 0, 5, 0));
_analysisBar = new JProgressBar();
progressPnl.add(_analysisBar, BorderLayout.CENTER);
contentPnl.add(progressPnl, BorderLayout.CENTER);
JPanel controlsPnl = new JPanel();
controlsPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
controlsPnl.setBorder(new EmptyBorder(0, 10, 0, 0));
JButton cancelBtn = new JButton("Cancel");
cancelBtn.addActionListener(e -> {
if (_analysisWorker != null)
_analysisWorker.cancel(true);
_analysisWorker = null;
_analysisBar.setMinimum(0);
_analysisBar.setIndeterminate(true);
_sourceFld.setText(null);
_northLayout.show(_northPnl, _NORTH_PANEL_ID_NO_FILE);
_centerLayout.show(_centerPnl, _CENTER_PANEL_ID_HELP);
});
controlsPnl.add(cancelBtn);
contentPnl.add(controlsPnl, BorderLayout.EAST);
pnl.add(contentPnl, BorderLayout.CENTER);
return pnl;
}
private JPanel buildProcessingProgressPanel() {
JPanel pnl = new JPanel(new BorderLayout());
pnl.setBorder(new EmptyBorder(0, 0, 10, 0));
JPanel lblPnl = new JPanel();
lblPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
lblPnl.setBorder(null);
lblPnl.add(Standalone.createItalicLabel("Processing file..."));
pnl.add(lblPnl, BorderLayout.NORTH);
JPanel contentPnl = new JPanel(new BorderLayout());
JPanel progressPnl = new JPanel(new BorderLayout());
progressPnl.setBorder(new EmptyBorder(5, 0, 5, 0));
_processingBar = new JProgressBar();
progressPnl.add(_processingBar, BorderLayout.CENTER);
contentPnl.add(progressPnl, BorderLayout.CENTER);
JPanel controlsPnl = new JPanel();
controlsPnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
controlsPnl.setBorder(new EmptyBorder(0, 10, 0, 0));
JButton cancelBtn = new JButton("Cancel");
cancelBtn.addActionListener(e -> {
if (_processingWorker != null)
_processingWorker.cancel(true);
_processingWorker = null;
_sourceFld.setText(null);
_northLayout.show(_northPnl, _NORTH_PANEL_ID_NO_FILE);
_centerLayout.show(_centerPnl, _CENTER_PANEL_ID_HELP);
});
controlsPnl.add(cancelBtn);
contentPnl.add(controlsPnl, BorderLayout.EAST);
pnl.add(contentPnl, BorderLayout.CENTER);
return pnl;
}
private JPanel buildProcessingResultsPanel() {
JPanel pnl = new JPanel();
pnl.setBorder(new EmptyBorder(10, 0, 10, 0));
pnl.setLayout(new BoxLayout(pnl, BoxLayout.Y_AXIS));
JPanel row1Pnl = new JPanel();
row1Pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
_processingResult1Lbl = new JLabel(" ");
row1Pnl.add(_processingResult1Lbl);
pnl.add(row1Pnl);
JPanel row2Pnl = new JPanel();
row2Pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
_processingResult2Lbl = new JLabel();
row2Pnl.add(_processingResult2Lbl);
pnl.add(row2Pnl);
return pnl;
}
private JPanel buildProcessingInterruptedPanel() {
JPanel pnl = new JPanel();
pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
JLabel lbl = Standalone.createBoldLabel("Processing was interrupted.");
pnl.add(lbl);
return pnl;
}
private JPanel buildProcessingErrorPanel() {
JPanel pnl = new JPanel();
pnl.setBorder(new EmptyBorder(10, 10, 0, 0));
pnl.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0));
JLabel lbl = Standalone.createBoldLabel("Error processing file: ");
lbl.setForeground(Color.RED);
pnl.add(lbl);
_processingErrorLbl = new JLabel(" ");
_processingErrorLbl.setForeground(Color.RED);
pnl.add(_processingErrorLbl);
return pnl;
}
private void performPreAnalysis() {
_centerPnl.setVisible(false);
File file = new File(_sourceFld.getText());
NaaccrFormat format = getFormatForInputFile(file);
if (format != null) { // if it's null, an error has already been reported to the user
if (file.getName().toLowerCase().endsWith(".gz") || file.getName().toLowerCase().endsWith(".xz"))
_formatLbl.setText("Compressed " + format.getDisplayName());
else
_formatLbl.setText(format.getDisplayName());
_numLinesLbl.setText("<?>");
if (_numPatLbl != null)
_numPatLbl.setText("<?>");
if (_numTumLbl != null)
_numTumLbl.setText("<?>");
_fileSizeLbl.setText(Standalone.formatFileSize(file.length()));
_northLayout.show(_northPnl, _NORTH_PANEL_ID_ANALYSIS_RESULTS);
_centerPnl.setVisible(true);
_userDictionaries.clear();
List<String> requiredDictionaries = getRequiredUserDefinedDictionaries(file);
if (!requiredDictionaries.isEmpty()) {
_dictionaryDisclaimerLbl.setText(_TXT_DICT_NEEDED);
_dictionaryDisclaimerLbl.setForeground(new Color(150, 0, 0));
_dictionaryLbl.setText(String.join(" ", requiredDictionaries));
_dictionaryPnl.setVisible(true);
}
else {
_dictionaryDisclaimerLbl.setText(_TXT_DICT_NOT_NEEDED);
_dictionaryDisclaimerLbl.setForeground(Color.BLACK);
_dictionaryLbl.setText("");
_dictionaryPnl.setVisible(false);
}
_centerLayout.show(_centerPnl, _CENTER_PANEL_ID_OPTIONS);
if (_targetFld != null) {
_targetFld.setText(invertFilename(file));
if (_targetFld.getText().endsWith(".gz"))
_compressionBox.setSelectedItem(_COMPRESSION_GZIP);
else if (_targetFld.getText().endsWith(".xz"))
_compressionBox.setSelectedItem(_COMPRESSION_XZ);
else
_compressionBox.setSelectedItem(_COMPRESSION_NONE);
}
}
}
protected abstract NaaccrFormat getFormatForInputFile(File file);
private void performAnalysis() {
List<String> requiredDictionaries = new ArrayList<>(getRequiredUserDefinedDictionaries(new File(_sourceFld.getText())));
requiredDictionaries.removeAll(_userDictionaries.keySet());
if (!requiredDictionaries.isEmpty()) {
StringBuilder message = new StringBuilder("The data file requires the following user-defined dictionaries:\n");
for (String requiredDictionary : requiredDictionaries)
message.append("\n - ").append(requiredDictionary);
message.append("\n\nWithout those dictionaries, some data items might not be properly recognized and will be ignored.");
message.append("\n\nAre you sure you want to continue without providing the dictionaries?");
int result = JOptionPane.showConfirmDialog(this, message, "Confirmation",
JOptionPane.YES_NO_OPTION);
if (result != JOptionPane.YES_OPTION)
return;
}
if (_targetFld != null && new File(_targetFld.getText()).exists()) {
int result = JOptionPane.showConfirmDialog(this, "Target file already exists, are you sure you want to replace it?", "Confirmation", JOptionPane.YES_NO_OPTION);
if (result != JOptionPane.YES_OPTION)
return;
}
_centerPnl.setVisible(true);
_centerLayout.show(_centerPnl, _CENTER_PANEL_ID_PROCESSING);
_processingResult1Lbl.setText(null);
_processingResult2Lbl.setText(null);
_northProcessingPnl.setVisible(true);
_northProcessingLayout.show(_northProcessingPnl, _NORTH_PROCESSING_PANEL_ID_ANALYSIS);
_warningsTextArea.setText(null);
_warningsTextArea.setForeground(new Color(150, 0, 0));
_warningsSummaryTextArea.setText("Processing not done...");
_warningsSummaryTextArea.setForeground(Color.GRAY);
_analysisBar.setMinimum(0);
_analysisBar.setIndeterminate(true);
final File srcFile = new File(_sourceFld.getText());
final long start = System.currentTimeMillis();
_analysisWorker = new SwingWorker<Void, Void>() {
@Override
protected Void doInBackground() throws Exception {
int numLines = 0, numPat = 0, numTum = 0;
try (LineNumberReader reader = new LineNumberReader(NaaccrXmlUtils.createReader(srcFile))) {
String line = reader.readLine();
while (line != null) {
numLines++;
if (_numPatLbl != null)
numPat += StringUtils.countMatches(line, "<Patient");
if (_numTumLbl != null)
numTum += StringUtils.countMatches(line, "<Tumor");
line = reader.readLine();
}
_numLinesLbl.setText(Standalone.formatNumber(numLines));
if (_numPatLbl != null)
_numPatLbl.setText(Standalone.formatNumber(numPat));
if (_numTumLbl != null)
_numTumLbl.setText(Standalone.formatNumber(numTum));
}
return null;
}
@Override
protected void done() {
try {
get();
_analysisBar.setMinimum(0);
_analysisBar.setIndeterminate(true);
performProcessing(srcFile, System.currentTimeMillis() - start);
}
catch (CancellationException | InterruptedException e) {
// ignored
}
catch (ExecutionException e) {
reportAnalysisError(e.getCause());
}
finally {
_analysisWorker = null;
}
}
};
_analysisWorker.execute();
}
private void performProcessing(final File srcFile, final long analysisTime) {
_northProcessingLayout.show(_northProcessingPnl, _NORTH_PROCESSING_PANEL_ID_PROGRESS);
_processingBar.setMinimum(0);
_processingBar.setMaximum(Integer.parseInt(_numLinesLbl.getText().replaceAll(",", "")));
_processingBar.setValue(0);
_maxWarningsReached = _maxWarningsDiscAdded = false;
_warningStats.clear();
_warningStatsDetails.clear();
_processingWorker = new SwingWorker<Void, Patient>() {
@Override
protected Void doInBackground() throws Exception {
final File targetFile = _targetFld == null ? null : new File(fixFileExtension(_targetFld.getText(), (String)_compressionBox.getSelectedItem()));
List<NaaccrDictionary> userDictionaries = new ArrayList<>(_userDictionaries.values());
final long start = System.currentTimeMillis();
final AtomicInteger numPatients = new AtomicInteger();
final AtomicInteger numTumors = new AtomicInteger();
NaaccrFormat format = getFormatForInputFile(srcFile);
NaaccrDictionary baseDictionary = format == null ? null : NaaccrXmlDictionaryUtils.getBaseDictionaryByVersion(format.getNaaccrVersion());
runProcessing(srcFile, targetFile, _guiOptions.getOptions(baseDictionary, userDictionaries), userDictionaries, new NaaccrObserver() {
@Override
public void patientRead(Patient patient) {
publish(patient);
}
@Override
public void patientWritten(Patient patient) {
numPatients.getAndIncrement();
numTumors.getAndAdd(patient.getTumors().size());
}
});
// update GUI
SwingUtilities.invokeLater(() -> {
long processingTime = System.currentTimeMillis() - start;
String size = targetFile == null ? null : Standalone.formatFileSize(targetFile.length());
String path = targetFile == null ? null : targetFile.getPath();
_processingResult1Lbl.setText(getProcessingResultRow1Text(path, analysisTime, processingTime, size));
String row2Text = getProcessingResultRow2Text(numPatients.get(), numTumors.get());
if (row2Text != null)
_processingResult2Lbl.setText(row2Text);
_northProcessingLayout.show(_northProcessingPnl, _NORTH_PROCESSING_PANEL_ID_RESULTS);
});
return null;
}
@Override
protected void done() {
try {
get();
SwingUtilities.invokeLater(() -> {
if (_warningsTextArea.getText().isEmpty()) {
_warningsTextArea.setForeground(Color.GRAY);
_warningsTextArea.setText("Found no warning, well done!");
}
if (_warningStats.isEmpty()) {
_warningsTextArea.setForeground(Color.GRAY);
_warningsSummaryTextArea.setText("Found no warning, well done!");
}
else {
_warningsSummaryTextArea.setForeground(Color.BLACK);
StringBuilder buf = new StringBuilder("Validation warning counts (0 counts not displayed):\n\n");
for (String code : NaaccrErrorUtils.getAllValidationErrors().keySet()) {
int count = _warningStats.containsKey(code) ? _warningStats.get(code).get() : 0;
if (count > 0) {
buf.append(" ").append(code).append(": ").append(Standalone.formatNumber(count)).append("\n");
if (_warningStatsDetails.containsKey(code)) {
List<String> list = new ArrayList<>(_warningStatsDetails.get(code));
Collections.sort(list);
buf.append(" ").append(list).append("\n");
}
}
}
_warningsSummaryTextArea.setText(buf.toString());
}
});
}
catch (CancellationException | InterruptedException e) {
_warningsSummaryTextArea.setText("Processing interrupted...");
}
catch (ExecutionException e) {
reportProcessingError(e.getCause());
_warningsSummaryTextArea.setText("Processing error...");
}
finally {
_processingWorker = null;
}
}
@Override
protected void process(final List<Patient> patients) {
final StringBuilder buf = new StringBuilder();
// extract errors
for (Patient patient : patients) {
for (NaaccrValidationError error : patient.getAllValidationErrors()) {
// this will be shown in the warnings view
buf.append("Line ").append(error.getLineNumber() == null ? "N/A" : error.getLineNumber());
if (error.getNaaccrId() != null) {
buf.append(", item '").append(error.getNaaccrId()).append("'");
if (error.getNaaccrNum() != null)
buf.append(" (#").append(error.getNaaccrNum()).append(")");
}
buf.append(": ").append(error.getMessage());
if (error.getValue() != null && !error.getValue().isEmpty()) {
buf.append(" [").append(error.getValue().length() > 250 ? (error.getValue().substring(0, 250) + "...") : error.getValue()).append("]");
}
buf.append("\n");
// this will be used in the summary view
AtomicInteger count = _warningStats.get(error.getCode());
if (count == null)
_warningStats.put(error.getCode(), new AtomicInteger(1));
else
count.incrementAndGet();
// let's also keep track of more detailed information
if (error.getNaaccrId() != null)
_warningStatsDetails.computeIfAbsent(error.getCode(), k -> new HashSet<>()).add(error.getNaaccrId());
}
}
// update GUI (process bar and text area)
SwingUtilities.invokeLater(() -> {
// technically this should use the "endLineNumber", not the "startLineNumber", but that's close enough for a progress bar...
int processedLineNumber = 0;
for (Patient patient : patients)
processedLineNumber = Math.max(processedLineNumber, patient.getStartLineNumber());
_processingBar.setValue(processedLineNumber);
if (!_maxWarningsReached) {
_warningsTextArea.append(buf.toString());
if (_warningsTextArea.getLineCount() > 5000)
_maxWarningsReached = true;
}
else if (!_maxWarningsDiscAdded) {
_warningsTextArea.append("Reached maximum number of warnings that can be displayed; use the summary instead (available once the processing is done)...");
_maxWarningsDiscAdded = true;
}
});
}
};
_processingWorker.execute();
}
protected abstract void runProcessing(File source, File target, NaaccrOptions options, List<NaaccrDictionary> dictionaries, NaaccrObserver observer) throws NaaccrIOException;
protected String getProcessingResultRow1Text(String path, long analysisTime, long processingTime, String size) {
String analysis = Standalone.formatTime(analysisTime);
String processing = Standalone.formatTime(processingTime);
String total = Standalone.formatTime(analysisTime + processingTime);
return "Successfully created \"" + path + "\" (" + size + ") in " + total + " (analysis: " + analysis + ", processing: " + processing + ")";
}
protected String getProcessingResultRow2Text(int numPatients, int numTumors) {
return null;
}
protected void reportAnalysisError(Throwable e) {
_centerPnl.setVisible(false);
_analysisBar.setIndeterminate(false);
_analysisErrorLbl.setText(extractMessageFromException(e));
_northLayout.show(_northPnl, _NORTH_PANEL_ID_ERROR);
}
protected void reportProcessingError(Throwable e) {
_processingErrorLbl.setText(extractMessageFromException(e));
_northProcessingLayout.show(_northProcessingPnl, _NORTH_PROCESSING_PANEL_ID_ERROR);
}
private String extractMessageFromException(Throwable e) {
String result = null;
if (e != null) {
if (e instanceof NaaccrIOException) {
NaaccrIOException ioe = (NaaccrIOException)e;
if (ioe.getMessage() != null) {
if (ioe.getLineNumber() != null)
result = e.getMessage() + " at line " + ioe.getLineNumber();
else
result = e.getMessage();
}
}
if (result == null)
result = e.getMessage();
}
if (result == null)
result = "unexpected error";
return result;
}
private String fixFileExtension(String filename, String compression) {
String result = filename;
if (_COMPRESSION_GZIP.equals(compression)) {
if (result.endsWith(".xz"))
result = result.replace(".xz", "");
if (!result.endsWith(".gz"))
result = result + ".gz";
}
else if (_COMPRESSION_XZ.equals(compression)) {
if (result.endsWith(".gz"))
result = result.replace(".gz", "");
if (!result.endsWith(".xz"))
result = result + ".xz";
}
else if (_COMPRESSION_NONE.equals(compression)) {
if (result.endsWith(".gz"))
result = result.replace(".gz", "");
else if (result.endsWith(".xz"))
result = filename.replace(".xz", "");
}
return result;
}
}
| |
package com.tamzid.android.spotifystreamer;
import android.app.Activity;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.support.annotation.Nullable;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.graphics.Palette;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import java.util.ArrayList;
import java.util.List;
/** Auto-play through a list of passed tracks.*/
public class PlayerDialogFragment extends DialogFragment {
private static final String LOG_TAG = PlayerDialogFragment.class.getSimpleName();
// Save instance state
private static final String SAVESTATE_TRACK_NOW_PLAYING = "savestateTrackNowPlaying";
private static final String SAVESTATE_TRACKLIST = "savestateTrackList";
private static final String SAVESTATE_IS_MEDIA_PLAYER_SERVICE_BOUND = "saveStateIsMediaPlayerServiceBound";
private static final String SAVESTATE_MAX_DURATION = "saveStateMaxDuration";
private static final String SAVESTATE_CURRENT_PROGRESS = "saveStateCurrentProgress";
// Fragment initialization parameters
private static final String ARG_TRACKLIST = "tracklist";
private static final String ARG_TRACK_NOW_PLAYING = "trackNowPlaying";
// Utilities
private List<TrackBundle> mTrackList;
private int mTrackNowPlaying;
private MediaPlayerService mMediaPlayerService;
private Intent mPlayMusicIntent;
private boolean mIsMediaPlayerServiceBound = false;
private ServiceConnection mMediaPlayerServiceConnection;
private boolean mIsPlaying = false;
private BroadcastReceiver mBroadcastReceiver;
// UI
private LinearLayout mBackgroundLinearLayout;
private TextView mArtistNameTextView;
private TextView mAlbumTitleTextView;
private ImageView mAlbumArtImageView;
private TextView mTrackTitleTextView;
private SeekBar mSeekBar;
private TextView mElapsedTimeTextView;
private TextView mDurationTextView;
private ImageButton mPlayPauseImageButton;
private final Target mTarget = new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
mAlbumArtImageView.setImageBitmap(bitmap);
Palette.generateAsync(bitmap, new Palette.PaletteAsyncListener() {
@Override
public void onGenerated(Palette palette) {
int vibrantDark = palette.getDarkVibrantColor(getResources().getColor(R.color.background_material_dark));
mBackgroundLinearLayout.setBackgroundColor(vibrantDark);
}
});
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {
}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
}
};
private int mMaxDuration = -1;
private int mCurrentProgress = -1;
private final Handler mMediaPlayerServiceHandler = new Handler(Looper.getMainLooper());
/**
* Create and return a new instance of this fragment.
*
* @param trackList A list of {@link TrackBundle}s containing relevant track information
* @param trackNowPlaying Index of currently playing track from {@code trackList}
* @return A new instance of fragment PlayerDialogFragment.
*/
// TODO: Rename and change types and number of parameters
public static PlayerDialogFragment newInstance(List<TrackBundle> trackList, int trackNowPlaying) {
PlayerDialogFragment fragment = new PlayerDialogFragment();
Bundle args = new Bundle();
args.putParcelableArrayList(ARG_TRACKLIST, (ArrayList<TrackBundle>) trackList);
args.putInt(ARG_TRACK_NOW_PLAYING, trackNowPlaying);
fragment.setArguments(args);
return fragment;
}
private void loadSavedInstanceState(Bundle savedInstanceState) {
mTrackList = savedInstanceState.getParcelableArrayList(SAVESTATE_TRACKLIST);
mTrackNowPlaying = savedInstanceState.getInt(SAVESTATE_TRACK_NOW_PLAYING);
mIsMediaPlayerServiceBound = savedInstanceState.getBoolean(SAVESTATE_IS_MEDIA_PLAYER_SERVICE_BOUND);
mMaxDuration = savedInstanceState.getInt(SAVESTATE_MAX_DURATION);
mCurrentProgress = savedInstanceState.getInt(SAVESTATE_CURRENT_PROGRESS);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
startMediaPlayerService();
// Start runnable which manages the seekbar once the service is bound
mMediaPlayerServiceHandler.postDelayed(mMusicPlayerUpdaterRunnable, 1000);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null && !savedInstanceState.isEmpty()) {
// If savedInstanceState has data, use it
Debug.logD(LOG_TAG, "used saveInstanceState");
loadSavedInstanceState(savedInstanceState);
}
}
@Override
public void onDestroyView() {
// Workaround for tablet to retain Dialog on rotation
if (getDialog() != null && getRetainInstance()) {
getDialog().setDismissMessage(null);
}
super.onDestroyView();
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
if (savedInstanceState != null && !savedInstanceState.isEmpty()) {
// If savedInstanceState has data, use it
Debug.logD(LOG_TAG, "used saveInstanceState");
loadSavedInstanceState(savedInstanceState);
} else if (getArguments() != null) {
// Otherwise, get it from the argument bundle
mTrackList = getArguments().getParcelableArrayList(ARG_TRACKLIST);
mTrackNowPlaying = getArguments().getInt(ARG_TRACK_NOW_PLAYING);
Log.d(LOG_TAG, "Playing: " + mTrackNowPlaying);
for (TrackBundle trackBundle : mTrackList) {
Log.d(LOG_TAG, "Contains: " + trackBundle.name);
}
}
mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getBooleanExtra(MediaPlayerService.INTENT_MEDIA_PLAYER_SERVICE_IS_PREPARED, false)) {
mPlayPauseImageButton.setEnabled(true);
mIsPlaying = true;
setPlayPauseIcon();
}
}
};
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Dialog dialog = super.onCreateDialog(savedInstanceState);
// Request no titlebar when using as a dialog
// http://stackoverflow.com/questions/15277460/how-to-create-a-dialogfragment-without-title
dialog.getWindow().requestFeature(Window.FEATURE_NO_TITLE);
return dialog;
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
View v = inflater.inflate(R.layout.fragment_dialog_player, container, false);
mBackgroundLinearLayout = (LinearLayout) v.findViewById(R.id.fragment_dialog_player_background);
mArtistNameTextView = (TextView) v.findViewById(R.id.fragment_player_artist_textview);
mAlbumTitleTextView = (TextView) v.findViewById(R.id.fragment_player_album_textview);
mAlbumArtImageView = (ImageView) v.findViewById(R.id.fragment_player_albumart_imageview);
mTrackTitleTextView = (TextView) v.findViewById(R.id.fragment_player_track_textview);
mSeekBar = (SeekBar) v.findViewById(R.id.fragment_player_seekbar);
mElapsedTimeTextView = (TextView) v.findViewById(R.id.fragment_player_elapsedtime_textview);
mDurationTextView = (TextView) v.findViewById(R.id.fragment_player_duration_textview);
bindView();
ImageButton previousImageButton = (ImageButton) v.findViewById(R.id.fragment_player_previous_imagebutton);
previousImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
previousTrack();
}
});
mPlayPauseImageButton = (ImageButton) v.findViewById(R.id.fragment_player_playpause_imagebutton);
mPlayPauseImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
pauseOrPlayCurrentTrack();
}
});
ImageButton nextImageButton = (ImageButton) v.findViewById(R.id.fragment_player_next_imagebutton);
nextImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
nextTrack();
}
});
setPlayPauseIcon();
mSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) {
seekTo(progress);
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
return v;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelableArrayList(SAVESTATE_TRACKLIST, (ArrayList<TrackBundle>) mTrackList);
outState.putInt(SAVESTATE_TRACK_NOW_PLAYING, mTrackNowPlaying);
outState.putBoolean(SAVESTATE_IS_MEDIA_PLAYER_SERVICE_BOUND, mIsMediaPlayerServiceBound);
outState.putInt(SAVESTATE_MAX_DURATION, mMaxDuration);
outState.putInt(SAVESTATE_CURRENT_PROGRESS, mCurrentProgress);
Debug.logD(LOG_TAG, "saved state");
}
@Override
public void onStart() {
super.onStart();
LocalBroadcastManager.getInstance(getActivity()).registerReceiver(mBroadcastReceiver, new IntentFilter(MediaPlayerService.INTENT_MEDIA_PLAYER_SERVICE_BROADCAST));
}
@Override
public void onStop() {
super.onStop();
LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(mBroadcastReceiver);
}
@Override
public void onPause() {
Picasso.with(getActivity()).cancelRequest(mTarget);
super.onPause();
}
//region LOCAL METHODS=========================================================================
/** Binds new data to the views */
private void bindView() {
TrackBundle trackPlaying = mTrackList.get(mTrackNowPlaying);
mArtistNameTextView.setText(trackPlaying.artists.get(0));
mAlbumTitleTextView.setText(trackPlaying.album);
Picasso.with(getActivity()).load(trackPlaying.imageUrls.get(0)).into(mTarget);
mTrackTitleTextView.setText(trackPlaying.name);
mElapsedTimeTextView.setText(MediaPlayerUtilities.formatMillisToString(0));
mDurationTextView.setText(MediaPlayerUtilities.formatMillisToString(0));
if (mCurrentProgress == -1) {
mSeekBar.setProgress(0);
} else {
mSeekBar.setProgress(mCurrentProgress);
}
}
/** Starts {@link MediaPlayerService} and begins playing with the current song */
private void startMediaPlayerService() {
if (mPlayMusicIntent == null && !mIsMediaPlayerServiceBound) {
// Start the music player service if the service is not already bound
mPlayMusicIntent = new Intent(getActivity().getApplicationContext(), MediaPlayerService.class);
mMediaPlayerServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
MediaPlayerService.MediaPlayerBinder binder = (MediaPlayerService.MediaPlayerBinder) service;
mMediaPlayerService = binder.getService();
mIsMediaPlayerServiceBound = true;
mMediaPlayerService.mIsPaused = mIsPlaying;
String songUrl = mTrackList.get(mTrackNowPlaying).preview_url;
mMediaPlayerService.playMusic(songUrl);
Debug.logD(LOG_TAG, "MediaPlayerService bound");
}
@Override
public void onServiceDisconnected(ComponentName name) {
mIsMediaPlayerServiceBound = false;
Debug.logD(LOG_TAG, "MediaPlayerService unbound");
}
};
getActivity().getApplicationContext().bindService(mPlayMusicIntent, mMediaPlayerServiceConnection, Context.BIND_AUTO_CREATE);
getActivity().getApplicationContext().startService(mPlayMusicIntent);
}
}
/** Stops and unbinds the {@link MediaPlayerService}, nullifies all objects and updates flags */
private void stopMediaPlayerService() {
Debug.logD(LOG_TAG, "stopMediaPlayerService called");
getActivity().getApplicationContext().stopService(mPlayMusicIntent);
getActivity().getApplicationContext().unbindService(mMediaPlayerServiceConnection);
mMediaPlayerService = null;
mPlayMusicIntent = null;
mIsMediaPlayerServiceBound = false;
mIsPlaying = false;
}
private void pauseOrPlayCurrentTrack() {
mMediaPlayerService.pauseOrPlayCurrentTrack();
mIsPlaying = !mIsPlaying;
setPlayPauseIcon();
}
private void setPlayPauseIcon() {
if (mIsPlaying) {
mPlayPauseImageButton.setImageResource(android.R.drawable.ic_media_pause);
} else {
mPlayPauseImageButton.setImageResource(android.R.drawable.ic_media_play);
}
}
/** Stop current {@link MediaPlayerService} and start a new one with the next track */
private void nextTrack() {
mPlayPauseImageButton.setEnabled(false);
stopMediaPlayerService();
incrementTrack(true);
bindView();
startMediaPlayerService();
}
/** Stop current {@link MediaPlayerService} and start a new one with the previous track */
private void previousTrack() {
mPlayPauseImageButton.setEnabled(false);
stopMediaPlayerService();
incrementTrack(false);
bindView();
startMediaPlayerService();
}
/**
* Increment track count up or down, loop back if end is reached. Releases media player.
*
* @param incrementUp Enter <code>true</code> to increment up, <code>false</code> to increment
* down.
*/
private void incrementTrack(boolean incrementUp) {
if (incrementUp) {
mTrackNowPlaying = (mTrackNowPlaying + 1) % mTrackList.size();
} else {
mTrackNowPlaying = (mTrackNowPlaying - 1) < 0 ? mTrackList.size() - 1 : mTrackNowPlaying - 1;
}
}
/** Updates the position of the time display and seekbar every 1 second. */
private final Runnable mMusicPlayerUpdaterRunnable = new Runnable() {
@Override
public void run() {
if (mIsMediaPlayerServiceBound && mMediaPlayerService != null) {
mMaxDuration = mMediaPlayerService.getMaxDuration();
mSeekBar.setMax(mMaxDuration);
mDurationTextView.setText(MediaPlayerUtilities.formatMillisToString(mMaxDuration));
mCurrentProgress = mMediaPlayerService.getCurrentPosition();
mSeekBar.setProgress(mCurrentProgress);
mElapsedTimeTextView.setText(MediaPlayerUtilities.formatMillisToString(mCurrentProgress));
} else {
Debug.logD(LOG_TAG, "MediaPlayerService not yet bound");
}
mMediaPlayerServiceHandler.postDelayed(this, 1000);
}
};
private void seekTo(int position) {
if (mIsMediaPlayerServiceBound) {
mMediaPlayerService.seekTo(position);
}
}
//endregion
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Ordering;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
/**
* Filters a {@link MergedAndroidData} resource drawables to the specified densities.
*/
public class DensitySpecificResourceFilter {
private static class ResourceInfo {
/** Path to an actual file resource, instead of a directory. */
private Path resource;
private String restype;
private String qualifiers;
private String density;
private String resid;
public ResourceInfo(Path resource, String restype, String qualifiers, String density,
String resid) {
this.resource = resource;
this.restype = restype;
this.qualifiers = qualifiers;
this.density = density;
this.resid = resid;
}
public Path getResource() {
return this.resource;
}
public String getRestype() {
return this.restype;
}
public String getQualifiers() {
return this.qualifiers;
}
public String getDensity() {
return this.density;
}
public String getResid() {
return this.resid;
}
}
private static class RecursiveFileCopier extends SimpleFileVisitor<Path> {
private final Path copyToPath;
private final List<Path> copiedSourceFiles = new ArrayList<>();
private Path root;
public RecursiveFileCopier(final Path copyToPath, final Path root) {
this.copyToPath = copyToPath;
this.root = root;
}
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException {
Path copyTo = copyToPath.resolve(root.relativize(path));
Files.createDirectories(copyTo.getParent());
Files.copy(path, copyTo, LinkOption.NOFOLLOW_LINKS);
copiedSourceFiles.add(copyTo);
return FileVisitResult.CONTINUE;
}
public List<Path> getCopiedFiles() {
return copiedSourceFiles;
}
}
private final List<String> densities;
private final Path out;
private final Path working;
private static final Map<String, Integer> DENSITY_MAP =
new ImmutableMap.Builder<String, Integer>()
.put("nodpi", 0)
.put("ldpi", 120)
.put("mdpi", 160)
.put("tvdpi", 213)
.put("hdpi", 240)
.put("xhdpi", 320)
.put("400dpi", 400)
.put("420dpi", 420)
.put("xxhdpi", 480)
.put("560dpi", 560)
.put("xxxhdpi", 640)
.build();
private static final Function<ResourceInfo, String> GET_RESOURCE_ID =
new Function<ResourceInfo, String>() {
@Override
public String apply(ResourceInfo info) {
return info.getResid();
}
};
private static final Function<ResourceInfo, String> GET_RESOURCE_QUALIFIERS =
new Function<ResourceInfo, String>() {
@Override
public String apply(ResourceInfo info) {
return info.getQualifiers();
}
};
private static final Function<ResourceInfo, Path> GET_RESOURCE_PATH =
new Function<ResourceInfo, Path>() {
@Override
public Path apply(ResourceInfo info) {
return info.getResource();
}
};
/**
* @param densities An array of string densities to use for filtering resources
* @param out The path to use for name spacing the final resource directory.
* @param working The path of the working directory for the filtering
*/
public DensitySpecificResourceFilter(List<String> densities, Path out, Path working) {
this.densities = densities;
this.out = out;
this.working = working;
}
@VisibleForTesting
List<Path> getResourceToRemove(List<Path> resourcePaths) {
List<ResourceInfo> resourceInfos = getResourceInfos(resourcePaths);
List<ResourceInfo> densityResourceInfos = filterDensityResourceInfos(resourceInfos);
List<ResourceInfo> resourceInfoToRemove = new ArrayList<>();
Multimap<String, ResourceInfo> fileGroups = groupResourceInfos(densityResourceInfos,
GET_RESOURCE_ID);
for (String key : fileGroups.keySet()) {
Multimap<String, ResourceInfo> qualifierGroups = groupResourceInfos(fileGroups.get(key),
GET_RESOURCE_QUALIFIERS);
for (String qualifiers : qualifierGroups.keySet()) {
Collection<ResourceInfo> qualifierResourceInfos = qualifierGroups.get(qualifiers);
if (qualifierResourceInfos.size() != 1) {
for (final String density : densities) {
List<ResourceInfo> sortedResourceInfos = Ordering.natural().onResultOf(
new Function<ResourceInfo, Double>() {
@Override
public Double apply(ResourceInfo info) {
return matchScore(info, density);
}
}).immutableSortedCopy(qualifierResourceInfos);
resourceInfoToRemove.addAll(sortedResourceInfos.subList(1, sortedResourceInfos.size()));
}
}
}
}
return ImmutableList.copyOf(Lists.transform(resourceInfoToRemove, GET_RESOURCE_PATH));
}
private static void removeResources(List<Path> resourceInfoToRemove) {
for (Path resource : resourceInfoToRemove) {
resource.toFile().delete();
}
}
private static Multimap<String, ResourceInfo> groupResourceInfos(
final Collection<ResourceInfo> resourceInfos, Function<ResourceInfo, String> keyFunction) {
Multimap<String, ResourceInfo> resourceGroups = ArrayListMultimap.create();
for (ResourceInfo resourceInfo : resourceInfos) {
resourceGroups.put(keyFunction.apply(resourceInfo), resourceInfo);
}
return ImmutableMultimap.copyOf(resourceGroups);
}
private static List<ResourceInfo> getResourceInfos(final List<Path> resourcePaths) {
List<ResourceInfo> resourceInfos = new ArrayList<>();
for (Path resourcePath : resourcePaths) {
String qualifiers = resourcePath.getParent().getFileName().toString();
String density = "";
for (String densityName : DENSITY_MAP.keySet()) {
if (qualifiers.contains("-" + densityName)) {
qualifiers = qualifiers.replace("-" + densityName, "");
density = densityName;
}
}
String[] qualifierArray = qualifiers.split("-");
String restype = qualifierArray[0];
qualifiers = (qualifierArray.length) > 0 ? Joiner.on("-").join(Arrays.copyOfRange(
qualifierArray, 1, qualifierArray.length)) : "";
resourceInfos.add(new ResourceInfo(resourcePath, restype, qualifiers, density,
resourcePath.getFileName().toString()));
}
return ImmutableList.copyOf(resourceInfos);
}
private static List<ResourceInfo> filterDensityResourceInfos(
final List<ResourceInfo> resourceInfos) {
List<ResourceInfo> densityResourceInfos = new ArrayList<>();
for (ResourceInfo info : resourceInfos) {
if (info.getRestype().equals("drawable") && !info.getDensity().equals("")
&& !info.getDensity().equals("nodpi") && !info.getResid().endsWith(".xml")) {
densityResourceInfos.add(info);
}
}
return ImmutableList.copyOf(densityResourceInfos);
}
private static double matchScore(ResourceInfo resource, String density) {
if (resource.getDensity().equals(density)) {
return -2;
}
double affinity =
Math.log((double) (DENSITY_MAP.get(density)) / DENSITY_MAP.get(resource.getDensity()))
/ Math.log(2);
if (affinity == -1) {
// It's very efficient to downsample an image that's exactly 2x the screen
// density, so we prefer that over other non-perfect matches
return affinity;
} else if (affinity < 0) {
// We give a slight bump to images that have the same multiplier but are
// higher quality.
affinity = Math.abs(affinity + 0.01);
}
return affinity;
}
/** Filters the contents of a resource directory. */
public Path filter(Path unFilteredResourceDir) {
// no densities to filter, so skip.
if (densities.isEmpty()) {
return unFilteredResourceDir;
}
final Path filteredResourceDir =
out.resolve(working.relativize(unFilteredResourceDir));
RecursiveFileCopier fileVisitor =
new RecursiveFileCopier(filteredResourceDir, unFilteredResourceDir);
try {
Files.walkFileTree(unFilteredResourceDir, EnumSet.of(FileVisitOption.FOLLOW_LINKS),
Integer.MAX_VALUE, fileVisitor);
} catch (IOException e) {
throw Throwables.propagate(e);
}
removeResources(getResourceToRemove(fileVisitor.getCopiedFiles()));
return filteredResourceDir;
}
}
| |
/*
* Copyright 2012-2016, the original author or authors.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.flipkart.flux.domain;
import org.hibernate.annotations.Type;
import javax.persistence.*;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* <code>State</code> represents the current state of the StateMachine. This implementation also supports integration with user defined code that is executed when the
* state transition happens. User code can be integrated using {@link Hook} and {@link Task}. Hooks are added on entry or exit of this State while Task is executed when the
* transition is in progress. The outcome of Hook execution does not impact state transition whereas a failed Task execution will abort the transition.
*
* @author Yogesh
* @author regunath.balasubramanian
* @author shyam.akirala
* @author kartik.bommepally
*/
@Entity
@Table(name = "States")
@IdClass(State.StatePK.class)
public class State {
/**
* Constants for storing replay retries attempted and maximum allowed for a replayable state.
*/
private static final short DEFAULT_ATTEMPTED_REPLAYABLE_RETRIES = 0;
private static final short MAX_REPLAYABLE_RETRIES = 5;
/* Defined by the User */
/**
* Unique identifier of the state
*/
@Id
private Long id;
/**
* Id of the state machine to which this state belongs
*/
@Id
private String stateMachineId;
/**
* Version for this State
*/
private Long version;
/**
* The name of this State
*/
private String name;
/**
* Description for this State
*/
private String description;
/**
* Name of Hook class that is executed on entry of this State, must be a public class
*/
private String onEntryHook;
/**
* Name of Task class that is executed when the transition happens to this State, must be a public class
*/
private String task;
/**
* Name of Hook class that is executed on exit of this State, must be a public class
*/
private String onExitHook;
/**
* The max retry count for a successful transition
*/
private Long retryCount;
/**
* Timeout for state transition
*/
private Long timeout;
/**
* List of event names this state is dependent on
*/
@Type(type = "ListJsonType") // This dependency on Hibernate is not a good option, however don't have a replacement for this in JPA
private List<String> dependencies;
private String outputEvent;
/* Maintained by the execution engine */
/**
* The Status of state transition execution
*/
@Enumerated(EnumType.STRING)
private Status status;
/**
* The rollback status
*/
@Enumerated(EnumType.STRING)
private Status rollbackStatus;
/**
* The number of retries attempted
*/
private Long attemptedNoOfRetries;
/**
* Execution version for this state
*/
private Long executionVersion;
/**
* Boolean value to indicate whether state is replayable or not
*/
private Boolean replayable;
/**
* Time at which this State has been created
*/
private Timestamp createdAt;
/**
* Time at which this State has been last updated
*/
@Column(updatable = false)
private Timestamp updatedAt;
/***
* The number of retries attempted for a replayable state
*/
private Short attemptedNumOfReplayableRetries;
/***
* The max retry count for a replayable event
*/
private Short maxReplayableRetries;
/**
* Constructors
*/
protected State() {
super();
dependencies = new LinkedList<>();
}
public State(Long version, String name, String description, String onEntryHook, String task, String onExitHook, List<String> dependencies,
Long retryCount, Long timeout, String outputEvent, Status status, Status rollbackStatus,
Long attemptedNoOfRetries, String stateMachineId, Long id) {
this(version, name, description, onEntryHook, task, onExitHook, dependencies, retryCount, timeout, outputEvent,
status, rollbackStatus, attemptedNoOfRetries, stateMachineId, id, MAX_REPLAYABLE_RETRIES, DEFAULT_ATTEMPTED_REPLAYABLE_RETRIES, Boolean.FALSE, 0L);
}
public State(Long version, String name, String description, String onEntryHook, String task, String onExitHook, List<String> dependencies,
Long retryCount, Long timeout, String outputEvent, Status status, Status rollbackStatus,
Long attemptedNoOfRetries, String stateMachineId, Long id, Boolean replayable) {
this(version, name, description, onEntryHook, task, onExitHook, dependencies, retryCount, timeout, outputEvent,
status, rollbackStatus, attemptedNoOfRetries, stateMachineId, id, MAX_REPLAYABLE_RETRIES, DEFAULT_ATTEMPTED_REPLAYABLE_RETRIES, replayable, 0L);
}
public State(Long version, String name, String description, String onEntryHook, String task, String onExitHook, List<String> dependencies,
Long retryCount, Long timeout, String outputEvent, Status status, Status rollbackStatus,
Long attemptedNoOfRetries, String stateMachineId, Long id, Short maxReplayableRetries, Short attemptedNumOfReplayableRetries, Boolean replayable) {
this(version, name, description, onEntryHook, task, onExitHook, dependencies, retryCount, timeout, outputEvent,
status, rollbackStatus, attemptedNoOfRetries, stateMachineId, id, maxReplayableRetries, attemptedNumOfReplayableRetries, replayable, 0L);
}
public State(Long version, String name, String description, String onEntryHook, String task, String onExitHook, List<String> dependencies,
Long retryCount, Long timeout, String outputEvent, Status status, Status rollbackStatus,
Long attemptedNoOfRetries, String stateMachineId, Long id, Short maxReplayableRetryCount, Short attemptedNumOfReplayableRetries,
Boolean replayable, Long executionVersion) {
this();
this.version = version;
this.name = name;
this.description = description;
this.onEntryHook = onEntryHook;
this.task = task;
this.onExitHook = onExitHook;
this.dependencies = dependencies;
this.retryCount = retryCount;
this.timeout = timeout;
this.outputEvent = outputEvent;
this.status = status;
this.rollbackStatus = rollbackStatus;
this.attemptedNoOfRetries = attemptedNoOfRetries;
this.stateMachineId = stateMachineId;
this.id = id;
this.executionVersion = executionVersion;
this.maxReplayableRetries = maxReplayableRetryCount;
this.attemptedNumOfReplayableRetries = attemptedNumOfReplayableRetries;
this.replayable = replayable;
}
/**
* Used to check whether the state has all its dependencies met based on the input set of event names
*
* @param receivedEvents - Input set containing event names of all events received so far
* @return true if dependency is completely satisfied
*/
public boolean isDependencySatisfied(Set<String> receivedEvents) {
return receivedEvents.containsAll(this.dependencies);
}
/**
* Accessor/Mutator methods
*/
public Long getId() {
return id;
}
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getStateMachineId() {
return stateMachineId;
}
public void setStateMachineId(String stateMachineId) {
this.stateMachineId = stateMachineId;
}
public String getOnEntryHook() {
return onEntryHook;
}
public void setOnEntryHook(String onEntryHook) {
this.onEntryHook = onEntryHook;
}
public String getTask() {
return task;
}
public void setTask(String task) {
this.task = task;
}
public String getOnExitHook() {
return onExitHook;
}
public void setOnExitHook(String onExitHook) {
this.onExitHook = onExitHook;
}
public List<String> getDependencies() {
return dependencies;
}
public void setDependencies(List<String> dependencies) {
this.dependencies = dependencies;
}
public Long getRetryCount() {
return retryCount;
}
public void setRetryCount(Long retryCount) {
this.retryCount = retryCount;
}
public Long getTimeout() {
return timeout;
}
public void setTimeout(Long timeout) {
this.timeout = timeout;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public Status getRollbackStatus() {
return rollbackStatus;
}
public void setRollbackStatus(Status rollbackStatus) {
this.rollbackStatus = rollbackStatus;
}
public Long getAttemptedNumOfRetries() {
return attemptedNoOfRetries;
}
public void setAttemptedNumOfRetries(Long attemptedNoOfRetries) {
this.attemptedNoOfRetries = attemptedNoOfRetries;
}
public Timestamp getCreatedAt() {
return createdAt;
}
public Timestamp getUpdatedAt() {
return updatedAt;
}
public String getOutputEvent() {
return outputEvent;
}
public Long getExecutionVersion() {
return executionVersion;
}
public void setExecutionVersion(Long executionVersion) {
this.executionVersion = executionVersion;
}
public Boolean getReplayable() {
return replayable;
}
public void setReplayable(Boolean replayable) {
this.replayable = replayable;
}
public short getMaxReplayableRetries() {
return maxReplayableRetries;
}
public void setMaxReplayableRetries(short maxReplayableRetries) {
this.maxReplayableRetries = maxReplayableRetries;
}
public short getAttemptedNumOfReplayableRetries() {
return attemptedNumOfReplayableRetries;
}
public void setAttemptedNumOfReplayableRetries(short attemptedNumOfReplayableRetries) {
this.attemptedNumOfReplayableRetries = attemptedNumOfReplayableRetries;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof State)) return false;
State state = (State) o;
if (createdAt != null ? !createdAt.equals(state.createdAt) : state.createdAt != null) return false;
if (description != null ? !description.equals(state.description) : state.description != null) return false;
if (name != null ? !name.equals(state.name) : state.name != null) return false;
if (attemptedNoOfRetries != null ? !attemptedNoOfRetries.equals(state.attemptedNoOfRetries) : state.attemptedNoOfRetries != null)
return false;
if (onEntryHook != null ? !onEntryHook.equals(state.onEntryHook) : state.onEntryHook != null) return false;
if (onExitHook != null ? !onExitHook.equals(state.onExitHook) : state.onExitHook != null) return false;
if (outputEvent != null ? !outputEvent.equals(state.outputEvent) : state.outputEvent != null) return false;
if (retryCount != null ? !retryCount.equals(state.retryCount) : state.retryCount != null) return false;
if (rollbackStatus != state.rollbackStatus) return false;
if (stateMachineId != null ? !stateMachineId.equals(state.stateMachineId) : state.stateMachineId != null)
return false;
if (status != state.status) return false;
if (task != null ? !task.equals(state.task) : state.task != null) return false;
if (timeout != null ? !timeout.equals(state.timeout) : state.timeout != null) return false;
if (updatedAt != null ? !updatedAt.equals(state.updatedAt) : state.updatedAt != null) return false;
if (version != null ? !version.equals(state.version) : state.version != null) return false;
if (executionVersion != null ? !executionVersion.equals(state.executionVersion) : state.executionVersion != null)
return false;
if (replayable != null ? !replayable.equals(state.replayable) : state.replayable != null) return false;
if (attemptedNumOfReplayableRetries != null ? !attemptedNumOfReplayableRetries.equals(state.attemptedNumOfReplayableRetries) : state.attemptedNumOfReplayableRetries != null)
return false;
if (maxReplayableRetries != null ? !maxReplayableRetries.equals(state.maxReplayableRetries) : state.maxReplayableRetries != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = version != null ? version.hashCode() : 0;
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
result = 31 * result + (stateMachineId != null ? stateMachineId.hashCode() : 0);
result = 31 * result + (onEntryHook != null ? onEntryHook.hashCode() : 0);
result = 31 * result + (task != null ? task.hashCode() : 0);
result = 31 * result + (onExitHook != null ? onExitHook.hashCode() : 0);
result = 31 * result + (outputEvent != null ? outputEvent.hashCode() : 0);
result = 31 * result + (retryCount != null ? retryCount.hashCode() : 0);
result = 31 * result + (timeout != null ? timeout.hashCode() : 0);
result = 31 * result + (status != null ? status.hashCode() : 0);
result = 31 * result + (rollbackStatus != null ? rollbackStatus.hashCode() : 0);
result = 31 * result + (attemptedNoOfRetries != null ? attemptedNoOfRetries.hashCode() : 0);
result = 31 * result + (executionVersion != null ? executionVersion.hashCode() : 0);
result = 31 * result + (replayable != null ? replayable.hashCode() : 0);
result = 31 * result + (createdAt != null ? createdAt.hashCode() : 0);
result = 31 * result + (updatedAt != null ? updatedAt.hashCode() : 0);
result = 31 * result + (attemptedNumOfReplayableRetries != null ? attemptedNumOfReplayableRetries.hashCode() : 0);
result = 31 * result + (maxReplayableRetries != null ? maxReplayableRetries.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "State{" +
"id=" + id +
", version=" + version +
", name='" + name + '\'' +
", description='" + description + '\'' +
", stateMachineId=" + stateMachineId +
", onEntryHook='" + onEntryHook + '\'' +
", task='" + task + '\'' +
", onExitHook='" + onExitHook + '\'' +
", outputEvent='" + outputEvent + '\'' +
", retryCount=" + retryCount +
", timeout=" + timeout +
", dependencies=" + dependencies +
", status=" + status +
", rollbackStatus=" + rollbackStatus +
", attemptedNoOfRetries=" + attemptedNoOfRetries +
", executionVersion=" + executionVersion +
", replayable=" + replayable +
", createdAt=" + createdAt +
", updatedAt=" + updatedAt +
", attemptedNumOfReplayableRetries=" + attemptedNumOfReplayableRetries +
", maxReplayableRetries=" + maxReplayableRetries +
'}';
}
/**
* <code>StatePK</code> is the composite primary key of "State" table in DB.
*/
static class StatePK implements Serializable {
private Long id;
private String stateMachineId;
/**
* for Hibernate
*/
public StatePK() {
}
public StatePK(Long id, String stateMachineId) {
this.id = id;
this.stateMachineId = stateMachineId;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof StatePK)) return false;
StatePK statePK = (StatePK) o;
if (!getId().equals(statePK.getId())) return false;
return getStateMachineId().equals(statePK.getStateMachineId());
}
@Override
public int hashCode() {
int result = getId().hashCode();
result = 31 * result + getStateMachineId().hashCode();
return result;
}
public String getStateMachineId() {
return stateMachineId;
}
public void setStateMachineId(String stateMachineId) {
this.stateMachineId = stateMachineId;
}
}
}
| |
/*
* Copyright (c) JForum Team
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* 2) Redistributions in binary form must reproduce the
* above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3) Neither the name of "Rafael Steil" nor
* the names of its contributors may be used to endorse
* or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
* HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*
* Created on Feb 17, 2003 / 10:47:29 PM
* The JForum Project
* http://www.jforum.net
*/
package net.jforum.entities;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import net.jforum.SessionFacade;
import net.jforum.exceptions.ForumOrderChangedException;
import net.jforum.repository.SecurityRepository;
import net.jforum.security.PermissionControl;
import net.jforum.security.SecurityConstants;
import net.jforum.util.ForumOrderComparator;
/**
* Represents a category in the System.
* Each category holds a reference to all its forums, which
* can be retrieved by calling either @link #getForums(),
* @link #getForum(int) and related methods.
*
* <br />
*
* This class also controls the access to its forums, so a call
* to @link #getForums() will only return the forums accessible
* to the user who make the call tho the method.
*
* @author Rafael Steil
* @version $Id$
*/
public class Category implements Serializable, Comparable<Category>
{
private static final long serialVersionUID = -4894230707020588049L;
private int id;
private int order;
private boolean moderated;
private String name;
private Map<Integer, Forum> forumsIdMap = new HashMap<Integer, Forum>();
private Set<Forum> forums = new TreeSet<Forum>(new ForumOrderComparator());
public Category() {}
public Category(int id) {
this.id = id;
}
public Category(String name, int id) {
this.name = name;
this.id = id;
}
public Category(Category category) {
this.name = category.getName();
this.id = category.getId();
this.order = category.getOrder();
this.moderated = category.isModerated();
for (Iterator<Forum> iter = category.getForums().iterator(); iter.hasNext(); ) {
Forum forum = new Forum(iter.next());
this.forumsIdMap.put(Integer.valueOf(forum.getId()), forum);
this.forums.add(forum);
}
}
public void setModerated(boolean status)
{
this.moderated = status;
}
public boolean isModerated()
{
return this.moderated;
}
/**
* @return int
*/
public int getId() {
return this.id;
}
/**
* @return String
*/
public String getName() {
return this.name;
}
/**
* @return int
*/
public int getOrder() {
return this.order;
}
/**
* Sets the id.
* @param id The id to set
*/
public void setId(int id) {
this.id = id;
}
/**
* Sets the name.
* @param name The name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* Sets the order.
* @param order The order to set
*/
public void setOrder(int order) {
this.order = order;
}
/**
* Adds a forum to this category
*
* @param forum Forum
*/
public void addForum(Forum forum) {
this.forumsIdMap.put(Integer.valueOf(forum.getId()), forum);
this.forums.add(forum);
}
/**
* Reloads a forum.
* The forum should already be in the cache and <b>SHOULD NOT</b>
* have its order changed. If the forum's order was changed,
* then you <b>MUST CALL</b> @link #changeForumOrder(Forum) <b>BEFORE</b>
* calling this method.
*
* @param forum The forum to reload its information
* @see #changeForumOrder(Forum)
*/
public void reloadForum(Forum forum) {
Forum currentForum = this.getForum(forum.getId());
if (forum.getOrder() != currentForum.getOrder()) {
throw new ForumOrderChangedException("Forum #" + forum.getId() + " cannot be reloaded, since its "
+ "display order was changed. You must call Category#changeForumOrder(Forum)"
+ "first");
}
Set<Forum> tmpSet = new TreeSet<Forum>(new ForumOrderComparator());
tmpSet.addAll(this.forums);
tmpSet.remove(currentForum);
tmpSet.add(forum);
this.forumsIdMap.put(Integer.valueOf(forum.getId()), forum);
this.forums = tmpSet;
}
/**
* Changes a forum's display order.
* This method changes the position of the
* forum in the current display order of the
* forum instance passed as argument, if applicable.
*
* @param forum The forum to change
*/
public void changeForumOrder(Forum forum)
{
Forum current = this.getForum(forum.getId());
Forum currentAtOrder = this.findByOrder(forum.getOrder());
Set<Forum> tmpSet = new TreeSet<Forum>(new ForumOrderComparator());
tmpSet.addAll(this.forums);
// Remove the forum in the current order
// where the changed forum will need to be
if (currentAtOrder != null) {
tmpSet.remove(currentAtOrder);
}
tmpSet.add(forum);
this.forumsIdMap.put(Integer.valueOf(forum.getId()), forum);
// Remove the forum in the position occupied
// by the changed forum before its modification,
// so then we can add the another forum into
// its position
if (currentAtOrder != null) {
tmpSet.remove(current);
currentAtOrder.setOrder(current.getOrder());
tmpSet.add(currentAtOrder);
this.forumsIdMap.put(Integer.valueOf(currentAtOrder.getId()), currentAtOrder);
}
this.forums = tmpSet;
}
private Forum findByOrder(int order)
{
for (Iterator<Forum> iter = this.forums.iterator(); iter.hasNext(); ) {
Forum forum = iter.next();
if (forum.getOrder() == order) {
return forum;
}
}
return null;
}
/**
* Removes a forum from the list.
* @param forumId int
*/
public void removeForum(int forumId) {
this.forums.remove(this.getForum(forumId));
this.forumsIdMap.remove(Integer.valueOf(forumId));
}
/**
* Gets a forum.
*
* @param userId The user's id who is trying to see the forum
* @param forumId The id of the forum to get
* @return The <code>Forum</code> instance if found, or <code>null</code>
* otherwise.
* @see #getForum(int)
*/
public Forum getForum(int userId, int forumId)
{
PermissionControl pc = SecurityRepository.get(userId);
if (pc.canAccess(SecurityConstants.PERM_FORUM, Integer.toString(forumId))) {
return this.forumsIdMap.get(Integer.valueOf(forumId));
}
return null;
}
/**
* Gets a forum.
*
* @param forumId The forum's id
* @return The requested forum, if found, or <code>null</code> if
* the forum does not exists or access to it is denied.
* @see #getForum(int, int)
*/
public Forum getForum(int forumId)
{
return this.getForum(SessionFacade.getUserSession().getUserId(), forumId);
}
/**
* Get all forums from this category.
*
* @return All forums, regardless it is accessible
* to the user or not.
*/
public Collection<Forum> getForums()
{
if (this.forums.size() == 0) {
return this.forums;
}
return this.getForums(SessionFacade.getUserSession().getUserId());
}
/**
* Gets all forums from this category.
*
* @return The forums available to the user who make the call
* @see #getForums()
* @param userId int
*/
public Collection<Forum> getForums(int userId)
{
PermissionControl pc = SecurityRepository.get(userId);
List<Forum> forums = new ArrayList<Forum>();
for (Iterator<Forum> iter = this.forums.iterator(); iter.hasNext(); ) {
Forum forum = iter.next();
if (pc.canAccess(SecurityConstants.PERM_FORUM, Integer.toString(forum.getId()))) {
forums.add(forum);
}
}
return forums;
}
/**
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
return this.id;
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o)
{
return ((o instanceof Category) && (((Category)o).getId() == this.id));
}
/**
* @see java.lang.Object#toString()
*/
public String toString() {
return "[" + this.name + ", id=" + this.id + ", order=" + this.order + "]";
}
public int compareTo(Category o) {
return this.getOrder() - ((Category)o).getOrder();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.InetAddress;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
/**
* A JUnit test to test Map-Reduce empty jobs.
*/
public class TestEmptyJob extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestEmptyJob.class.getName());
private static String TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp")).toURI()
.toString().replace(' ', '+');
MiniMRCluster mr = null;
/** Committer with commit waiting on a signal
*/
static class CommitterWithDelayCommit extends FileOutputCommitter {
@Override
public void commitJob(JobContext context) throws IOException {
Configuration conf = context.getConfiguration();
Path share = new Path(conf.get("share"));
FileSystem fs = FileSystem.get(conf);
while (true) {
if (fs.exists(share)) {
break;
}
UtilsForTests.waitFor(100);
}
super.commitJob(context);
}
}
/**
* Simple method running a MapReduce job with no input data. Used to test that
* such a job is successful.
*
* @param fileSys
* @param numMaps
* @param numReduces
* @return true if the MR job is successful, otherwise false
* @throws IOException
*/
private boolean launchEmptyJob(URI fileSys, int numMaps, int numReduces)
throws IOException {
// create an empty input dir
final Path inDir = new Path(TEST_ROOT_DIR, "testing/empty/input");
final Path outDir = new Path(TEST_ROOT_DIR, "testing/empty/output");
final Path inDir2 = new Path(TEST_ROOT_DIR, "testing/dummy/input");
final Path outDir2 = new Path(TEST_ROOT_DIR, "testing/dummy/output");
final Path share = new Path(TEST_ROOT_DIR, "share");
JobConf conf = mr.createJobConf();
FileSystem fs = FileSystem.get(fileSys, conf);
fs.delete(new Path(TEST_ROOT_DIR), true);
fs.delete(outDir, true);
if (!fs.mkdirs(inDir)) {
LOG.warn("Can't create " + inDir);
return false;
}
// use WordCount example
FileSystem.setDefaultUri(conf, fileSys);
conf.setJobName("empty");
// use an InputFormat which returns no split
conf.setInputFormat(EmptyInputFormat.class);
conf.setOutputCommitter(CommitterWithDelayCommit.class);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(IdentityMapper.class);
conf.setReducerClass(IdentityReducer.class);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
conf.setNumMapTasks(numMaps);
conf.setNumReduceTasks(numReduces);
conf.set("share", share.toString());
// run job and wait for completion
JobClient jc = new JobClient(conf);
RunningJob runningJob = jc.submitJob(conf);
JobInProgress job = mr.getJobTrackerRunner().getJobTracker().getJob(runningJob.getID());
InetAddress ip = InetAddress.getLocalHost();
if (ip != null) {
assertTrue(job.getJobSubmitHostAddress().equalsIgnoreCase(
ip.getHostAddress()));
assertTrue(job.getJobSubmitHostName().equalsIgnoreCase(ip.getHostName()));
}
while (true) {
if (job.isCleanupLaunched()) {
LOG.info("Waiting for cleanup to be launched for job "
+ runningJob.getID());
break;
}
UtilsForTests.waitFor(100);
}
// submit another job so that the map load increases and scheduling happens
LOG.info("Launching dummy job ");
RunningJob dJob = null;
try {
JobConf dConf = new JobConf(conf);
dConf.setOutputCommitter(FileOutputCommitter.class);
dJob = UtilsForTests.runJob(dConf, inDir2, outDir2, 2, 0);
} catch (Exception e) {
LOG.info("Exception ", e);
throw new IOException(e);
}
while (true) {
LOG.info("Waiting for job " + dJob.getID() + " to complete");
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
if (dJob.isComplete()) {
break;
}
}
// check if the second job is successful
assertTrue(dJob.isSuccessful());
// signal the cleanup
fs.create(share).close();
while (true) {
LOG.info("Waiting for job " + runningJob.getID() + " to complete");
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
if (runningJob.isComplete()) {
break;
}
}
assertTrue(runningJob.isComplete());
assertTrue(runningJob.isSuccessful());
JobID jobID = runningJob.getID();
TaskReport[] jobSetupTasks = jc.getSetupTaskReports(jobID);
assertTrue("Number of job-setup tips is not 2!", jobSetupTasks.length == 2);
assertTrue("Setup progress is " + runningJob.setupProgress()
+ " and not 1.0", runningJob.setupProgress() == 1.0);
assertTrue("Setup task is not finished!", mr.getJobTrackerRunner()
.getJobTracker().getJob(jobID).isSetupFinished());
assertTrue("Number of maps is not zero!", jc.getMapTaskReports(runningJob
.getID()).length == 0);
assertTrue(
"Map progress is " + runningJob.mapProgress() + " and not 1.0!",
runningJob.mapProgress() == 1.0);
assertTrue("Reduce progress is " + runningJob.reduceProgress()
+ " and not 1.0!", runningJob.reduceProgress() == 1.0);
assertTrue("Number of reduces is not " + numReduces, jc
.getReduceTaskReports(runningJob.getID()).length == numReduces);
TaskReport[] jobCleanupTasks = jc.getCleanupTaskReports(jobID);
assertTrue("Number of job-cleanup tips is not 2!",
jobCleanupTasks.length == 2);
assertTrue("Cleanup progress is " + runningJob.cleanupProgress()
+ " and not 1.0", runningJob.cleanupProgress() == 1.0);
assertTrue("Job output directory doesn't exit!", fs.exists(outDir));
FileStatus[] list = fs.listStatus(outDir,
new Utils.OutputFileUtils.OutputFilesFilter());
assertTrue("Number of part-files is " + list.length + " and not "
+ numReduces, list.length == numReduces);
// cleanup
fs.delete(outDir, true);
// return job result
LOG.info("job is complete: " + runningJob.isSuccessful());
return (runningJob.isSuccessful());
}
/**
* Test that a job with no input data (and thus with no input split and no map
* task to execute) is successful.
*
* @throws IOException
*/
public void testEmptyJob()
throws IOException {
FileSystem fileSys = null;
try {
final int taskTrackers = 2;
JobConf conf = new JobConf();
fileSys = FileSystem.get(conf);
conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
conf.set(JTConfig.JT_IPC_ADDRESS, "127.0.0.1:0");
conf.set(JTConfig.JT_HTTP_ADDRESS, "127.0.0.1:0");
conf.set(TTConfig.TT_HTTP_ADDRESS, "127.0.0.1:0");
mr =
new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1,
null, null, conf);
assertTrue(launchEmptyJob(fileSys.getUri(), 3, 1));
assertTrue(launchEmptyJob(fileSys.getUri(), 0, 0));
} finally {
if (fileSys != null) {
fileSys.close();
}
if (mr != null) {
mr.shutdown();
}
}
}
}
| |
/** java bean entity **/
package dbbeans.cliente;
import java.sql.ResultSet;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.apache.log4j.Logger;
public class dbCLIENTE implements org.nibble.main.dbInterfase {
private Connection conn;
private ResultSet rSet;
private PreparedStatement stmt;
static Logger logger= Logger.getLogger(dbCLIENTE.class);
private int iidnocliente;
private short siidpais;
private short siidviaembarque;
private int iidnivel;
private int iidcatcliente;
private String vchrfc;
private String vchnombre;
private String vchrazonsocial;
private String vchcalle;
private String vchcolonia;
private String chcodigop;
private String vchmail;
private String vchdescregion;
private String vchdescestado;
private String vchtel1;
private String vchtel2;
private String vchfax;
private String vchcurp;
private boolean bitdadodebaja;
private float declimitecredito;
private float deccreditutilizado;
private short sidiadepago;
private boolean bsuceptiblecredito;
private short siplazodiaspago;
private int iidestado;
private int iidmunicipio;
private String vchnumeroexterior;
private String vchnumerointerior;
private String vchlocalidad;
public void create() throws Exception, java.sql.SQLException {
StringBuffer query = new StringBuffer();
query.append("INSERT INTO CLIENTE (siidpais,siidviaembarque,iidnivel,iidcatcliente,vchrfc,vchnombre,vchrazonsocial,vchcalle,vchcolonia,chcodigop,vchmail,vchdescregion,vchdescestado,vchtel1,vchtel2,vchfax,vchcurp,bitdadodebaja,declimitecredito,deccreditutilizado,sidiadepago,bsuceptiblecredito,siplazodiaspago,iidestado,iidmunicipio, vchnumexterior, vchnuminterior, vchlocalidad) ");
query.append(" VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) ");
stmt = conn.prepareStatement(query.toString().toLowerCase());
stmt.setShort(1,siidpais);
stmt.setShort(2,siidviaembarque);
stmt.setInt(3,iidnivel);
stmt.setInt(4,iidcatcliente);
stmt.setString(5,vchrfc);
stmt.setString(6,vchnombre);
stmt.setString(7,vchrazonsocial);
stmt.setString(8,vchcalle);
stmt.setString(9,vchcolonia);
stmt.setString(10,chcodigop);
stmt.setString(11,vchmail);
stmt.setString(12,vchdescregion);
stmt.setString(13,vchdescestado);
stmt.setString(14,vchtel1);
stmt.setString(15,vchtel2);
stmt.setString(16,vchfax);
stmt.setString(17,vchcurp);
stmt.setBoolean(18, isBitdadodebaja());
stmt.setFloat(19,declimitecredito);
stmt.setFloat(20,deccreditutilizado);
stmt.setShort(21,sidiadepago);
stmt.setBoolean(22, isBsuceptiblecredito());
stmt.setShort(23,siplazodiaspago);
stmt.setInt(24,iidestado);
stmt.setInt(25,iidmunicipio);
stmt.setString(26,vchnumeroexterior);
stmt.setString(27,vchnumerointerior);
stmt.setString(28,vchlocalidad);
stmt.executeUpdate();
stmt.close();;
}
public boolean load() throws Exception, java.sql.SQLException {
StringBuffer query = new StringBuffer();
query.append("SELECT * ");
query.append("FROM CLIENTE ");
query.append("WHERE iidnocliente=? ");
//query.append("ORDER BY vchnombre ");
stmt = conn.prepareStatement(query.toString().toLowerCase());
stmt.setInt(1,iidnocliente);
rSet = stmt.executeQuery();
if (rSet.next()) {
this.siidpais = rSet.getShort("siidpais");
this.siidviaembarque = rSet.getShort("siidviaembarque");
this.iidnivel = rSet.getInt("iidnivel");
this.iidcatcliente = rSet.getInt("iidcatcliente");
this.vchrfc = rSet.getString("vchrfc");
this.vchnombre = rSet.getString("vchnombre");
this.vchrazonsocial = rSet.getString("vchrazonsocial");
this.vchcalle = rSet.getString("vchcalle");
this.vchcolonia = rSet.getString("vchcolonia");
this.chcodigop = rSet.getString("chcodigop");
this.vchmail = rSet.getString("vchmail");
this.vchdescregion = rSet.getString("vchdescregion");
this.vchdescestado = rSet.getString("vchdescestado");
this.vchtel1 = rSet.getString("vchtel1");
this.vchtel2 = rSet.getString("vchtel2");
this.vchfax = rSet.getString("vchfax");
this.vchcurp = rSet.getString("vchcurp");
this.bitdadodebaja = rSet.getBoolean("bitdadodebaja");
this.declimitecredito = rSet.getFloat("declimitecredito");
this.deccreditutilizado = rSet.getFloat("deccreditutilizado");
this.sidiadepago = rSet.getShort("sidiadepago");
this.bsuceptiblecredito = rSet.getBoolean("bsuceptiblecredito");
this.siplazodiaspago = rSet.getShort("siplazodiaspago");
this.iidestado = rSet.getInt("iidestado");
this.iidmunicipio = rSet.getInt("iidmunicipio");
this.vchnumeroexterior = rSet.getString("vchnumexterior");
this.vchnumerointerior = rSet.getString("vchnuminterior");
this.vchlocalidad = rSet.getString("vchlocalidad");
}
else
throw new Exception ("No se encontro el registro");
rSet.close();
stmt.close();
return false;
}
public void remove() throws Exception, java.sql.SQLException {
StringBuffer query = new StringBuffer();
query.append("DELETE FROM CLIENTE ");
query.append("WHERE iidnocliente=? ");
stmt = conn.prepareStatement(query.toString().toLowerCase());
stmt.setInt(1,iidnocliente);
stmt.executeUpdate();
stmt.close();
}
public void store() throws SQLException {
StringBuffer query = new StringBuffer();
query.append("UPDATE CLIENTE SET siidpais=? , siidviaembarque=? , iidnivel=? , iidcatcliente=? , vchrfc=? , vchnombre=? , vchrazonsocial=? , vchcalle=? , vchcolonia=? , chcodigop=? , vchmail=? , vchdescregion=? , vchdescestado=? , vchtel1=? , vchtel2=? , vchfax=? , vchcurp=? , bitdadodebaja=? , declimitecredito=? , deccreditutilizado=? , sidiadepago=? , bsuceptiblecredito=? , siplazodiaspago=? , iidestado=? , iidmunicipio=?, vchnumexterior=?, vchnuminterior=?, vchlocalidad=? ");
query.append("WHERE iidnocliente=? ");
logger.debug(query.toString().toLowerCase());
stmt = conn.prepareStatement(query.toString().toLowerCase());
stmt.setShort(1,siidpais);
stmt.setShort(2,siidviaembarque);
stmt.setInt(3,iidnivel);
stmt.setInt(4,iidcatcliente);
stmt.setString(5,vchrfc);
stmt.setString(6,vchnombre);
stmt.setString(7,vchrazonsocial);
stmt.setString(8,vchcalle);
stmt.setString(9,vchcolonia);
stmt.setString(10,chcodigop);
stmt.setString(11,vchmail);
stmt.setString(12,vchdescregion);
stmt.setString(13,vchdescestado);
stmt.setString(14,vchtel1);
stmt.setString(15,vchtel2);
stmt.setString(16,vchfax);
stmt.setString(17,vchcurp);
stmt.setBoolean(18, isBitdadodebaja());
stmt.setFloat(19,declimitecredito);
stmt.setFloat(20,deccreditutilizado);
stmt.setShort(21,sidiadepago);
stmt.setBoolean(22, isBsuceptiblecredito());
stmt.setShort(23,siplazodiaspago);
stmt.setInt(24,iidestado);
stmt.setInt(25,iidmunicipio);
stmt.setString(26,vchnumeroexterior);
stmt.setString(27,vchnumerointerior);
stmt.setString(28,vchlocalidad);
stmt.setInt(29,iidnocliente);
logger.debug("EL ID DE CLIENTE ES " + iidnocliente);
stmt.executeUpdate();
stmt.close();
}
public void setConnection(java.sql.Connection conn) {
this.conn = conn;
}
public void setIidnocliente(int iidnocliente){
this.iidnocliente=iidnocliente;
}
public int getIidnocliente(){
return this.iidnocliente;
}
public void setSiidpais(short siidpais){
this.siidpais=siidpais;
}
public short getSiidpais(){
return this.siidpais;
}
public void setSiidviaembarque(short siidviaembarque){
this.siidviaembarque=siidviaembarque;
}
public short getSiidviaembarque(){
return this.siidviaembarque;
}
public void setIidnivel(int iidnivel){
this.iidnivel=iidnivel;
}
public int getIidnivel(){
return this.iidnivel;
}
public void setIidcatcliente(int iidcatcliente){
this.iidcatcliente=iidcatcliente;
}
public int getIidcatcliente(){
return this.iidcatcliente;
}
public void setVchrfc(String vchrfc){
this.vchrfc=vchrfc;
}
public String getVchrfc(){
return this.vchrfc;
}
public void setVchnombre(String vchnombre){
this.vchnombre=vchnombre;
}
public String getVchnombre(){
return this.vchnombre;
}
public void setVchrazonsocial(String vchrazonsocial){
this.vchrazonsocial=vchrazonsocial;
}
public String getVchrazonsocial(){
return this.vchrazonsocial;
}
public void setVchcalle(String vchcalle){
this.vchcalle=vchcalle;
}
public String getVchcalle(){
return this.vchcalle;
}
public void setVchcolonia(String vchcolonia){
this.vchcolonia=vchcolonia;
}
public String getVchcolonia(){
return this.vchcolonia;
}
public void setChcodigop(String chcodigop){
this.chcodigop=chcodigop;
}
public String getChcodigop(){
return this.chcodigop;
}
public void setVchmail(String vchmail){
this.vchmail=vchmail;
}
public String getVchmail(){
return this.vchmail;
}
public void setVchdescregion(String vchdescregion){
this.vchdescregion=vchdescregion;
}
public String getVchdescregion(){
return this.vchdescregion;
}
public void setVchdescestado(String vchdescestado){
this.vchdescestado=vchdescestado;
}
public String getVchdescestado(){
return this.vchdescestado;
}
public void setVchtel1(String vchtel1){
this.vchtel1=vchtel1;
}
public String getVchtel1(){
return this.vchtel1;
}
public void setVchtel2(String vchtel2){
this.vchtel2=vchtel2;
}
public String getVchtel2(){
return this.vchtel2;
}
public void setVchfax(String vchfax){
this.vchfax=vchfax;
}
public String getVchfax(){
return this.vchfax;
}
public void setVchcurp(String vchcurp){
this.vchcurp=vchcurp;
}
public String getVchcurp(){
return this.vchcurp;
}
public void setBitdadodebaja(boolean bitdadodebaja){
this.bitdadodebaja=bitdadodebaja;
}
public boolean getBitdadodebaja(){
return this.isBitdadodebaja();
}
public void setDeclimitecredito(float declimitecredito){
this.declimitecredito=declimitecredito;
}
public float getDeclimitecredito(){
return this.declimitecredito;
}
public void setDeccreditutilizado(float deccreditutilizado){
this.deccreditutilizado=deccreditutilizado;
}
public float getDeccreditutilizado(){
return this.deccreditutilizado;
}
public void setSidiadepago(short sidiadepago){
this.sidiadepago=sidiadepago;
}
public short getSidiadepago(){
return this.sidiadepago;
}
public void setBsuceptiblecredito(boolean bsuceptiblecredito){
this.bsuceptiblecredito=bsuceptiblecredito;
}
public boolean getBsuceptiblecredito(){
return this.isBsuceptiblecredito();
}
public void setSiplazodiaspago(short siplazodiaspago){
this.siplazodiaspago=siplazodiaspago;
}
public short getSiplazodiaspago(){
return this.siplazodiaspago;
}
public void setIidestado(int iidestado){
this.iidestado=iidestado;
}
public int getIidestado(){
return this.iidestado;
}
public void setIidmunicipio(int iidmunicipio){
this.iidmunicipio=iidmunicipio;
}
public int getIidmunicipio(){
return this.iidmunicipio;
}
/**
* @return the bitdadodebaja
*/
public boolean isBitdadodebaja() {
return bitdadodebaja;
}
/**
* @return the bsuceptiblecredito
*/
public boolean isBsuceptiblecredito() {
return bsuceptiblecredito;
}
/**
* @return the vchnumeroexterior
*/
public String getVchnumeroexterior() {
return vchnumeroexterior;
}
/**
* @param vchnumeroexterior the vchnumeroexterior to set
*/
public void setVchnumeroexterior(String vchnumeroexterior) {
this.vchnumeroexterior = vchnumeroexterior;
}
/**
* @return the vchnumerointerior
*/
public String getVchnumerointerior() {
return vchnumerointerior;
}
/**
* @param vchnumerointerior the vchnumerointerior to set
*/
public void setVchnumerointerior(String vchnumerointerior) {
this.vchnumerointerior = vchnumerointerior;
}
/**
* @return the vchlocalidad
*/
public String getVchlocalidad() {
return vchlocalidad;
}
/**
* @param vchlocalidad the vchlocalidad to set
*/
public void setVchlocalidad(String vchlocalidad) {
this.vchlocalidad = vchlocalidad;
}
}
| |
package md.frolov.legume.client.ui.components;
import java.util.Date;
import java.util.Map;
import com.github.gwtbootstrap.client.ui.Button;
import com.github.gwtbootstrap.client.ui.ControlGroup;
import com.github.gwtbootstrap.datetimepicker.client.ui.DateTimeBox;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.logical.shared.ResizeEvent;
import com.google.gwt.i18n.client.DateTimeFormat;
import com.google.gwt.i18n.client.NumberFormat;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ResizeLayoutPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.web.bindery.event.shared.EventBus;
import com.googlecode.gflot.client.DataPoint;
import com.googlecode.gflot.client.PlotSelectionArea;
import com.googlecode.gflot.client.Series;
import com.googlecode.gflot.client.SeriesHandler;
import com.googlecode.gflot.client.SimplePlot;
import com.googlecode.gflot.client.event.PlotClickListener;
import com.googlecode.gflot.client.event.PlotHoverListener;
import com.googlecode.gflot.client.event.PlotItem;
import com.googlecode.gflot.client.event.PlotPosition;
import com.googlecode.gflot.client.event.PlotSelectedListener;
import com.googlecode.gflot.client.jsni.Plot;
import com.googlecode.gflot.client.options.*;
import com.googlecode.gflot.client.options.side.IntegerSideOptions;
import md.frolov.legume.client.Application;
import md.frolov.legume.client.elastic.ElasticSearchService;
import md.frolov.legume.client.elastic.api.Callback;
import md.frolov.legume.client.elastic.api.HistogramInterval;
import md.frolov.legume.client.elastic.api.HistogramRequest;
import md.frolov.legume.client.elastic.api.HistogramResponse;
import md.frolov.legume.client.events.*;
import md.frolov.legume.client.gin.WidgetInjector;
import md.frolov.legume.client.model.Search;
import md.frolov.legume.client.ui.EventFlowPanel;
import md.frolov.legume.client.util.ConversionUtils;
/** @author Ivan Frolov (ifrolov@tacitknowledge.com) */
public class HistogramComponent extends Composite implements UpdateSearchQueryHandler, LogMessageHoverEventHandler, LogMessageOutEventHandler
{
private int MAXIMUM_STEPS;
interface HistogramComponentUiBinder extends UiBinder<Widget, HistogramComponent>
{
}
interface Css extends CssResource {
String disabled();
String dateControlsVisible();
}
private static HistogramComponentUiBinder binder = GWT.create(HistogramComponentUiBinder.class);
private static final DateTimeFormat DATE_LABEL_DTF = DateTimeFormat.getFormat("dd/MM/yyyy HH:mm:ss");
private static final NumberFormat NUMBER_FORMAT = NumberFormat.getFormat("#,###");
@UiField(provided = true)
SimplePlot plot;
@UiField
ResizeLayoutPanel resizePanel;
@UiField
FlowPanel loading;
@UiField
FlowPanel error;
@UiField
Button zoomIn;
@UiField
Button zoomOut;
@UiField
Button downloadImage;
@UiField
Button trackPosition;
@UiField
InlineLabel fromDateLabel;
@UiField
Label toDateLabel;
@UiField
InlineLabel hitsLabel;
@UiField
Button chooseDateButton;
@UiField
FlowPanel dateControlsPanel;
@UiField
Css css;
@UiField
Button hideDateButton;
@UiField
Button goButton;
@UiField
DateTimeBox fromBox;
@UiField
DateTimeBox toBox;
@UiField
FlowPanel hoverInfo;
@UiField
Label countHoverInfo;
@UiField
Label dateHoverInfo;
@UiField
EventFlowPanel plotPanel;
@UiField
ControlGroup datesControlGroup;
private EventBus eventBus = WidgetInjector.INSTANCE.eventBus();
private ElasticSearchService elasticSearchService = WidgetInjector.INSTANCE.elasticSearchService();
private Application application = WidgetInjector.INSTANCE.application();
private ConversionUtils conversionUtils = ConversionUtils.INSTANCE;
private Search currentSearch;
private Search queuedSearch;
private boolean inprocess = false;
private boolean ignoreClickEvent = false;
private HistogramInterval currentInterval;
public HistogramComponent()
{
initMaxSteps();
initPlot();
initWidget(binder.createAndBindUi(this));
eventBus.addHandler(UpdateSearchQuery.TYPE, this);
eventBus.addHandler(LogMessageHoverEvent.TYPE, this);
eventBus.addHandler(LogMessageOutEvent.TYPE, this);
}
private void initMaxSteps()
{
int maxSteps = Window.getClientWidth()*2;
if(maxSteps<1000) {
maxSteps = 1000;
}
if(maxSteps>4000) {
maxSteps = 4000;
}
MAXIMUM_STEPS = maxSteps;
}
private void initPlot()
{
PlotOptions plotOptions = PlotOptions.create();
// add tick formatter to the options
plotOptions.addYAxisOptions(AxisOptions.create().setTickColor("#fafafa"));
//Styling
plotOptions.setGridOptions(GridOptions.create().setBorderWidth(IntegerSideOptions.of(0, 0, 1, 0)).setBorderColor("#999")
.setClickable(true).setHoverable(true).setAutoHighlight(true));
plotOptions.setSelectionOptions(SelectionOptions.create().setMode(SelectionOptions.SelectionMode.X).setColor("#ccc"));
plotOptions.setGlobalSeriesOptions(GlobalSeriesOptions.create().setShadowSize(0).setLineSeriesOptions(
LineSeriesOptions.create().setFill(true).setSteps(true).setZero(true).setLineWidth(1)
));
//Crosshair
plotOptions.setCrosshairOptions(CrosshairOptions.create().setColor("hsl(210,60%,70%)").setLineWidth(1).setMode(CrosshairOptions.Mode.X));
// create the plot
plot = new SimplePlot(plotOptions);
//add listeners
plot.addSelectedListener(new PlotSelectedListener()
{
@Override
public void onPlotSelected(final PlotSelectionArea area)
{
long from = area.getX().getFrom().longValue();
long to = area.getX().getTo().longValue();
ignoreClickEvent = true;
Search search = application.getCurrentSearch().clone();
search.setFromDate(from);
search.setToDate(to);
search.setFocusDate(from);
eventBus.fireEvent(new UpdateSearchQuery(search));
}
});
plot.addHoverListener(new PlotHoverListener()
{
@Override
public void onPlotHover(final Plot plot, final PlotPosition position, final PlotItem item)
{
if (item != null && item.getDataPoint().getY() >= 0)
{
String countStr = NUMBER_FORMAT.format(item.getDataPoint().getY()) + currentInterval.getDescription();
countHoverInfo.setText(countStr);
}
else
{
countHoverInfo.setText("");
}
if (position != null)
{
Date theDate = new Date(position.getX().longValue());
String dateStr = DATE_LABEL_DTF.format(theDate);
dateHoverInfo.setText(dateStr);
}
else
{
dateHoverInfo.setText("");
}
hoverInfo.setVisible(true);
}
}, false);
plot.addClickListener(new PlotClickListener()
{
@Override
public void onPlotClick(final Plot plot, final PlotPosition position, final PlotItem item)
{
if(ignoreClickEvent) {
ignoreClickEvent = false;
return;
}
long focusDate = position.getX().longValue();
eventBus.fireEvent(new FocusOnDateEvent(focusDate));
}
}, false);
}
private void updateHistogramWithData(HistogramResponse response)
{
plot.getModel().removeAllSeries();
SeriesHandler handler = plot.getModel().addSeries(Series.of("", "#999"));
long total = 0;
for (Map.Entry<Long, Long> entry : response.getDateValueMap().entrySet())
{
handler.add(DataPoint.of(entry.getKey(), entry.getValue()));
total += entry.getValue();
}
plot.getPlotOptions().setXAxesOptions(AxesOptions.create().addAxisOptions(TimeSeriesAxisOptions.create()
.setTickColor("#eee").setReserveSpace(true)
.setTimeZone("browser").setTimeFormat(response.getInterval().getDateTimeFormat())
));
currentInterval = response.getInterval();
loading.setVisible(false);
plot.removeStyleName(css.disabled());
plot.setVisible(true);
plot.redraw(true);
plot.clearCrosshair();
plot.lockCrosshair();
hitsLabel.setText(NUMBER_FORMAT.format(total));
}
@Override
public void onUpdateSearchQuery(final UpdateSearchQuery event)
{
if(!isUpdateRequired(event.getSearchQuery())) {
return;
} else {
currentSearch = event.getSearchQuery().clone();
currentSearch.setFromDate(currentSearch.getRealFromDate());
currentSearch.setToDate(currentSearch.getRealToDate());
}
requestHistogram(event.getSearchQuery());
//update labels
String fromDateStr;
fromDateStr = DATE_LABEL_DTF.format(new Date(event.getSearchQuery().getRealFromDate()));
fromDateLabel.setText(fromDateStr);
String toDateStr;
toDateStr = DATE_LABEL_DTF.format(new Date(event.getSearchQuery().getRealToDate()));
toDateLabel.setText(toDateStr);
}
private boolean isUpdateRequired(Search newSearch) {
if(currentSearch == null) {
return true;
}
return !(newSearch.getQuery().equals(currentSearch.getQuery())
&& newSearch.getFromDate() == currentSearch.getFromDate()
&& newSearch.getToDate() == currentSearch.getToDate()
);
}
@UiHandler("resizePanel")
public void onPanelResize(final ResizeEvent event)
{
plot.setWidth(event.getWidth() - 220);
plot.redraw();
}
private void checkQueued() {
if(queuedSearch != null) {
requestHistogram(queuedSearch);
queuedSearch = null;
}
}
private void requestHistogram(Search search)
{
if (inprocess)
{
queuedSearch = search;
return;
}
inprocess = true;
plot.addStyleName(css.disabled());
hoverInfo.setVisible(false);
error.setVisible(false);
loading.setVisible(true);
hitsLabel.setText("n/a");
HistogramRequest request = new HistogramRequest(search, MAXIMUM_STEPS);
elasticSearchService.query(request, new Callback<HistogramRequest, HistogramResponse>()
{
@Override
public void onFailure(final Throwable exception)
{
loading.setVisible(false);
error.setVisible(true);
plot.setVisible(false);
inprocess = false;
checkQueued();
}
@Override
public void onSuccess(final HistogramRequest query, final HistogramResponse response)
{
updateHistogramWithData(response);
inprocess = false;
checkQueued();
}
});
}
@Override
public void onLogMessageHover(final LogMessageHoverEvent event)
{
if (!trackPosition.isToggled())
{
return;
}
Search search = application.getCurrentSearch();
long selectionDate = event.getDate();
long toDate = search.getRealToDate();
long fromDate = search.getRealFromDate();
boolean update = false;
if (selectionDate > toDate)
{
update = true;
long alltime = toDate - fromDate;
toDate = selectionDate + alltime / 2;
long now = new Date().getTime();
if (toDate > now)
{
toDate = now;
}
}
else if (selectionDate < fromDate)
{
update = true;
long alltime = toDate - fromDate;
fromDate = selectionDate - alltime / 2;
}
if (update)
{
Search newSearch = search.clone();
newSearch.setFromDate(fromDate);
newSearch.setToDate(toDate);
eventBus.fireEvent(new UpdateSearchQuery(newSearch));
}
else
{
plot.lockCrosshair(PlotPosition.of(event.getDate(), 0));
}
}
@Override
public void onLogMessageOut(final LogMessageOutEvent event)
{
plot.clearCrosshair();
plot.lockCrosshair();
}
@UiHandler("tryAgain")
public void onTryAgain(final ClickEvent event) {
requestHistogram(currentSearch);
}
@UiHandler("zoomInError")
public void onZoomInError(final ClickEvent event) {
onZoomIn(event);
}
@UiHandler("zoomIn")
public void onZoomIn(final ClickEvent event)
{
Search search = application.getCurrentSearch();
long from = search.getFromDate() == 0 ? 0 : search.getRealFromDate();
long to = search.getRealToDate();
if (to - from < 100)
{
return;
}
long allTime = to - from;
long fromDate;
long toDate;
if(from == 0) {
fromDate = 0;
toDate = to - allTime / 2;
} else {
fromDate = from + allTime / 4;
toDate = to - allTime / 4;
}
Search newSearch = search.clone();
newSearch.setFromDate(fromDate);
newSearch.setToDate(toDate);
eventBus.fireEvent(new UpdateSearchQuery(newSearch));
}
@UiHandler("zoomOut")
public void onZoomOut(final ClickEvent event)
{
Search search = application.getCurrentSearch();
long from = search.getRealFromDate();
long to = search.getRealToDate();
long now = new Date().getTime();
long allTime = to - from;
from = from - allTime / 2;
to = to + allTime / 2;
if (to > now)
{
from = from - (to - now);
to = now;
}
Search newSearch = search.clone();
newSearch.setFromDate(from);
newSearch.setToDate(to);
eventBus.fireEvent(new UpdateSearchQuery(newSearch));
}
@UiHandler("downloadImage")
public void onDownloadImage(final ClickEvent event)
{
if (plot.isExportAsImageEnabled())
{
plot.saveAsImage();
}
else
{
Window.alert("Sorry. This is not supported in your browser");
}
}
@UiHandler("chooseDateButton")
public void onChooseDateButtonClick(final ClickEvent event)
{
Search search = application.getCurrentSearch();
datesControlGroup.removeStyleName("error");
fromBox.setValue(new Date(search.getRealFromDate()));
toBox.setValue(new Date(search.getRealToDate()));
dateControlsPanel.addStyleName(css.dateControlsVisible());
hideDateButton.setVisible(true);
chooseDateButton.setVisible(false);
}
@UiHandler("hideDateButton")
public void onHideDateButtonClick(final ClickEvent event)
{
dateControlsPanel.removeStyleName(css.dateControlsVisible());
hideDateButton.setVisible(false);
chooseDateButton.setVisible(true);
}
@UiHandler("goButton")
public void onGoButtonClick(final ClickEvent event)
{
Date from = fromBox.getValue();
Date to = toBox.getValue();
if(from == null || to == null || from.getTime()>=to.getTime() || from.getTime()<0 || to.getTime()<0) {
datesControlGroup.addStyleName("error");
return;
}
datesControlGroup.removeStyleName("error");
Search search = application.getCurrentSearch().clone();
search.setFromDate(fromBox.getValue().getTime());
search.setToDate(toBox.getValue().getTime());
eventBus.fireEvent(new UpdateSearchQuery(search));
onHideDateButtonClick(null);
}
private void submitLastNmins(long mins) {
Search search = application.getCurrentSearch().clone();
search.setFromDate(-mins*60000);
search.setToDate(0);
search.setFocusDate(0);
eventBus.fireEvent(new UpdateSearchQuery(search));
onHideDateButtonClick(null);
}
@UiHandler("last15m")
public void onLast15mClick(ClickEvent event) {
submitLastNmins(15);
}
@UiHandler("last30m")
public void onLast30mClick(ClickEvent event) {
submitLastNmins(30);
}
@UiHandler("last1h")
public void onLast1hClick(ClickEvent event) {
submitLastNmins(60);
}
@UiHandler("last2h")
public void onLast2hClick(ClickEvent event) {
submitLastNmins(120);
}
@UiHandler("last4h")
public void onLast4hClick(ClickEvent event) {
submitLastNmins(240);
}
@UiHandler("last6h")
public void onLast6hClick(ClickEvent event) {
submitLastNmins(360);
}
@UiHandler("last12h")
public void onLast12hClick(ClickEvent event) {
submitLastNmins(720);
}
@UiHandler("last24h")
public void onLast24hClick(ClickEvent event) {
submitLastNmins(1440);
}
@UiHandler("last2d")
public void onLast2dClick(ClickEvent event) {
submitLastNmins(2880);
}
@UiHandler("last3d")
public void onLast3dClick(ClickEvent event) {
submitLastNmins(4320);
}
@UiHandler("last5d")
public void onLast5dClick(ClickEvent event) {
submitLastNmins(7200);
}
@UiHandler("last7d")
public void onLast7dClick(ClickEvent event) {
submitLastNmins(10080);
}
@UiHandler("lastAllTime")
public void onLastAllTimeClick(ClickEvent event) {
submitLastNmins(0);
}
@UiHandler("plotPanel")
public void handleMouseOut(final MouseOutEvent event)
{
hoverInfo.setVisible(false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.security.sasl.SaslException;
/**
* SCRAM request/response message creation and parsing based on
* <a href="https://tools.ietf.org/html/rfc5802">RFC 5802</a>
*
*/
public class ScramMessages {
static abstract class AbstractScramMessage {
static final String ALPHA = "[A-Za-z]+";
static final String VALUE_SAFE = "[\\x01-\\x7F&&[^=,]]+";
static final String VALUE = "[\\x01-\\x7F&&[^,]]+";
static final String PRINTABLE = "[\\x21-\\x7E&&[^,]]+";
static final String SASLNAME = "(?:[\\x01-\\x7F&&[^=,]]|=2C|=3D)+";
static final String BASE64_CHAR = "[a-zA-Z0-9/+]";
static final String BASE64 = String.format("(?:%s{4})*(?:%s{3}=|%s{2}==)?", BASE64_CHAR, BASE64_CHAR, BASE64_CHAR);
static final String RESERVED = String.format("(m=%s,)?", VALUE);
static final String EXTENSIONS = String.format("(,%s=%s)*", ALPHA, VALUE);
abstract String toMessage();
public byte[] toBytes() {
return toMessage().getBytes(StandardCharsets.UTF_8);
}
protected String toMessage(byte[] messageBytes) {
return new String(messageBytes, StandardCharsets.UTF_8);
}
}
/**
* Format:
* gs2-header [reserved-mext ","] username "," nonce ["," extensions]
* Limitations:
* Only gs2-header "n" is supported.
* Extensions are ignored.
*
*/
public static class ClientFirstMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"n,(a=(?<authzid>%s))?,%sn=(?<saslname>%s),r=(?<nonce>%s)(?<extensions>%s)",
SASLNAME,
RESERVED,
SASLNAME,
PRINTABLE,
EXTENSIONS));
private final String saslName;
private final String nonce;
private final String authorizationId;
private final ScramExtensions extensions;
public ClientFirstMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM client first message format: " + message);
String authzid = matcher.group("authzid");
this.authorizationId = authzid != null ? authzid : "";
this.saslName = matcher.group("saslname");
this.nonce = matcher.group("nonce");
String extString = matcher.group("extensions");
this.extensions = extString.startsWith(",") ? new ScramExtensions(extString.substring(1)) : new ScramExtensions();
}
public ClientFirstMessage(String saslName, String nonce, Map<String, String> extensions) {
this.saslName = saslName;
this.nonce = nonce;
this.extensions = new ScramExtensions(extensions);
this.authorizationId = ""; // Optional authzid not specified in gs2-header
}
public String saslName() {
return saslName;
}
public String nonce() {
return nonce;
}
public String authorizationId() {
return authorizationId;
}
public String gs2Header() {
return "n," + authorizationId + ",";
}
public ScramExtensions extensions() {
return extensions;
}
public String clientFirstMessageBare() {
String extensionStr = extensions.toString();
if (extensionStr.isEmpty())
return String.format("n=%s,r=%s", saslName, nonce);
else
return String.format("n=%s,r=%s,%s", saslName, nonce, extensionStr);
}
String toMessage() {
return gs2Header() + clientFirstMessageBare();
}
}
/**
* Format:
* [reserved-mext ","] nonce "," salt "," iteration-count ["," extensions]
* Limitations:
* Extensions are ignored.
*
*/
public static class ServerFirstMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"%sr=(?<nonce>%s),s=(?<salt>%s),i=(?<iterations>[0-9]+)%s",
RESERVED,
PRINTABLE,
BASE64,
EXTENSIONS));
private final String nonce;
private final byte[] salt;
private final int iterations;
public ServerFirstMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM server first message format: " + message);
try {
this.iterations = Integer.parseInt(matcher.group("iterations"));
if (this.iterations <= 0)
throw new SaslException("Invalid SCRAM server first message format: invalid iterations " + iterations);
} catch (NumberFormatException e) {
throw new SaslException("Invalid SCRAM server first message format: invalid iterations");
}
this.nonce = matcher.group("nonce");
String salt = matcher.group("salt");
this.salt = Base64.getDecoder().decode(salt);
}
public ServerFirstMessage(String clientNonce, String serverNonce, byte[] salt, int iterations) {
this.nonce = clientNonce + serverNonce;
this.salt = salt;
this.iterations = iterations;
}
public String nonce() {
return nonce;
}
public byte[] salt() {
return salt;
}
public int iterations() {
return iterations;
}
String toMessage() {
return String.format("r=%s,s=%s,i=%d", nonce, Base64.getEncoder().encodeToString(salt), iterations);
}
}
/**
* Format:
* channel-binding "," nonce ["," extensions]"," proof
* Limitations:
* Extensions are ignored.
*
*/
public static class ClientFinalMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"c=(?<channel>%s),r=(?<nonce>%s)%s,p=(?<proof>%s)",
BASE64,
PRINTABLE,
EXTENSIONS,
BASE64));
private final byte[] channelBinding;
private final String nonce;
private byte[] proof;
public ClientFinalMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM client final message format: " + message);
this.channelBinding = Base64.getDecoder().decode(matcher.group("channel"));
this.nonce = matcher.group("nonce");
this.proof = Base64.getDecoder().decode(matcher.group("proof"));
}
public ClientFinalMessage(byte[] channelBinding, String nonce) {
this.channelBinding = channelBinding;
this.nonce = nonce;
}
public byte[] channelBinding() {
return channelBinding;
}
public String nonce() {
return nonce;
}
public byte[] proof() {
return proof;
}
public void proof(byte[] proof) {
this.proof = proof;
}
public String clientFinalMessageWithoutProof() {
return String.format("c=%s,r=%s",
Base64.getEncoder().encodeToString(channelBinding),
nonce);
}
String toMessage() {
return String.format("%s,p=%s",
clientFinalMessageWithoutProof(),
Base64.getEncoder().encodeToString(proof));
}
}
/**
* Format:
* ("e=" server-error-value | "v=" base64_server_signature) ["," extensions]
* Limitations:
* Extensions are ignored.
*
*/
public static class ServerFinalMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"(?:e=(?<error>%s))|(?:v=(?<signature>%s))%s",
VALUE_SAFE,
BASE64,
EXTENSIONS));
private final String error;
private final byte[] serverSignature;
public ServerFinalMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM server final message format: " + message);
String error = null;
try {
error = matcher.group("error");
} catch (IllegalArgumentException e) {
// ignore
}
if (error == null) {
this.serverSignature = Base64.getDecoder().decode(matcher.group("signature"));
this.error = null;
} else {
this.serverSignature = null;
this.error = error;
}
}
public ServerFinalMessage(String error, byte[] serverSignature) {
this.error = error;
this.serverSignature = serverSignature;
}
public String error() {
return error;
}
public byte[] serverSignature() {
return serverSignature;
}
String toMessage() {
if (error != null)
return "e=" + error;
else
return "v=" + Base64.getEncoder().encodeToString(serverSignature);
}
}
}
| |
package com.gildedgames.fuzzyjava.core.evaluation;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.gildedgames.fuzzyjava.api.evaluation.FFuncAnt;
import com.gildedgames.fuzzyjava.api.evaluation.FFuncCons;
import com.gildedgames.fuzzyjava.api.evaluation.FFuncProp;
import com.gildedgames.fuzzyjava.api.evaluation.IProperty;
import com.gildedgames.fuzzyjava.api.evaluation.IRuleBuilder;
import com.gildedgames.fuzzyjava.api.evaluation.IRuleSet;
import com.gildedgames.fuzzyjava.api.evaluation.Parameter;
import com.gildedgames.fuzzyjava.api.evaluation.Variable;
import com.gildedgames.fuzzyjava.api.functions.FFunction;
import com.gildedgames.fuzzyjava.core.Ops;
import com.gildedgames.fuzzyjava.core.evaluation.exceptions.BoundedVarUsedFreeException;
import com.gildedgames.fuzzyjava.util.Pair;
/**
* Uses tonnes of inner classes to build
* new propositions out of multiple
* functions.
* @author Emile
*
*/
public class RuleBuilder implements IRuleBuilder
{
private static abstract class FuncAntMerge<E> implements FFuncAnt<E>
{
private final Set<Entry<IProperty<E>, Parameter[]>> pWithVars;
private FuncAntMerge(FFuncAnt<E>... fs)
{
this.pWithVars = new HashSet<>();
for (final FFuncAnt<E> f : fs)
{
this.pWithVars.addAll(f.propertiesWithVars());
}
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return this.pWithVars;
}
}
private static abstract class FuncAntBase<E> implements FFuncAnt<E>
{
private final FFuncAnt<E> func;
private FuncAntBase(FFuncAnt<E> func)
{
this.func = func;
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return this.func.propertiesWithVars();
}
}
@Override
public <E> FFuncAnt<E> and(final FFuncAnt<E>... functions)
{
return new FuncAntMerge<E>(functions)
{
@Override
public float membershipOf(Object[] element)
{
float and = 1.0f;
for (final FFuncAnt<E> f : functions)
{
and = Ops.and(and, f.membershipOf(element));
}
return and;
}
@Override
public float evaluate(Map<Variable, ?> env, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
float and = 1.0f;
for (final FFuncAnt<E> f : functions)
{
and = Ops.and(and, f.evaluate(env, ruleSet, inferred));
}
return and;
}
};
}
@Override
public <E> FFuncAnt<E> or(final FFuncAnt<E>... functions)
{
return new FuncAntMerge<E>(functions)
{
@Override
public float membershipOf(Object[] element)
{
float or = 0.0f;
for (final FFuncAnt<E> f : functions)
{
or = Ops.or(or, f.membershipOf(element));
}
return or;
}
@Override
public float evaluate(Map<Variable, ?> env, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
float or = 0.0f;
for (final FFuncAnt<E> f : functions)
{
or = Ops.or(or, f.evaluate(env, ruleSet, inferred));
}
return or;
}
};
}
@Override
public <E> FFuncAnt<E> implies(final FFuncAnt<E> function1, final FFuncAnt<E> function2)
{
final Set<Entry<IProperty<E>, Parameter[]>> pWithVars = new HashSet<>(function1.propertiesWithVars());
pWithVars.addAll(function2.propertiesWithVars());
return new FFuncAnt<E>()
{
@Override
public float membershipOf(Object[] element)
{
return Ops.or(Ops.not(function1.membershipOf(element)), function2.membershipOf(element));
}
@Override
public float evaluate(Map<Variable, ?> env, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
return Ops.or(Ops.not(function1.evaluate(env, ruleSet, inferred)), function2.evaluate(env, ruleSet, inferred));
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return pWithVars;
}
};
}
@Override
public <E> FFuncAnt<E> not(final FFuncAnt<E> function)
{
return new FuncAntBase<E>(function)
{
@Override
public float membershipOf(Object[] element)
{
return Ops.not(function.membershipOf(element));
}
@Override
public float evaluate(Map<Variable, ?> interpretation, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
return Ops.not(function.evaluate(interpretation, ruleSet, inferred));
}
};
}
@Override
public <E> FFuncCons<E> notC(final FFuncCons<E> function)
{
return new FFuncCons<E>()
{
@Override
public float membershipOf(Object[] element)
{
return Ops.not(function.membershipOf(element));
}
@Override
public float membershipOfFloat(float element)
{
return Ops.not(function.membershipOfFloat(element));
}
@Override
public FFuncProp<E> getPropFunc()
{
return function.getPropFunc();
}
@Override
public Variable[] variables()
{
return function.variables();
}
};
}
@Override
public <E> FFuncProp<E> prop(FFunction<Float> function, IProperty<E> property)
{
return new FuncProp<>(function, property);
}
@Override
public <E> FFuncAnt<E> ant(final FFunction<E> function, final IProperty<E> property, final Parameter param)
{
final Set<Entry<IProperty<E>, Parameter[]>> set = new HashSet<>(1);
if (property != null)
{
set.add(new Pair<>(property, new Parameter[] { param }));
}
return new FFuncAnt<E>()
{
@Override
public float membershipOf(Object[] element)
{
return function.membershipOf((E) element[0]);
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return set;
}
@Override
public float evaluate(Map<Variable, ?> env, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
return function.membershipOf((E) param.getValue(env));
}
};
}
@Override
public <E> FFuncAnt<E> ant(FFunction<E> function, Parameter param)
{
return this.ant(function, null, param);
}
@Override
public <E> FFuncAnt<E> ant(final FFunction<Object[]> function, final IProperty<E> property, final Parameter... param)
{
final Set<Entry<IProperty<E>, Parameter[]>> set = new HashSet<>();
if (property != null)
{
set.add(new Pair<>(property, param));
}
return new FFuncAnt<E>()
{
@Override
public float membershipOf(Object[] element)
{
return function.membershipOf(element);
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return set;
}
@Override
public float evaluate(Map<Variable, ?> env, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
final Object[] els = new Object[param.length];
for (int i = 0; i < param.length; i++)
{
els[i] = param[i].getValue(env);
}
return this.membershipOf(els);
}
};
}
@Override
public <E> FFuncAnt<E> ant(FFunction<Object[]> function, Parameter... param)
{
return this.ant(function, null, param);
}
private static final Map<Variable, ?> emptyMap = new HashMap<>(0);
@Override
public Parameter constant(final Object constant)
{
return new Parameter()
{
@Override
public Variable[] variables()
{
return new Variable[0];
}
@Override
public Object getValue(Map<Variable, ?> env)
{
return constant;
}
@Override
public Map<Variable, ?> tryInferVars(Object paramValue)
{
return RuleBuilder.emptyMap;
}
};
}
@Override
public Variable createVar()
{
return new Variable();
}
@Override
public <E> FFuncAnt<E> very(final FFuncAnt<E> function)
{
return new FuncAntBase<E>(function)
{
@Override
public float membershipOf(Object[] element)
{
return (float) Math.pow(function.membershipOf(element), 2);
}
@Override
public float evaluate(Map<Variable, ?> interpretation, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
return (float) Math.pow(function.evaluate(interpretation, ruleSet, inferred), 2);
}
};
}
@Override
public <E> FFuncAnt<E> slightly(final FFuncAnt<E> function)
{
return new FuncAntBase<E>(function)
{
@Override
public float membershipOf(Object[] element)
{
return (float) Math.pow(function.membershipOf(element), 0.5f);
}
@Override
public float evaluate(Map<Variable, ?> interpretation, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
return (float) Math.pow(function.evaluate(interpretation, ruleSet, inferred), 0.5f);
}
};
}
@Override
public <E> FFuncAnt<E> all(final Variable var, final FFuncAnt<E> function)
{
final Set<Entry<IProperty<E>, Parameter[]>> set = function.propertiesWithVars();
for (final Entry<IProperty<E>, Parameter[]> entry : set)
{
final Parameter[] params = entry.getValue();
for (int i = 0; i < params.length; i++)
{
final Parameter param = params[i];
boolean replace = false;
for (final Variable varI : param.variables())
{
replace = replace || varI == var;
}
if (replace)
{
final Variable[] newVarA = new Variable[param.variables().length - 1];
final int count = 0;
for (final Variable varI : param.variables())
{
if (varI != var)
{
newVarA[count] = varI;
}
}
params[i] = new Parameter()
{
@Override
public Variable[] variables()
{
return newVarA;
}
@Override
public Object getValue(Map<Variable, ?> env)
{
return param.getValue(env);
}
@Override
public Map<Variable, ?> tryInferVars(Object paramValue)
{
return RuleBuilder.emptyMap;
}
};
}
}
}
return new FFuncAnt<E>()
{
@Override
public float membershipOf(Object[] element)
{
return function.membershipOf(element);
}
@Override
public Set<Entry<IProperty<E>, Parameter[]>> propertiesWithVars()
{
return set;
}
@Override
public float evaluate(Map<Variable, ?> interpretation, IRuleSet<E> ruleSet, Set<Entry<List<Object>, IProperty<E>>> inferred)
{
if (interpretation.containsKey(var))
{
throw new BoundedVarUsedFreeException();
}
//Go over the universe and create a new interpretation for the var.
final Map<Variable, Object> newInterpretation = new HashMap<>(interpretation);
float and = 1.0f;
for (final E el : ruleSet.getUniverse())
{
newInterpretation.put(var, el);
final float membership = ruleSet.getMembership(function, newInterpretation, inferred);
and = Ops.and(and, membership);
}
return and;
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.stateful.analysis;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.processor.ProcessSessionFactory;
import org.apache.nifi.state.MockStateManager;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Test;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.apache.nifi.processors.stateful.analysis.AttributeRollingWindow.REL_FAILED_SET_STATE;
import static org.apache.nifi.processors.stateful.analysis.AttributeRollingWindow.ROLLING_WINDOW_COUNT_KEY;
import static org.apache.nifi.processors.stateful.analysis.AttributeRollingWindow.ROLLING_WINDOW_MEAN_KEY;
import static org.apache.nifi.processors.stateful.analysis.AttributeRollingWindow.ROLLING_WINDOW_VALUE_KEY;
public class TestAttributeRollingWindow {
@Test
public void testFailureDueToBadAttribute() throws InterruptedException {
final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class);
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "3 sec");
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "bad");
runner.enqueue("1".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_FAILURE);
}
@Test
public void testStateFailures() throws InterruptedException, IOException {
final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class);
MockStateManager mockStateManager = runner.getStateManager();
final AttributeRollingWindow processor = (AttributeRollingWindow) runner.getProcessor();
final ProcessSessionFactory processSessionFactory = runner.getProcessSessionFactory();
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "3 sec");
processor.onScheduled(runner.getProcessContext());
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "1");
mockStateManager.setFailOnStateGet(Scope.LOCAL, true);
runner.enqueue(new byte[0],attributes);
processor.onTrigger(runner.getProcessContext(), processSessionFactory.createSession());
runner.assertQueueNotEmpty();
mockStateManager.setFailOnStateGet(Scope.LOCAL, false);
mockStateManager.setFailOnStateSet(Scope.LOCAL, true);
processor.onTrigger(runner.getProcessContext(), processSessionFactory.createSession());
runner.assertQueueEmpty();
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_FAILED_SET_STATE, 1);
MockFlowFile mockFlowFile = runner.getFlowFilesForRelationship(REL_FAILED_SET_STATE).get(0);
mockFlowFile.assertAttributeNotExists(ROLLING_WINDOW_VALUE_KEY);
mockFlowFile.assertAttributeNotExists(ROLLING_WINDOW_COUNT_KEY);
mockFlowFile.assertAttributeNotExists(ROLLING_WINDOW_MEAN_KEY);
}
@Test
public void testBasic() throws InterruptedException {
final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class);
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "300 ms");
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "1");
runner.enqueue("1".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
MockFlowFile flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "1.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "1");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "1.0");
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "2.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "2");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "1.0");
Thread.sleep(500L);
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "1.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "1");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "1.0");
}
@Test
public void testVerifyCount() throws InterruptedException {
final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class);
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "10 sec");
MockFlowFile flowFile;
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "1");
for(int i = 1; i<61; i++){
runner.enqueue(String.valueOf(i).getBytes(), attributes);
runner.run();
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
Double value = (double) i;
Double mean = value / i;
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, String.valueOf(value));
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, String.valueOf(i));
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, String.valueOf(mean));
Thread.sleep(10L);
}
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.SUB_WINDOW_LENGTH, "500 ms");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "10 sec");
for(int i = 1; i<10; i++){
runner.enqueue(String.valueOf(i).getBytes(), attributes);
runner.run();
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
Double value = (double) i;
Double mean = value / i;
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, String.valueOf(Double.valueOf(i)));
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, String.valueOf(i));
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, String.valueOf(mean));
Thread.sleep(10L);
}
}
@Test
public void testMicroBatching() throws InterruptedException {
final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class);
runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}");
runner.setProperty(AttributeRollingWindow.SUB_WINDOW_LENGTH, "500 ms");
runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "1 sec");
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "2");
runner.enqueue("1".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
MockFlowFile flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "2.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "1");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
Thread.sleep(200L);
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "4.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "2");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
Thread.sleep(300L);
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "6.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "3");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
Thread.sleep(200L);
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "8.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "4");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
Thread.sleep(300L);
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "6.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "3");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
runner.enqueue("2".getBytes(), attributes);
runner.run(1);
runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1);
flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0);
runner.clearTransferState();
flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "8.0");
flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "4");
flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0");
}
}
| |
/**
* Copyright (c) 2012-2020 Netflix, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.msl.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import com.netflix.msl.MslConstants;
import com.netflix.msl.MslCryptoException;
import com.netflix.msl.MslEncodingException;
import com.netflix.msl.MslError;
import com.netflix.msl.MslException;
import com.netflix.msl.MslInternalException;
import com.netflix.msl.MslMasterTokenException;
import com.netflix.msl.crypto.ICryptoContext;
import com.netflix.msl.crypto.NullCryptoContext;
import com.netflix.msl.crypto.SessionCryptoContext;
import com.netflix.msl.crypto.SymmetricCryptoContext;
import com.netflix.msl.entityauth.EntityAuthenticationScheme;
import com.netflix.msl.test.ExpectedMslException;
import com.netflix.msl.tokens.MasterToken;
import com.netflix.msl.tokens.ServiceToken;
import com.netflix.msl.tokens.UserIdToken;
import com.netflix.msl.userauth.MockEmailPasswordAuthenticationFactory;
/**
* Simple MSL store unit tests.
*
* @author Wesley Miaw <wmiaw@netflix.com>
*/
public class SimpleMslStoreTest {
private static final String KEYSET_ID = "keyset";
private static final String USER_ID = "userid";
/** Maximum number of randomly generated tokens. */
private static final int MAX_TOKENS = 3;
/** Stress test pool shutdown timeout in milliseconds. */
private static final int STRESS_TIMEOUT_MILLIS = 3000;
/**
* @param c1 first collection.
* @param c2 second collection.
* @return true if each collection contain all elements found in the other.
*/
private static boolean equal(final Collection<? extends Object> c1, final Collection<? extends Object> c2) {
return c1.containsAll(c2) && c2.containsAll(c1);
}
@Rule
public ExpectedMslException thrown = ExpectedMslException.none();
@BeforeClass
public static void setup() throws MslEncodingException, MslCryptoException {
ctx = new MockMslContext(EntityAuthenticationScheme.NONE, false);
}
@AfterClass
public static void teardown() {
ctx = null;
}
@Before
public void createStore() {
store = new SimpleMslStore();
}
@After
public void destroyStore() {
store = null;
}
@Test
public void storeCryptoContext() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
assertNull(store.getCryptoContext(masterToken));
final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null);
store.setCryptoContext(masterToken, cc1);
final ICryptoContext cc2 = store.getCryptoContext(masterToken);
assertNotNull(cc2);
assertSame(cc1, cc2);
assertEquals(masterToken, store.getMasterToken());
}
@Test
public void replaceCryptoContext() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null);
final ICryptoContext cc2 = new NullCryptoContext();
store.setCryptoContext(masterToken, cc1);
final ICryptoContext cc3 = store.getCryptoContext(masterToken);
assertSame(cc1, cc3);
assertNotSame(cc2, cc3);
store.setCryptoContext(masterToken, cc2);
final ICryptoContext cc4 = store.getCryptoContext(masterToken);
assertNotSame(cc1, cc4);
assertSame(cc2, cc4);
assertEquals(masterToken, store.getMasterToken());
}
@Test
public void removeCryptoContext() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
store.setCryptoContext(masterToken, cryptoContext);
store.removeCryptoContext(masterToken);
assertNull(store.getMasterToken());
assertNull(store.getCryptoContext(masterToken));
}
@Test
public void clearCryptoContext() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null);
store.setCryptoContext(masterToken, cc1);
store.clearCryptoContexts();
assertNull(store.getCryptoContext(masterToken));
assertNull(store.getMasterToken());
}
@Test
public void twoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException {
final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA);
final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB);
store.setCryptoContext(mtA, ccMtA1);
store.setCryptoContext(mtB, ccMtB1);
final ICryptoContext ccMtA2 = store.getCryptoContext(mtA);
assertNotNull(ccMtA2);
assertSame(ccMtA1, ccMtA2);
final ICryptoContext ccMtB2 = store.getCryptoContext(mtB);
assertNotNull(ccMtB2);
assertSame(ccMtB1, ccMtB2);
assertEquals(mtB, store.getMasterToken());
}
@Test
public void replaceTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException {
final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA);
final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB);
store.setCryptoContext(mtA, ccMtA1);
store.setCryptoContext(mtB, ccMtB1);
assertEquals(mtB, store.getMasterToken());
final ICryptoContext ccNull = new NullCryptoContext();
store.setCryptoContext(mtA, ccNull);
final ICryptoContext ccMtA2 = store.getCryptoContext(mtA);
assertNotNull(ccMtA2);
assertNotSame(ccMtA1, ccMtA2);
assertSame(ccNull, ccMtA2);
final ICryptoContext ccMtB2 = store.getCryptoContext(mtB);
assertNotNull(ccMtB2);
assertSame(ccMtB1, ccMtB2);
assertEquals(mtB, store.getMasterToken());
}
@Test
public void clearTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException {
final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA);
final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB);
store.setCryptoContext(mtA, ccMtA1);
store.setCryptoContext(mtB, ccMtB1);
store.clearCryptoContexts();
assertNull(store.getCryptoContext(mtA));
assertNull(store.getCryptoContext(mtB));
assertNull(store.getMasterToken());
}
@Test
public void removeTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException {
final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA);
final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB);
store.setCryptoContext(mtA, ccMtA1);
store.setCryptoContext(mtB, ccMtB1);
store.removeCryptoContext(mtA);
assertNull(store.getCryptoContext(mtA));
assertEquals(ccMtB1, store.getCryptoContext(mtB));
}
/**
* Crypto context add/remove stress test runner.
*
* Randomly adds or removes a crypto context for one of many master tokens
* (by master token entity identity). Also iterates through the set crypto
* contexts.
*/
private static class CryptoContextStressor implements Runnable {
/**
* Create a new crypto context stressor.
*
* @param ctx MSL context.
* @param store MSL store.
* @param count the number of master token identities to stress.
*/
public CryptoContextStressor(final MslContext ctx, final MslStore store, final int count) {
this.ctx = ctx;
this.store = store;
this.count = count;
}
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
final Random r = new Random();
try {
for (int i = 0; i < 10 * count; ++i) {
final int tokenIndex = r.nextInt(count);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1);
final int option = r.nextInt(4);
switch (option) {
case 0:
store.setCryptoContext(masterToken, null);
break;
case 1:
final ICryptoContext cryptoContext = new SessionCryptoContext(ctx, masterToken);
store.setCryptoContext(masterToken, cryptoContext);
break;
case 2:
store.getCryptoContext(masterToken);
break;
case 3:
store.removeCryptoContext(masterToken);
break;
}
}
} catch (final MslMasterTokenException e) {
throw new MslInternalException("Unexpected master token exception.", e);
} catch (final MslEncodingException e) {
throw new MslInternalException("Unexpected master token encoding exception.", e);
} catch (final MslCryptoException e) {
throw new MslInternalException("Unexpected master token creation exception.", e);
}
}
/** MSL context. */
private final MslContext ctx;
/** MSL store. */
private final MslStore store;
/** Number of crypto context identities. */
private final int count;
}
@Test
public void stressCryptoContexts() throws InterruptedException, MslEncodingException, MslCryptoException {
final ExecutorService service = Executors.newCachedThreadPool();
for (int i = 0; i < 10 * MAX_TOKENS; ++i) {
service.execute(new CryptoContextStressor(ctx, store, MAX_TOKENS));
}
service.shutdown();
assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS));
}
@Test
public void nonReplayableId() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
for (int i = 1; i < 10; ++i)
assertEquals(i, store.getNonReplayableId(masterToken));
}
@Ignore
@Test
public void wrappedNonReplayableId() throws MslEncodingException, MslCryptoException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
for (long i = 1; i < MslConstants.MAX_LONG_VALUE; ++i)
store.getNonReplayableId(masterToken);
assertEquals(MslConstants.MAX_LONG_VALUE, store.getNonReplayableId(masterToken));
assertEquals(0, store.getNonReplayableId(masterToken));
assertEquals(1, store.getNonReplayableId(masterToken));
}
@Test
public void twoNonReplayableIds() throws MslEncodingException, MslCryptoException {
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2);
for (int i = 1; i < 10; ++i) {
assertEquals(i, store.getNonReplayableId(masterTokenA));
assertEquals(i, store.getNonReplayableId(masterTokenB));
}
}
@Test
public void addUserIdToken() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
assertEquals(userIdToken, store.getUserIdToken(USER_ID));
assertNull(store.getUserIdToken(USER_ID + "x"));
}
@Test
public void removeUserIdToken() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.removeUserIdToken(userIdToken);
assertNull(store.getUserIdToken(USER_ID));
}
@Test
public void replaceUserIdToken() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdTokenA);
store.addUserIdToken(USER_ID, userIdTokenB);
assertEquals(userIdTokenB, store.getUserIdToken(USER_ID));
}
@Test
public void twoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
assertEquals(userIdTokenA, store.getUserIdToken(userIdA));
assertEquals(userIdTokenB, store.getUserIdToken(userIdB));
}
@Test
public void replaceTwoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterToken, 3, MockEmailPasswordAuthenticationFactory.USER);
store.addUserIdToken(userIdA, userIdTokenC);
assertEquals(userIdTokenC, store.getUserIdToken(userIdA));
assertEquals(userIdTokenB, store.getUserIdToken(userIdB));
}
@Test
public void removeTwoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.removeUserIdToken(userIdTokenA);
assertNull(store.getUserIdToken(userIdA));
assertEquals(userIdTokenB, store.getUserIdToken(userIdB));
}
@Test
public void clearUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.clearUserIdTokens();
assertNull(store.getUserIdToken(userIdA));
assertNull(store.getUserIdToken(userIdB));
}
@Test
public void unknownMasterTokenUserIdToken() throws MslEncodingException, MslCryptoException, MslException {
thrown.expect(MslException.class);
thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_NOT_FOUND);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
store.addUserIdToken(USER_ID, userIdToken);
}
@Test
public void removeMasterTokenSameSerialNumberUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final String userIdC = USER_ID + "C";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenA, 2, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterTokenB, 1, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterTokenA, cryptoContext);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addUserIdToken(userIdC, userIdTokenC);
// We still have a master token with serial number 1 so no user ID
// tokens should be deleted.
store.removeCryptoContext(masterTokenA);
assertEquals(userIdTokenA, store.getUserIdToken(userIdA));
assertEquals(userIdTokenB, store.getUserIdToken(userIdB));
assertEquals(userIdTokenC, store.getUserIdToken(userIdC));
}
@Test
public void removeMasterTokenReissuedUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
// Master token B has a new serial number, to invalidate the old master
// token and its user ID tokens.
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final String userIdC = USER_ID + "C";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenA, 2, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterTokenB, 1, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterTokenA, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdC, userIdTokenC);
// All of master token A's user ID tokens should be deleted.
store.removeCryptoContext(masterTokenA);
assertNull(store.getUserIdToken(userIdA));
assertNull(store.getUserIdToken(userIdB));
assertEquals(userIdTokenC, store.getUserIdToken(userIdC));
}
@Test
public void clearCryptoContextsUserIdTokens() throws MslEncodingException, MslCryptoException, MslException {
// Master token B has a new serial number, to invalidate the old master
// token and its user ID tokens.
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER);
store.setCryptoContext(masterTokenA, cryptoContext);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
// All user ID tokens should be deleted.
store.clearCryptoContexts();
assertNull(store.getUserIdToken(userIdA));
assertNull(store.getUserIdToken(userIdB));
}
/**
* User ID token add/remove stress test runner.
*
* Randomly adds or removes user ID tokens. Also iterates through the user
* ID tokens.
*/
private static class UserIdTokenStressor implements Runnable {
/**
* Create a new service token stressor.
*
* @param ctx MSL context.
* @param store MSL store.
* @param count the number of master token and user ID tokens to create
* combinations of.
*/
public UserIdTokenStressor(final MslContext ctx, final MslStore store, final int count) {
this.ctx = ctx;
this.store = store;
this.count = count;
}
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
final Random r = new Random();
try {
for (int i = 0; i < 10 * count; ++i) {
final int tokenIndex = r.nextInt(count);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1);
final long userId = r.nextInt(count);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, userId, MockEmailPasswordAuthenticationFactory.USER);
final int option = r.nextInt(3);
switch (option) {
case 0:
{
store.setCryptoContext(masterToken, new NullCryptoContext());
store.addUserIdToken(USER_ID + userId, userIdToken);
break;
}
case 1:
{
store.getUserIdToken(USER_ID + userId);
break;
}
case 2:
{
store.removeUserIdToken(userIdToken);
break;
}
}
}
} catch (final MslMasterTokenException e) {
throw new MslInternalException("Unexpected master token exception.", e);
} catch (final MslEncodingException e) {
throw new MslInternalException("Unexpected master token encoding exception.", e);
} catch (final MslCryptoException e) {
throw new MslInternalException("Unexpected master token creation exception.", e);
} catch (final MslException e) {
throw new MslInternalException("Master token / user ID token service token query mismatch.", e);
}
}
/** MSL context. */
private final MslContext ctx;
/** MSL store. */
private final MslStore store;
/** Number of master token and user ID token identities. */
private final int count;
}
@Test
public void stressUserIdTokens() throws InterruptedException {
final ExecutorService service = Executors.newCachedThreadPool();
for (int i = 0; i < 10 * MAX_TOKENS; ++i) {
service.execute(new UserIdTokenStressor(ctx, store, MAX_TOKENS));
}
service.shutdown();
assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS));
}
@Test
public void masterBoundServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null);
store.setCryptoContext(masterToken, cryptoContext);
final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null);
assertNotNull(emptyTokens);
assertEquals(0, emptyTokens.size());
store.addServiceTokens(tokens);
final Set<ServiceToken> storedTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedTokens);
assertTrue(equal(tokens, storedTokens));
}
@Test
public void missingMasterTokenAddServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null);
MslException exception = null;
try {
store.addServiceTokens(tokens);
} catch (final MslException e) {
exception = e;
}
assertNotNull(exception);
final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null);
assertNotNull(emptyTokens);
assertEquals(0, emptyTokens.size());
}
@Test
public void userBoundServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(emptyTokens);
assertEquals(0, emptyTokens.size());
store.addServiceTokens(tokens);
final Set<ServiceToken> storedTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(storedTokens);
assertTrue(equal(tokens, storedTokens));
}
@Test
public void missingUserIdTokenAddServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken);
store.setCryptoContext(masterToken, cryptoContext);
MslException exception = null;
try {
store.addServiceTokens(tokens);
} catch (final MslException e) {
exception = e;
}
assertNotNull(exception);
final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null);
assertNotNull(emptyTokens);
assertEquals(0, emptyTokens.size());
}
@Test
public void unboundServiceTokens() throws MslException {
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, null, null);
final Set<ServiceToken> emptyTokens = store.getServiceTokens(null, null);
assertNotNull(emptyTokens);
assertEquals(0, emptyTokens.size());
store.addServiceTokens(tokens);
final Set<ServiceToken> storedTokens = store.getServiceTokens(null, null);
assertNotNull(storedTokens);
assertTrue(equal(tokens, storedTokens));
}
@Test
public void removeMasterBoundServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken);
final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.addServiceTokens(masterBoundTokens);
store.addServiceTokens(userBoundTokens);
store.addServiceTokens(unboundTokens);
store.removeServiceTokens(null, masterToken, null);
// This should only return the unbound tokens.
final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedMasterBoundTokens);
assertTrue(equal(unboundTokens, storedMasterBoundTokens));
// This should only return the unbound and user-bound tokens.
final Set<ServiceToken> unboundAndUserBoundTokens = new HashSet<ServiceToken>();
unboundAndUserBoundTokens.addAll(unboundTokens);
unboundAndUserBoundTokens.addAll(userBoundTokens);
final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken);
assertTrue(equal(unboundAndUserBoundTokens, storedUserBoundTokens));
// This should only return the unbound tokens.
final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null);
assertNotNull(storedUnboundTokens);
assertTrue(equal(unboundTokens, storedUnboundTokens));
}
@Test
public void removeUserBoundServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken);
final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.addServiceTokens(masterBoundTokens);
store.addServiceTokens(userBoundTokens);
store.addServiceTokens(unboundTokens);
store.removeServiceTokens(null, null, userIdToken);
// This should only return the unbound and master bound-only tokens.
final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedMasterBoundTokens);
final Set<ServiceToken> unboundAndMasterBoundTokens = new HashSet<ServiceToken>();
unboundAndMasterBoundTokens.addAll(unboundTokens);
unboundAndMasterBoundTokens.addAll(masterBoundTokens);
assertTrue(equal(unboundAndMasterBoundTokens, storedMasterBoundTokens));
// This should only return the unbound and master bound-only tokens.
final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(storedUserBoundTokens);
assertTrue(equal(unboundAndMasterBoundTokens, storedUserBoundTokens));
// This should only return the unbound tokens.
final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null);
assertNotNull(storedUnboundTokens);
assertTrue(equal(unboundTokens, storedUnboundTokens));
}
@Test
public void removeNoServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken);
final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.addServiceTokens(masterBoundTokens);
store.addServiceTokens(userBoundTokens);
store.addServiceTokens(unboundTokens);
store.removeServiceTokens(null, null, null);
// This should only return the unbound and master bound tokens.
final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedMasterBoundTokens);
final Set<ServiceToken> unboundAndMasterBoundTokens = new HashSet<ServiceToken>();
unboundAndMasterBoundTokens.addAll(unboundTokens);
unboundAndMasterBoundTokens.addAll(masterBoundTokens);
assertTrue(equal(unboundAndMasterBoundTokens, storedMasterBoundTokens));
// This should return all of the tokens.
final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(storedUserBoundTokens);
final Set<ServiceToken> allTokens = new HashSet<ServiceToken>();
allTokens.addAll(unboundTokens);
allTokens.addAll(userBoundTokens);
allTokens.addAll(masterBoundTokens);
assertTrue(equal(allTokens, storedUserBoundTokens));
// This should only return the unbound tokens.
final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null);
assertNotNull(storedUnboundTokens);
assertTrue(equal(unboundTokens, storedUnboundTokens));
}
@Test
public void removeNamedServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken);
final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.addServiceTokens(masterBoundTokens);
store.addServiceTokens(userBoundTokens);
store.addServiceTokens(unboundTokens);
final Set<ServiceToken> allTokens = new HashSet<ServiceToken>();
allTokens.addAll(masterBoundTokens);
allTokens.addAll(userBoundTokens);
allTokens.addAll(unboundTokens);
final Random random = new Random();
final Set<ServiceToken> removedTokens = new HashSet<ServiceToken>();
for (final ServiceToken token : allTokens) {
if (random.nextBoolean()) continue;
store.removeServiceTokens(token.getName(), token.isMasterTokenBound() ? masterToken : null, token.isUserIdTokenBound() ? userIdToken : null);
removedTokens.add(token);
}
// This should only return tokens that haven't been removed.
final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedMasterBoundTokens);
assertFalse(storedMasterBoundTokens.removeAll(removedTokens));
// This should only return tokens that haven't been removed.
final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(storedUserBoundTokens);
assertFalse(storedUserBoundTokens.removeAll(removedTokens));
// This should only return tokens that haven't been removed.
final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null);
assertNotNull(storedUnboundTokens);
assertFalse(storedUnboundTokens.removeAll(removedTokens));
}
@Test
public void clearServiceTokens() throws MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final ICryptoContext cryptoContext = new NullCryptoContext();
final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken);
final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(USER_ID, userIdToken);
store.addServiceTokens(masterBoundTokens);
store.addServiceTokens(userBoundTokens);
store.addServiceTokens(unboundTokens);
store.clearServiceTokens();
final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null);
assertNotNull(storedMasterBoundTokens);
assertEquals(0, storedMasterBoundTokens.size());
final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken);
assertNotNull(storedUserBoundTokens);
assertEquals(0, storedUserBoundTokens.size());
final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null);
assertNotNull(storedUnboundTokens);
assertEquals(0, storedUserBoundTokens.size());
}
@Test
public void mismatchedGetServiceTokens() throws MslException {
thrown.expect(MslException.class);
thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_MISMATCH);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final MasterToken mismatchedMasterToken = MslTestUtils.getMasterToken(ctx, 2, 2);
store.getServiceTokens(mismatchedMasterToken, userIdToken);
}
@Test
public void missingMasterTokenGetServiceTokens() throws MslException {
thrown.expect(MslException.class);
thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_NULL);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
store.getServiceTokens(null, userIdToken);
}
@Test
public void mismatchedRemoveServiceTokens() throws MslException {
thrown.expect(MslException.class);
thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_MISMATCH);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final MasterToken mismatchedMasterToken = MslTestUtils.getMasterToken(ctx, 2, 2);
store.removeServiceTokens(null, mismatchedMasterToken, userIdToken);
}
@Test
public void removeMasterTokenSameSerialNumberServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 2, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER);
final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterTokenA);
final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA);
final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB);
store.setCryptoContext(masterTokenA, cryptoContext);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addServiceTokens(masterBoundServiceTokens);
store.addServiceTokens(serviceTokensA);
store.addServiceTokens(serviceTokensB);
// We still have a master token with serial number 1 so no service
// tokens should have been deleted.
store.removeCryptoContext(masterTokenA);
final Set<ServiceToken> storedServiceTokensA = store.getServiceTokens(masterTokenB, userIdTokenA);
final Set<ServiceToken> storedServiceTokensB = store.getServiceTokens(masterTokenB, userIdTokenB);
final Set<ServiceToken> expectedServiceTokensA = new HashSet<ServiceToken>(masterBoundServiceTokens);
expectedServiceTokensA.addAll(serviceTokensA);
assertEquals(expectedServiceTokensA, storedServiceTokensA);
final Set<ServiceToken> expectedServiceTokensB = new HashSet<ServiceToken>(masterBoundServiceTokens);
expectedServiceTokensB.addAll(serviceTokensB);
assertEquals(expectedServiceTokensB, storedServiceTokensB);
}
@Test
public void removeMasterTokenReissuedServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
// Master token B has a new serial number, to invalidate the old master
// token and its user ID tokens.
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER);
final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterTokenA);
final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA);
final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB);
store.setCryptoContext(masterTokenA, cryptoContext);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addServiceTokens(masterBoundServiceTokens);
store.addServiceTokens(serviceTokensA);
store.addServiceTokens(serviceTokensB);
// All of master token A's user ID tokens should be deleted.
store.removeCryptoContext(masterTokenA);
assertTrue(store.getServiceTokens(masterTokenA, userIdTokenA).isEmpty());
final Set<ServiceToken> storedServiceTokensB = store.getServiceTokens(masterTokenB, userIdTokenB);
assertEquals(serviceTokensB, storedServiceTokensB);
}
@Test
public void clearCryptoContextsServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
// Master token B has a new serial number, to invalidate the old master
// token and its user ID tokens.
final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1);
final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER);
final Set<ServiceToken> unboundServiceTokens = MslTestUtils.getServiceTokens(ctx, null, null);
final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA);
final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB);
store.setCryptoContext(masterTokenA, cryptoContext);
store.setCryptoContext(masterTokenB, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addServiceTokens(unboundServiceTokens);
store.addServiceTokens(serviceTokensA);
store.addServiceTokens(serviceTokensB);
// All bound service tokens should be deleted.
store.clearCryptoContexts();
assertEquals(unboundServiceTokens, store.getServiceTokens(masterTokenA, userIdTokenA));
assertEquals(unboundServiceTokens, store.getServiceTokens(masterTokenB, userIdTokenB));
final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(null, null);
assertEquals(unboundServiceTokens, storedServiceTokens);
}
@Test
public void removeUserIdTokenServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenA);
final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenB);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addServiceTokens(masterBoundServiceTokens);
store.addServiceTokens(serviceTokensA);
store.addServiceTokens(serviceTokensB);
// We should still have all the master token bound and user ID token B
// bound service tokens.
store.removeUserIdToken(userIdTokenA);
final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(masterToken, userIdTokenB);
final Set<ServiceToken> expectedServiceTokens = new HashSet<ServiceToken>(masterBoundServiceTokens);
expectedServiceTokens.addAll(serviceTokensB);
assertEquals(expectedServiceTokens, storedServiceTokens);
}
@Test
public void clearUserIdTokensServiceTokens() throws MslEncodingException, MslCryptoException, MslException {
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1);
final ICryptoContext cryptoContext = new NullCryptoContext();
final String userIdA = USER_ID + "A";
final String userIdB = USER_ID + "B";
final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER);
final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER);
final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken);
final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenA);
final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenB);
store.setCryptoContext(masterToken, cryptoContext);
store.addUserIdToken(userIdA, userIdTokenA);
store.addUserIdToken(userIdB, userIdTokenB);
store.addServiceTokens(masterBoundServiceTokens);
store.addServiceTokens(serviceTokensA);
store.addServiceTokens(serviceTokensB);
// Only the master token bound service tokens should be left.
store.clearUserIdTokens();
final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(masterToken, userIdTokenB);
assertEquals(masterBoundServiceTokens, storedServiceTokens);
}
/**
* Service token add/remove stress test runner.
*
* Randomly adds or removes service tokens in combinations of unbound,
* master token bound, and user ID token bound Also iterates through the
* service tokens.
*/
private static class ServiceTokenStressor implements Runnable {
/**
* Create a new service token stressor.
*
* @param ctx MSL context.
* @param store MSL store.
* @param count the number of master token and user ID tokens to create
* combinations of.
*/
public ServiceTokenStressor(final MslContext ctx, final MslStore store, final int count) {
this.ctx = ctx;
this.store = store;
this.count = count;
}
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
final Random r = new Random();
try {
for (int i = 0; i < 10 * count; ++i) {
final int tokenIndex = r.nextInt(count);
final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1);
final long userId = r.nextInt(count);
final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, userId, MockEmailPasswordAuthenticationFactory.USER);
final int option = r.nextInt(6);
switch (option) {
case 0:
{
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, null, null);
store.addServiceTokens(tokens);
break;
}
case 1:
{
store.setCryptoContext(masterToken, new NullCryptoContext());
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null);
store.addServiceTokens(tokens);
break;
}
case 2:
{
store.setCryptoContext(masterToken, new NullCryptoContext());
store.addUserIdToken(USER_ID + userId, userIdToken);
final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken);
store.addServiceTokens(tokens);
break;
}
case 3:
{
store.getServiceTokens(null, null);
break;
}
case 4:
{
store.getServiceTokens(masterToken, null);
break;
}
case 5:
{
store.getServiceTokens(masterToken, userIdToken);
break;
}
}
}
} catch (final MslMasterTokenException e) {
throw new MslInternalException("Unexpected master token exception.", e);
} catch (final MslEncodingException e) {
throw new MslInternalException("Unexpected master token encoding exception.", e);
} catch (final MslCryptoException e) {
throw new MslInternalException("Unexpected master token creation exception.", e);
} catch (final MslException e) {
throw new MslInternalException("Master token / user ID token service token query mismatch.", e);
}
}
/** MSL context. */
private final MslContext ctx;
/** MSL store. */
private final MslStore store;
/** Number of master token and user ID token identities. */
private final int count;
}
@Test
public void stressServiceTokens() throws InterruptedException {
final ExecutorService service = Executors.newCachedThreadPool();
for (int i = 0; i < 10 * MAX_TOKENS; ++i) {
service.execute(new ServiceTokenStressor(ctx, store, MAX_TOKENS));
}
service.shutdown();
assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS));
}
/** MSL context. */
private static MslContext ctx;
/** MSL store. */
private MslStore store;
}
| |
package io.mewbase.server.impl;
import io.mewbase.bson.BsonArray;
import io.mewbase.bson.BsonObject;
import io.mewbase.client.Client;
import io.mewbase.common.SubDescriptor;
import io.mewbase.server.Binder;
import io.mewbase.server.Log;
import io.mewbase.server.MewbaseAuthProvider;
import io.mewbase.server.MewbaseUser;
import io.mewbase.server.impl.auth.UnauthorizedUser;
import io.mewbase.server.impl.cqrs.QueryImpl;
import io.vertx.core.Context;
import io.vertx.core.Vertx;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.parsetools.RecordParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
* Created by tim on 23/09/16.
*/
public class ConnectionImpl implements ServerFrameHandler {
private final static Logger logger = LoggerFactory.getLogger(ConnectionImpl.class);
private final ServerImpl server;
private final TransportConnection transportConnection;
private final Context context;
private final Map<Integer, SubscriptionImpl> subscriptionMap = new HashMap<>();
private final Map<Integer, QueryExecution> queryStates = new HashMap<>();
private boolean closed;
private MewbaseAuthProvider authProvider;
private MewbaseUser user;
private int subSeq;
public ConnectionImpl(ServerImpl server, TransportConnection transportConnection, Context context,
MewbaseAuthProvider authProvider) {
Protocol protocol = new Protocol(this);
RecordParser recordParser = protocol.recordParser();
transportConnection.handler(recordParser::handle);
this.server = server;
this.transportConnection = transportConnection;
this.context = context;
this.authProvider = authProvider;
transportConnection.closeHandler(this::close);
}
@Override
public void handleConnect(BsonObject frame) {
checkContext();
String clientVersion = (String)frame.getValue(Protocol.CONNECT_VERSION);
BsonObject value = (BsonObject)frame.getValue(Protocol.CONNECT_AUTH_INFO);
CompletableFuture<MewbaseUser> cf = authProvider.authenticate(value);
cf.whenComplete((result, ex) -> {
checkContext();
boolean versionCompatible = ServerVersionProvider.isCompatibleWith(clientVersion);
if (!versionCompatible) {
final String errorMsg = "Client version not supported";
sendConnectErrorResponse(Client.ERR_SERVER_ERROR, errorMsg);
logAndClose(errorMsg);
return;
}
BsonObject response = new BsonObject();
if (ex != null) {
sendConnectErrorResponse(Client.ERR_AUTHENTICATION_FAILED, "Authentication failed");
logAndClose(ex.getMessage());
} else {
if (result != null) {
user = result;
response.put(Protocol.RESPONSE_OK, true);
writeResponse(Protocol.RESPONSE_FRAME, response);
} else {
String nullUserMsg = "AuthProvider returned a null user";
logAndClose(nullUserMsg);
throw new IllegalStateException(nullUserMsg);
}
}
});
}
@Override
public void handlePublish(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.PUBLISH_FRAME, frame, () -> {
String channel = frame.getString(Protocol.PUBLISH_CHANNEL);
BsonObject event = frame.getBsonObject(Protocol.PUBLISH_EVENT);
Integer sessID = frame.getInteger(Protocol.PUBLISH_SESSID);
Integer requestID = frame.getInteger(Protocol.REQUEST_REQUEST_ID);
if (channel == null) {
missingField(Protocol.PUBLISH_CHANNEL, Protocol.PUBLISH_FRAME);
return;
}
if (event == null) {
missingField(Protocol.PUBLISH_EVENT, Protocol.PUBLISH_FRAME);
return;
}
if (requestID == null) {
missingField(Protocol.REQUEST_REQUEST_ID, Protocol.PUBLISH_FRAME);
return;
}
Log log = server.getLog(channel);
if (log == null) {
sendErrorResponse(Client.ERR_NO_SUCH_CHANNEL, "no such channel " + channel, frame);
return;
}
CompletableFuture<Long> cf = server.publishEvent(log, event);
cf.handle((v, ex) -> {
if (ex == null) {
writeResponseOK(frame);
} else {
sendErrorResponse(Client.ERR_SERVER_ERROR, "failed to persist", frame);
}
return null;
});
});
}
@Override
public void handleStartTx(BsonObject frame) {
checkContext();
throw new UnsupportedOperationException();
}
@Override
public void handleCommitTx(BsonObject frame) {
checkContext();
throw new UnsupportedOperationException();
}
@Override
public void handleAbortTx(BsonObject frame) {
checkContext();
throw new UnsupportedOperationException();
}
@Override
public void handleSubscribe(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.SUBSCRIBE_FRAME, frame, () -> {
String channel = frame.getString(Protocol.SUBSCRIBE_CHANNEL);
if (channel == null) {
missingField(Protocol.SUBSCRIBE_CHANNEL, Protocol.SUBSCRIBE_FRAME);
return;
}
Long startSeq = frame.getLong(Protocol.SUBSCRIBE_STARTPOS);
Long startTimestamp = frame.getLong(Protocol.SUBSCRIBE_STARTTIMESTAMP);
String durableID = frame.getString(Protocol.SUBSCRIBE_DURABLEID);
String filterName = frame.getString(Protocol.SUBSCRIBE_FILTER_NAME);
SubDescriptor subDescriptor = new SubDescriptor()
.setStartEventNum(startSeq == null ? SubDescriptor.DEFAULT_START_NUM : startSeq)
.setStartTimestamp(startTimestamp)
.setFilterName(filterName)
.setDurableID(durableID)
.setChannel(channel);
if (subDescriptor.getStartEventNum() != SubDescriptor.DEFAULT_START_NUM &&
subDescriptor.getStartTimestamp() != SubDescriptor.DEFAULT_START_TIME) {
logAndClose("Cannot set both non default startPosition and timestamp in Subscription");
return;
}
int subID = subSeq++;
checkWrap(subSeq);
Log log = server.getLog(channel);
if (log == null) {
sendErrorResponse(Client.ERR_NO_SUCH_CHANNEL, "no such channel " + channel, frame);
}
SubscriptionImpl subscription = new SubscriptionImpl(this, subID, subDescriptor);
subscriptionMap.put(subID, subscription);
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
resp.put(Protocol.SUBRESPONSE_SUBID, subID);
writeResponse0(Protocol.SUBRESPONSE_FRAME, frame, resp);
logger.trace("Subscribed channel: {} startSeq {}", channel, startSeq);
});
}
@Override
public void handleSubClose(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.SUBCLOSE_FRAME, frame, () -> {
Integer subID = frame.getInteger(Protocol.SUBCLOSE_SUBID);
if (subID == null) {
missingField(Protocol.SUBCLOSE_SUBID, Protocol.SUBCLOSE_FRAME);
return;
}
SubscriptionImpl subscription = subscriptionMap.remove(subID);
if (subscription == null) {
invalidField(Protocol.SUBCLOSE_SUBID, Protocol.SUBCLOSE_FRAME);
return;
}
subscription.close();
writeResponseOK(frame);
});
}
@Override
public void handleUnsubscribe(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.UNSUBSCRIBE_FRAME, frame, () -> {
Integer subID = frame.getInteger(Protocol.UNSUBSCRIBE_SUBID);
if (subID == null) {
missingField(Protocol.UNSUBSCRIBE_SUBID, Protocol.UNSUBSCRIBE_FRAME);
return;
}
SubscriptionImpl subscription = subscriptionMap.remove(subID);
if (subscription == null) {
invalidField(Protocol.UNSUBSCRIBE_SUBID, Protocol.UNSUBSCRIBE_FRAME);
return;
}
subscription.close();
subscription.unsubscribe();
writeResponseOK(frame);
});
}
@Override
public void handleAckEv(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.ACKEV_FRAME, frame, () -> {
Integer subID = frame.getInteger(Protocol.ACKEV_SUBID);
if (subID == null) {
missingField(Protocol.ACKEV_SUBID, Protocol.ACKEV_FRAME);
return;
}
Integer bytes = frame.getInteger(Protocol.ACKEV_BYTES);
if (bytes == null) {
missingField(Protocol.ACKEV_BYTES, Protocol.ACKEV_FRAME);
return;
}
Long pos = frame.getLong(Protocol.ACKEV_POS);
if (pos == null) {
missingField(Protocol.ACKEV_POS, Protocol.ACKEV_FRAME);
return;
}
SubscriptionImpl subscription = subscriptionMap.get(subID);
if (subscription == null) {
invalidField(Protocol.ACKEV_SUBID, Protocol.ACKEV_FRAME);
return;
}
subscription.handleAckEv(pos, bytes);
});
}
@Override
public void handleQuery(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.QUERY_FRAME, frame, () -> {
Integer queryID = frame.getInteger(Protocol.QUERY_QUERYID);
if (queryID == null) {
missingField(Protocol.QUERY_QUERYID, Protocol.QUERY_FRAME);
return;
}
String queryName = frame.getString(Protocol.QUERY_NAME);
if (queryName == null) {
missingField(Protocol.QUERY_NAME, Protocol.QUERY_FRAME);
return;
}
BsonObject params = frame.getBsonObject(Protocol.QUERY_PARAMS);
if (params == null) {
missingField(Protocol.QUERY_PARAMS, Protocol.QUERY_FRAME);
return;
}
QueryImpl query = server.getCqrsManager().getQuery(queryName);
if (query == null) {
writeQueryError(Client.ERR_NO_SUCH_QUERY, "No such query " + queryName, queryID);
} else {
QueryExecution qe = new ConnectionQueryExecution(this, queryID, query, params,
server.getServerOptions().getQueryMaxUnackedBytes());
queryStates.put(queryID, qe);
qe.start();
}
});
}
@Override
public void handleFindByID(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.FINDBYID_FRAME, frame, () -> {
String docID = frame.getString(Protocol.FINDBYID_DOCID);
if (docID == null) {
missingField(Protocol.FINDBYID_DOCID, Protocol.FINDBYID_FRAME);
return;
}
String binderName = frame.getString(Protocol.FINDBYID_BINDER);
if (binderName == null) {
missingField(Protocol.FINDBYID_BINDER, Protocol.FINDBYID_FRAME);
return;
}
Binder binder = server.getBinder(binderName);
if (binder != null) {
CompletableFuture<BsonObject> cf = binder.get(docID);
cf.thenAccept(doc -> {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
resp.put(Protocol.FINDRESPONSE_RESULT, doc);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
});
} else {
sendErrorResponse(Client.ERR_NO_SUCH_BINDER, "No such binder " + binderName, frame);
}
});
}
@Override
public void handleQueryAck(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.QUERYACK_FRAME, frame, () -> {
Integer queryID = frame.getInteger(Protocol.QUERYACK_QUERYID);
if (queryID == null) {
missingField(Protocol.QUERYACK_QUERYID, Protocol.QUERYACK_FRAME);
return;
}
Integer bytes = frame.getInteger(Protocol.QUERYACK_BYTES);
if (bytes == null) {
missingField(Protocol.QUERYACK_BYTES, Protocol.QUERYACK_FRAME);
return;
}
QueryExecution queryState = queryStates.get(queryID);
if (queryState != null) {
queryState.handleAck(bytes);
}
});
}
@Override
public void handlePing(BsonObject frame) {
checkContext();
}
@Override
public void handleCommand(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.COMMAND_FRAME, frame, () -> {
String commandName = frame.getString(Protocol.COMMAND_NAME);
if (commandName == null) {
missingField(Protocol.COMMAND_NAME, Protocol.COMMAND_FRAME);
return;
}
BsonObject command = frame.getBsonObject(Protocol.COMMAND_COMMAND);
if (command == null) {
missingField(Protocol.COMMAND_COMMAND, Protocol.COMMAND_FRAME);
return;
}
CompletableFuture<Void> cf = server.getCqrsManager().callCommandHandler(commandName, command);
cf.handle((res, t) -> {
if (t != null) {
sendErrorResponse(Client.ERR_COMMAND_NOT_PROCESSED, t.getMessage(), frame);
} else {
writeResponseOK(frame);
}
return null;
});
});
}
// Admin operations
@Override
public void handleListBinders(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.LIST_BINDERS_FRAME, frame, () -> {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
BsonArray arr = new BsonArray(server.listBinders());
resp.put(Protocol.LISTBINDERS_BINDERS, arr);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
});
}
@Override
public void handleCreateBinder(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.CREATEBINDER_NAME, frame, () -> {
String binderName = frame.getString(Protocol.CREATEBINDER_NAME);
if (binderName == null) {
missingField(Protocol.CREATEBINDER_NAME, Protocol.CREATE_BINDER_FRAME);
return;
}
CompletableFuture<Boolean> cf = server.createBinder(binderName);
cf.handle((res, t) -> {
if (t != null) {
sendErrorResponse(Client.ERR_SERVER_ERROR, "failed to create binder", frame);
} else {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
resp.put(Protocol.CREATEBINDER_RESPONSE_EXISTS, !res);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
}
return null;
});
});
}
@Override
public void handleListChannels(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.LIST_CHANNELS_FRAME, frame, () -> {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
BsonArray arr = new BsonArray(server.listChannels());
resp.put(Protocol.LISTCHANNELS_CHANNELS, arr);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
});
}
@Override
public void handleCreateChannel(BsonObject frame) {
checkContext();
authoriseThenHandle(Protocol.CREATECHANNEL_NAME, frame, () -> {
String channelName = frame.getString(Protocol.CREATECHANNEL_NAME);
if (channelName == null) {
missingField(Protocol.CREATECHANNEL_NAME, Protocol.CREATE_CHANNEL_FRAME);
return;
}
CompletableFuture<Boolean> cf = server.createChannel(channelName);
cf.handle((res, t) -> {
if (t != null) {
sendErrorResponse(Client.ERR_SERVER_ERROR, "failed to create channel", frame);
} else {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
resp.put(Protocol.CREATECHANNEL_RESPONSE_EXISTS, !res);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
}
return null;
});
});
}
private void writeResponse0(String responseFrameType, BsonObject requestFrame, BsonObject responseFrame) {
Integer requestID = requestFrame.getInteger(Protocol.REQUEST_REQUEST_ID);
if (requestID != null) {
responseFrame.put(Protocol.RESPONSE_REQUEST_ID, requestID);
writeResponse(responseFrameType, responseFrame);
} else {
logAndClose("No request id in frame");
}
}
private void writeResponseOK(BsonObject frame) {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, true);
writeResponse0(Protocol.RESPONSE_FRAME, frame, resp);
}
private void authoriseThenHandle(String frameType, BsonObject frame, Runnable action) {
CompletableFuture<Boolean> authorisedCF = user.isAuthorised(frameType);
authorisedCF.whenComplete((res, ex) -> {
if (ex != null) {
String msg = "Authorisation failed";
logger.error(msg, ex);
sendErrorResponse(Client.ERR_SERVER_ERROR, msg, frame);
close();
} else {
if (!res) {
sendErrorResponse(Client.ERR_NOT_AUTHORISED, "User is not authorised", frame);
close();
} else {
// OK
action.run();
}
}
});
}
protected Buffer writeQueryResult(BsonObject doc, int queryID, boolean last) {
BsonObject res = new BsonObject();
res.put(Protocol.QUERYRESULT_OK, true);
res.put(Protocol.QUERYRESULT_QUERYID, queryID);
res.put(Protocol.QUERYRESULT_RESULT, doc);
res.put(Protocol.QUERYRESULT_LAST, last);
return writeResponse(Protocol.QUERYRESULT_FRAME, res);
}
private Buffer writeQueryError(int errCode, String errMsg, int queryID) {
BsonObject res = new BsonObject();
res.put(Protocol.QUERYRESULT_OK, false);
res.put(Protocol.QUERYRESULT_QUERYID, queryID);
res.put(Protocol.QUERYRESULT_LAST, true);
res.put(Protocol.RESPONSE_ERRCODE, errCode);
res.put(Protocol.RESPONSE_ERRMSG, errMsg);
return writeResponse(Protocol.QUERYRESULT_FRAME, res);
}
protected Buffer writeResponse(String frameName, BsonObject frame) {
Buffer buff = Protocol.encodeFrame(frameName, frame);
//System.out.println("WritBuf :" + buff.toString());
// TODO compare performance of writing directly in all cases and via context
Context curr = Vertx.currentContext();
if (curr != context) {
context.runOnContext(v -> transportConnection.write(buff));
} else {
transportConnection.write(buff);
}
return buff;
}
private void checkWrap(int i) {
// Sanity check - wrap around - won't happen but better to close connection than give incorrect behaviour
if (i == Integer.MIN_VALUE) {
String msg = "int wrapped!";
logger.error(msg);
close();
}
}
private void missingField(String fieldName, String frameType) {
logger.warn("protocol error: missing {} in {}. connection will be closed", fieldName, frameType);
close();
}
private void invalidField(String fieldName, String frameType) {
logger.warn("protocol error: invalid {} in {}. connection will be closed", fieldName, frameType);
close();
}
private void logAndClose(String exceptionMessage) {
logger.error("{}, Connection will be closed", exceptionMessage);
close();
}
private void sendConnectErrorResponse(int errCode, String errMsg) {
BsonObject resp = new BsonObject();
resp.put(Protocol.RESPONSE_OK, false);
resp.put(Protocol.RESPONSE_ERRCODE, errCode);
resp.put(Protocol.RESPONSE_ERRMSG, errMsg);
writeResponse(Protocol.RESPONSE_FRAME, resp);
}
private void sendErrorResponse(int errCode, String errMsg, BsonObject frame) {
BsonObject resp = new BsonObject();
Integer requestID = frame.getInteger(Protocol.REQUEST_REQUEST_ID);
if (requestID != null) {
resp.put(Protocol.RESPONSE_REQUEST_ID, requestID);
resp.put(Protocol.RESPONSE_OK, false);
resp.put(Protocol.RESPONSE_ERRCODE, errCode);
resp.put(Protocol.RESPONSE_ERRMSG, errMsg);
writeResponse(Protocol.RESPONSE_FRAME, resp);
} else {
logAndClose(errMsg + ": " + errCode);
}
}
// Sanity check - this should always be executed using the correct context
private void checkContext() {
if (Vertx.currentContext() != context) {
logger.trace("Wrong context!! " + Thread.currentThread() + " expected " + context, new Exception());
throw new IllegalStateException("Wrong context!");
}
}
protected void removeQueryState(int queryID) {
checkContext();
queryStates.remove(queryID);
}
private void close() {
checkContext();
if (closed) {
return;
}
user = new UnauthorizedUser();
for (QueryExecution queryState : queryStates.values()) {
queryState.close();
}
queryStates.clear();
closed = true;
transportConnection.close();
}
protected ServerImpl server() {
return server;
}
}
| |
/*
* Copyright 2012 - 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.solr.core;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SpatialParams;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.geo.Box;
import org.springframework.data.geo.Distance;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.solr.VersionUtil;
import org.springframework.data.solr.core.convert.DateTimeConverters;
import org.springframework.data.solr.core.convert.NumberConverters;
import org.springframework.data.solr.core.geo.GeoConverters;
import org.springframework.data.solr.core.mapping.SolrPersistentEntity;
import org.springframework.data.solr.core.mapping.SolrPersistentProperty;
import org.springframework.data.solr.core.query.*;
import org.springframework.data.solr.core.query.Criteria.OperationKey;
import org.springframework.data.solr.core.query.Criteria.Predicate;
import org.springframework.data.solr.core.query.Query.Operator;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* Base Implementation of {@link QueryParser} providing common functions for creating
* {@link org.apache.solr.client.solrj.SolrQuery}.
*
* @author Christoph Strobl
* @author Francisco Spaeth
* @author Radek Mensik
* @author David Webb
* @author Michael Rocke
*/
public abstract class QueryParserBase<QUERYTPYE extends SolrDataQuery> implements QueryParser {
protected static final String CRITERIA_VALUE_SEPERATOR = " ";
protected static final String DELIMINATOR = ":";
protected static final String NOT = "-";
protected static final String BOOST = "^";
protected final GenericConversionService conversionService = new GenericConversionService();
private final List<PredicateProcessor> critieraEntryProcessors = new ArrayList<>();
private final PredicateProcessor defaultProcessor = new DefaultProcessor();
private final @Nullable MappingContext<? extends SolrPersistentEntity<?>, SolrPersistentProperty> mappingContext;
{
if (!conversionService.canConvert(java.util.Date.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JavaDateConverter.INSTANCE);
}
if (!conversionService.canConvert(Number.class, String.class)) {
conversionService.addConverter(NumberConverters.NumberConverter.INSTANCE);
}
if (!conversionService.canConvert(Distance.class, String.class)) {
conversionService.addConverter(GeoConverters.DistanceToStringConverter.INSTANCE);
}
if (!conversionService.canConvert(org.springframework.data.geo.Point.class, String.class)) {
conversionService.addConverter(GeoConverters.Point3DToStringConverter.INSTANCE);
}
if (VersionUtil.isJodaTimeAvailable()) {
if (!conversionService.canConvert(org.joda.time.ReadableInstant.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JodaDateTimeConverter.INSTANCE);
}
if (!conversionService.canConvert(org.joda.time.LocalDateTime.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JodaLocalDateTimeConverter.INSTANCE);
}
}
critieraEntryProcessors.add(new ExpressionProcessor());
critieraEntryProcessors.add(new BetweenProcessor());
critieraEntryProcessors.add(new NearProcessor());
critieraEntryProcessors.add(new WithinProcessor());
critieraEntryProcessors.add(new FuzzyProcessor());
critieraEntryProcessors.add(new SloppyProcessor());
critieraEntryProcessors.add(new WildcardProcessor());
critieraEntryProcessors.add(new FunctionProcessor());
}
/**
* @param mappingContext
* @since 4.0
*/
public QueryParserBase(
@Nullable MappingContext<? extends SolrPersistentEntity<?>, SolrPersistentProperty> mappingContext) {
this.mappingContext = mappingContext;
}
@Override
public String getQueryString(SolrDataQuery query, @Nullable Class<?> domainType) {
if (query.getCriteria() == null) {
return null;
}
String queryString = createQueryStringFromNode(query.getCriteria(), domainType);
queryString = prependJoin(queryString, query, domainType);
return queryString;
}
@Override
public void registerConverter(Converter<?, ?> converter) {
conversionService.addConverter(converter);
}
/**
* add another {@link PredicateProcessor}
*
* @param processor
*/
public void addPredicateProcessor(PredicateProcessor processor) {
this.critieraEntryProcessors.add(processor);
}
/**
* Create the plain query string representation of the given node.
*
* @param node
* @return
* @deprecated since 4.0. Use {@link #createQueryStringFromNode(Node, Class)} instead
*/
@Deprecated
public String createQueryStringFromNode(Node node) {
return createQueryStringFromNode(node, null);
}
/**
* Create the plain query string representation of the given node using mapping information derived from the domain
* type.
*
* @param node
* @param domainType can be {@literal null}.
* @return
* @since 4.0
*/
public String createQueryStringFromNode(Node node, @Nullable Class<?> domainType) {
return createQueryStringFromNode(node, 0, domainType);
}
/**
* Create the plain query string representation of the given node.
*
* @param node
* @return
* @deprecated since 4.0. Use {@link #createQueryStringFromNode(Node, int, Class)} instead.
*/
@Deprecated
public String createQueryStringFromNode(Node node, int position) {
return createQueryStringFromNode(node, position, null);
}
/**
* Create the plain query string representation of the given node using mapping information derived from the domain
* type.
*
* @param node
* @param position
* @param domainType can be {@literal null}.
* @return
* @since 4.0
*/
public String createQueryStringFromNode(Node node, int position, @Nullable Class<?> domainType) {
StringBuilder query = new StringBuilder();
if (position > 0) {
query.append(node.isOr() ? " OR " : " AND ");
}
if (node.hasSiblings()) {
if (node.isNegating()) {
query.append("-");
}
if (!node.isRoot() || (node.isRoot() && node.isNegating())) {
query.append('(');
}
int i = 0;
for (Node nested : node.getSiblings()) {
query.append(createQueryStringFromNode(nested, i++, domainType));
}
if (!node.isRoot() || (node.isRoot() && node.isNegating())) {
query.append(')');
}
} else {
query.append(createQueryFragmentForCriteria((Criteria) node, domainType));
}
return query.toString();
}
/**
* Iterates criteria list and concats query string fragments to form a valid query string to be used with
* {@link org.apache.solr.client.solrj.SolrQuery#setQuery(String)}
*
* @param criteria
* @return
* @deprecated since 4.0. Use {@link #createQueryStringFromCriteria(Criteria, Class)} instead.
*/
@Deprecated
protected String createQueryStringFromCriteria(Criteria criteria) {
return createQueryStringFromCriteria(criteria, null);
}
/**
* Iterates criteria list and concats query string fragments to form a valid query string to be used with
* {@link org.apache.solr.client.solrj.SolrQuery#setQuery(String)}
*
* @param criteria
* @param domainType
* @return
* @since 4.0
*/
protected String createQueryStringFromCriteria(Criteria criteria, @Nullable Class<?> domainType) {
return createQueryStringFromNode(criteria, domainType);
}
/**
* Creates query string representation of a single critiera.
*
* @param part
* @param domainType
* @return
*/
protected String createQueryFragmentForCriteria(Criteria part, @Nullable Class<?> domainType) {
Criteria criteria = part;
StringBuilder queryFragment = new StringBuilder();
boolean singeEntryCriteria = (criteria.getPredicates().size() == 1);
if (criteria instanceof QueryStringHolder) {
return ((QueryStringHolder) criteria).getQueryString();
}
String fieldName = getNullsafeFieldName(criteria.getField(), domainType);
if (criteria.isNegating()) {
fieldName = NOT + fieldName;
}
if (!StringUtils.isEmpty(fieldName) && !containsFunctionCriteria(criteria.getPredicates())) {
queryFragment.append(fieldName);
queryFragment.append(DELIMINATOR);
}
// no criteria given is defaulted to not null
if (criteria.getPredicates().isEmpty()) {
queryFragment.append("[* TO *]");
return queryFragment.toString();
}
if (!singeEntryCriteria) {
queryFragment.append("(");
}
CriteriaQueryStringValueProvider valueProvider = new CriteriaQueryStringValueProvider(criteria, domainType);
while (valueProvider.hasNext()) {
queryFragment.append(valueProvider.next());
if (valueProvider.hasNext()) {
queryFragment.append(CRITERIA_VALUE_SEPERATOR);
}
}
if (!singeEntryCriteria) {
queryFragment.append(")");
}
if (!Float.isNaN(criteria.getBoost())) {
queryFragment.append(BOOST).append(criteria.getBoost());
}
return queryFragment.toString();
}
private String getNullsafeFieldName(@Nullable Field field, Class<?> domainType) {
if (field == null || field.getName() == null) {
return "";
}
return getMappedFieldName(field, domainType);
}
/**
* Get the mapped field name using meta information derived from the given domain type.
*
* @param field
* @param domainType
* @return
* @since 4.0
*/
protected String getMappedFieldName(Field field, @Nullable Class<?> domainType) {
return getMappedFieldName(field.getName(), domainType);
}
/**
* Get the mapped field name using meta information derived from the given domain type.
*
* @param fieldName
* @param domainType
* @return
* @since 4.0
*/
protected String getMappedFieldName(String fieldName, @Nullable Class<?> domainType) {
if (domainType == null || mappingContext == null) {
return fieldName;
}
SolrPersistentEntity entity = mappingContext.getPersistentEntity(domainType);
if (entity == null) {
return fieldName;
}
SolrPersistentProperty property = entity.getPersistentProperty(fieldName);
return property != null ? property.getFieldName() : fieldName;
}
/**
* Create {@link SolrClient} readable String representation for {@link CalculatedField}.
*
* @param calculatedField
* @return
* @since 1.1
*/
protected String createCalculatedFieldFragment(CalculatedField calculatedField, @Nullable Class<?> domainType) {
return StringUtils.isNotBlank(calculatedField.getAlias())
? (calculatedField.getAlias() + ":" + createFunctionFragment(calculatedField.getFunction(), 0, domainType))
: createFunctionFragment(calculatedField.getFunction(), 0, domainType);
}
/**
* Create {@link SolrClient} readable String representation for {@link Function}
*
* @param function
* @return
* @since 1.1
*/
protected String createFunctionFragment(Function function, int level, @Nullable Class<?> domainType) {
StringBuilder sb = new StringBuilder();
if (level <= 0) {
sb.append("{!func}");
}
sb.append(function.getOperation());
sb.append('(');
if (function.hasArguments()) {
List<String> solrReadableArguments = new ArrayList<>();
for (Object arg : function.getArguments()) {
Assert.notNull(arg, "Unable to parse 'null' within function arguments.");
if (arg instanceof Function) {
solrReadableArguments.add(createFunctionFragment((Function) arg, level + 1, domainType));
} else if (arg instanceof Criteria) {
solrReadableArguments.add(createQueryStringFromNode((Criteria) arg, domainType));
} else if (arg instanceof Field) {
solrReadableArguments.add(getMappedFieldName((Field) arg, domainType));
} else if (arg instanceof Query) {
solrReadableArguments.add(getQueryString((Query) arg, domainType));
} else if (arg instanceof String || !conversionService.canConvert(arg.getClass(), String.class)) {
solrReadableArguments.add(arg.toString());
} else {
solrReadableArguments.add(conversionService.convert(arg, String.class));
}
}
sb.append(StringUtils.join(solrReadableArguments, ','));
}
sb.append(')');
return sb.toString();
}
/**
* Prepend {@code !join from= to=} to given queryString
*
* @param queryString
* @param query
* @param domainType
* @return
*/
protected String prependJoin(String queryString, @Nullable SolrDataQuery query, @Nullable Class<?> domainType) {
if (query == null || query.getJoin() == null) {
return queryString;
}
String fromIndex = query.getJoin().getFromIndex() != null ? " fromIndex=" + query.getJoin().getFromIndex() : "";
return "{!join from=" + getMappedFieldName(query.getJoin().getFrom(), domainType) + " to="
+ getMappedFieldName(query.getJoin().getTo(), domainType) + fromIndex + "}" + queryString;
}
/**
* Append pagination information {@code start, rows} to {@link SolrQuery}
*
* @param query
* @param offset
* @param rows
*/
protected void appendPagination(SolrQuery query, @Nullable Long offset, @Nullable Integer rows) {
if (offset != null && offset.intValue() >= 0) {
query.setStart(offset.intValue());
}
if (rows != null && rows >= 0) {
query.setRows(rows);
}
}
@Deprecated
protected void appendProjectionOnFields(SolrQuery solrQuery, List<Field> fields) {
appendProjectionOnFields(solrQuery, fields, null);
}
/**
* Append field list to {@link SolrQuery}
*
* @param solrQuery
* @param fields
*/
protected void appendProjectionOnFields(SolrQuery solrQuery, List<Field> fields, @Nullable Class<?> domainType) {
if (CollectionUtils.isEmpty(fields)) {
return;
}
List<String> solrReadableFields = new ArrayList<>();
for (Field field : fields) {
if (field instanceof CalculatedField) {
solrReadableFields.add(createCalculatedFieldFragment((CalculatedField) field, domainType));
} else {
solrReadableFields.add(getMappedFieldName(field, domainType));
}
}
solrQuery.setParam(CommonParams.FL, StringUtils.join(solrReadableFields, ","));
}
/**
* Set {@code q.op} parameter for {@link SolrQuery}
*
* @param solrQuery
* @param defaultOperator
*/
protected void appendDefaultOperator(SolrQuery solrQuery, @Nullable Operator defaultOperator) {
if (defaultOperator != null && !Query.Operator.NONE.equals(defaultOperator)) {
solrQuery.set("q.op", defaultOperator.asQueryStringRepresentation());
}
}
/**
* Set {@link SolrQuery#setTimeAllowed(Integer)}
*
* @param solrQuery
* @param timeAllowed
*/
protected void appendTimeAllowed(SolrQuery solrQuery, @Nullable Integer timeAllowed) {
if (timeAllowed != null) {
solrQuery.setTimeAllowed(timeAllowed);
}
}
/**
* Set {@code defType} for {@link SolrQuery}
*
* @param solrQuery
* @param defType
*/
protected void appendDefType(SolrQuery solrQuery, @Nullable String defType) {
if (StringUtils.isNotBlank(defType)) {
solrQuery.set("defType", defType);
}
}
/**
* Set request handler parameter for {@link SolrQuery}
*
* @param solrQuery
* @param requestHandler
*/
protected void appendRequestHandler(SolrQuery solrQuery, @Nullable String requestHandler) {
if (StringUtils.isNotBlank(requestHandler)) {
solrQuery.add(CommonParams.QT, requestHandler);
}
}
private boolean containsFunctionCriteria(Set<Predicate> chainedCriterias) {
for (Predicate entry : chainedCriterias) {
if (StringUtils.equals(OperationKey.WITHIN.getKey(), entry.getKey())) {
return true;
} else if (StringUtils.equals(OperationKey.NEAR.getKey(), entry.getKey())) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
@Override
public SolrQuery constructSolrQuery(SolrDataQuery query, @Nullable Class<?> domainType) {
return doConstructSolrQuery((QUERYTPYE) query, domainType);
}
public abstract SolrQuery doConstructSolrQuery(QUERYTPYE query, @Nullable Class<?> domainType);
/**
* {@link PredicateProcessor} creates a solr reable query string representation for a given {@link Predicate}
*
* @author Christoph Strobl
*/
public interface PredicateProcessor {
/**
* @param predicate
* @return true if predicate can be processed by this parser
*/
boolean canProcess(@Nullable Predicate predicate);
/**
* Create query string representation of given {@link Predicate}
*
* @param predicate
* @param field
* @return
*/
Object process(@Nullable Predicate predicate, @Nullable Field field, Class<?> domainType);
}
/**
* @author Christoph Strobl
*/
class CriteriaQueryStringValueProvider implements Iterator<String> {
private final Criteria criteria;
private Iterator<Predicate> delegate;
private @Nullable Class<?> domainType;
CriteriaQueryStringValueProvider(Criteria criteria, @Nullable Class<?> domainType) {
Assert.notNull(criteria, "Unable to provide values for 'null' criteria");
this.criteria = criteria;
this.delegate = criteria.getPredicates().iterator();
this.domainType = domainType;
}
@SuppressWarnings("unchecked")
@Nullable
private <T> T getPredicateValue(Predicate predicate) {
PredicateProcessor processor = findMatchingProcessor(predicate);
return (T) processor.process(predicate, criteria.getField(), domainType);
}
private PredicateProcessor findMatchingProcessor(Predicate predicate) {
for (PredicateProcessor processor : critieraEntryProcessors) {
if (processor.canProcess(predicate)) {
return processor;
}
}
return defaultProcessor;
}
@Override
public boolean hasNext() {
return this.delegate.hasNext();
}
@Override
public String next() {
Object o = getPredicateValue(this.delegate.next());
return o != null ? o.toString() : null;
}
@Override
public void remove() {
this.delegate.remove();
}
}
/**
* Base implementation of {@link PredicateProcessor} handling null values and delegating calls to
* {@link BasePredicateProcessor#doProcess(Predicate, Field, Class)}
*
* @author Christoph Strobl
*/
abstract class BasePredicateProcessor implements PredicateProcessor {
protected static final String DOUBLEQUOTE = "\"";
protected final Set<String> BOOLEAN_OPERATORS = new HashSet<>(Arrays.asList("NOT", "AND", "OR"));
protected final String[] RESERVED_CHARS = { DOUBLEQUOTE, "+", "-", "&&", "||", "!", "(", ")", "{", "}", "[", "]",
"^", "~", "*", "?", ":", "\\" };
protected String[] RESERVED_CHARS_REPLACEMENT = { "\\" + DOUBLEQUOTE, "\\+", "\\-", "\\&\\&", "\\|\\|", "\\!",
"\\(", "\\)", "\\{", "\\}", "\\[", "\\]", "\\^", "\\~", "\\*", "\\?", "\\:", "\\\\" };
@Override
public Object process(@Nullable Predicate predicate, @Nullable Field field, @Nullable Class<?> domainType) {
if (predicate == null || predicate.getValue() == null) {
return null;
}
return doProcess(predicate, field, domainType);
}
protected Object filterCriteriaValue(Object criteriaValue) {
if (!(criteriaValue instanceof String)) {
if (conversionService.canConvert(criteriaValue.getClass(), String.class)) {
return conversionService.convert(criteriaValue, String.class);
}
return criteriaValue;
}
String value = escapeCriteriaValue((String) criteriaValue);
return processWhiteSpaces(value);
}
private String escapeCriteriaValue(String criteriaValue) {
return StringUtils.replaceEach(criteriaValue, RESERVED_CHARS, RESERVED_CHARS_REPLACEMENT);
}
private String processWhiteSpaces(String criteriaValue) {
if (StringUtils.contains(criteriaValue, CRITERIA_VALUE_SEPERATOR) || BOOLEAN_OPERATORS.contains(criteriaValue)) {
return DOUBLEQUOTE + criteriaValue + DOUBLEQUOTE;
}
return criteriaValue;
}
@Nullable
protected abstract Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType);
}
/**
* Default implementation of {@link PredicateProcessor} escaping values accordingly
*
* @author Christoph Strobl
*/
class DefaultProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return true;
}
@Override
public Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
return filterCriteriaValue(predicate.getValue());
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#EXPRESSION}
*
* @author Christoph Strobl
*/
class ExpressionProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.EXPRESSION.getKey().equals(predicate.getKey());
}
@Override
public Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
return predicate.getValue().toString();
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#BETWEEN}
*
* @author Christoph Strobl
*/
class BetweenProcessor extends BasePredicateProcessor {
private static final String RANGE_OPERATOR = " TO ";
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.BETWEEN.getKey().equals(predicate.getKey());
}
@Override
public Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Object[] args = (Object[]) predicate.getValue();
String rangeFragment = (Boolean) args[2] ? "[" : "{";
rangeFragment += createRangeFragment(args[0], args[1]);
rangeFragment += (Boolean) args[3] ? "]" : "}";
return rangeFragment;
}
protected String createRangeFragment(@Nullable Object rangeStart, @Nullable Object rangeEnd) {
String rangeFragment = "";
rangeFragment += (rangeStart != null ? filterCriteriaValue(rangeStart) : Criteria.WILDCARD);
rangeFragment += RANGE_OPERATOR;
rangeFragment += (rangeEnd != null ? filterCriteriaValue(rangeEnd) : Criteria.WILDCARD);
return rangeFragment;
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#NEAR}
*
* @author Christoph Strobl
*/
class NearProcessor extends BetweenProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.NEAR.getKey().equals(predicate.getKey());
}
@Override
public Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
String nearFragment;
Object[] args = (Object[]) predicate.getValue();
if (args[0] instanceof Box) {
Box box = (Box) args[0];
nearFragment = getMappedFieldName(field, domainType) + ":[";
nearFragment += createRangeFragment(box.getFirst(), box.getSecond());
nearFragment += "]";
} else {
nearFragment = createSpatialFunctionFragment(getMappedFieldName(field, domainType),
(org.springframework.data.geo.Point) args[0], (Distance) args[1], "bbox");
}
return nearFragment;
}
protected String createSpatialFunctionFragment(@Nullable String fieldName,
org.springframework.data.geo.Point location, Distance distance, String function) {
String spatialFragment = "{!" + function + " " + SpatialParams.POINT + "=";
spatialFragment += filterCriteriaValue(location);
spatialFragment += " " + SpatialParams.FIELD + "=" + fieldName;
spatialFragment += " " + SpatialParams.DISTANCE + "=" + filterCriteriaValue(distance);
spatialFragment += "}";
return spatialFragment;
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#WITHIN}
*
* @author Christoph Strobl
*/
class WithinProcessor extends NearProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return OperationKey.WITHIN.getKey().equals(predicate.getKey());
}
@Nullable
@Override
public Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
Object[] args = (Object[]) predicate.getValue();
return createSpatialFunctionFragment(getMappedFieldName(field, domainType),
(org.springframework.data.geo.Point) args[0], (Distance) args[1], "geofilt");
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#FUZZY}
*
* @author Christoph Strobl
*/
class FuzzyProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.FUZZY.getKey().equals(predicate.getKey());
}
@Nullable
@Override
protected Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
Object[] args = (Object[]) predicate.getValue();
Float distance = (Float) args[1];
return filterCriteriaValue(args[0]) + "~" + (distance.isNaN() ? "" : distance);
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#SLOPPY}
*
* @author Christoph Strobl
*/
class SloppyProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.SLOPPY.getKey().equals(predicate.getKey());
}
@Nullable
@Override
protected Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
Object[] args = (Object[]) predicate.getValue();
Integer distance = (Integer) args[1];
return filterCriteriaValue(args[0]) + "~" + distance;
}
}
/**
* Handles {@link Criteria}s with {@link OperationKey#CONTAINS}, {@link OperationKey#STARTS_WITH},
* {@link OperationKey#ENDS_WITH}
*
* @author Christoph Strobl
*/
class WildcardProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && (OperationKey.CONTAINS.getKey().equals(predicate.getKey())
|| OperationKey.STARTS_WITH.getKey().equals(predicate.getKey())
|| OperationKey.ENDS_WITH.getKey().equals(predicate.getKey()));
}
@Override
protected Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
Object filteredValue = filterCriteriaValue(predicate.getValue());
if (OperationKey.CONTAINS.getKey().equals(predicate.getKey())) {
return Criteria.WILDCARD + filteredValue + Criteria.WILDCARD;
} else if (OperationKey.STARTS_WITH.getKey().equals(predicate.getKey())) {
return filteredValue + Criteria.WILDCARD;
} else if (OperationKey.ENDS_WITH.getKey().equals(predicate.getKey())) {
return Criteria.WILDCARD + filteredValue;
}
return filteredValue;
}
}
/**
* Handles {@link Criteria} with {@link OperationKey#FUNCTION}
*
* @since 1.1
*/
class FunctionProcessor extends BasePredicateProcessor {
@Override
public boolean canProcess(@Nullable Predicate predicate) {
return predicate != null && OperationKey.FUNCTION.getKey().equals(predicate.getKey());
}
@Override
@Nullable
protected Object doProcess(@Nullable Predicate predicate, Field field, @Nullable Class<?> domainType) {
Assert.notNull(predicate, "Predicate must not be null!");
return createFunctionFragment((Function) predicate.getValue(), 0, domainType);
}
}
private static void setObjectName(Map<String, Object> namesAssociation, Object object, String name) {
namesAssociation.put(name, object);
}
/**
* @author Francisco Spaeth
* @since 1.4
*/
interface NamedObjects {
void setName(Object object, String name);
Map<String, Object> getNamesAssociation();
}
/**
* @author Francisco Spaeth
* @since 1.4
*/
static class NamedObjectsQuery extends AbstractQueryDecorator implements NamedObjects {
private Map<String, Object> namesAssociation = new HashMap<>();
public NamedObjectsQuery(Query query) {
super(query);
Assert.notNull(query, "group query shall not be null");
}
@Override
public void setName(Object object, String name) {
setObjectName(namesAssociation, object, name);
}
@Override
public Map<String, Object> getNamesAssociation() {
return Collections.unmodifiableMap(namesAssociation);
}
}
/**
* @author Francisco Spaeth
* @since 1.4
*/
static class NamedObjectsFacetQuery extends AbstractFacetQueryDecorator implements NamedObjects {
private Map<String, Object> namesAssociation = new HashMap<>();
public NamedObjectsFacetQuery(FacetQuery query) {
super(query);
}
@Override
public void setName(Object object, String name) {
setObjectName(namesAssociation, object, name);
}
@Override
public Map<String, Object> getNamesAssociation() {
return Collections.unmodifiableMap(namesAssociation);
}
}
/**
* @author Francisco Spaeth
* @since 1.4
*/
static class NamedObjectsHighlightQuery extends AbstractHighlightQueryDecorator implements NamedObjects {
private Map<String, Object> namesAssociation = new HashMap<>();
public NamedObjectsHighlightQuery(HighlightQuery query) {
super(query);
}
@Override
public void setName(Object object, String name) {
setObjectName(namesAssociation, object, name);
}
@Override
public Map<String, Object> getNamesAssociation() {
return Collections.unmodifiableMap(namesAssociation);
}
}
/**
* @author David Webb
* @since 2.1
*/
static class NamedObjectsFacetAndHighlightQuery extends AbstractFacetAndHighlightQueryDecorator
implements NamedObjects {
private Map<String, Object> namesAssociation = new HashMap<>();
public NamedObjectsFacetAndHighlightQuery(FacetAndHighlightQuery query) {
super(query);
}
/*
* (non-Javadoc)
* @see org.springframework.data.solr.core.QueryParserBase.NamedObjects#setName(java.lang.Object, java.lang.String)
*/
@Override
public void setName(Object object, String name) {
setObjectName(namesAssociation, object, name);
}
/*
* (non-Javadoc)
* @see org.springframework.data.solr.core.QueryParserBase.NamedObjects#getNamesAssociation()
*/
@Override
public Map<String, Object> getNamesAssociation() {
return Collections.unmodifiableMap(namesAssociation);
}
}
}
| |
package com.emc.ecs.cloudfoundry.broker.service;
import com.emc.ecs.cloudfoundry.broker.EcsManagementClientException;
import com.emc.ecs.cloudfoundry.broker.EcsManagementResourceNotFoundException;
import com.emc.ecs.cloudfoundry.broker.config.BrokerConfig;
import com.emc.ecs.cloudfoundry.broker.config.CatalogConfig;
import com.emc.ecs.cloudfoundry.broker.model.PlanProxy;
import com.emc.ecs.cloudfoundry.broker.model.ServiceDefinitionProxy;
import com.emc.ecs.management.sdk.*;
import com.emc.ecs.management.sdk.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.servicebroker.exception.ServiceBrokerException;
import org.springframework.cloud.servicebroker.exception.ServiceInstanceExistsException;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.*;
import java.util.stream.Collectors;
@Service
public class EcsService {
private static final Logger logger = LoggerFactory.getLogger(EcsService.class);
private static final String UNCHECKED = "unchecked";
private static final String WARN = "warn";
private static final String LIMIT = "limit";
private static final String QUOTA = "quota";
private static final String RETENTION = "retention";
private static final String SERVICE_NOT_FOUND =
"No service matching service id: ";
private static final String DEFAULT_RETENTION = "default-retention";
@Autowired
private Connection connection;
@Autowired
private BrokerConfig broker;
@Autowired
private CatalogConfig catalog;
private String replicationGroupID;
private String objectEndpoint;
String getObjectEndpoint() {
return objectEndpoint;
}
String getNfsMountHost() {
return broker.getNfsMountHost();
}
@PostConstruct
void initialize() {
try {
lookupObjectEndpoints();
lookupReplicationGroupID();
prepareRepository();
} catch (EcsManagementClientException | EcsManagementResourceNotFoundException e) {
throw new ServiceBrokerException(e);
}
}
void deleteBucket(String id) {
try {
BucketAction.delete(connection, prefix(id), broker.getNamespace());
} catch (Exception e) {
throw new ServiceBrokerException(e);
}
}
Boolean getBucketFileEnabled(String id) throws EcsManagementClientException {
ObjectBucketInfo b = BucketAction.get(connection, prefix(id), broker.getNamespace());
return b.getFsAccessEnabled();
}
Map<String, Object> createBucket(String id, ServiceDefinitionProxy service,
PlanProxy plan, Map<String, Object> parameters) {
if (parameters == null) parameters = new HashMap<>();
logger.info(String.format("Creating bucket %s", id));
try {
if (bucketExists(id)) {
throw new ServiceInstanceExistsException(id, service.getId());
}
parameters.putAll(plan.getServiceSettings());
parameters.putAll(service.getServiceSettings());
BucketAction.create(connection, new ObjectBucketCreate(prefix(id),
broker.getNamespace(), replicationGroupID, parameters));
if (parameters.containsKey(QUOTA)) {
logger.info("Applying quota");
@SuppressWarnings(UNCHECKED)
Map<String, Integer> quota = (Map<String, Integer>) parameters
.get(QUOTA);
BucketQuotaAction.create(connection, prefix(id),
broker.getNamespace(), quota.get(LIMIT), quota.get(WARN));
}
if (parameters.containsKey(DEFAULT_RETENTION)) {
logger.info("Applying retention policy");
BucketRetentionAction.update(connection, broker.getNamespace(),
prefix(id), (int) parameters.get(DEFAULT_RETENTION));
}
} catch (Exception e) {
logger.error(String.format("Failed to create bucket %s", id), e);
throw new ServiceBrokerException(e);
}
return parameters;
}
Map<String, Object> changeBucketPlan(String id, ServiceDefinitionProxy service,
PlanProxy plan, Map<String, Object> parameters) {
parameters.putAll(plan.getServiceSettings());
parameters.putAll(service.getServiceSettings());
@SuppressWarnings(UNCHECKED)
Map<String, Object> quota = (Map<String, Object>) parameters
.getOrDefault(QUOTA, new HashMap<>());
int limit = (int) quota.getOrDefault(LIMIT, -1);
int warn = (int) quota.getOrDefault(WARN, -1);
try {
if (limit == -1 && warn == -1) {
parameters.remove(QUOTA);
BucketQuotaAction.delete(connection, prefix(id),
broker.getNamespace());
} else {
BucketQuotaAction.create(connection, prefix(id),
broker.getNamespace(), limit, warn);
}
} catch (EcsManagementClientException e) {
throw new ServiceBrokerException(e);
}
return parameters;
}
private boolean bucketExists(String id) throws EcsManagementClientException {
return BucketAction.exists(connection, prefix(id),
broker.getNamespace());
}
UserSecretKey createUser(String id) {
try {
ObjectUserAction.create(connection, prefix(id), broker.getNamespace());
ObjectUserSecretAction.create(connection, prefix(id));
return ObjectUserSecretAction.list(connection, prefix(id)).get(0);
} catch (Exception e) {
throw new ServiceBrokerException(e);
}
}
UserSecretKey createUser(String id, String namespace)
throws EcsManagementClientException {
ObjectUserAction.create(connection, prefix(id), prefix(namespace));
ObjectUserSecretAction.create(connection, prefix(id));
return ObjectUserSecretAction.list(connection, prefix(id)).get(0);
}
void createUserMap(String id, int uid)
throws EcsManagementClientException {
ObjectUserMapAction.create(connection, prefix(id), uid, broker.getNamespace());
}
void deleteUserMap(String id, String uid)
throws EcsManagementClientException {
ObjectUserMapAction.delete(connection, prefix(id), uid, broker.getNamespace());
}
Boolean userExists(String id) throws ServiceBrokerException {
try {
return ObjectUserAction.exists(connection, prefix(id),
broker.getNamespace());
} catch (Exception e) {
throw new ServiceBrokerException(e);
}
}
void deleteUser(String id) throws EcsManagementClientException {
ObjectUserAction.delete(connection, prefix(id));
}
void addUserToBucket(String id, String username) {
try {
addUserToBucket(id, username, Collections.singletonList("full_control"));
} catch (Exception e) {
throw new ServiceBrokerException(e);
}
}
void addUserToBucket(String id, String username,
List<String> permissions) throws EcsManagementClientException {
BucketAcl acl = BucketAclAction.get(connection, prefix(id),
broker.getNamespace());
List<BucketUserAcl> userAcl = acl.getAcl().getUserAccessList();
userAcl.add(new BucketUserAcl(prefix(username), permissions));
acl.getAcl().setUserAccessList(userAcl);
BucketAclAction.update(connection, prefix(id), acl);
}
void removeUserFromBucket(String id, String username)
throws EcsManagementClientException {
BucketAcl acl = BucketAclAction.get(connection, prefix(id),
broker.getNamespace());
List<BucketUserAcl> newUserAcl = acl.getAcl().getUserAccessList()
.stream().filter(a -> !a.getUser().equals(prefix(username)))
.collect(Collectors.toList());
acl.getAcl().setUserAccessList(newUserAcl);
BucketAclAction.update(connection, prefix(id), acl);
}
String prefix(String string) {
return broker.getPrefix() + string;
}
private void lookupObjectEndpoints() throws EcsManagementClientException,
EcsManagementResourceNotFoundException {
if (broker.getObjectEndpoint() != null) {
objectEndpoint = broker.getObjectEndpoint();
} else {
List<BaseUrl> baseUrlList = BaseUrlAction.list(connection);
String urlId;
if (baseUrlList.isEmpty()) {
throw new ServiceBrokerException(
"No object endpoint or base URL available");
} else if (broker.getBaseUrl() != null) {
urlId = baseUrlList.stream()
.filter(b -> broker.getBaseUrl().equals(b.getName()))
.findFirst()
.orElseThrow(() -> new ServiceBrokerException("configured ECS Base URL not found"))
.getId();
} else {
urlId = detectDefaultBaseUrlId(baseUrlList);
}
objectEndpoint = BaseUrlAction.get(connection, urlId)
.getNamespaceUrl(broker.getNamespace(), false);
}
if (broker.getRepositoryEndpoint() == null)
broker.setRepositoryEndpoint(objectEndpoint);
}
String getNamespaceURL(String namespace, ServiceDefinitionProxy service,
PlanProxy plan, Map<String, Object> parameters) {
parameters.putAll(plan.getServiceSettings());
parameters.putAll(service.getServiceSettings());
try {
return getNamespaceURL(namespace, parameters);
} catch (EcsManagementClientException e) {
throw new ServiceBrokerException(e);
}
}
private String getNamespaceURL(String namespace,
Map<String, Object> parameters)
throws EcsManagementClientException {
String baseUrl = (String) parameters.getOrDefault("base-url",
broker.getBaseUrl());
Boolean useSSL = (Boolean) parameters.getOrDefault("use-ssl", false);
return getNamespaceURL(namespace, useSSL, baseUrl);
}
private String getNamespaceURL(String namespace, Boolean useSSL, String baseURL)
throws EcsManagementClientException {
List<BaseUrl> baseUrlList = BaseUrlAction.list(connection);
String urlId = baseUrlList.stream()
.filter(b -> baseURL.equals(b.getName()))
.findFirst()
.orElseThrow(() -> new ServiceBrokerException("Configured ECS namespace not found."))
.getId();
return BaseUrlAction.get(connection, urlId).getNamespaceUrl(namespace, useSSL);
}
private void lookupReplicationGroupID()
throws EcsManagementClientException {
replicationGroupID = ReplicationGroupAction.list(connection).stream()
.filter(r -> broker.getReplicationGroup().equals(r.getName()))
.findFirst()
.orElseThrow(() -> new ServiceBrokerException("Configured ECS replication group not found."))
.getId();
}
private void prepareRepository() throws EcsManagementClientException,
EcsManagementResourceNotFoundException {
String bucketName = broker.getRepositoryBucket();
String userName = broker.getRepositoryUser();
if (!bucketExists(bucketName)) {
ServiceDefinitionProxy service = lookupServiceDefinition(
broker.getRepositoryServiceId());
Map<String, Object> parameters = new HashMap<>();
createBucket(bucketName, service,
service.findPlan(broker.getRepositoryPlanId()), parameters);
}
if (!userExists(userName)) {
UserSecretKey secretKey = createUser(userName);
addUserToBucket(bucketName, userName);
broker.setRepositorySecret(secretKey.getSecretKey());
} else {
broker.setRepositorySecret(getUserSecret(userName));
}
}
private String getUserSecret(String id)
throws EcsManagementClientException {
return ObjectUserSecretAction.list(connection, prefix(id)).get(0)
.getSecretKey();
}
private String detectDefaultBaseUrlId(List<BaseUrl> baseUrlList) {
Optional<BaseUrl> maybeBaseUrl = baseUrlList.stream()
.filter(b -> "DefaultBaseUrl".equals(b.getName())).findAny();
if (maybeBaseUrl.isPresent()) {
return maybeBaseUrl.get().getId();
}
return baseUrlList.get(0).getId();
}
private Boolean namespaceExists(String id)
throws EcsManagementClientException {
return NamespaceAction.exists(connection, prefix(id));
}
Map<String, Object> createNamespace(String id, ServiceDefinitionProxy service,
PlanProxy plan, Map<String, Object> parameters)
throws EcsManagementClientException {
if (namespaceExists(id))
throw new ServiceInstanceExistsException(id, service.getId());
if (parameters == null) parameters = new HashMap<>();
parameters.putAll(plan.getServiceSettings());
parameters.putAll(service.getServiceSettings());
NamespaceAction.create(connection, new NamespaceCreate(prefix(id),
replicationGroupID, parameters));
if (parameters.containsKey(QUOTA)) {
@SuppressWarnings(UNCHECKED)
Map<String, Integer> quota = (Map<String, Integer>) parameters
.get(QUOTA);
NamespaceQuotaParam quotaParam = new NamespaceQuotaParam(id,
quota.get(LIMIT), quota.get(WARN));
NamespaceQuotaAction.create(connection, prefix(id), quotaParam);
}
if (parameters.containsKey(RETENTION)) {
@SuppressWarnings(UNCHECKED)
Map<String, Integer> retention = (Map<String, Integer>) parameters
.get(RETENTION);
for (Map.Entry<String, Integer> entry : retention.entrySet()) {
NamespaceRetentionAction.create(connection, prefix(id),
new RetentionClassCreate(entry.getKey(),
entry.getValue()));
}
}
return parameters;
}
void deleteNamespace(String id) throws EcsManagementClientException {
NamespaceAction.delete(connection, prefix(id));
}
Map<String, Object> changeNamespacePlan(String id, ServiceDefinitionProxy service,
PlanProxy plan, Map<String, Object> parameters)
throws EcsManagementClientException {
parameters.putAll(plan.getServiceSettings());
parameters.putAll(service.getServiceSettings());
NamespaceAction.update(connection, prefix(id),
new NamespaceUpdate(parameters));
if (parameters.containsKey(RETENTION)) {
@SuppressWarnings(UNCHECKED)
Map<String, Integer> retention = (Map<String, Integer>) parameters
.get(RETENTION);
for (Map.Entry<String, Integer> entry : retention.entrySet()) {
if (NamespaceRetentionAction.exists(connection, id,
entry.getKey())) {
if (-1 == entry.getValue()) {
NamespaceRetentionAction.delete(connection, prefix(id),
entry.getKey());
parameters.remove(RETENTION);
} else {
NamespaceRetentionAction.update(connection, prefix(id),
entry.getKey(),
new RetentionClassUpdate(entry.getValue()));
}
} else {
NamespaceRetentionAction.create(connection, prefix(id),
new RetentionClassCreate(entry.getKey(),
entry.getValue()));
}
}
}
return parameters;
}
ServiceDefinitionProxy lookupServiceDefinition(
String serviceDefinitionId) throws ServiceBrokerException {
ServiceDefinitionProxy service = catalog
.findServiceDefinition(serviceDefinitionId);
if (service == null)
throw new ServiceBrokerException(SERVICE_NOT_FOUND + serviceDefinitionId);
return service;
}
String addExportToBucket(String instanceId, String relativeExportPath) throws EcsManagementClientException {
String namespace = broker.getNamespace();
String absoluteExportPath = "/" + namespace + "/" + prefix(instanceId) + "/" + relativeExportPath;
List<NFSExport> exports = NFSExportAction.list(connection, absoluteExportPath);
if (exports == null) {
NFSExportAction.create(connection, absoluteExportPath);
}
return absoluteExportPath;
}
}
| |
/*
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.integration.runner;
import com.consol.citrus.annotations.CitrusTest;
import com.consol.citrus.dsl.testng.TestNGCitrusTestRunner;
import com.consol.citrus.ws.validation.SoapFaultValidator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.testng.annotations.Test;
/**
* @author Christoph Deppisch
*/
@Test
public class ServerSoapFaultTestRunnerIT extends TestNGCitrusTestRunner {
@Autowired
@Qualifier("xmlSoapFaultValidator")
private SoapFaultValidator soapFaultValidator;
@CitrusTest
public void serverSoapFault() {
variable("correlationId", "citrus:randomNumber(10)");
variable("messageId", "citrus:randomNumber(10)");
variable("user", "Christoph");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test Soap fault actor support");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.faultActor("SERVER")
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultActor("SERVER")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test XML Soap fault validation");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.validator(soapFaultValidator)
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test XML schema validation skip");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.schemaValidation(false)
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test explicit XML schema repository");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHelloExtended.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"<ns0:Reason>Client</ns0:Reason>" +
"</ns0:FaultDetail>")
.validator(soapFaultValidator)
.xsdSchemaRepository("helloSchemaRepository")
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHelloExtended.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"<ns0:Reason>Client</ns0:Reason>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test explicit XML schema instance");
parallel().actions(
assertSoapFault().faultCode("{http://www.citrusframework.org/faults}TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHelloExtended.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"<ns0:Reason>Client</ns0:Reason>" +
"</ns0:FaultDetail>")
.validator(soapFaultValidator)
.xsd("helloSchemaExtended")
.when(
send(builder -> builder.endpoint("webServiceClient")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>"))
),
sequential().actions(
receive(builder -> builder.endpoint("webServiceRequestReceiver")
.payload("<ns0:HelloRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:User>${user}</ns0:User>" +
"<ns0:Text>Hello WebServer</ns0:Text>" +
"</ns0:HelloRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")),
soap(builder -> builder.server("webServiceResponseSender")
.sendFault()
.faultCode("{http://www.citrusframework.org/faults}citrus-ns:TEC-1000")
.faultString("Invalid request")
.faultDetail("<ns0:FaultDetail xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHelloExtended.xsd\">" +
"<ns0:MessageId>${messageId}</ns0:MessageId>" +
"<ns0:CorrelationId>${correlationId}</ns0:CorrelationId>" +
"<ns0:ErrorCode>TEC-1000</ns0:ErrorCode>" +
"<ns0:Text>Invalid request</ns0:Text>" +
"<ns0:Reason>Client</ns0:Reason>" +
"</ns0:FaultDetail>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
echo("Test XML multiple soap fault detail elements validation");
//TODO code test
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.websockets.core;
import io.undertow.util.ImmediatePooled;
import org.xnio.ChannelListener;
import io.undertow.connector.PooledByteBuffer;
import org.xnio.Pooled;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A buffered binary message.
*
* @author Stuart Douglas
*/
public class BufferedBinaryMessage {
private final boolean bufferFullMessage;
private List<PooledByteBuffer> data = new ArrayList<>(1);
private PooledByteBuffer current;
private final long maxMessageSize;
private long currentSize;
private boolean complete;
private int frameCount;
public BufferedBinaryMessage(long maxMessageSize, boolean bufferFullMessage) {
this.bufferFullMessage = bufferFullMessage;
this.maxMessageSize = maxMessageSize;
}
public BufferedBinaryMessage(boolean bufferFullMessage) {
this(-1, bufferFullMessage);
}
public void readBlocking(StreamSourceFrameChannel channel) throws IOException {
if (current == null) {
current = channel.getWebSocketChannel().getBufferPool().allocate();
}
for (; ; ) {
int res = channel.read(current.getBuffer());
if (res == -1) {
complete = true;
return;
} else if (res == 0) {
channel.awaitReadable();
}
checkMaxSize(channel, res);
if (bufferFullMessage) {
dealWithFullBuffer(channel);
} else if (!current.getBuffer().hasRemaining()) {
return;
}
}
}
private void dealWithFullBuffer(StreamSourceFrameChannel channel) {
if (!current.getBuffer().hasRemaining()) {
current.getBuffer().flip();
data.add(current);
current = channel.getWebSocketChannel().getBufferPool().allocate();
}
}
public void read(final StreamSourceFrameChannel channel, final WebSocketCallback<BufferedBinaryMessage> callback) {
try {
for (; ; ) {
if (current == null) {
current = channel.getWebSocketChannel().getBufferPool().allocate();
}
int res = channel.read(current.getBuffer());
if (res == -1) {
this.complete = true;
callback.complete(channel.getWebSocketChannel(), this);
return;
} else if (res == 0) {
channel.getReadSetter().set(new ChannelListener<StreamSourceFrameChannel>() {
@Override
public void handleEvent(StreamSourceFrameChannel channel) {
if(complete ) {
return;
}
try {
for (; ; ) {
if (current == null) {
current = channel.getWebSocketChannel().getBufferPool().allocate();
}
int res = channel.read(current.getBuffer());
if (res == -1) {
complete = true;
channel.suspendReads();
callback.complete(channel.getWebSocketChannel(), BufferedBinaryMessage.this);
return;
} else if (res == 0) {
return;
}
checkMaxSize(channel, res);
if (bufferFullMessage) {
dealWithFullBuffer(channel);
} else if (!current.getBuffer().hasRemaining()) {
callback.complete(channel.getWebSocketChannel(), BufferedBinaryMessage.this);
} else {
handleNewFrame(channel, callback);
}
}
} catch (IOException e) {
channel.suspendReads();
callback.onError(channel.getWebSocketChannel(), BufferedBinaryMessage.this, e);
}
}
});
channel.resumeReads();
return;
}
checkMaxSize(channel, res);
if (bufferFullMessage) {
dealWithFullBuffer(channel);
} else if (!current.getBuffer().hasRemaining()) {
callback.complete(channel.getWebSocketChannel(), BufferedBinaryMessage.this);
} else {
handleNewFrame(channel, callback);
}
}
} catch (IOException e) {
callback.onError(channel.getWebSocketChannel(), this, e);
}
}
private void handleNewFrame(StreamSourceFrameChannel channel, final WebSocketCallback<BufferedBinaryMessage> callback) {
//TODO: remove this crap
//basically some bogus web sockets TCK tests assume that messages will be broken up into frames
//even if we have the full message available.
// if(!bufferFullMessage) {
// if(channel.getWebSocketFrameCount() != frameCount && current != null && !channel.isFinalFragment()) {
// frameCount = channel.getWebSocketFrameCount();
// callback.complete(channel.getWebSocketChannel(), this);
// }
// }
}
private void checkMaxSize(StreamSourceFrameChannel channel, int res) throws IOException {
currentSize += res;
if (maxMessageSize > 0 && currentSize > maxMessageSize) {
getData().free();
WebSockets.sendClose(new CloseMessage(CloseMessage.MSG_TOO_BIG, WebSocketMessages.MESSAGES.messageToBig(maxMessageSize)), channel.getWebSocketChannel(), null);
throw new IOException(WebSocketMessages.MESSAGES.messageToBig(maxMessageSize));
}
}
public Pooled<ByteBuffer[]> getData() {
if (current == null) {
return new ImmediatePooled<>(new ByteBuffer[0]);
}
if (data.isEmpty()) {
final PooledByteBuffer current = this.current;
current.getBuffer().flip();
this.current = null;
final ByteBuffer[] data = new ByteBuffer[]{current.getBuffer()};
return new PooledByteBufferArray(Collections.singletonList(current), data);
}
current.getBuffer().flip();
data.add(current);
current = null;
ByteBuffer[] ret = new ByteBuffer[data.size()];
for (int i = 0; i < data.size(); ++i) {
ret[i] = data.get(i).getBuffer();
}
List<PooledByteBuffer> data = this.data;
this.data = new ArrayList<>();
return new PooledByteBufferArray(data, ret);
}
public boolean isComplete() {
return complete;
}
private static final class PooledByteBufferArray implements Pooled<ByteBuffer[]> {
private final List<PooledByteBuffer> pooled;
private final ByteBuffer[] data;
private PooledByteBufferArray(List<PooledByteBuffer> pooled, ByteBuffer[] data) {
this.pooled = pooled;
this.data = data;
}
@Override
public void discard() {
for (PooledByteBuffer item : pooled) {
item.close();
}
}
@Override
public void free() {
for (PooledByteBuffer item : pooled) {
item.close();
}
}
@Override
public ByteBuffer[] getResource() throws IllegalStateException {
return data;
}
@Override
public void close() {
free();
}
}
}
| |
/*******************************************************************************
* Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.opennebula.client.vdc;
import org.opennebula.client.Client;
import org.opennebula.client.OneResponse;
import org.opennebula.client.PoolElement;
import org.w3c.dom.Node;
/**
* This class represents an OpenNebula vdc.
* It also offers static XML-RPC call wrappers.
*/
public class Vdc extends PoolElement{
private static final String METHOD_PREFIX = "vdc.";
private static final String ALLOCATE = METHOD_PREFIX + "allocate";
private static final String INFO = METHOD_PREFIX + "info";
private static final String DELETE = METHOD_PREFIX + "delete";
private static final String UPDATE = METHOD_PREFIX + "update";
private static final String RENAME = METHOD_PREFIX + "rename";
private static final String ADDGROUP = METHOD_PREFIX + "addgroup";
private static final String DELGROUP = METHOD_PREFIX + "delgroup";
private static final String ADDCLUSTER = METHOD_PREFIX + "addcluster";
private static final String DELCLUSTER = METHOD_PREFIX + "delcluster";
private static final String ADDHOST = METHOD_PREFIX + "addhost";
private static final String DELHOST = METHOD_PREFIX + "delhost";
private static final String ADDDATASTORE = METHOD_PREFIX + "adddatastore";
private static final String DELDATASTORE = METHOD_PREFIX + "deldatastore";
private static final String ADDVNET = METHOD_PREFIX + "addvnet";
private static final String DELVNET = METHOD_PREFIX + "delvnet";
/**
* Creates a new Vdc representation.
*
* @param id The vdc id.
* @param client XML-RPC Client.
*/
public Vdc(int id, Client client)
{
super(id, client);
}
/**
* @see PoolElement
*/
protected Vdc(Node xmlElement, Client client)
{
super(xmlElement, client);
}
// =================================
// Static XML-RPC methods
// =================================
/**
* Allocates a new vdc in OpenNebula
*
* @param client XML-RPC Client.
* @param description A string containing the template of the vdc.
* @return If successful the message contains the associated
* id generated for this vdc.
*/
public static OneResponse allocate(Client client, String description)
{
return client.call(ALLOCATE, description);
}
/**
* Retrieves the information of the given vdc.
*
* @param client XML-RPC Client.
* @param id The vdc id.
* @return If successful the message contains the string
* with the information returned by OpenNebula.
*/
public static OneResponse info(Client client, int id)
{
return client.call(INFO, id);
}
/**
* Deletes a vdc from OpenNebula.
*
* @param client XML-RPC Client.
* @param id The vdc id.
* @return A encapsulated response.
*/
public static OneResponse delete(Client client, int id)
{
return client.call(DELETE, id);
}
/**
* Replaces the template contents.
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc we want to modify.
* @param new_template New template contents
* @param append True to append new attributes instead of replace the whole template
* @return If successful the message contains the vdc id.
*/
public static OneResponse update(Client client, int id, String new_template,
boolean append)
{
return client.call(UPDATE, id, new_template, append ? 1 : 0);
}
/**
* Renames this vdc
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param name New name for the vdc.
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse rename(Client client, int id, String name)
{
return client.call(RENAME, id, name);
}
/**
* Adds a group to this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param groupId The group to add
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse addGroup(Client client, int id, int groupId)
{
return client.call(ADDGROUP, id, groupId);
}
/**
* Deletes a group from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param groupId The group to delete
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse delGroup(Client client, int id, int groupId)
{
return client.call(DELGROUP, id, groupId);
}
/**
* Adds a cluster to this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The cluster's zone
* @param clusterId The cluster to add
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse addCluster(Client client, int id, int zoneId, int clusterId)
{
return client.call(ADDCLUSTER, id, zoneId, clusterId);
}
/**
* Deletes a cluster from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The cluster's zone
* @param clusterId The cluster to delete
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse delCluster(Client client, int id, int zoneId, int clusterId)
{
return client.call(DELCLUSTER, id, zoneId, clusterId);
}
/**
* Adds a host to this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The host's zone
* @param hostId The host to add
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse addHost(Client client, int id, int zoneId, int hostId)
{
return client.call(ADDHOST, id, zoneId, hostId);
}
/**
* Deletes a host from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The host's zone
* @param hostId The host to delete
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse delHost(Client client, int id, int zoneId, int hostId)
{
return client.call(DELHOST, id, zoneId, hostId);
}
/**
* Adds a vnet to this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The vnet's zone
* @param vnetId The vnet to add
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse addVnet(Client client, int id, int zoneId, int vnetId)
{
return client.call(ADDVNET, id, zoneId, vnetId);
}
/**
* Deletes a vnet from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The vnet's zone
* @param vnetId The vnet to delete
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse delVnet(Client client, int id, int zoneId, int vnetId)
{
return client.call(DELVNET, id, zoneId, vnetId);
}
/**
* Adds a datastore to this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The datastore's zone
* @param datastoreId The datastore to add
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse addDatastore(Client client, int id, int zoneId, int datastoreId)
{
return client.call(ADDDATASTORE, id, zoneId, datastoreId);
}
/**
* Deletes a datastore from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The datastore's zone
* @param datastoreId The datastore to delete
* @return If an error occurs the error message contains the reason.
*/
public static OneResponse delDatastore(Client client, int id, int zoneId, int datastoreId)
{
return client.call(DELDATASTORE, id, zoneId, datastoreId);
}
// =================================
// Instanced object XML-RPC methods
// =================================
/**
* Loads the xml representation of the vdc.
* The info is also stored internally.
*
* @see Vdc#info(Client, int)
*/
public OneResponse info()
{
OneResponse response = info(client, id);
super.processInfo(response);
return response;
}
/**
* Deletes the vdc from OpenNebula.
*
* @see Vdc#delete(Client, int)
*/
public OneResponse delete()
{
return delete(client, id);
}
/**
* Replaces the template contents.
*
* @param new_template New template contents
* @return If successful the message contains the vdc id.
*/
public OneResponse update(String new_template)
{
return update(new_template, false);
}
/**
* Replaces the template contents.
*
* @param new_template New template contents
* @param append True to append new attributes instead of replace the whole template
* @return If successful the message contains the vdc id.
*/
public OneResponse update(String new_template, boolean append)
{
return update(client, id, new_template, append);
}
/**
* Renames this Vdc
*
* @param name New name for the vdc.
* @return If an error occurs the error message contains the reason.
*/
public OneResponse rename(String name)
{
return rename(client, id, name);
}
/**
* Adds a group to this VDC
*
* @param groupId The group to add
* @return If an error occurs the error message contains the reason.
*/
public OneResponse addGroup(int groupId)
{
return client.call(ADDGROUP, id, groupId);
}
/**
* Deletes a group from this VDC
*
* @param groupId The group to delete
* @return If an error occurs the error message contains the reason.
*/
public OneResponse delGroup(int groupId)
{
return client.call(DELGROUP, id, groupId);
}
/**
* Adds a cluster to this VDC
*
* @param zoneId The cluster's zone
* @param clusterId The cluster to add
* @return If an error occurs the error message contains the reason.
*/
public OneResponse addCluster(int zoneId, int clusterId)
{
return client.call(ADDCLUSTER, id, zoneId, clusterId);
}
/**
* Deletes a cluster from this VDC
*
* @param zoneId The cluster's zone
* @param clusterId The cluster to delete
* @return If an error occurs the error message contains the reason.
*/
public OneResponse delCluster(int zoneId, int clusterId)
{
return client.call(DELCLUSTER, id, zoneId, clusterId);
}
/**
* Adds a host to this VDC
*
* @param zoneId The host's zone
* @param hostId The host to add
* @return If an error occurs the error message contains the reason.
*/
public OneResponse addHost(int zoneId, int hostId)
{
return client.call(ADDHOST, id, zoneId, hostId);
}
/**
* Deletes a host from this VDC
*
* @param zoneId The host's zone
* @param hostId The host to delete
* @return If an error occurs the error message contains the reason.
*/
public OneResponse delHost(int zoneId, int hostId)
{
return client.call(DELHOST, id, zoneId, hostId);
}
/**
* Adds a vnet to this VDC
*
* @param zoneId The vnet's zone
* @param vnetId The vnet to add
* @return If an error occurs the error message contains the reason.
*/
public OneResponse addVnet(int zoneId, int vnetId)
{
return client.call(ADDVNET, id, zoneId, vnetId);
}
/**
* Deletes a vnet from this VDC
*
* @param zoneId The vnet's zone
* @param vnetId The vnet to delete
* @return If an error occurs the error message contains the reason.
*/
public OneResponse delVnet(int zoneId, int vnetId)
{
return client.call(DELVNET, id, zoneId, vnetId);
}
/**
* Adds a datastore to this VDC
*
* @param zoneId The datastore's zone
* @param datastoreId The datastore to add
* @return If an error occurs the error message contains the reason.
*/
public OneResponse addDatastore(int zoneId, int datastoreId)
{
return client.call(ADDDATASTORE, id, zoneId, datastoreId);
}
/**
* Deletes a datastore from this VDC
*
* @param zoneId The datastore's zone
* @param datastoreId The datastore to delete
* @return If an error occurs the error message contains the reason.
*/
public OneResponse delDatastore(int zoneId, int datastoreId)
{
return client.call(DELDATASTORE, id, zoneId, datastoreId);
}
// =================================
// Helpers
// =================================
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Timer;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_LOCK_SUPPRESS_WARNING_INTERVAL_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_LOCK_SUPPRESS_WARNING_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_FSLOCK_FAIR_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_FSLOCK_FAIR_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LOCK_DETAILED_METRICS_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LOCK_DETAILED_METRICS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_READ_LOCK_REPORTING_THRESHOLD_MS_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_READ_LOCK_REPORTING_THRESHOLD_MS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_WRITE_LOCK_REPORTING_THRESHOLD_MS_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_WRITE_LOCK_REPORTING_THRESHOLD_MS_KEY;
/**
* Mimics a ReentrantReadWriteLock but does not directly implement the interface
* so more sophisticated locking capabilities and logging/metrics are possible.
* {@link org.apache.hadoop.hdfs.DFSConfigKeys#DFS_NAMENODE_LOCK_DETAILED_METRICS_KEY}
* to be true, metrics will be emitted into the FSNamesystem metrics registry
* for each operation which acquires this lock indicating how long the operation
* held the lock for. These metrics have names of the form
* FSN(Read|Write)LockNanosOperationName, where OperationName denotes the name
* of the operation that initiated the lock hold (this will be OTHER for certain
* uncategorized operations) and they export the hold time values in
* nanoseconds. Note that if a thread dies, metrics produced after the
* most recent snapshot will be lost due to the use of
* {@link MutableRatesWithAggregation}. However since threads are re-used
* between operations this should not generally be an issue.
*/
class FSNamesystemLock {
@VisibleForTesting
protected ReentrantReadWriteLock coarseLock;
private final boolean metricsEnabled;
private final MutableRatesWithAggregation detailedHoldTimeMetrics;
private final Timer timer;
/**
* Log statements about long lock hold times will not be produced more
* frequently than this interval.
*/
private final long lockSuppressWarningIntervalMs;
/** Threshold (ms) for long holding write lock report. */
private final long writeLockReportingThresholdMs;
/** Last time stamp for write lock. Keep the longest one for multi-entrance.*/
private long writeLockHeldTimeStampNanos;
private int numWriteLockWarningsSuppressed = 0;
/** Time stamp (ms) of the last time a write lock report was written. */
private long timeStampOfLastWriteLockReportMs = 0;
/** Longest time (ms) a write lock was held since the last report. */
private long longestWriteLockHeldIntervalMs = 0;
/** Threshold (ms) for long holding read lock report. */
private final long readLockReportingThresholdMs;
/**
* Last time stamp for read lock. Keep the longest one for
* multi-entrance. This is ThreadLocal since there could be
* many read locks held simultaneously.
*/
private final ThreadLocal<Long> readLockHeldTimeStampNanos =
new ThreadLocal<Long>() {
@Override
public Long initialValue() {
return Long.MAX_VALUE;
}
};
private final AtomicInteger numReadLockWarningsSuppressed =
new AtomicInteger(0);
/** Time stamp (ms) of the last time a read lock report was written. */
private final AtomicLong timeStampOfLastReadLockReportMs = new AtomicLong(0);
/** Longest time (ms) a read lock was held since the last report. */
private final AtomicLong longestReadLockHeldIntervalMs = new AtomicLong(0);
@VisibleForTesting
static final String OP_NAME_OTHER = "OTHER";
private static final String READ_LOCK_METRIC_PREFIX = "FSNReadLock";
private static final String WRITE_LOCK_METRIC_PREFIX = "FSNWriteLock";
private static final String LOCK_METRIC_SUFFIX = "Nanos";
private static final String OVERALL_METRIC_NAME = "Overall";
FSNamesystemLock(Configuration conf,
MutableRatesWithAggregation detailedHoldTimeMetrics) {
this(conf, detailedHoldTimeMetrics, new Timer());
}
@VisibleForTesting
FSNamesystemLock(Configuration conf,
MutableRatesWithAggregation detailedHoldTimeMetrics, Timer timer) {
boolean fair = conf.getBoolean(DFS_NAMENODE_FSLOCK_FAIR_KEY,
DFS_NAMENODE_FSLOCK_FAIR_DEFAULT);
FSNamesystem.LOG.info("fsLock is fair: " + fair);
this.coarseLock = new ReentrantReadWriteLock(fair);
this.timer = timer;
this.writeLockReportingThresholdMs = conf.getLong(
DFS_NAMENODE_WRITE_LOCK_REPORTING_THRESHOLD_MS_KEY,
DFS_NAMENODE_WRITE_LOCK_REPORTING_THRESHOLD_MS_DEFAULT);
this.readLockReportingThresholdMs = conf.getLong(
DFS_NAMENODE_READ_LOCK_REPORTING_THRESHOLD_MS_KEY,
DFS_NAMENODE_READ_LOCK_REPORTING_THRESHOLD_MS_DEFAULT);
this.lockSuppressWarningIntervalMs = conf.getTimeDuration(
DFS_LOCK_SUPPRESS_WARNING_INTERVAL_KEY,
DFS_LOCK_SUPPRESS_WARNING_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS);
this.metricsEnabled = conf.getBoolean(
DFS_NAMENODE_LOCK_DETAILED_METRICS_KEY,
DFS_NAMENODE_LOCK_DETAILED_METRICS_DEFAULT);
FSNamesystem.LOG.info("Detailed lock hold time metrics enabled: " +
this.metricsEnabled);
this.detailedHoldTimeMetrics = detailedHoldTimeMetrics;
}
public void readLock() {
coarseLock.readLock().lock();
if (coarseLock.getReadHoldCount() == 1) {
readLockHeldTimeStampNanos.set(timer.monotonicNowNanos());
}
}
public void readLockInterruptibly() throws InterruptedException {
coarseLock.readLock().lockInterruptibly();
if (coarseLock.getReadHoldCount() == 1) {
readLockHeldTimeStampNanos.set(timer.monotonicNowNanos());
}
}
public void readUnlock() {
readUnlock(OP_NAME_OTHER);
}
public void readUnlock(String opName) {
final boolean needReport = coarseLock.getReadHoldCount() == 1;
final long readLockIntervalNanos =
timer.monotonicNowNanos() - readLockHeldTimeStampNanos.get();
coarseLock.readLock().unlock();
if (needReport) {
addMetric(opName, readLockIntervalNanos, false);
readLockHeldTimeStampNanos.remove();
}
final long readLockIntervalMs =
TimeUnit.NANOSECONDS.toMillis(readLockIntervalNanos);
if (needReport && readLockIntervalMs >= this.readLockReportingThresholdMs) {
long localLongestReadLock;
do {
localLongestReadLock = longestReadLockHeldIntervalMs.get();
} while (localLongestReadLock - readLockIntervalMs < 0 &&
!longestReadLockHeldIntervalMs.compareAndSet(localLongestReadLock,
readLockIntervalMs));
long localTimeStampOfLastReadLockReport;
long nowMs;
do {
nowMs = timer.monotonicNow();
localTimeStampOfLastReadLockReport =
timeStampOfLastReadLockReportMs.get();
if (nowMs - localTimeStampOfLastReadLockReport <
lockSuppressWarningIntervalMs) {
numReadLockWarningsSuppressed.incrementAndGet();
return;
}
} while (!timeStampOfLastReadLockReportMs.compareAndSet(
localTimeStampOfLastReadLockReport, nowMs));
int numSuppressedWarnings = numReadLockWarningsSuppressed.getAndSet(0);
long longestLockHeldIntervalMs =
longestReadLockHeldIntervalMs.getAndSet(0);
FSNamesystem.LOG.info("FSNamesystem read lock held for " +
readLockIntervalMs + " ms via\n" +
StringUtils.getStackTrace(Thread.currentThread()) +
"\tNumber of suppressed read-lock reports: " + numSuppressedWarnings +
"\n\tLongest read-lock held interval: " + longestLockHeldIntervalMs);
}
}
public void writeLock() {
coarseLock.writeLock().lock();
if (coarseLock.getWriteHoldCount() == 1) {
writeLockHeldTimeStampNanos = timer.monotonicNowNanos();
}
}
public void writeLockInterruptibly() throws InterruptedException {
coarseLock.writeLock().lockInterruptibly();
if (coarseLock.getWriteHoldCount() == 1) {
writeLockHeldTimeStampNanos = timer.monotonicNowNanos();
}
}
/**
* Unlocks FSNameSystem write lock. This internally calls {@link
* FSNamesystemLock#writeUnlock(String, boolean)}
*/
public void writeUnlock() {
writeUnlock(OP_NAME_OTHER, false);
}
/**
* Unlocks FSNameSystem write lock. This internally calls {@link
* FSNamesystemLock#writeUnlock(String, boolean)}
*
* @param opName Operation name.
*/
public void writeUnlock(String opName) {
writeUnlock(opName, false);
}
/**
* Unlocks FSNameSystem write lock.
*
* @param opName Operation name
* @param suppressWriteLockReport When false, event of write lock being held
* for long time will be logged in logs and metrics.
*/
public void writeUnlock(String opName, boolean suppressWriteLockReport) {
final boolean needReport = !suppressWriteLockReport && coarseLock
.getWriteHoldCount() == 1 && coarseLock.isWriteLockedByCurrentThread();
final long currentTimeNanos = timer.monotonicNowNanos();
final long writeLockIntervalNanos =
currentTimeNanos - writeLockHeldTimeStampNanos;
final long currentTimeMs = TimeUnit.NANOSECONDS.toMillis(currentTimeNanos);
final long writeLockIntervalMs =
TimeUnit.NANOSECONDS.toMillis(writeLockIntervalNanos);
boolean logReport = false;
int numSuppressedWarnings = 0;
long longestLockHeldIntervalMs = 0;
if (needReport &&
writeLockIntervalMs >= this.writeLockReportingThresholdMs) {
if (writeLockIntervalMs > longestWriteLockHeldIntervalMs) {
longestWriteLockHeldIntervalMs = writeLockIntervalMs;
}
if (currentTimeMs - timeStampOfLastWriteLockReportMs >
this.lockSuppressWarningIntervalMs) {
logReport = true;
numSuppressedWarnings = numWriteLockWarningsSuppressed;
numWriteLockWarningsSuppressed = 0;
longestLockHeldIntervalMs = longestWriteLockHeldIntervalMs;
longestWriteLockHeldIntervalMs = 0;
timeStampOfLastWriteLockReportMs = currentTimeMs;
} else {
numWriteLockWarningsSuppressed++;
}
}
coarseLock.writeLock().unlock();
if (needReport) {
addMetric(opName, writeLockIntervalNanos, true);
}
if (logReport) {
FSNamesystem.LOG.info("FSNamesystem write lock held for " +
writeLockIntervalMs + " ms via\n" +
StringUtils.getStackTrace(Thread.currentThread()) +
"\tNumber of suppressed write-lock reports: " +
numSuppressedWarnings + "\n\tLongest write-lock held interval: " +
longestLockHeldIntervalMs);
}
}
public int getReadHoldCount() {
return coarseLock.getReadHoldCount();
}
public int getWriteHoldCount() {
return coarseLock.getWriteHoldCount();
}
public boolean isWriteLockedByCurrentThread() {
return coarseLock.isWriteLockedByCurrentThread();
}
public Condition newWriteLockCondition() {
return coarseLock.writeLock().newCondition();
}
/**
* Returns the QueueLength of waiting threads.
*
* A larger number indicates greater lock contention.
*
* @return int - Number of threads waiting on this lock
*/
public int getQueueLength() {
return coarseLock.getQueueLength();
}
/**
* Add the lock hold time for a recent operation to the metrics.
* @param operationName Name of the operation for which to record the time
* @param value Length of time the lock was held (nanoseconds)
*/
private void addMetric(String operationName, long value, boolean isWrite) {
if (metricsEnabled) {
String opMetric = getMetricName(operationName, isWrite);
detailedHoldTimeMetrics.add(opMetric, value);
String overallMetric = getMetricName(OVERALL_METRIC_NAME, isWrite);
detailedHoldTimeMetrics.add(overallMetric, value);
}
}
private static String getMetricName(String operationName, boolean isWrite) {
return (isWrite ? WRITE_LOCK_METRIC_PREFIX : READ_LOCK_METRIC_PREFIX) +
org.apache.commons.lang3.StringUtils.capitalize(operationName) +
LOCK_METRIC_SUFFIX;
}
}
| |
package net.thecodersbreakfast.seren;
import static java.lang.reflect.Modifier.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.instrument.ClassFileTransformer;
import java.lang.instrument.IllegalClassFormatException;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtField;
import javassist.CtMethod;
import javassist.LoaderClassPath;
import javassist.NotFoundException;
import net.thecodersbreakfast.seren.filter.ClassFilter;
/**
* A {@link ClassFileTransformer} that enhances the serialization speed by injecting optimized writeObject/readObject
* methods.
*
* @author Olivier Croisier
*/
public class SerenClassTransformer implements ClassFileTransformer {
private ClassFilter filter;
private boolean verbose;
private Map<ClassLoader, ClassPool> pools = new HashMap<ClassLoader, ClassPool>();
public SerenClassTransformer(ClassFilter filter, boolean verbose) {
this.filter = filter;
this.verbose = verbose;
}
@Override
public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classBytes) throws IllegalClassFormatException {
CtClass cl = null;
ClassPool pool = getClassPool(loader);
try {
cl = pool.makeClass(new ByteArrayInputStream(classBytes));
if (filter.acceptClass(loader, cl)) {
if (verbose) {
System.out.println("[SEREN] Enhancing class : " + cl.getName());
}
List<FieldInfo> serializableFields = findSerializableFields(cl);
createCustomSerializationMethods(cl, serializableFields);
classBytes = cl.toBytecode();
}
} catch (Exception e) {
System.err.println("[SEREN] Warning: could not enhance class " + className + " : " + e.getMessage());
if (verbose) {
e.printStackTrace();
}
} finally {
if (cl != null) {
cl.detach();
}
}
return classBytes;
}
private ClassPool getClassPool(ClassLoader loader) {
ClassPool pool = pools.get(loader);
if (pool == null) {
pool = new ClassPool(true);
pool.appendClassPath(new LoaderClassPath(loader));
pools.put(loader, pool);
}
return pool;
}
private void createCustomSerializationMethods(CtClass cl,
List<FieldInfo> serializableFields) throws CannotCompileException,
IOException {
if (serializableFields == null || serializableFields.size() == 0) {
return;
}
StringBuilder serializationCode = new StringBuilder();
StringBuilder deserializationCode = new StringBuilder();
generateCodeForFields(serializableFields, serializationCode, deserializationCode);
String serCode = serializationCode.toString();
String deserCode = deserializationCode.toString();
CtMethod writeObjectMethod = CtMethod.make(serCode, cl);
cl.addMethod(writeObjectMethod);
CtMethod readObjectMethod = CtMethod.make(deserCode, cl);
cl.addMethod(readObjectMethod);
}
private void generateCodeForFields(List<FieldInfo> serializableFields, Appendable serializationCode, Appendable deserializationCode) throws IOException {
serializationCode.append("private void writeObject (java.io.ObjectOutputStream out) throws java.io.IOException { \n");
deserializationCode.append("private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { \n");
for (FieldInfo field : serializableFields) {
if (field.isPrimitive()) {
appendCodeForPrimitiveField(field, serializationCode, deserializationCode);
} else if (field.isWrapper()) {
appendCodeForWrapperField(field, serializationCode, deserializationCode);
} else if (field.isString()) {
appendCodeForStringField(field, serializationCode, deserializationCode);
} else {
appendCodeForGenericField(field, serializationCode, deserializationCode);
}
}
serializationCode.append("} \n");
deserializationCode.append("} \n");
}
private void appendCodeForPrimitiveField(FieldInfo field, Appendable serializationCode, Appendable deserializationCode) throws IOException {
String capitalizedType = capitalize(field.getSimpleType());
serializationCode.append("out.write");
serializationCode.append(capitalizedType);
serializationCode.append("(");
serializationCode.append(field.getName());
serializationCode.append("); \n");
deserializationCode.append(field.getName());
deserializationCode.append(" = in.read");
deserializationCode.append(capitalizedType);
deserializationCode.append("(); \n");
}
private void appendCodeForWrapperField(FieldInfo field, Appendable serializationCode, Appendable deserializationCode) throws IOException {
String name = field.getName();
String type = field.toPrimitiveType();
serializationCode.append("out.writeBoolean(").append(name).append(" != null); \n");
serializationCode.append("if (").append(name).append(" != null) { \n");
serializationCode.append(" out.write").append(capitalize(type)).append("(").append(name).append(".").append(type).append("Value()); \n");
serializationCode.append("} \n");
deserializationCode.append("if (in.readBoolean()) { \n");
deserializationCode.append(name + " = " + field.getType() + ".valueOf(in.read").append(capitalize(type)).append("()); \n");
deserializationCode.append("} else { \n");
deserializationCode.append(name + " = null; \n");
deserializationCode.append("} \n");
}
private void appendCodeForStringField(FieldInfo field, Appendable serializationCode, Appendable deserializationCode) throws IOException {
String name = field.getName();
serializationCode.append("out.writeBoolean(").append(name).append(" != null); \n");
serializationCode.append("if (").append(name).append(" != null) { \n");
serializationCode.append(" out.writeBoolean(").append(name).append(".length() > 0xFFFF); \n");
serializationCode.append(" if (").append(name).append(".length() > 0xFFFF) { \n");
serializationCode.append(" out.writeObject(").append(name).append("); \n");
serializationCode.append(" } else { \n");
serializationCode.append(" out.writeUTF(").append(name).append("); \n");
serializationCode.append(" } \n");
serializationCode.append("} \n");
deserializationCode.append("if (in.readBoolean()) { \n");
deserializationCode.append(" if (in.readBoolean()) { \n");
deserializationCode.append(name).append(" = (String) in.readObject(); \n");
deserializationCode.append(" } else { \n");
deserializationCode.append(name).append("= in.readUTF(); \n");
deserializationCode.append(" } \n");
deserializationCode.append("} else { \n");
deserializationCode.append(name + " = null; \n");
deserializationCode.append("} \n");
}
private void appendCodeForGenericField(FieldInfo field, Appendable serializationCode, Appendable deserializationCode) throws IOException {
String name = field.getName();
serializationCode.append("out.writeBoolean(").append(name).append(" != null); \n");
serializationCode.append("if (").append(name).append(" != null) { \n");
serializationCode.append("out.writeObject(").append(name).append("); \n");
serializationCode.append("} \n");
deserializationCode.append("if (in.readBoolean()) { \n");
deserializationCode.append(name + " = (" + field.getType() + ") in.readObject(); \n");
deserializationCode.append("} else { \n");
deserializationCode.append(name + " = null; \n");
deserializationCode.append("} \n");
}
private List<FieldInfo> findSerializableFields(CtClass cl) throws NotFoundException {
List<FieldInfo> serializableFields = new ArrayList<FieldInfo>();
CtField[] fields = cl.getDeclaredFields();
if (fields != null) {
for (CtField field : fields) {
if (isSerializableField(field)) {
serializableFields.add(new FieldInfo(field.getName(), field.getType().getName(), isFinalField(field)));
}
}
}
return serializableFields;
}
private boolean isSerializableField(CtField field) {
return !isStatic(field.getModifiers()) && !isTransient(field.getModifiers());
}
private boolean isFinalField(CtField field) {
return isFinal(field.getModifiers());
}
private static String capitalize(String s) {
if (s == null) return null;
if (s.length() == 1) return s.toUpperCase();
return Character.toUpperCase(s.charAt(0)) + s.substring(1);
}
}
| |
package org.opencb.opencga.storage.hadoop.variant.index.sample;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.opencga.storage.core.exceptions.StorageEngineException;
import org.opencb.opencga.storage.core.variant.VariantStorageOptions;
import org.opencb.opencga.storage.hadoop.variant.AbstractVariantsTableDriver;
import org.opencb.opencga.storage.hadoop.variant.GenomeHelper;
import org.opencb.opencga.storage.hadoop.variant.HadoopVariantStorageOptions;
import org.opencb.opencga.storage.hadoop.variant.gaps.FillGapsFromArchiveMapper;
import org.opencb.opencga.storage.hadoop.variant.mr.VariantsTableMapReduceHelper;
import org.opencb.opencga.storage.hadoop.variant.mr.VariantMapReduceUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
/**
* Creates and executes a MR job that finishes loading all the pending variants in the SampleIndex table.
* For each variant with, reads all pending variants like _V_{VARIANT}_{GT}, and updates the list of variants
* for that genotype and updates the genotype counters.
*
* Only genotypes without the main alternate (0/2, 2/3, ...) should be found as pending.
* Genotypes with the main alternate (0/1, 1/1, 1/2, ...) are already loaded by {@link SampleIndexDBLoader}
*
* Created on 30/05/18.
*
* @author Jacobo Coll <jacobo167@gmail.com>
*/
public class SampleIndexConsolidationDrive extends AbstractVariantsTableDriver {
public static final String GENOTYPES_COUNTER_GROUP_NAME = "genotypes";
private static final Logger LOG = LoggerFactory.getLogger(SampleIndexConsolidationDrive.class);
private String sampleIndexTable;
private int[] samples;
private boolean allSamples;
@Override
protected void parseAndValidateParameters() throws IOException {
super.parseAndValidateParameters();
sampleIndexTable = generator.getSampleIndexTableName(getStudyId());
if (sampleIndexTable == null || sampleIndexTable.isEmpty()) {
throw new IllegalArgumentException("Missing sampleIndex table!");
}
samples = getConf().getInts(FillGapsFromArchiveMapper.SAMPLES);
if (samples == null || samples.length == 0) {
allSamples = true;
} else {
allSamples = false;
}
}
@Override
protected Class<SampleIndexConsolidationMapper> getMapperClass() {
return SampleIndexConsolidationMapper.class;
}
@Override
protected Job setupJob(Job job, String archiveTable, String variantTable) throws IOException {
List<Scan> scans = new ArrayList<>();
int caching = job.getConfiguration().getInt(HadoopVariantStorageOptions.MR_HBASE_SCAN_CACHING.key(), 100);
LOG.info("Scan set Caching to " + caching);
Scan templateScan = new Scan();
templateScan.setCaching(caching); // 1 is the default in Scan
templateScan.setCacheBlocks(false); // don't set to true for MR jobs
if (allSamples) {
scans.add(templateScan);
} else {
for (int sample : samples) {
Scan newScan = new Scan(templateScan);
newScan.setRowPrefixFilter(SampleIndexSchema.toRowKey(sample));
scans.add(newScan);
}
}
for (int i = 0; i < scans.size(); i++) {
Scan s = scans.get(i);
LOG.info("scan[" + i + "]= " + s.toJSON());
}
// set other scan attrs
VariantMapReduceUtil.initTableMapperJob(job, sampleIndexTable, sampleIndexTable, scans, getMapperClass());
job.setSpeculativeExecution(false);
// job.getConfiguration().setInt(MRJobConfig.TASK_TIMEOUT, 20 * 60 * 1000);
return job;
}
@Override
protected void postExecution(Job job) throws IOException, StorageEngineException {
super.postExecution(job);
// Update list of loaded genotypes
Set<String> gts = new HashSet<>();
if (job.isSuccessful()) {
for (Counter counter : job.getCounters().getGroup(GENOTYPES_COUNTER_GROUP_NAME)) {
gts.add(counter.getName());
}
if (!gts.isEmpty()) {
getMetadataManager().updateStudyMetadata(getStudyId(), sm -> {
gts.addAll(sm.getAttributes().getAsStringList(VariantStorageOptions.LOADED_GENOTYPES.key()));
sm.getAttributes().put(VariantStorageOptions.LOADED_GENOTYPES.key(), gts);
return sm;
});
}
}
}
@Override
protected String getJobOperationName() {
return "consolidate_sample_index";
}
public static class SampleIndexConsolidationMapper extends TableMapper<ImmutableBytesWritable, Mutation> {
private byte[] family;
private SampleIndexVariantBiConverter converter;
@Override
protected void setup(Context context) throws IOException, InterruptedException {
new GenomeHelper(context.getConfiguration());
family = GenomeHelper.COLUMN_FAMILY_BYTES;
converter = new SampleIndexVariantBiConverter();
}
@Override
protected void map(ImmutableBytesWritable k, Result result, Context context) throws IOException, InterruptedException {
Map<String, List<String>> map = new HashMap<>();
Map<String, Cell> otherCells = new HashMap<>();
Delete delete = new Delete(result.getRow());
for (Cell cell : result.rawCells()) {
byte[] column = CellUtil.cloneQualifier(cell);
Pair<String, String> pair = HBaseToSampleIndexConverter.parsePendingColumn(column);
if (pair != null) {
delete.addColumn(family, column);
String variant = pair.getKey();
String gt = pair.getValue();
map.computeIfAbsent(gt, key -> new ArrayList<>()).add(variant);
} else {
otherCells.put(Bytes.toString(column), cell);
}
}
if (!delete.isEmpty()) {
context.getCounter(VariantsTableMapReduceHelper.COUNTER_GROUP_NAME, "consolidation").increment(1);
Put put = new Put(result.getRow());
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
String gt = entry.getKey();
context.getCounter(GENOTYPES_COUNTER_GROUP_NAME, gt).increment(entry.getValue().size());
List<String> variants = entry.getValue();
Cell cell = otherCells.get(gt);
if (cell == null) {
context.getCounter(VariantsTableMapReduceHelper.COUNTER_GROUP_NAME, "new_gt").increment(1);
put.addColumn(family, Bytes.toBytes(gt), converter.toBytesFromStrings(variants));
put.addColumn(family, SampleIndexSchema.toGenotypeCountColumn(gt), Bytes.toBytes(variants.size()));
} else {
context.getCounter(VariantsTableMapReduceHelper.COUNTER_GROUP_NAME, "merged_gt").increment(1);
// Merge with existing values
TreeSet<Variant> variantsSet = new TreeSet<>(SampleIndexSchema.INTRA_CHROMOSOME_VARIANT_COMPARATOR);
List<Variant> loadedVariants = converter.toVariants(cell);
variantsSet.addAll(loadedVariants);
for (String variant : variants) {
variantsSet.add(new Variant(variant));
}
if (loadedVariants.size() == variantsSet.size()) {
context.getCounter(VariantsTableMapReduceHelper.COUNTER_GROUP_NAME, "merged_gt_skip").increment(1);
} else {
put.addColumn(family, SampleIndexSchema.toGenotypeColumn(gt), converter.toBytes(variantsSet));
put.addColumn(family, SampleIndexSchema.toGenotypeCountColumn(gt), Bytes.toBytes(variantsSet.size()));
}
}
}
if (!put.isEmpty()) {
context.write(k, put);
}
context.write(k, delete);
}
}
}
public static void main(String[] args) {
try {
System.exit(new SampleIndexConsolidationDrive().privateMain(args));
} catch (Exception e) {
LOG.error("Error executing " + SampleIndexConsolidationDrive.class, e);
System.exit(1);
}
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.net.InetAddress;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.IConnectivityManager;
import android.net.INetworkManagementEventObserver;
import android.net.InterfaceConfiguration;
import android.net.NetworkInfo;
import android.os.Binder;
import android.os.CommonTimeConfig;
import android.os.Handler;
import android.os.IBinder;
import android.os.INetworkManagementService;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.SystemProperties;
import android.util.Log;
import com.android.server.net.BaseNetworkObserver;
/**
* @hide
* <p>CommonTimeManagementService manages the configuration of the native Common Time service,
* reconfiguring the native service as appropriate in response to changes in network configuration.
*/
class CommonTimeManagementService extends Binder {
/*
* Constants and globals.
*/
private static final String TAG = CommonTimeManagementService.class.getSimpleName();
private static final int NATIVE_SERVICE_RECONNECT_TIMEOUT = 5000;
private static final String AUTO_DISABLE_PROP = "ro.common_time.auto_disable";
private static final String ALLOW_WIFI_PROP = "ro.common_time.allow_wifi";
private static final String SERVER_PRIO_PROP = "ro.common_time.server_prio";
private static final String NO_INTERFACE_TIMEOUT_PROP = "ro.common_time.no_iface_timeout";
private static final boolean AUTO_DISABLE;
private static final boolean ALLOW_WIFI;
private static final byte BASE_SERVER_PRIO;
private static final int NO_INTERFACE_TIMEOUT;
private static final InterfaceScoreRule[] IFACE_SCORE_RULES;
static {
int tmp;
AUTO_DISABLE = (0 != SystemProperties.getInt(AUTO_DISABLE_PROP, 1));
ALLOW_WIFI = (0 != SystemProperties.getInt(ALLOW_WIFI_PROP, 0));
tmp = SystemProperties.getInt(SERVER_PRIO_PROP, 1);
NO_INTERFACE_TIMEOUT = SystemProperties.getInt(NO_INTERFACE_TIMEOUT_PROP, 60000);
if (tmp < 1)
BASE_SERVER_PRIO = 1;
else
if (tmp > 30)
BASE_SERVER_PRIO = 30;
else
BASE_SERVER_PRIO = (byte)tmp;
if (ALLOW_WIFI) {
IFACE_SCORE_RULES = new InterfaceScoreRule[] {
new InterfaceScoreRule("wlan", (byte)1),
new InterfaceScoreRule("eth", (byte)2),
};
} else {
IFACE_SCORE_RULES = new InterfaceScoreRule[] {
new InterfaceScoreRule("eth", (byte)2),
};
}
};
/*
* Internal state
*/
private final Context mContext;
private INetworkManagementService mNetMgr;
private CommonTimeConfig mCTConfig;
private String mCurIface;
private Handler mReconnectHandler = new Handler();
private Handler mNoInterfaceHandler = new Handler();
private Object mLock = new Object();
private boolean mDetectedAtStartup = false;
private byte mEffectivePrio = BASE_SERVER_PRIO;
/*
* Callback handler implementations.
*/
private INetworkManagementEventObserver mIfaceObserver = new BaseNetworkObserver() {
public void interfaceStatusChanged(String iface, boolean up) {
reevaluateServiceState();
}
public void interfaceLinkStateChanged(String iface, boolean up) {
reevaluateServiceState();
}
public void interfaceAdded(String iface) {
reevaluateServiceState();
}
public void interfaceRemoved(String iface) {
reevaluateServiceState();
}
};
private BroadcastReceiver mConnectivityMangerObserver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
reevaluateServiceState();
}
};
private CommonTimeConfig.OnServerDiedListener mCTServerDiedListener =
new CommonTimeConfig.OnServerDiedListener() {
public void onServerDied() {
scheduleTimeConfigReconnect();
}
};
private Runnable mReconnectRunnable = new Runnable() {
public void run() { connectToTimeConfig(); }
};
private Runnable mNoInterfaceRunnable = new Runnable() {
public void run() { handleNoInterfaceTimeout(); }
};
/*
* Public interface (constructor, systemReady and dump)
*/
public CommonTimeManagementService(Context context) {
mContext = context;
}
void systemRunning() {
if (ServiceManager.checkService(CommonTimeConfig.SERVICE_NAME) == null) {
Log.i(TAG, "No common time service detected on this platform. " +
"Common time services will be unavailable.");
return;
}
mDetectedAtStartup = true;
IBinder b = ServiceManager.getService(Context.NETWORKMANAGEMENT_SERVICE);
mNetMgr = INetworkManagementService.Stub.asInterface(b);
// Network manager is running along-side us, so we should never receiver a remote exception
// while trying to register this observer.
try {
mNetMgr.registerObserver(mIfaceObserver);
}
catch (RemoteException e) { }
// Register with the connectivity manager for connectivity changed intents.
IntentFilter filter = new IntentFilter();
filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION);
mContext.registerReceiver(mConnectivityMangerObserver, filter);
// Connect to the common time config service and apply the initial configuration.
connectToTimeConfig();
}
@Override
protected void dump(FileDescriptor fd, PrintWriter pw, String[] args) {
if (mContext.checkCallingOrSelfPermission(android.Manifest.permission.DUMP)
!= PackageManager.PERMISSION_GRANTED) {
pw.println(String.format(
"Permission Denial: can't dump CommonTimeManagement service from from " +
"pid=%d, uid=%d", Binder.getCallingPid(), Binder.getCallingUid()));
return;
}
if (!mDetectedAtStartup) {
pw.println("Native Common Time service was not detected at startup. " +
"Service is unavailable");
return;
}
synchronized (mLock) {
pw.println("Current Common Time Management Service Config:");
pw.println(String.format(" Native service : %s",
(null == mCTConfig) ? "reconnecting"
: "alive"));
pw.println(String.format(" Bound interface : %s",
(null == mCurIface ? "unbound" : mCurIface)));
pw.println(String.format(" Allow WiFi : %s", ALLOW_WIFI ? "yes" : "no"));
pw.println(String.format(" Allow Auto Disable : %s", AUTO_DISABLE ? "yes" : "no"));
pw.println(String.format(" Server Priority : %d", mEffectivePrio));
pw.println(String.format(" No iface timeout : %d", NO_INTERFACE_TIMEOUT));
}
}
/*
* Inner helper classes
*/
private static class InterfaceScoreRule {
public final String mPrefix;
public final byte mScore;
public InterfaceScoreRule(String prefix, byte score) {
mPrefix = prefix;
mScore = score;
}
};
/*
* Internal implementation
*/
private void cleanupTimeConfig() {
mReconnectHandler.removeCallbacks(mReconnectRunnable);
mNoInterfaceHandler.removeCallbacks(mNoInterfaceRunnable);
if (null != mCTConfig) {
mCTConfig.release();
mCTConfig = null;
}
}
private void connectToTimeConfig() {
// Get access to the common time service configuration interface. If we catch a remote
// exception in the process (service crashed or no running for w/e reason), schedule an
// attempt to reconnect in the future.
cleanupTimeConfig();
try {
synchronized (mLock) {
mCTConfig = new CommonTimeConfig();
mCTConfig.setServerDiedListener(mCTServerDiedListener);
mCurIface = mCTConfig.getInterfaceBinding();
mCTConfig.setAutoDisable(AUTO_DISABLE);
mCTConfig.setMasterElectionPriority(mEffectivePrio);
}
if (NO_INTERFACE_TIMEOUT >= 0)
mNoInterfaceHandler.postDelayed(mNoInterfaceRunnable, NO_INTERFACE_TIMEOUT);
reevaluateServiceState();
}
catch (RemoteException e) {
scheduleTimeConfigReconnect();
}
}
private void scheduleTimeConfigReconnect() {
cleanupTimeConfig();
Log.w(TAG, String.format("Native service died, will reconnect in %d mSec",
NATIVE_SERVICE_RECONNECT_TIMEOUT));
mReconnectHandler.postDelayed(mReconnectRunnable,
NATIVE_SERVICE_RECONNECT_TIMEOUT);
}
private void handleNoInterfaceTimeout() {
if (null != mCTConfig) {
Log.i(TAG, "Timeout waiting for interface to come up. " +
"Forcing networkless master mode.");
if (CommonTimeConfig.ERROR_DEAD_OBJECT == mCTConfig.forceNetworklessMasterMode())
scheduleTimeConfigReconnect();
}
}
private void reevaluateServiceState() {
String bindIface = null;
byte bestScore = -1;
try {
// Check to see if this interface is suitable to use for time synchronization.
//
// TODO : This selection algorithm needs to be enhanced for use with mobile devices. In
// particular, the choice of whether to a wireless interface or not should not be an all
// or nothing thing controlled by properties. It would probably be better if the
// platform had some concept of public wireless networks vs. home or friendly wireless
// networks (something a user would configure in settings or when a new interface is
// added). Then this algorithm could pick only wireless interfaces which were flagged
// as friendly, and be dormant when on public wireless networks.
//
// Another issue which needs to be dealt with is the use of driver supplied interface
// name to determine the network type. The fact that the wireless interface on a device
// is named "wlan0" is just a matter of convention; its not a 100% rule. For example,
// there are devices out there where the wireless is name "tiwlan0", not "wlan0". The
// internal network management interfaces in Android have all of the information needed
// to make a proper classification, there is just no way (currently) to fetch an
// interface's type (available from the ConnectionManager) as well as its address
// (available from either the java.net interfaces or from the NetworkManagment service).
// Both can enumerate interfaces, but that is no way to correlate their results (no
// common shared key; although using the interface name in the connection manager would
// be a good start). Until this gets resolved, we resort to substring searching for
// tags like wlan and eth.
//
String ifaceList[] = mNetMgr.listInterfaces();
if (null != ifaceList) {
for (String iface : ifaceList) {
byte thisScore = -1;
for (InterfaceScoreRule r : IFACE_SCORE_RULES) {
if (iface.contains(r.mPrefix)) {
thisScore = r.mScore;
break;
}
}
if (thisScore <= bestScore)
continue;
InterfaceConfiguration config = mNetMgr.getInterfaceConfig(iface);
if (null == config)
continue;
if (config.isActive()) {
bindIface = iface;
bestScore = thisScore;
}
}
}
}
catch (RemoteException e) {
// Bad news; we should not be getting remote exceptions from the connectivity manager
// since it is running in SystemServer along side of us. It probably does not matter
// what we do here, but go ahead and unbind the common time service in this case, just
// so we have some defined behavior.
bindIface = null;
}
boolean doRebind = true;
synchronized (mLock) {
if ((null != bindIface) && (null == mCurIface)) {
Log.e(TAG, String.format("Binding common time service to %s.", bindIface));
mCurIface = bindIface;
} else
if ((null == bindIface) && (null != mCurIface)) {
Log.e(TAG, "Unbinding common time service.");
mCurIface = null;
} else
if ((null != bindIface) && (null != mCurIface) && !bindIface.equals(mCurIface)) {
Log.e(TAG, String.format("Switching common time service binding from %s to %s.",
mCurIface, bindIface));
mCurIface = bindIface;
} else {
doRebind = false;
}
}
if (doRebind && (null != mCTConfig)) {
byte newPrio = (bestScore > 0)
? (byte)(bestScore * BASE_SERVER_PRIO)
: BASE_SERVER_PRIO;
if (newPrio != mEffectivePrio) {
mEffectivePrio = newPrio;
mCTConfig.setMasterElectionPriority(mEffectivePrio);
}
int res = mCTConfig.setNetworkBinding(mCurIface);
if (res != CommonTimeConfig.SUCCESS)
scheduleTimeConfigReconnect();
else if (NO_INTERFACE_TIMEOUT >= 0) {
mNoInterfaceHandler.removeCallbacks(mNoInterfaceRunnable);
if (null == mCurIface)
mNoInterfaceHandler.postDelayed(mNoInterfaceRunnable, NO_INTERFACE_TIMEOUT);
}
}
}
}
| |
package th.in.llun.thorfun;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONException;
import org.json.JSONObject;
import org.ocpsoft.prettytime.PrettyTime;
import th.in.llun.thorfun.api.Thorfun;
import th.in.llun.thorfun.api.model.Post;
import th.in.llun.thorfun.api.model.RemoteCollection;
import th.in.llun.thorfun.api.model.Reply;
import th.in.llun.thorfun.api.model.ThorfunResult;
import th.in.llun.thorfun.utils.ImageLoader;
import android.app.Activity;
import android.os.Bundle;
import android.text.Html;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.app.SherlockActivity;
import com.actionbarsherlock.view.MenuItem;
public class PostActivity extends SherlockActivity {
public static final String KEY_POST = "post";
private Thorfun mThorfun;
private Post mPost;
private List<Reply> mReplies;
private ReplyAdapter mAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_post_view);
ActionBar actionBar = getSupportActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
setTitle(getString(R.string.board_post_title));
String rawPost = getIntent().getStringExtra(KEY_POST);
try {
mPost = new Post(new JSONObject(rawPost));
ImageView icon = (ImageView) findViewById(R.id.board_post_avatar);
ViewGroup loading = (ViewGroup) findViewById(R.id.board_post_loading);
loading.setVisibility(View.VISIBLE);
new ImageLoader(icon, loading).execute(mPost.getNeightbour()
.getImageURL());
TextView title = (TextView) findViewById(R.id.board_post_title);
title.setText(Html.fromHtml(mPost.getTitle()));
TextView username = (TextView) findViewById(R.id.board_post_username_text);
username.setText(mPost.getNeightbour().getName());
PrettyTime prettyTime = new PrettyTime();
TextView time = (TextView) findViewById(R.id.board_post_timestamp);
time.setText(prettyTime.format(mPost.getTime()));
mThorfun = Thorfun.getInstance(this);
mReplies = new ArrayList<Reply>();
mAdapter = new ReplyAdapter(this, getLayoutInflater(), mPost, mReplies);
ListView repliesView = (ListView) findViewById(R.id.board_post_replies);
repliesView.setAdapter(mAdapter);
mThorfun.loadPostReplies(mPost, null,
new ThorfunResult<RemoteCollection<Reply>>() {
@Override
public void onResponse(RemoteCollection<Reply> response) {
mReplies.addAll(response.collection());
mAdapter.notifyDataSetChanged();
}
});
View inputView = findViewById(R.id.board_post_reply_field);
if (mThorfun.isLoggedIn()) {
inputView.setVisibility(View.VISIBLE);
} else {
inputView.setVisibility(View.GONE);
}
final EditText inputField = (EditText) findViewById(R.id.board_post_reply_input);
final Button inputButton = (Button) findViewById(R.id.board_post_reply_submit);
inputButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String text = inputField.getText().toString().trim();
if (text.length() > 0) {
inputField.setText("");
inputButton.setEnabled(false);
mThorfun.commentPost(mPost, text, new ThorfunResult<Reply>() {
@Override
public void onResponse(Reply response) {
inputButton.setEnabled(true);
mReplies.add(response);
mAdapter.notifyDataSetChanged();
}
});
}
}
});
} catch (JSONException e) {
Log.e(Thorfun.LOG_TAG, "Can't parse post json", e);
finish();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private static class ReplyAdapter extends BaseAdapter {
private Activity mActivity;
private LayoutInflater mInflater;
private Post mPost;
private List<Reply> mReplies;
private boolean mIsLoading = false;
private boolean mIsLastPage = false;
public ReplyAdapter(Activity activity, LayoutInflater inflater, Post post,
List<Reply> replies) {
mActivity = activity;
mInflater = inflater;
mPost = post;
mReplies = replies;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (position == getCount() - 1) {
RelativeLayout row = (RelativeLayout) convertView;
if (row == null) {
row = (RelativeLayout) mInflater.inflate(
R.layout.fragment_loading_row, parent, false);
}
if (mReplies.size() < Thorfun.DEFAULT_PAGE_LIMIT) {
mIsLastPage = true;
}
final BaseAdapter self = this;
if (!mIsLoading && !mIsLastPage) {
mIsLoading = true;
Thorfun.getInstance(mActivity).loadPostReplies(mPost,
mReplies.get(mReplies.size() - 1),
new ThorfunResult<RemoteCollection<Reply>>() {
@Override
public void onResponse(RemoteCollection<Reply> response) {
mIsLoading = false;
List<Reply> next = response.collection();
if (next.size() > 0) {
mReplies.addAll(next);
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
self.notifyDataSetChanged();
}
});
} else {
mIsLastPage = true;
}
}
});
}
if (mIsLastPage) {
row.setVisibility(View.GONE);
}
return row;
} else {
View row = convertView;
if (row == null) {
row = mInflater.inflate(R.layout.story_comment_reply_row, parent,
false);
}
Reply reply = mReplies.get(position);
ImageView icon = (ImageView) row
.findViewById(R.id.story_comment_avatar);
ViewGroup loading = (ViewGroup) row
.findViewById(R.id.story_comment_progress_box);
loading.setVisibility(View.VISIBLE);
new ImageLoader(icon, loading).execute(reply.getNeightbour()
.getImageURL());
TextView usernameText = (TextView) row
.findViewById(R.id.story_comment_user);
usernameText.setText(reply.getNeightbour().getName());
TextView commentText = (TextView) row
.findViewById(R.id.story_comment_text);
commentText.setText(Html.fromHtml(reply.getText()));
TextView timeText = (TextView) row
.findViewById(R.id.story_comment_time);
timeText.setText(new PrettyTime().format(reply.getTime()));
return row;
}
}
@Override
public long getItemId(int position) {
return mReplies.get(position).getID();
}
@Override
public Object getItem(int position) {
return mReplies.get(position);
}
@Override
public int getCount() {
if (mReplies.size() > 0) {
return mReplies.size() + 1;
}
return 0;
}
public int getViewTypeCount() {
return 2;
}
public int getItemViewType(int position) {
if (position == getCount() - 1) {
return 0;
}
return 1;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.QueryBuilder;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
import java.util.List;
public class MatchQuery {
public static enum Type {
BOOLEAN,
PHRASE,
PHRASE_PREFIX
}
public static enum ZeroTermsQuery {
NONE,
ALL
}
protected final QueryParseContext parseContext;
protected String analyzer;
protected BooleanClause.Occur occur = BooleanClause.Occur.SHOULD;
protected boolean enablePositionIncrements = true;
protected int phraseSlop = 0;
protected Fuzziness fuzziness = null;
protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;
protected boolean transpositions = FuzzyQuery.defaultTranspositions;
protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
protected boolean lenient;
protected ZeroTermsQuery zeroTermsQuery = ZeroTermsQuery.NONE;
protected Float commonTermsCutoff = null;
public MatchQuery(QueryParseContext parseContext) {
this.parseContext = parseContext;
}
public void setAnalyzer(String analyzer) {
this.analyzer = analyzer;
}
public void setOccur(BooleanClause.Occur occur) {
this.occur = occur;
}
public void setCommonTermsCutoff(float cutoff) {
this.commonTermsCutoff = Float.valueOf(cutoff);
}
public void setEnablePositionIncrements(boolean enablePositionIncrements) {
this.enablePositionIncrements = enablePositionIncrements;
}
public void setPhraseSlop(int phraseSlop) {
this.phraseSlop = phraseSlop;
}
public void setFuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
}
public void setFuzzyPrefixLength(int fuzzyPrefixLength) {
this.fuzzyPrefixLength = fuzzyPrefixLength;
}
public void setMaxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
}
public void setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
}
public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public void setLenient(boolean lenient) {
this.lenient = lenient;
}
public void setZeroTermsQuery(ZeroTermsQuery zeroTermsQuery) {
this.zeroTermsQuery = zeroTermsQuery;
}
protected boolean forceAnalyzeQueryString() {
return false;
}
protected Analyzer getAnalyzer(MappedFieldType fieldType) {
if (this.analyzer == null) {
if (fieldType != null) {
return parseContext.getSearchAnalyzer(fieldType);
}
return parseContext.mapperService().searchAnalyzer();
} else {
Analyzer analyzer = parseContext.mapperService().analysisService().analyzer(this.analyzer);
if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
}
return analyzer;
}
}
public Query parse(Type type, String fieldName, Object value) throws IOException {
final String field;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
field = fieldName;
}
if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
try {
return fieldType.termQuery(value, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
}
throw e;
}
}
Analyzer analyzer = getAnalyzer(fieldType);
assert analyzer != null;
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);
builder.setEnablePositionIncrements(this.enablePositionIncrements);
Query query = null;
switch (type) {
case BOOLEAN:
if (commonTermsCutoff == null) {
query = builder.createBooleanQuery(field, value.toString(), occur);
} else {
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, fieldType);
}
break;
case PHRASE:
query = builder.createPhraseQuery(field, value.toString(), phraseSlop);
break;
case PHRASE_PREFIX:
query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions);
break;
default:
throw new IllegalStateException("No type found for [" + type + "]");
}
if (query == null) {
return zeroTermsQuery();
} else {
return query;
}
}
protected Query zeroTermsQuery() {
return zeroTermsQuery == ZeroTermsQuery.NONE ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery();
}
private class MatchQueryBuilder extends QueryBuilder {
private final MappedFieldType mapper;
/**
* Creates a new QueryBuilder using the given analyzer.
*/
public MatchQueryBuilder(Analyzer analyzer, @Nullable MappedFieldType mapper) {
super(analyzer);
this.mapper = mapper;
}
@Override
protected Query newTermQuery(Term term) {
return blendTermQuery(term, mapper);
}
public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) {
final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop);
final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery();
prefixQuery.setMaxExpansions(maxExpansions);
prefixQuery.setSlop(phraseSlop);
if (query instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery)query;
Term[] terms = pq.getTerms();
int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; i++) {
prefixQuery.add(new Term[] {terms[i]}, positions[i]);
}
return prefixQuery;
} else if (query instanceof MultiPhraseQuery) {
MultiPhraseQuery pq = (MultiPhraseQuery)query;
List<Term[]> terms = pq.getTermArrays();
int[] positions = pq.getPositions();
for (int i = 0; i < terms.size(); i++) {
prefixQuery.add(terms.get(i), positions[i]);
}
return prefixQuery;
} else if (query instanceof TermQuery) {
prefixQuery.add(((TermQuery) query).getTerm());
return prefixQuery;
}
return query;
}
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) booleanQuery;
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), fieldType);
for (BooleanClause clause : bq.clauses()) {
if (!(clause.getQuery() instanceof TermQuery)) {
return booleanQuery;
}
query.add(((TermQuery) clause.getQuery()).getTerm());
}
return query;
}
return booleanQuery;
}
}
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
if (fuzziness != null) {
if (fieldType != null) {
Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
if (query instanceof FuzzyQuery) {
QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
}
return query;
}
int edits = fuzziness.asDistance(term.text());
FuzzyQuery query = new FuzzyQuery(term, edits, fuzzyPrefixLength, maxExpansions, transpositions);
QueryParsers.setRewriteMethod(query, fuzzyRewriteMethod);
return query;
}
if (fieldType != null) {
Query termQuery = fieldType.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
}
return new TermQuery(term);
}
}
| |
/* Copyright (c) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.appengine.demos.sticky.client.model;
import java.io.Serializable;
import java.util.Date;
import com.google.gwt.core.client.GWT;
import com.google.gwt.user.client.rpc.RemoteService;
import com.google.gwt.user.client.rpc.RemoteServiceRelativePath;
/**
* The RPC api available to the client. The asynchronous version that is used
* directly by the client is {@link ServiceAsync}.
*
*/
@RemoteServiceRelativePath("service")
public interface Service extends RemoteService {
/**
* An exception that is thrown by the server whenever the current user is not
* logged in, or if the RPC requests an operation that cannot be carried out
* for the user (i.e. putting notes on someone else's surface).
*/
@SuppressWarnings("serial")
static class AccessDeniedException extends Exception {
}
/**
* Encapsulates a response for
* {@link Service#addAuthorToSurface(String, String)}.
*/
@SuppressWarnings("serial")
static class AddAuthorToSurfaceResult implements Serializable {
private String authorName;
private Date updatedAt;
/**
* Constructs a new result. This constructor can only be invoked on the
* server.
*
* @param authorName
* the name of the author that was added to the surface
* @param updatedAt
* the new last updated time for the surface
*/
public AddAuthorToSurfaceResult(String authorName, Date updatedAt) {
assert !GWT.isClient();
this.authorName = authorName;
this.updatedAt = updatedAt;
}
/**
* Needed for RPC serialization.
*/
@SuppressWarnings("unused")
private AddAuthorToSurfaceResult() {
}
/**
* The name of the {@link Author} that was added to the surface.
*
* @return
*/
public String getAuthorName() {
return authorName;
}
/**
* The new last updated time for the surface that was modified.
*
* @return
*/
public Date getUpdatedAt() {
return updatedAt;
}
}
/**
* Encapsulates a response from
* {@link Service#createNote(user, int, int, int, int)}.
*/
@SuppressWarnings("serial")
static class CreateObjectResult implements Serializable {
private String key;
private Date updateTime;
/**
* Constructs a new result. This constructor can only be invoked on the
* server.
*
* @param key
* the key that was assigned to the new {@link Note}
* @param updateTime
* the time assigned to {@link Note#getLastUpdatedAt()}
*/
public CreateObjectResult(String key, Date updateTime) {
assert !GWT.isClient();
this.key = key;
this.updateTime = updateTime;
}
/**
* Needed for RPC serialization.
*/
@SuppressWarnings("unused")
private CreateObjectResult() {
}
/**
* Returns the key that was assigned to the new {@link Note}.
*
* @return
*/
public String getKey() {
return key;
}
/**
* Returns the {@link Date} that was assigned to
* {@link Note#getLastUpdatedAt()} by the server.
*
* @return
*/
public Date getUpdateTime() {
return updateTime;
}
}
/**
* Encapsulates a response from {@link Service#getNotes(String, String)}.
*/
@SuppressWarnings("serial")
static class GetNotesResult implements Serializable {
private String timestamp;
private Note[] notes;
/**
* Constructs a new result. This constructor can only be invoked on the
* server.
*
* @param timestamp
* an opaque timestamp
* @param notes
* the list of notes to return
*/
public GetNotesResult(String timestamp, Note[] notes) {
assert !GWT.isClient();
this.timestamp = timestamp;
this.notes = notes;
}
/**
* Needed for RPC serialization.
*/
@SuppressWarnings("unused")
private GetNotesResult() {
}
/**
* Returns the notes that were returned by the server. This can be
* zero-length, but will not be null.
*
* @return
*/
public Note[] getNotes() {
return notes;
}
/**
* Returns an opaque timestamp that should be included in future calls to
* {@link Service#getNotes(String, String)}.
*
* @return
*/
public String getTimestamp() {
return timestamp;
}
}
/**
* Encapsulates a response to {@link Service#getSurfaces(String)}.
*/
@SuppressWarnings("serial")
static class GetSurfacesResult implements Serializable {
private String timestamp;
private Surface[] surfaces;
/**
* Constructs a new result. This constructor can only be invoked on the
* server.
*
* @param timestamp
* an opaque timestamp
* @param surfaces
* a list of surfaces for the current author
*/
public GetSurfacesResult(String timestamp, Surface[] surfaces) {
assert !GWT.isClient();
this.timestamp = timestamp;
this.surfaces = surfaces;
}
/**
* Needed for RPC serialization.
*/
@SuppressWarnings("unused")
private GetSurfacesResult() {
}
/**
* Returns a list of surfaces for the current author.
*
* @return
*/
public Surface[] getSurfaces() {
return surfaces;
}
/**
* Returns an opaque timestamp.
*
* @return
*/
public String getTimestamp() {
return timestamp;
}
}
/**
* Encapsulates a response for {@link Service#getUserInfo()}.
*/
@SuppressWarnings("serial")
static class UserInfoResult implements Serializable {
private Author author;
private Surface surface;
private String logoutUrl;
/**
* Constructs a new response. This constructor can only be invoked on the
* server.
*
* @param author
* the current author
* @param surface
* the initially selected {@link Surface}
* @param logoutUrl
* a url that can be used to log the current user out
*/
public UserInfoResult(Author author, Surface surface, String logoutUrl) {
assert !GWT.isClient();
this.author = author;
this.surface = surface;
this.logoutUrl = logoutUrl;
}
/**
* Needed for RPC serialization.
*/
@SuppressWarnings("unused")
private UserInfoResult() {
}
/**
* Returns the current author.
*
* @return
*/
public Author getAuthor() {
return author;
}
/**
* Returns a url that can be used to log the author out.
*
* @return
*/
public String getLogoutUrl() {
return logoutUrl;
}
/**
* Returns the default surface for the author. This is the surface that will
* be selected when the application first loads.
*
* @return
*/
public Surface getSurface() {
return surface;
}
}
/**
* Add an author to the author list of a surface.
*
* @param surfaceKey
* the key of the surface being modified
* @param email
* the email address of the author being added
* @return a result object
* @throws AccessDeniedException
*/
AddAuthorToSurfaceResult addAuthorToSurface(String surfaceKey, String email)
throws AccessDeniedException;
/**
* Updates the content of a {@link Note}.
*
* @param noteKey
* they key of the note to modify
* @param content
* the new content to assign
* @return the new last updated date for the note that was modified
* @throws AccessDeniedException
*/
Date changeNoteContent(String noteKey, String content)
throws AccessDeniedException;
/**
* Updates the position for a {@link Note}.
*
* @param noteKey
* the key of the note to modify
* @param x
* the new x value
* @param y
* the new y value
* @param width
* the new width value
* @param height
* the new height value
* @return the new last updated date for the note that was modified
* @throws AccessDeniedException
*/
Date changeNotePosition(String noteKey, int x, int y, int width, int height)
throws AccessDeniedException;
/**
* Creates a new {@link Note}.
*
* @param surfaceKey
* the key of the surface where this note will be created
* @param x
* the x position for the new note
* @param y
* the y position for the new note
* @param width
* the width of the new note
* @param height
* the height of the new note
* @return a result object
* @throws AccessDeniedException
*/
CreateObjectResult createNote(String surfaceKey, int x, int y, int width,
int height) throws AccessDeniedException;
/**
* Create a new {@link Surface}.
*
* @param title
* the title of the surface
* @return a result object
* @throws AccessDeniedException
*/
CreateObjectResult createSurface(String title) throws AccessDeniedException;
/**
* Get all notes for the currently logged in author. <code>timestamp</code> is
* an opaque timestamp used by the server to optimize the set of results that
* are returned. Callers should pass a timestamp from
* {@link GetNotesResult#getTimestamp()}. For the initial call, or to simply
* receive the full set of notes, pass <code>null</code>.
*
* @param surfaceKey
* the surface to query
* @param timestamp
* an opaque timestamp
* @return
* @throws AccessDeniedException
*/
GetNotesResult getNotes(String surfaceKey, String timestamp)
throws AccessDeniedException;
/**
* Get all the surfaces for the currently logged in author.
* <code>timestamp</code> is an opaque timestamp user by the server to
* optimize the number of results that are returned. Callers should pass a
* timestamp from {@link GetSurfacesResult#getTimestamp()}. For the initial
* call, or to simply receive the full set of surfaces, pass <code>null</code>
* .
*
* @param timestamp
* an opaque timestamp
* @return a result object
* @throws AccessDeniedException
*/
GetSurfacesResult getSurfaces(String timestamp) throws AccessDeniedException;
/**
* Returns the information needed to load the application.
*
* @return a result object
* @throws AccessDeniedException
*/
UserInfoResult getUserInfo() throws AccessDeniedException;
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.BoundType.CLOSED;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Collections.sort;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.Helpers.NullsBeforeB;
import com.google.common.collect.testing.NavigableSetTestSuiteBuilder;
import com.google.common.collect.testing.TestStringSetGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.google.MultisetFeature;
import com.google.common.collect.testing.google.SortedMultisetTestSuiteBuilder;
import com.google.common.collect.testing.google.TestStringMultisetGenerator;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
/**
* Unit test for {@link TreeMultiset}.
*
* @author Neal Kanodia
*/
@GwtCompatible(emulated = true)
public class TreeMultisetTest extends TestCase {
@GwtIncompatible("suite")
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(SortedMultisetTestSuiteBuilder
.using(new TestStringMultisetGenerator() {
@Override
protected Multiset<String> create(String[] elements) {
return TreeMultiset.create(Arrays.asList(elements));
}
@Override
public List<String> order(List<String> insertionOrder) {
return Ordering.natural().sortedCopy(insertionOrder);
}
})
.withFeatures(CollectionSize.ANY, CollectionFeature.KNOWN_ORDER,
CollectionFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES,
MultisetFeature.ENTRIES_ARE_VIEWS)
.named("TreeMultiset, Ordering.natural")
.createTestSuite());
suite.addTest(SortedMultisetTestSuiteBuilder
.using(new TestStringMultisetGenerator() {
@Override
protected Multiset<String> create(String[] elements) {
Multiset<String> result = TreeMultiset.create(NullsBeforeB.INSTANCE);
Collections.addAll(result, elements);
return result;
}
@Override
public List<String> order(List<String> insertionOrder) {
sort(insertionOrder, NullsBeforeB.INSTANCE);
return insertionOrder;
}
})
.withFeatures(CollectionSize.ANY, CollectionFeature.KNOWN_ORDER,
CollectionFeature.GENERAL_PURPOSE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES,
MultisetFeature.ENTRIES_ARE_VIEWS)
.named("TreeMultiset, NullsBeforeB")
.createTestSuite());
suite.addTest(NavigableSetTestSuiteBuilder.using(new TestStringSetGenerator() {
@Override
protected Set<String> create(String[] elements) {
return TreeMultiset.create(Arrays.asList(elements)).elementSet();
}
@Override
public List<String> order(List<String> insertionOrder) {
return Lists.newArrayList(Sets.newTreeSet(insertionOrder));
}
})
.named("TreeMultiset[Ordering.natural].elementSet")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.REMOVE_OPERATIONS,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTestSuite(TreeMultisetTest.class);
return suite;
}
public void testCreate() {
TreeMultiset<String> multiset = TreeMultiset.create();
multiset.add("foo", 2);
multiset.add("bar");
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals(Ordering.natural(), multiset.comparator());
assertEquals("[bar, foo x 2]", multiset.toString());
}
public void testCreateWithComparator() {
Multiset<String> multiset = TreeMultiset.create(Collections.reverseOrder());
multiset.add("foo", 2);
multiset.add("bar");
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals("[foo x 2, bar]", multiset.toString());
}
public void testCreateFromIterable() {
Multiset<String> multiset
= TreeMultiset.create(Arrays.asList("foo", "bar", "foo"));
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals("[bar, foo x 2]", multiset.toString());
}
public void testToString() {
Multiset<String> ms = TreeMultiset.create();
ms.add("a", 3);
ms.add("c", 1);
ms.add("b", 2);
assertEquals("[a x 3, b x 2, c]", ms.toString());
}
public void testElementSetSortedSetMethods() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("c", 1);
ms.add("a", 3);
ms.add("b", 2);
SortedSet<String> elementSet = ms.elementSet();
assertEquals("a", elementSet.first());
assertEquals("c", elementSet.last());
assertEquals(Ordering.natural(), elementSet.comparator());
assertThat(elementSet.headSet("b")).has().exactly("a").inOrder();
assertThat(elementSet.tailSet("b")).has().exactly("b", "c").inOrder();
assertThat(elementSet.subSet("a", "c")).has().exactly("a", "b").inOrder();
}
public void testElementSetSubsetRemove() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("a", 1);
ms.add("b", 3);
ms.add("c", 2);
ms.add("d", 1);
ms.add("e", 3);
ms.add("f", 2);
SortedSet<String> elementSet = ms.elementSet();
assertThat(elementSet).has().exactly("a", "b", "c", "d", "e", "f").inOrder();
SortedSet<String> subset = elementSet.subSet("b", "f");
assertThat(subset).has().exactly("b", "c", "d", "e").inOrder();
assertTrue(subset.remove("c"));
assertThat(elementSet).has().exactly("a", "b", "d", "e", "f").inOrder();
assertThat(subset).has().exactly("b", "d", "e").inOrder();
assertEquals(10, ms.size());
assertFalse(subset.remove("a"));
assertThat(elementSet).has().exactly("a", "b", "d", "e", "f").inOrder();
assertThat(subset).has().exactly("b", "d", "e").inOrder();
assertEquals(10, ms.size());
}
public void testElementSetSubsetRemoveAll() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("a", 1);
ms.add("b", 3);
ms.add("c", 2);
ms.add("d", 1);
ms.add("e", 3);
ms.add("f", 2);
SortedSet<String> elementSet = ms.elementSet();
assertThat(elementSet).has().exactly("a", "b", "c", "d", "e", "f").inOrder();
SortedSet<String> subset = elementSet.subSet("b", "f");
assertThat(subset).has().exactly("b", "c", "d", "e").inOrder();
assertTrue(subset.removeAll(Arrays.asList("a", "c")));
assertThat(elementSet).has().exactly("a", "b", "d", "e", "f").inOrder();
assertThat(subset).has().exactly("b", "d", "e").inOrder();
assertEquals(10, ms.size());
}
public void testElementSetSubsetRetainAll() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("a", 1);
ms.add("b", 3);
ms.add("c", 2);
ms.add("d", 1);
ms.add("e", 3);
ms.add("f", 2);
SortedSet<String> elementSet = ms.elementSet();
assertThat(elementSet).has().exactly("a", "b", "c", "d", "e", "f").inOrder();
SortedSet<String> subset = elementSet.subSet("b", "f");
assertThat(subset).has().exactly("b", "c", "d", "e").inOrder();
assertTrue(subset.retainAll(Arrays.asList("a", "c")));
assertThat(elementSet).has().exactly("a", "c", "f").inOrder();
assertThat(subset).has().exactly("c").inOrder();
assertEquals(5, ms.size());
}
public void testElementSetSubsetClear() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("a", 1);
ms.add("b", 3);
ms.add("c", 2);
ms.add("d", 1);
ms.add("e", 3);
ms.add("f", 2);
SortedSet<String> elementSet = ms.elementSet();
assertThat(elementSet).has().exactly("a", "b", "c", "d", "e", "f").inOrder();
SortedSet<String> subset = elementSet.subSet("b", "f");
assertThat(subset).has().exactly("b", "c", "d", "e").inOrder();
subset.clear();
assertThat(elementSet).has().exactly("a", "f").inOrder();
assertThat(subset).isEmpty();
assertEquals(3, ms.size());
}
public void testCustomComparator() throws Exception {
Comparator<String> comparator = new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o2.compareTo(o1);
}
};
TreeMultiset<String> ms = TreeMultiset.create(comparator);
ms.add("b");
ms.add("c");
ms.add("a");
ms.add("b");
ms.add("d");
assertThat(ms).has().exactly("d", "c", "b", "b", "a").inOrder();
SortedSet<String> elementSet = ms.elementSet();
assertEquals("d", elementSet.first());
assertEquals("a", elementSet.last());
assertEquals(comparator, elementSet.comparator());
}
public void testNullAcceptingComparator() throws Exception {
Comparator<String> comparator = Ordering.<String>natural().nullsFirst();
TreeMultiset<String> ms = TreeMultiset.create(comparator);
ms.add("b");
ms.add(null);
ms.add("a");
ms.add("b");
ms.add(null, 2);
assertThat(ms).has().exactly(null, null, null, "a", "b", "b").inOrder();
assertEquals(3, ms.count(null));
SortedSet<String> elementSet = ms.elementSet();
assertEquals(null, elementSet.first());
assertEquals("b", elementSet.last());
assertEquals(comparator, elementSet.comparator());
}
private static final Comparator<String> DEGENERATE_COMPARATOR =
new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.length() - o2.length();
}
};
/**
* Test a TreeMultiset with a comparator that can return 0 when comparing
* unequal values.
*/
public void testDegenerateComparator() throws Exception {
TreeMultiset<String> ms = TreeMultiset.create(DEGENERATE_COMPARATOR);
ms.add("foo");
ms.add("a");
ms.add("bar");
ms.add("b");
ms.add("c");
assertEquals(2, ms.count("bar"));
assertEquals(3, ms.count("b"));
Multiset<String> ms2 = TreeMultiset.create(DEGENERATE_COMPARATOR);
ms2.add("cat", 2);
ms2.add("x", 3);
assertEquals(ms, ms2);
assertEquals(ms2, ms);
SortedSet<String> elementSet = ms.elementSet();
assertEquals("a", elementSet.first());
assertEquals("foo", elementSet.last());
assertEquals(DEGENERATE_COMPARATOR, elementSet.comparator());
}
public void testSubMultisetSize() {
TreeMultiset<String> ms = TreeMultiset.create();
ms.add("a", Integer.MAX_VALUE);
ms.add("b", Integer.MAX_VALUE);
ms.add("c", 3);
assertEquals(Integer.MAX_VALUE, ms.count("a"));
assertEquals(Integer.MAX_VALUE, ms.count("b"));
assertEquals(3, ms.count("c"));
assertEquals(Integer.MAX_VALUE, ms.headMultiset("c", CLOSED).size());
assertEquals(Integer.MAX_VALUE, ms.headMultiset("b", CLOSED).size());
assertEquals(Integer.MAX_VALUE, ms.headMultiset("a", CLOSED).size());
assertEquals(3, ms.tailMultiset("c", CLOSED).size());
assertEquals(Integer.MAX_VALUE, ms.tailMultiset("b", CLOSED).size());
assertEquals(Integer.MAX_VALUE, ms.tailMultiset("a", CLOSED).size());
}
@GwtIncompatible("reflection")
public void testElementSetBridgeMethods() {
for (Method m : TreeMultiset.class.getMethods()) {
if (m.getName().equals("elementSet") && m.getReturnType().equals(SortedSet.class)) {
return;
}
}
fail("No bridge method found");
}
}
| |
/*
* Copyright (c) 2013 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.enzo.sevensegment;
import eu.hansolo.enzo.sevensegment.skin.SevenSegmentSkin;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.ReadOnlyStringProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.scene.control.Control;
import javafx.scene.control.Skin;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SevenSegment extends Control {
public static final String STYLE_CLASS_RED = "red";
public static final String STYLE_CLASS_GREEN = "green";
public static final String STYLE_CLASS_BLUE = "blue";
public static final String STYLE_CLASS_YELLOW = "yellow";
public static final String STYLE_CLASS_ORANGE = "orange";
public static final String STYLE_CLASS_CYAN = "cyan";
public static final String STYLE_CLASS_MAGENTA = "magenta";
public static final String STYLE_CLASS_WHITE = "white";
public static final String STYLE_CLASS_BLACK = "black";
public static enum Segment { A, B, C, D, E, F, G, DOT }
public static enum SegmentStyle {
RED(STYLE_CLASS_RED),
GREEN(STYLE_CLASS_GREEN),
BLUE(STYLE_CLASS_BLUE),
YELLOW(STYLE_CLASS_YELLOW),
ORANGE(STYLE_CLASS_ORANGE),
CYAN(STYLE_CLASS_CYAN),
MAGENTA(STYLE_CLASS_MAGENTA),
WHITE(STYLE_CLASS_WHITE),
BLACK(STYLE_CLASS_BLACK);
public final String ON_CLASS;
public final String OFF_CLASS;
private SegmentStyle(final String CLASS_NAME) {
ON_CLASS = CLASS_NAME;
OFF_CLASS = CLASS_NAME + "-off";
}
}
private boolean keepAspect;
private String _character = " ";
private StringProperty character;
private boolean _dotOn = false;
private BooleanProperty dotOn;
private SegmentStyle _segmentStyle;
private ObjectProperty<SegmentStyle> segmentStyle;
private Map<Integer, List<Segment>> mapping;
// ******************** Constructors **************************************
public SevenSegment() {
this(" ", SegmentStyle.RED);
}
public SevenSegment(final String CHARACTER) {
this(CHARACTER, SegmentStyle.RED);
}
public SevenSegment(final Character CHARACTER) {
this(CHARACTER, SegmentStyle.RED);
}
public SevenSegment(final int CHARACTER) {
this(Integer.toString(CHARACTER < 0 ? 0 : (CHARACTER > 9 ? 9 : CHARACTER)), SegmentStyle.RED);
}
public SevenSegment(final Character CHARACTER, final SegmentStyle SEGMENT_STYLE) {
this(String.valueOf(CHARACTER), SEGMENT_STYLE);
}
public SevenSegment(final int CHARACTER, final SegmentStyle SEGMENT_STYLE) {
this(Integer.toString(CHARACTER < 0 ? 0 : (CHARACTER > 9 ? 9 : CHARACTER)), SEGMENT_STYLE);
}
public SevenSegment(final String CHARACTER, final SegmentStyle SEGMENT_STYLE) {
getStyleClass().add("seven-segment");
keepAspect = true;
_character = CHARACTER.substring(0, 1);
_segmentStyle = SEGMENT_STYLE;
mapping = new HashMap<>(48);
initMapping();
}
// ******************** Initialization ************************************
private void initMapping() {
// Space
mapping.put(20, Arrays.asList(new Segment[] {}));
// .
mapping.put(46, Arrays.asList(new Segment[]{Segment.DOT}));
// 0 - 9
mapping.put(48, Arrays.asList(new Segment[]{Segment.A, Segment.B, Segment.C, Segment.D, Segment.E, Segment.F}));
mapping.put(49, Arrays.asList(new Segment[]{Segment.B, Segment.C}));
mapping.put(50, Arrays.asList(new Segment[]{Segment.A, Segment.B, Segment.D, Segment.E, Segment.G}));
mapping.put(51, Arrays.asList(new Segment[]{Segment.A, Segment.B, Segment.C, Segment.D, Segment.G}));
mapping.put(52, Arrays.asList(new Segment[]{Segment.B, Segment.C, Segment.F, Segment.G}));
mapping.put(53, Arrays.asList(new Segment[]{Segment.A, Segment.C, Segment.D, Segment.F, Segment.G}));
mapping.put(54, Arrays.asList(new Segment[]{Segment.A, Segment.C, Segment.D, Segment.E, Segment.F, Segment.G}));
mapping.put(55, Arrays.asList(new Segment[]{Segment.A, Segment.B, Segment.C}));
mapping.put(56, Arrays.asList(new Segment[]{Segment.A, Segment.B, Segment.C, Segment.D, Segment.E, Segment.F, Segment.G}));
mapping.put(57, Arrays.asList(new Segment[] {
Segment.A,
Segment.B,
Segment.C,
Segment.D,
Segment.F,
Segment.G
}));
}
// ******************** Methods *******************************************
public final boolean isKeepAspect() {
return keepAspect;
}
public final void setKeepAspect(final boolean KEEP_ASPECT) {
keepAspect = KEEP_ASPECT;
}
public final String getCharacter() {
return null == character ? _character : character.get();
}
public final void setCharacter(final String CHARACTER) {
if (null == character) {
_character = CHARACTER.substring(0, 1);
} else {
character.set(CHARACTER.substring(0, 1));
}
}
public final void setCharacter(final Character CHARACTER) {
if (null == character) {
_character = String.valueOf(CHARACTER);
} else {
character.set(String.valueOf(CHARACTER));
}
}
public final void setCharacter(final int CHARACTER) {
if (null == character) {
_character = Integer.toString(CHARACTER < 0 ? 0 : (CHARACTER > 9 ? 9 : CHARACTER));
} else {
character.set(Integer.toString(CHARACTER < 0 ? 0 : (CHARACTER > 9 ? 9 : CHARACTER)));
}
}
public final ReadOnlyStringProperty characterProperty() {
if (null == character) {
character = new SimpleStringProperty(this, "character", _character);
}
return character;
}
public final boolean isDotOn() {
return null == dotOn ? _dotOn : dotOn.get();
}
public final void setDotOn(final boolean DOT_ON) {
if (null == dotOn) {
_dotOn = DOT_ON;
} else {
dotOn.set(DOT_ON);
}
}
public final BooleanProperty dotOnProperty() {
if (null == dotOn) {
dotOn = new SimpleBooleanProperty(this, "dotOn", _dotOn);
}
return dotOn;
}
public final SegmentStyle getSegmentStyle() {
return null == segmentStyle ? _segmentStyle : segmentStyle.get();
}
public final void setSegmentStyle(final SegmentStyle SEGMENT_STYLE) {
if (null == segmentStyle) {
_segmentStyle = SEGMENT_STYLE;
} else {
segmentStyle.set(SEGMENT_STYLE);
}
}
public final ObjectProperty<SegmentStyle> segmentStyleProperty() {
if (null == segmentStyle) {
segmentStyle = new SimpleObjectProperty<>(this, "segmentStyle", _segmentStyle);
}
return segmentStyle;
}
/**
* Returns a Map that contains the default mapping from ascii integers to lcd segments.
* The segments are defined as follows:
*
* AAAAAAAAAA
* F B
* F B
* F B
* F B
* GGGGGGGGGG
* E C
* E C
* E C
* E C
* DDDDDDDDDD
*
* @return a Map that contains the default mapping from ascii integers to segments
*/
public final Map<Integer, List<Segment>> getSegmentMapping() {
Map<Integer, List<Segment>> segmentMapping = new HashMap<>(48);
for (int key : mapping.keySet()) {
segmentMapping.put(key, mapping.get(key));
}
return segmentMapping;
}
@Override public boolean isResizable() {
return true;
}
// ******************** Style related *************************************
@Override protected Skin createDefaultSkin() {
return new SevenSegmentSkin(this);
}
@Override protected String getUserAgentStylesheet() {
return getClass().getResource(getClass().getSimpleName().toLowerCase() + ".css").toExternalForm();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config;
import org.apache.dubbo.common.constants.CommonConstants;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.annotation.Method;
import org.apache.dubbo.config.support.Parameter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.apache.dubbo.config.Constants.ON_INVOKE_INSTANCE_KEY;
import static org.apache.dubbo.config.Constants.ON_INVOKE_METHOD_KEY;
import static org.apache.dubbo.config.Constants.ON_RETURN_INSTANCE_KEY;
import static org.apache.dubbo.config.Constants.ON_RETURN_METHOD_KEY;
import static org.apache.dubbo.config.Constants.ON_THROW_INSTANCE_KEY;
import static org.apache.dubbo.config.Constants.ON_THROW_METHOD_KEY;
/**
* The method configuration
*
* @export
*/
public class MethodConfig extends AbstractMethodConfig {
private static final long serialVersionUID = 884908855422675941L;
/**
* The method name
*/
private String name;
/**
* Stat
*/
private Integer stat;
/**
* Whether to retry
*/
private Boolean retry;
/**
* If it's reliable
*/
private Boolean reliable;
/**
* Thread limits for method invocations
*/
private Integer executes;
/**
* If it's deprecated
*/
private Boolean deprecated;
/**
* Whether to enable sticky
*/
private Boolean sticky;
/**
* Whether need to return
*/
private Boolean isReturn;
/**
* Callback instance when async-call is invoked
*/
private Object oninvoke;
/**
* Callback method when async-call is invoked
*/
private String oninvokeMethod;
/**
* Callback instance when async-call is returned
*/
private Object onreturn;
/**
* Callback method when async-call is returned
*/
private String onreturnMethod;
/**
* Callback instance when async-call has exception thrown
*/
private Object onthrow;
/**
* Callback method when async-call has exception thrown
*/
private String onthrowMethod;
/**
* The method arguments
*/
private List<ArgumentConfig> arguments;
/**
* These properties come from MethodConfig's parent Config module, they will neither be collected directly from xml or API nor be delivered to url
*/
private String service;
private String serviceId;
@Parameter(excluded = true)
public String getName() {
return name;
}
public MethodConfig() {
}
public MethodConfig(Method method) {
appendAnnotation(Method.class, method);
this.setReturn(method.isReturn());
if(!"".equals(method.oninvoke())){
this.setOninvoke(method.oninvoke());
}
if(!"".equals(method.onreturn())){
this.setOnreturn(method.onreturn());
}
if(!"".equals(method.onthrow())){
this.setOnthrow(method.onthrow());
}
if (method.arguments() != null && method.arguments().length != 0) {
List<ArgumentConfig> argumentConfigs = new ArrayList<ArgumentConfig>(method.arguments().length);
this.setArguments(argumentConfigs);
for (int i = 0; i < method.arguments().length; i++) {
ArgumentConfig argumentConfig = new ArgumentConfig(method.arguments()[i]);
argumentConfigs.add(argumentConfig);
}
}
}
public static List<MethodConfig> constructMethodConfig(Method[] methods) {
if (methods != null && methods.length != 0) {
List<MethodConfig> methodConfigs = new ArrayList<MethodConfig>(methods.length);
for (int i = 0; i < methods.length; i++) {
MethodConfig methodConfig = new MethodConfig(methods[i]);
methodConfigs.add(methodConfig);
}
return methodConfigs;
}
return Collections.emptyList();
}
public void setName(String name) {
checkMethodName("name", name);
this.name = name;
if (StringUtils.isEmpty(id)) {
id = name;
}
}
public Integer getStat() {
return stat;
}
@Deprecated
public void setStat(Integer stat) {
this.stat = stat;
}
@Deprecated
public Boolean isRetry() {
return retry;
}
@Deprecated
public void setRetry(Boolean retry) {
this.retry = retry;
}
@Deprecated
public Boolean isReliable() {
return reliable;
}
@Deprecated
public void setReliable(Boolean reliable) {
this.reliable = reliable;
}
public Integer getExecutes() {
return executes;
}
public void setExecutes(Integer executes) {
this.executes = executes;
}
public Boolean getDeprecated() {
return deprecated;
}
public void setDeprecated(Boolean deprecated) {
this.deprecated = deprecated;
}
public List<ArgumentConfig> getArguments() {
return arguments;
}
@SuppressWarnings("unchecked")
public void setArguments(List<? extends ArgumentConfig> arguments) {
this.arguments = (List<ArgumentConfig>) arguments;
}
public Boolean getSticky() {
return sticky;
}
public void setSticky(Boolean sticky) {
this.sticky = sticky;
}
@Parameter(key = ON_RETURN_INSTANCE_KEY, excluded = true, attribute = true)
public Object getOnreturn() {
return onreturn;
}
public void setOnreturn(Object onreturn) {
this.onreturn = onreturn;
}
@Parameter(key = ON_RETURN_METHOD_KEY, excluded = true, attribute = true)
public String getOnreturnMethod() {
return onreturnMethod;
}
public void setOnreturnMethod(String onreturnMethod) {
this.onreturnMethod = onreturnMethod;
}
@Parameter(key = ON_THROW_INSTANCE_KEY, excluded = true, attribute = true)
public Object getOnthrow() {
return onthrow;
}
public void setOnthrow(Object onthrow) {
this.onthrow = onthrow;
}
@Parameter(key = ON_THROW_METHOD_KEY, excluded = true, attribute = true)
public String getOnthrowMethod() {
return onthrowMethod;
}
public void setOnthrowMethod(String onthrowMethod) {
this.onthrowMethod = onthrowMethod;
}
@Parameter(key = ON_INVOKE_INSTANCE_KEY, excluded = true, attribute = true)
public Object getOninvoke() {
return oninvoke;
}
public void setOninvoke(Object oninvoke) {
this.oninvoke = oninvoke;
}
@Parameter(key = ON_INVOKE_METHOD_KEY, excluded = true, attribute = true)
public String getOninvokeMethod() {
return oninvokeMethod;
}
public void setOninvokeMethod(String oninvokeMethod) {
this.oninvokeMethod = oninvokeMethod;
}
public Boolean isReturn() {
return isReturn;
}
public void setReturn(Boolean isReturn) {
this.isReturn = isReturn;
}
@Parameter(excluded = true)
public String getService() {
return service;
}
public void setService(String service) {
this.service = service;
}
@Parameter(excluded = true)
public String getServiceId() {
return serviceId;
}
public void setServiceId(String serviceId) {
this.serviceId = serviceId;
}
/**
* service and name must not be null.
*
* @return
*/
@Override
@Parameter(excluded = true)
public String getPrefix() {
return CommonConstants.DUBBO + "." + service
+ (StringUtils.isEmpty(serviceId) ? "" : ("." + serviceId))
+ "." + getName();
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.querytransform;
import com.yahoo.prelude.query.Limit;
import com.yahoo.prelude.IndexFacts;
import com.yahoo.prelude.query.AndItem;
import com.yahoo.prelude.query.CompositeItem;
import com.yahoo.prelude.query.FalseItem;
import com.yahoo.prelude.query.IntItem;
import com.yahoo.prelude.query.Item;
import com.yahoo.prelude.query.QueryCanonicalizer;
import com.yahoo.search.Query;
import com.yahoo.search.Result;
import com.yahoo.search.Searcher;
import com.yahoo.search.searchchain.Execution;
import com.yahoo.search.searchchain.PhaseNames;
import com.yahoo.yolean.chain.After;
import com.yahoo.yolean.chain.Before;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
/**
* Finds and optimizes ranges in queries:
* For single value attributes c1 $lt; x AND x > c2 becomes x IN <c1; c2>.
* The query cost saving from this has been shown to be 2 orders of magnitude in real cases.
*
* @author bratseth
*/
@Before(QueryCanonicalizer.queryCanonicalization)
@After(PhaseNames.TRANSFORMED_QUERY)
public class RangeQueryOptimizer extends Searcher {
@Override
public Result search(Query query, Execution execution) {
if (execution.context().getIndexFacts() == null) return execution.search(query); // this is a test query
boolean optimized = recursiveOptimize(query.getModel().getQueryTree(), execution.context().getIndexFacts().newSession(query));
if (optimized)
query.trace("Optimized query ranges", true, 2);
return execution.search(query);
}
/** Recursively performs the range optimization on this query tree and returns whether at least one optimization was done */
private boolean recursiveOptimize(Item item, IndexFacts.Session indexFacts) {
if ( ! (item instanceof CompositeItem)) return false;
boolean optimized = false;
for (Iterator<Item> i = ((CompositeItem) item).getItemIterator(); i.hasNext(); )
optimized |= recursiveOptimize(i.next(), indexFacts);
if (item instanceof AndItem)
optimized |= optimizeAnd((AndItem)item, indexFacts);
return optimized;
}
private boolean optimizeAnd(AndItem and, IndexFacts.Session indexFacts) {
// Find consolidated ranges by collecting a list of compatible ranges
List<FieldRange> fieldRanges = null;
for (Iterator<Item> i = and.getItemIterator(); i.hasNext(); ) {
Item item = i.next();
if ( ! (item instanceof IntItem)) continue;
IntItem intItem = (IntItem)item;
if (intItem.getHitLimit() != 0) continue; // each such op gets a different partial set: Cannot be optimized
if (intItem.getFromLimit().equals(intItem.getToLimit())) continue; // don't optimize searches for single numbers
if (indexFacts.getIndex(intItem.getIndexName()).isMultivalue()) continue; // May match different values in each range
if (fieldRanges == null) fieldRanges = new ArrayList<>();
Optional<FieldRange> compatibleRange = findCompatibleRange(intItem, fieldRanges);
if (compatibleRange.isPresent())
compatibleRange.get().addRange(intItem);
else
fieldRanges.add(new FieldRange(intItem));
i.remove();
}
// Add consolidated ranges
if (fieldRanges == null) return false;
boolean optimized = false;
for (FieldRange fieldRange : fieldRanges) {
and.addItem(fieldRange.toItem());
optimized |= fieldRange.isOptimization();
}
return optimized;
}
private Optional<FieldRange> findCompatibleRange(IntItem item, List<FieldRange> fieldRanges) {
for (FieldRange fieldRange : fieldRanges) {
if (fieldRange.isCompatibleWith(item))
return Optional.of(fieldRange);
}
return Optional.empty();
}
/** Represents the ranges searched in a single field */
private static final class FieldRange {
private Range range = new Range(new Limit(Double.NEGATIVE_INFINITY, false), new Limit(Double.POSITIVE_INFINITY, false));
private int sourceRangeCount = 0;
// IntItem fields which must be preserved in the produced item.
// This is an unfortunate coupling and ideally we should delegate this (creation, compatibility)
// to the Item classes
private final String indexName;
private final Item.ItemCreator creator;
private final boolean ranked;
private final int weight;
public FieldRange(IntItem item) {
this.indexName = item.getIndexName();
this.creator = item.getCreator();
this.ranked = item.isRanked();
this.weight = item.getWeight();
addRange(item);
}
public String getIndexName() { return indexName; }
public boolean isCompatibleWith(IntItem item) {
if ( ! indexName.equals(item.getIndexName())) return false;
if (creator != item.getCreator()) return false;
if (ranked != item.isRanked()) return false;
if (weight != item.getWeight()) return false;
return true;
}
/** Adds a range for this field */
public void addRange(IntItem item) {
range = range.intersection(new Range(item));
sourceRangeCount++;
}
public Item toItem() {
Item item = range.toItem(indexName);
item.setCreator(creator);
item.setRanked(ranked);
item.setWeight(weight);
return item;
}
/** Returns whether this range is actually an optimization over what was in the source query */
public boolean isOptimization() { return sourceRangeCount > 1; }
}
/** An immutable numerical range */
private static class Range {
private final Limit from;
private final Limit to;
private static final Range empty = new EmptyRange();
public Range(Limit from, Limit to) {
this.from = from;
this.to = to;
}
public Range(IntItem range) {
from = range.getFromLimit();
to = range.getToLimit();
}
/** Returns true if these two ranges overlap */
public boolean overlaps(Range other) {
if (other.from.isSmallerOrEqualTo(this.to) && other.to.isLargerOrEqualTo(this.from)) return true;
if (other.to.isLargerOrEqualTo(this.from) && other.from.isSmallerOrEqualTo(this.to)) return true;
return false;
}
/**
* Returns the intersection of this and the given range.
* If the ranges does not overlap, an empty range is returned.
*/
public Range intersection(Range other) {
if ( ! overlaps(other)) return empty;
return new Range(from.max(other.from), to.min(other.to));
}
public Item toItem(String fieldName) {
return IntItem.from(fieldName, from, to, 0);
}
@Override
public String toString() { return "[" + from + ";" + to + "]"; }
}
private static class EmptyRange extends Range {
public EmptyRange() {
super(new Limit(0, false), new Limit(0, false)); // the to and from of an empty range is never used.
}
@Override
public boolean overlaps(Range other) { return false; }
@Override
public Range intersection(Range other) { return this; }
@Override
public Item toItem(String fieldName) { return new FalseItem(); }
@Override
public String toString() { return "(empty)"; }
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote;
import static com.google.common.truth.Truth.assertThat;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.eventbus.EventBus;
import com.google.common.io.ByteStreams;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.CommandLines.ParamFileActionInput;
import com.google.devtools.build.lib.actions.EnvironmentalExecException;
import com.google.devtools.build.lib.actions.ExecutionRequirements;
import com.google.devtools.build.lib.actions.MetadataProvider;
import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.SimpleSpawn;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.SpawnResult;
import com.google.devtools.build.lib.actions.SpawnResult.Status;
import com.google.devtools.build.lib.clock.JavaClock;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventKind;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.exec.ExecutionOptions;
import com.google.devtools.build.lib.exec.SpawnExecException;
import com.google.devtools.build.lib.exec.SpawnInputExpander;
import com.google.devtools.build.lib.exec.SpawnRunner;
import com.google.devtools.build.lib.exec.SpawnRunner.ProgressStatus;
import com.google.devtools.build.lib.exec.SpawnRunner.SpawnExecutionContext;
import com.google.devtools.build.lib.exec.util.FakeOwner;
import com.google.devtools.build.lib.remote.util.DigestUtil;
import com.google.devtools.build.lib.remote.util.DigestUtil.ActionKey;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.vfs.DigestHashFunction;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import com.google.devtools.common.options.Options;
import com.google.devtools.remoteexecution.v1test.ActionResult;
import com.google.devtools.remoteexecution.v1test.Digest;
import com.google.devtools.remoteexecution.v1test.ExecuteRequest;
import com.google.devtools.remoteexecution.v1test.ExecuteResponse;
import com.google.devtools.remoteexecution.v1test.LogFile;
import com.google.protobuf.ByteString;
import com.google.rpc.Code;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Collection;
import java.util.SortedMap;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
/** Tests for {@link com.google.devtools.build.lib.remote.RemoteSpawnRunner} */
@RunWith(JUnit4.class)
public class RemoteSpawnRunnerTest {
private static final ImmutableMap<String, String> NO_CACHE =
ImmutableMap.of(ExecutionRequirements.NO_CACHE, "");
private static ListeningScheduledExecutorService retryService;
private Path execRoot;
private Path logDir;
private DigestUtil digestUtil;
private FakeActionInputFileCache fakeFileCache;
private FileOutErr outErr;
private RemoteOptions options;
private RemoteRetrier retrier;
@Mock private AbstractRemoteActionCache cache;
@Mock
private GrpcRemoteExecutor executor;
@Mock
private SpawnRunner localRunner;
// The action key of the Spawn returned by newSimpleSpawn().
private final String simpleActionId =
"eb45b20cc979d504f96b9efc9a08c48103c6f017afa09c0df5c70a5f92a98ea8";
@BeforeClass
public static void beforeEverything() {
retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1));
}
@Before
public final void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
digestUtil = new DigestUtil(DigestHashFunction.SHA256);
FileSystem fs = new InMemoryFileSystem(new JavaClock(), DigestHashFunction.SHA256);
execRoot = fs.getPath("/exec/root");
logDir = fs.getPath("/server-logs");
FileSystemUtils.createDirectoryAndParents(execRoot);
fakeFileCache = new FakeActionInputFileCache(execRoot);
Path stdout = fs.getPath("/tmp/stdout");
Path stderr = fs.getPath("/tmp/stderr");
FileSystemUtils.createDirectoryAndParents(stdout.getParentDirectory());
FileSystemUtils.createDirectoryAndParents(stderr.getParentDirectory());
outErr = new FileOutErr(stdout, stderr);
options = Options.getDefaults(RemoteOptions.class);
retrier = RemoteModule.createExecuteRetrier(options, retryService);
}
@AfterClass
public static void afterEverything() {
retryService.shutdownNow();
}
@Test
@SuppressWarnings("unchecked")
public void nonCachableSpawnsShouldNotBeCached_remote() throws Exception {
// Test that if a spawn is marked "NO_CACHE" then it's not fetched from a remote cache.
// It should be executed remotely, but marked non-cacheable to remote execution, so that
// the action result is not saved in the remote cache.
options.remoteAcceptCached = true;
options.remoteLocalFallback = false;
options.remoteUploadLocalResults = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ExecuteResponse succeeded = ExecuteResponse.newBuilder().setResult(
ActionResult.newBuilder().setExitCode(0).build()).build();
when(executor.executeRemotely(any(ExecuteRequest.class))).thenReturn(succeeded);
Spawn spawn = new SimpleSpawn(
new FakeOwner("foo", "bar"),
/*arguments=*/ ImmutableList.of(),
/*environment=*/ ImmutableMap.of(),
NO_CACHE,
/*inputs=*/ ImmutableList.of(),
/*outputs=*/ ImmutableList.<ActionInput>of(),
ResourceSet.ZERO);
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
runner.exec(spawn, policy);
ArgumentCaptor<ExecuteRequest> requestCaptor = ArgumentCaptor.forClass(ExecuteRequest.class);
verify(executor).executeRemotely(requestCaptor.capture());
assertThat(requestCaptor.getValue().getSkipCacheLookup()).isTrue();
assertThat(requestCaptor.getValue().getAction().getDoNotCache()).isTrue();
verify(cache, never())
.getCachedActionResult(any(ActionKey.class));
verify(cache, never())
.upload(
any(ActionKey.class),
any(Path.class),
any(Collection.class),
any(FileOutErr.class),
any(Boolean.class));
verifyZeroInteractions(localRunner);
}
@Test
@SuppressWarnings("unchecked")
public void nonCachableSpawnsShouldNotBeCached_local() throws Exception {
// Test that if a spawn is executed locally, due to the local fallback, that its result is not
// uploaded to the remote cache. However, the artifacts should still be uploaded.
options.remoteAcceptCached = true;
options.remoteLocalFallback = true;
options.remoteUploadLocalResults = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir);
// Throw an IOException to trigger the local fallback.
when(executor.executeRemotely(any(ExecuteRequest.class))).thenThrow(IOException.class);
Spawn spawn = new SimpleSpawn(
new FakeOwner("foo", "bar"),
/*arguments=*/ ImmutableList.of(),
/*environment=*/ ImmutableMap.of(),
NO_CACHE,
/*inputs=*/ ImmutableList.of(),
/*outputs=*/ ImmutableList.<ActionInput>of(),
ResourceSet.ZERO);
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
runner.exec(spawn, policy);
verify(localRunner).exec(spawn, policy);
verify(cache, never())
.getCachedActionResult(any(ActionKey.class));
verify(cache)
.upload(
any(ActionKey.class),
any(Path.class),
any(Collection.class),
any(FileOutErr.class),
eq(false));
}
@Test
@SuppressWarnings("unchecked")
public void failedActionShouldOnlyUploadOutputs() throws Exception {
// Test that the outputs of a failed locally executed action are uploaded to a remote cache,
// but the action result itself is not.
options.remoteUploadLocalResults = true;
RemoteSpawnRunner runner =
spy(
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir));
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = Mockito.mock(SpawnResult.class);
when(res.exitCode()).thenReturn(1);
when(res.status()).thenReturn(Status.EXECUTION_FAILED);
when(localRunner.exec(eq(spawn), eq(policy))).thenReturn(res);
assertThat(runner.exec(spawn, policy)).isSameAs(res);
verify(localRunner).exec(eq(spawn), eq(policy));
verify(runner).execLocallyAndUpload(eq(spawn), eq(policy), any(SortedMap.class), eq(cache),
any(ActionKey.class));
verify(cache)
.upload(
any(ActionKey.class),
any(Path.class),
any(Collection.class),
any(FileOutErr.class),
eq(false));
}
@Test
public void dontAcceptFailedCachedAction() throws Exception {
// Test that bazel fails if the remote cache serves a failed action.
RemoteOptions options = Options.getDefaults(RemoteOptions.class);
ActionResult failedAction = ActionResult.newBuilder().setExitCode(1).build();
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(failedAction);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
RemoteSpawnRunner runner =
spy(
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir));
try {
runner.exec(spawn, policy);
fail("Expected exception");
} catch (EnvironmentalExecException expected) {
// Intentionally left empty.
}
}
@Test
@SuppressWarnings("unchecked")
public void printWarningIfCacheIsDown() throws Exception {
// If we try to upload to a local cache, that is down a warning should be printed.
options.remoteUploadLocalResults = true;
options.remoteLocalFallback = true;
Reporter reporter = new Reporter(new EventBus());
StoredEventHandler eventHandler = new StoredEventHandler();
reporter.addHandler(eventHandler);
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
false,
reporter,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
when(cache.getCachedActionResult(any(ActionKey.class)))
.thenThrow(new IOException("cache down"));
doThrow(new IOException("cache down"))
.when(cache)
.upload(
any(ActionKey.class),
any(Path.class),
any(Collection.class),
any(FileOutErr.class),
eq(true));
SpawnResult res =
new SpawnResult.Builder()
.setStatus(Status.SUCCESS)
.setExitCode(0)
.setRunnerName("test")
.build();
when(localRunner.exec(eq(spawn), eq(policy))).thenReturn(res);
assertThat(runner.exec(spawn, policy)).isEqualTo(res);
verify(localRunner).exec(eq(spawn), eq(policy));
assertThat(eventHandler.getEvents()).hasSize(1);
Event evt = eventHandler.getEvents().get(0);
assertThat(evt.getKind()).isEqualTo(EventKind.WARNING);
assertThat(evt.getMessage()).contains("fail");
assertThat(evt.getMessage()).contains("upload");
}
@Test
public void noRemoteExecutorFallbackFails() throws Exception {
// Errors from the fallback runner should be propogated out of the remote runner.
options.remoteUploadLocalResults = true;
options.remoteLocalFallback = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
IOException err = new IOException("local execution error");
when(localRunner.exec(eq(spawn), eq(policy))).thenThrow(err);
try {
runner.exec(spawn, policy);
fail("expected IOException to be raised");
} catch (IOException e) {
assertThat(e).isSameAs(err);
}
verify(localRunner).exec(eq(spawn), eq(policy));
}
@Test
public void remoteCacheErrorFallbackFails() throws Exception {
// Errors from the fallback runner should be propogated out of the remote runner.
options.remoteUploadLocalResults = true;
options.remoteLocalFallback = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
null,
retrier,
digestUtil,
logDir);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
when(cache.getCachedActionResult(any(ActionKey.class))).thenThrow(new IOException());
IOException err = new IOException("local execution error");
when(localRunner.exec(eq(spawn), eq(policy))).thenThrow(err);
try {
runner.exec(spawn, policy);
fail("expected IOException to be raised");
} catch (IOException e) {
assertThat(e).isSameAs(err);
}
verify(localRunner).exec(eq(spawn), eq(policy));
}
@Test
public void testLocalFallbackFailureRemoteExecutorFailure() throws Exception {
options.remoteLocalFallback = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
when(executor.executeRemotely(any(ExecuteRequest.class))).thenThrow(new IOException());
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
IOException err = new IOException("local execution error");
when(localRunner.exec(eq(spawn), eq(policy))).thenThrow(err);
try {
runner.exec(spawn, policy);
fail("expected IOException to be raised");
} catch (IOException e) {
assertThat(e).isSameAs(err);
}
verify(localRunner).exec(eq(spawn), eq(policy));
}
@Test
public void testHumanReadableServerLogsSavedForFailingAction() throws Exception {
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
Options.getDefaults(RemoteOptions.class),
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
Digest logDigest = digestUtil.computeAsUtf8("bla");
Path logPath = logDir.getRelative(simpleActionId).getRelative("logname");
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenReturn(
ExecuteResponse.newBuilder()
.putServerLogs(
"logname",
LogFile.newBuilder().setHumanReadable(true).setDigest(logDigest).build())
.setResult(ActionResult.newBuilder().setExitCode(31).build())
.build());
SettableFuture<Void> completed = SettableFuture.create();
completed.set(null);
when(cache.downloadFile(eq(logPath), eq(logDigest), eq(null))).thenReturn(completed);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.NON_ZERO_EXIT);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).downloadFile(eq(logPath), eq(logDigest), eq(null));
}
@Test
public void testHumanReadableServerLogsSavedForFailingActionWithStatus() throws Exception {
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
Options.getDefaults(RemoteOptions.class),
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
Digest logDigest = digestUtil.computeAsUtf8("bla");
Path logPath = logDir.getRelative(simpleActionId).getRelative("logname");
com.google.rpc.Status timeoutStatus =
com.google.rpc.Status.newBuilder().setCode(Code.DEADLINE_EXCEEDED.getNumber()).build();
ExecuteResponse resp =
ExecuteResponse.newBuilder()
.putServerLogs(
"logname", LogFile.newBuilder().setHumanReadable(true).setDigest(logDigest).build())
.setStatus(timeoutStatus)
.build();
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenThrow(new Retrier.RetryException(
"", 1, new ExecutionStatusException(resp.getStatus(), resp)));
SettableFuture<Void> completed = SettableFuture.create();
completed.set(null);
when(cache.downloadFile(eq(logPath), eq(logDigest), eq(null))).thenReturn(completed);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.TIMEOUT);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).downloadFile(eq(logPath), eq(logDigest), eq(null));
}
@Test
public void testNonHumanReadableServerLogsNotSaved() throws Exception {
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
Options.getDefaults(RemoteOptions.class),
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
Digest logDigest = digestUtil.computeAsUtf8("bla");
ActionResult result = ActionResult.newBuilder().setExitCode(31).build();
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenReturn(
ExecuteResponse.newBuilder()
.putServerLogs(
"logname",
LogFile.newBuilder().setDigest(logDigest).build())
.setResult(result)
.build());
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.NON_ZERO_EXIT);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).download(eq(result), eq(execRoot), any(FileOutErr.class));
verify(cache, never()).downloadFile(any(Path.class), any(Digest.class), any(ByteString.class));
}
@Test
public void testServerLogsNotSavedForSuccessfulAction() throws Exception {
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
Options.getDefaults(RemoteOptions.class),
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
Digest logDigest = digestUtil.computeAsUtf8("bla");
ActionResult result = ActionResult.newBuilder().setExitCode(0).build();
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenReturn(
ExecuteResponse.newBuilder()
.putServerLogs(
"logname",
LogFile.newBuilder().setHumanReadable(true).setDigest(logDigest).build())
.setResult(result)
.build());
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.SUCCESS);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).download(eq(result), eq(execRoot), any(FileOutErr.class));
verify(cache, never()).downloadFile(any(Path.class), any(Digest.class), any(ByteString.class));
}
@Test
public void cacheDownloadFailureTriggersRemoteExecution() throws Exception {
// If downloading a cached action fails, remote execution should be tried.
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ActionResult cachedResult = ActionResult.newBuilder().setExitCode(0).build();
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(cachedResult);
doThrow(CacheNotFoundException.class)
.when(cache)
.download(eq(cachedResult), any(Path.class), any(FileOutErr.class));
ActionResult execResult = ActionResult.newBuilder().setExitCode(31).build();
ExecuteResponse succeeded = ExecuteResponse.newBuilder().setResult(execResult).build();
when(executor.executeRemotely(any(ExecuteRequest.class))).thenReturn(succeeded);
doNothing().when(cache).download(eq(execResult), any(Path.class), any(FileOutErr.class));
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.NON_ZERO_EXIT);
assertThat(res.exitCode()).isEqualTo(31);
verify(executor).executeRemotely(any(ExecuteRequest.class));
}
@Test
public void testRemoteExecutionTimeout() throws Exception {
// If remote execution times out the SpawnResult status should be TIMEOUT.
options.remoteLocalFallback = false;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ActionResult cachedResult = ActionResult.newBuilder().setExitCode(0).build();
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
ExecuteResponse resp =
ExecuteResponse.newBuilder()
.setResult(cachedResult)
.setStatus(
com.google.rpc.Status.newBuilder()
.setCode(Code.DEADLINE_EXCEEDED.getNumber())
.build())
.build();
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenThrow(
new Retrier.RetryException(
"", 1, new ExecutionStatusException(resp.getStatus(), resp)));
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.TIMEOUT);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).download(eq(cachedResult), eq(execRoot), any(FileOutErr.class));
}
@Test
public void testRemoteExecutionTimeoutDoesNotTriggerFallback() throws Exception {
// If remote execution times out the SpawnResult status should be TIMEOUT, regardess of local
// fallback option.
options.remoteLocalFallback = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ActionResult cachedResult = ActionResult.newBuilder().setExitCode(0).build();
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
ExecuteResponse resp =
ExecuteResponse.newBuilder()
.setResult(cachedResult)
.setStatus(
com.google.rpc.Status.newBuilder()
.setCode(Code.DEADLINE_EXCEEDED.getNumber())
.build())
.build();
when(executor.executeRemotely(any(ExecuteRequest.class)))
.thenThrow(
new Retrier.RetryException(
"", 1, new ExecutionStatusException(resp.getStatus(), resp)));
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.TIMEOUT);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache).download(eq(cachedResult), eq(execRoot), any(FileOutErr.class));
verify(localRunner, never()).exec(eq(spawn), eq(policy));
}
@Test
public void testRemoteExecutionCommandFailureDoesNotTriggerFallback() throws Exception {
options.remoteLocalFallback = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ActionResult cachedResult = ActionResult.newBuilder().setExitCode(0).build();
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
ExecuteResponse failed = ExecuteResponse.newBuilder().setResult(
ActionResult.newBuilder().setExitCode(33).build()).build();
when(executor.executeRemotely(any(ExecuteRequest.class))).thenReturn(failed);
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.NON_ZERO_EXIT);
assertThat(res.exitCode()).isEqualTo(33);
verify(executor).executeRemotely(any(ExecuteRequest.class));
verify(cache, never()).download(eq(cachedResult), eq(execRoot), any(FileOutErr.class));
verify(localRunner, never()).exec(eq(spawn), eq(policy));
}
@Test
public void testExitCode_executorfailure() throws Exception {
// If we get a failure due to the remote cache not working, the exit code should be
// ExitCode.REMOTE_ERROR.
options.remoteLocalFallback = false;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
when(cache.getCachedActionResult(any(ActionKey.class))).thenReturn(null);
when(executor.executeRemotely(any(ExecuteRequest.class))).thenThrow(new IOException());
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
try {
runner.exec(spawn, policy);
fail("Exception expected");
} catch (SpawnExecException e) {
assertThat(e.getSpawnResult().exitCode())
.isEqualTo(ExitCode.REMOTE_ERROR.getNumericExitCode());
}
}
@Test
public void testExitCode_executionfailure() throws Exception {
// If we get a failure due to the remote executor not working, the exit code should be
// ExitCode.REMOTE_ERROR.
options.remoteLocalFallback = false;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
options,
Options.getDefaults(ExecutionOptions.class),
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
when(cache.getCachedActionResult(any(ActionKey.class))).thenThrow(new IOException());
Spawn spawn = newSimpleSpawn();
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
try {
runner.exec(spawn, policy);
fail("Exception expected");
} catch (SpawnExecException e) {
assertThat(e.getSpawnResult().exitCode())
.isEqualTo(ExitCode.REMOTE_ERROR.getNumericExitCode());
}
}
@Test
public void testMaterializeParamFiles() throws Exception {
ExecutionOptions executionOptions =
Options.parse(ExecutionOptions.class, "--materialize_param_files").getOptions();
executionOptions.materializeParamFiles = true;
RemoteSpawnRunner runner =
new RemoteSpawnRunner(
execRoot,
Options.getDefaults(RemoteOptions.class),
executionOptions,
new AtomicReference<>(localRunner),
true,
/*cmdlineReporter=*/ null,
"build-req-id",
"command-id",
cache,
executor,
retrier,
digestUtil,
logDir);
ExecuteResponse succeeded =
ExecuteResponse.newBuilder()
.setResult(ActionResult.newBuilder().setExitCode(0).build())
.build();
when(executor.executeRemotely(any(ExecuteRequest.class))).thenReturn(succeeded);
ImmutableList<String> args = ImmutableList.of("--foo", "--bar");
ParamFileActionInput input =
new ParamFileActionInput(
PathFragment.create("out/param_file"), args, ParameterFileType.UNQUOTED, ISO_8859_1);
Spawn spawn =
new SimpleSpawn(
new FakeOwner("foo", "bar"),
/*arguments=*/ ImmutableList.of(),
/*environment=*/ ImmutableMap.of(),
/*executionInfo=*/ ImmutableMap.of(),
ImmutableList.of(input),
/*outputs=*/ ImmutableList.<ActionInput>of(),
ResourceSet.ZERO);
SpawnExecutionContext policy = new FakeSpawnExecutionContext(spawn);
SpawnResult res = runner.exec(spawn, policy);
assertThat(res.status()).isEqualTo(Status.SUCCESS);
Path paramFile = execRoot.getRelative("out/param_file");
assertThat(paramFile.exists()).isTrue();
try (InputStream inputStream = paramFile.getInputStream()) {
assertThat(
new String(ByteStreams.toByteArray(inputStream), StandardCharsets.UTF_8).split("\n"))
.asList()
.containsExactly("--foo", "--bar");
}
}
private static Spawn newSimpleSpawn() {
return new SimpleSpawn(
new FakeOwner("foo", "bar"),
/*arguments=*/ ImmutableList.of(),
/*environment=*/ ImmutableMap.of(),
/*executionInfo=*/ ImmutableMap.of(),
/*inputs=*/ ImmutableList.of(),
/*outputs=*/ ImmutableList.<ActionInput>of(),
ResourceSet.ZERO);
}
// TODO(buchgr): Extract a common class to be used for testing.
class FakeSpawnExecutionContext implements SpawnExecutionContext {
private final ArtifactExpander artifactExpander =
(artifact, output) -> output.add(artifact);
private final Spawn spawn;
FakeSpawnExecutionContext(Spawn spawn) {
this.spawn = spawn;
}
@Override
public int getId() {
return 0;
}
@Override
public void prefetchInputs() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void lockOutputFiles() throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public boolean speculating() {
return false;
}
@Override
public MetadataProvider getMetadataProvider() {
return fakeFileCache;
}
@Override
public ArtifactExpander getArtifactExpander() {
throw new UnsupportedOperationException();
}
@Override
public Duration getTimeout() {
return Duration.ZERO;
}
@Override
public FileOutErr getFileOutErr() {
return outErr;
}
@Override
public SortedMap<PathFragment, ActionInput> getInputMapping() throws IOException {
return new SpawnInputExpander(execRoot, /*strict*/ false)
.getInputMapping(spawn, artifactExpander, fakeFileCache);
}
@Override
public void report(ProgressStatus state, String name) {
assertThat(state).isEqualTo(ProgressStatus.EXECUTING);
}
}
}
| |
package pw.koj.jetstreem.compiler;
import java.util.*;
import pw.koj.jetstreem.parser.*;
import pw.koj.jetstreem.compiler.ir.*;
public class AstToIrVisitor implements Visitor {
// dummy standard lib symbols
private static final String[] BUILTIN = {
"stdin", "stdout", "seq", "map", "each", "filter",
"tcp_server", "tcp_socket", "chan", "print"
};
private static RefTable builtIn;
static {
builtIn = new NsRefTable("pw/koj/jetstreem/runtime/BuiltIn");
for (String s : BUILTIN) {
builtIn.addLocal(s);
}
}
private Context ctx;
private ArrayList<String> pvs;
private int vlvarIndex;
// Entry point
public IrNode transform(NamespaceNode ast) throws CompileError {
ctx = new Context();
return visit(ast);
}
public IrNode visit(NamespaceNode nsNode) throws CompileError {
String nsName = nsNode.getName();
List<IrNode> stmts = new LinkedList<>();
NsRefTable refTable = new NsRefTable(nsName);
Namespace currentNs = ctx.peekNsStack();
Namespace ns;
if (currentNs == null) {
ns = new Namespace(nsName, stmts, refTable, null);
}
else if (currentNs.hasChild(nsName)) {
throw new CompileError("duplicate namespace definition");
}
else {
ns = new Namespace(nsName, stmts, refTable, currentNs);
currentNs.addChild(ns);
}
ctx.enterNsTo(ns);
for (Node stmt : nsNode.getStmts()) {
stmts.add(stmt.accept(this));
}
ctx.exitNs();
return ns;
}
public IrNode visit(ImportNode imp) throws CompileError {
String id = imp.getIdentifier();
Namespace ns = ctx.peekNsStack();
Namespace n = ns.lookupNs(id);
if (n == null) {
throw new CompileError("namespace not found: " + id);
}
return new Import(n);
}
public IrNode visit(LetNode let) throws CompileError {
RefTable refTable = ctx.peekRefTableStack();
String name = ((IdentifierNode)let.getLhs()).getName();
if (refTable.hasLocal(name)) {
throw new CompileError("duplicate assignment: " + name);
}
refTable.addLocal(name);
IrNode rhs = let.getRhs().accept(this);
return new Let(name, rhs);
}
public IrNode visit(SkipNode skp) throws CompileError {
return new Skip();
}
public IrNode visit(EmitNode emt) throws CompileError {
List<Node> arr = emt.getArgs().getData();
List<IrNode> args = new ArrayList<>();
for (Node node : arr) {
args.add(node.accept(this));
}
return new Emit(args);
}
public IrNode visit(ReturnNode ret) throws CompileError {
List<Node> arr = ret.getArgs().getData();
List<IrNode> args = new ArrayList<>();
for (Node node : arr) {
args.add(node.accept(this));
}
return new Return(args);
}
public IrNode visit(LambdaNode lambda) throws CompileError {
if (lambda.isBlock()) {
List<IrNode> body = new LinkedList<>();
for (Node stmt : lambda.getBody()) {
body.add(stmt.accept(this));
}
return new Block(body);
}
FuncRefTable refTable = new FuncRefTable();
List<IdentifierNode> args = lambda.getArgList();
for (IdentifierNode id : args) {
refTable.addArg(id.getName());
}
ctx.enterScopeTo(refTable);
List<IrNode> body = new LinkedList<>();
for (Node stmt : lambda.getBody()) {
body.add(stmt.accept(this));
}
ctx.exitScope();
return new Function(body, refTable);
}
public IrNode visit(IdentifierNode id) throws CompileError {
RefTable current = ctx.peekRefTableStack();
String name = id.getName();
RefTable ref = current.resolveRef(name);
if (ref == null) {
ref = builtIn.resolveRef(name);
if (ref == null) {
throw new CompileError("variable not defined");
}
}
return new VarRef(name, ref);
}
public IrNode visit(ArrayNode arr) throws CompileError {
GenArray ar = new GenArray();
List<Node> data = arr.getData();
for (Node expr : data) {
ar.add(expr.accept(this));
}
ar.setHeaders(arr.getHeaders());
String nsName = arr.getNs();
if (nsName != null) {
Namespace currentNs = ctx.peekNsStack();
Namespace ns = currentNs.lookupNs(nsName);
if (ns == null) {
throw new CompileError("namespace not found");
}
ar.setNs(ns);
}
return ar;
}
public IrNode visit(PairNode pair) throws CompileError {
return new Pair(pair.getKey(), pair.getValue().accept(this));
}
public IrNode visit(SplatNode splt) throws CompileError {
return new Splat(splt.getNode().accept(this));
}
public IrNode visit(IfNode ifn) throws CompileError {
IrNode cond = ifn.getCond().accept(this);
IrNode truePart = ifn.getThenBody().accept(this);
Node els = ifn.getElseBody();
if (els == null) {
return new CondBranch(cond, truePart);
}
IrNode falsePart = els.accept(this);
return new CondBranch(cond, truePart, falsePart);
}
public IrNode visit(BinaryOpNode bin) throws CompileError {
IrNode lhs = bin.getLhs().accept(this);
IrNode rhs = bin.getRhs().accept(this);
return new BinaryOp(bin.getOperator(), lhs, rhs);
}
public IrNode visit(UnaryOpNode una) throws CompileError {
IrNode expr = una.getExpr().accept(this);
return new UnaryOp(una.getOperator(), expr);
}
public IrNode visit(CallNode call) throws CompileError {
//TBD need modification, runtime ref resolve
RefTable current = ctx.peekRefTableStack();
String name = call.getIdentifier().getName();
RefTable ref = current.resolveRef(name);
if (ref == null) {
ref = builtIn.resolveRef(name);
}
ArrayNode ar = call.getArgs();
List<Node> data = ar.getData();
ArrayList<IrNode> args = new ArrayList<>();
for (Node a : data) {
args.add(a.accept(this));
}
return new Call(name, ref, args, ar.getHeaders());
}
public IrNode visit(StringLiteralNode strn) throws CompileError {
return new StringConstant(strn.getValue());
}
public IrNode visit(IntegerLiteralNode intn) throws CompileError {
return new IntegerConstant(intn.getValue());
}
public IrNode visit(DoubleLiteralNode doublen) throws CompileError {
return new DoubleConstant(doublen.getValue());
}
public IrNode visit(TimeLiteralNode time) throws CompileError {
return new TimeConstant(time.getValue());
}
public IrNode visit(NilNode nil) throws CompileError {
return new Nil();
}
public IrNode visit(BoolNode bool) throws CompileError {
return new BoolConstant(bool.getValue());
}
public IrNode visit(GenFuncNode genf) throws CompileError {
RefTable current = ctx.peekRefTableStack();
String name = genf.getIdentifier().getName();
RefTable ref = current.resolveRef(name);
if (ref == null) {
ref = builtIn.resolveRef(name);
}
return new GenericFunc(name, ref);
}
public IrNode visit(FunCallNode fcall) throws CompileError {
RefTable current = ctx.peekRefTableStack();
String name = fcall.getId().getName();
RefTable ref = current.resolveRef(name);
ArrayNode ar = fcall.getArgs();
List<Node> data = ar.getData();
ArrayList<IrNode> args = new ArrayList<>();
for (Node a : data) {
args.add(a.accept(this));
}
return new FunCall(name, ref, args, ar.getHeaders());
}
public IrNode visit(PatternLambdaNode plambda) throws CompileError {
List<PatternLambdaNode> plst = new ArrayList<>();
int nArms = 0;
for (PatternLambdaNode p = plambda; p != null; p = p.getNext()) {
plst.add(p);
nArms++;
}
PatternFuncRefTable refTable = new PatternFuncRefTable(nArms);
PatternFunc pf = new PatternFunc(refTable);
ctx.enterScopeTo(refTable);
int idx = 0;
for (PatternLambdaNode pl : plst) {
pvs = new ArrayList<>();
vlvarIndex = -1;
FuncArm arm = new FuncArm(pl.getPattern().accept(this));
refTable.switchToNewLocalRefs(idx++);
for (String arg : pvs) {
refTable.addLocal(arg);
}
if (pl.getCondition() != null) {
arm.setCondition(pl.getCondition().accept(this));
}
List<IrNode> body = new LinkedList<>();
for (Node stmt : pl.getBody()) {
body.add(stmt.accept(this));
}
arm.setBody(body);
pf.add(arm);
}
ctx.exitScope();
return pf;
}
public IrNode visit(PatternSplatNode psplt) throws CompileError {
Node headNode = psplt.getHead();
PatternVlenVarNode midNode = psplt.getMid();
Node tailNode = psplt.getTail();
if (headNode instanceof PatternStructNode) {
IrNode head = headNode.accept(this);
IrNode vvar = midNode.accept(this);
return new PatternStruct((PatternStruct)head, vvar);
}
else if (headNode instanceof PatternArrayNode) {
IrNode head = headNode.accept(this);
IrNode vvar = midNode.accept(this);
if (tailNode instanceof PatternArrayNode) {
IrNode tail = tailNode.accept(this);
return new PatternArray(head, vvar, tail);
}
return new PatternArray(head, vvar, null);
}
else if (headNode == null) {
if (tailNode instanceof PatternArrayNode) {
IrNode vvar = midNode.accept(this);
IrNode tail = tailNode.accept(this);
return new PatternArray(null, vvar, tail);
}
else if (tailNode == null) {
return new PatternArray(null, midNode.accept(this), null);
}
}
throw new CompileError("illegal splat pattern");
}
public IrNode visit(PatternArrayNode parr) throws CompileError {
PatternArray pattern = new PatternArray();
List<Node> pnodes = parr.getData();
for (Node p : pnodes) {
pattern.add(p.accept(this));
}
return pattern;
}
public IrNode visit(PatternStructNode pstruct) throws CompileError {
PatternStruct pattern = new PatternStruct();
List<Node> pnodes = pstruct.getData();
for (Node p : pnodes) {
PairNode pair = (PairNode)p;
pattern.add(pair.getKey(), pair.getValue().accept(this));
}
return pattern;
}
public IrNode visit(PatternNamespaceNode pns) throws CompileError {
PatternNamespace pattern = new PatternNamespace();
pattern.setName(pns.getName());
pattern.setPattern(pns.getPattern().accept(this));
return pattern;
}
public IrNode visit(PatternVarNode pvar) throws CompileError {
IrNode pattern;
String name = pvar.getName();
if (name.equals("_")) { return new PatternVarBind(-1); }
int idx = pvs.lastIndexOf(name);
if (idx < 0) {
pattern = new PatternVarBind(pvs.size());
pvs.add(name);
}
else if (idx != vlvarIndex) {
pattern = new PatternVarRef(idx);
}
else {
throw new CompileError("invalid pattern: *" + name + " already occured");
}
return pattern;
}
public IrNode visit(PatternVlenVarNode pvlvar) throws CompileError {
IrNode pattern;
String name = pvlvar.getName();
if (name.equals("_")) { throw new CompileError("invalid pattern: *_"); }
int idx = pvs.lastIndexOf(name);
if (idx < 0) {
pattern = new PatternVlenVarBind(pvs.size());
vlvarIndex = pvs.size();
pvs.add(name);
}
else {
throw new CompileError("invalid pattern: *" + name);
}
return pattern;
}
public IrNode visit(PatternNumberNode pnum) throws CompileError {
NumberLiteralNode num = pnum.getNumber();
if (num instanceof IntegerLiteralNode) {
return new PatternInteger(((IntegerLiteralNode)num).getValue());
}
else if (num instanceof DoubleLiteralNode) {
return new PatternDouble(((DoubleLiteralNode)num).getValue());
}
throw new CompileError("not a number pattern");
}
public IrNode visit(PatternStringNode pstr) throws CompileError {
return new PatternString(pstr.getStr());
}
public IrNode visit(PatternNilNode pnil) throws CompileError {
return new PatternNil();
}
public IrNode visit(PatternBoolNode pbool) throws CompileError {
return new PatternBool(pbool.getBool());
}
public IrNode visit(ArgsNode node) throws CompileError {
throw new CompileError("internal error: AST to IR ArgsNode");
}
public IrNode visit(TypeNode node) throws CompileError {
throw new CompileError("internal error: AST to IR TypeNode");
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
// Portions copyright Hiroshi Ito. Licensed under Apache 2.0 license
package com.gs.fw.common.mithra.attribute;
import com.gs.fw.common.mithra.AggregateData;
import com.gs.fw.common.mithra.MithraBusinessException;
import com.gs.fw.common.mithra.MithraDataObject;
import com.gs.fw.common.mithra.aggregate.attribute.LongAggregateAttribute;
import com.gs.fw.common.mithra.attribute.calculator.AbsoluteValueCalculatorLong;
import com.gs.fw.common.mithra.attribute.calculator.aggregateFunction.AverageCalculatorNumeric;
import com.gs.fw.common.mithra.attribute.calculator.aggregateFunction.MaxCalculatorNumeric;
import com.gs.fw.common.mithra.attribute.calculator.aggregateFunction.MinCalculatorNumeric;
import com.gs.fw.common.mithra.attribute.calculator.aggregateFunction.SumCalculatorNumeric;
import com.gs.fw.common.mithra.attribute.numericType.BigDecimalNumericType;
import com.gs.fw.common.mithra.attribute.numericType.DoubleNumericType;
import com.gs.fw.common.mithra.attribute.numericType.FloatNumericType;
import com.gs.fw.common.mithra.attribute.numericType.LongNumericType;
import com.gs.fw.common.mithra.attribute.numericType.NumericType;
import com.gs.fw.common.mithra.attribute.update.AttributeUpdateWrapper;
import com.gs.fw.common.mithra.attribute.update.LongNullUpdateWrapper;
import com.gs.fw.common.mithra.databasetype.DatabaseType;
import com.gs.fw.common.mithra.extractor.Extractor;
import com.gs.fw.common.mithra.extractor.LongExtractor;
import com.gs.fw.common.mithra.finder.None;
import com.gs.fw.common.mithra.finder.Operation;
import com.gs.fw.common.mithra.finder.orderby.LongOrderBy;
import com.gs.fw.common.mithra.finder.orderby.OrderBy;
import com.gs.fw.common.mithra.util.HashUtil;
import com.gs.fw.common.mithra.util.MutableLong;
import com.gs.fw.common.mithra.util.MutableNumber;
import com.gs.fw.common.mithra.util.Nullable;
import com.gs.fw.common.mithra.util.serializer.ReladomoSerializationContext;
import com.gs.fw.common.mithra.util.serializer.SerialWriter;
import org.eclipse.collections.api.set.primitive.LongSet;
import org.eclipse.collections.api.set.primitive.MutableLongSet;
import org.eclipse.collections.impl.set.mutable.primitive.LongHashSet;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.Format;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
public abstract class LongAttribute<T> extends PrimitiveNumericAttribute<T, Long> implements com.gs.fw.finder.attribute.LongAttribute<T>, LongExtractor<T, Long>
{
private transient OrderBy ascendingOrderBy;
private transient OrderBy descendingOrderBy;
private static final long serialVersionUID = -302964650737340928L;
@Override
public Class valueType()
{
return Long.class;
}
@Override
protected void serializedNonNullValue(T o, ObjectOutput out) throws IOException
{
out.writeLong(this.longValueOf(o));
}
@Override
protected void deserializedNonNullValue(T o, ObjectInput in) throws IOException
{
this.setLongValue(o, in.readLong());
}
@Override
public Operation nonPrimitiveEq(Object other)
{
if (other == null) return this.isNull();
return this.eq(((Number) other).longValue());
}
public abstract Operation eq(long other);
public abstract Operation notEq(long other);
@Override
public abstract Operation in(LongSet longSet);
@Override
public abstract Operation notIn(LongSet longSet);
public abstract Operation greaterThan(long target);
public abstract Operation greaterThanEquals(long target);
public abstract Operation lessThan(long target);
public abstract Operation lessThanEquals(long target);
// join operation:
/**
* @deprecated use joinEq or filterEq instead
* @param other Attribute to join to
* @return Operation corresponding to the join
**/
@Deprecated
public abstract Operation eq(LongAttribute other);
public abstract Operation joinEq(LongAttribute other);
public abstract Operation filterEq(LongAttribute other);
public abstract Operation notEq(LongAttribute other);
@Override
public void copyValueFrom(T dest, T src)
{
if (this.isAttributeNull(src))
{
this.setValueNull(dest);
}
else
{
this.setValue(dest, this.longValueOf(src));
}
}
public Long valueOf(T o)
{
return isAttributeNull(o) ? null : Long.valueOf(this.longValueOf(o));
}
public void setValue(T o, Long newValue)
{
this.setLongValue(o, newValue.longValue());
}
public int valueHashCode(T o)
{
return (this.isAttributeNull(o)) ? HashUtil.NULL_HASH : HashUtil.hash(this.longValueOf(o));
}
@Override
protected boolean primitiveValueEquals(T first, T second)
{
return this.longValueOf(first) == this.longValueOf(second);
}
@Override
protected <O> boolean primitiveValueEquals(T first, O second, Extractor<O, Long> secondExtractor)
{
return this.longValueOf(first) == ((LongExtractor) secondExtractor).longValueOf(second);
}
@Override
public OrderBy ascendingOrderBy()
{
if (this.ascendingOrderBy == null)
{
this.ascendingOrderBy = new LongOrderBy(this, true);
}
return this.ascendingOrderBy;
}
@Override
public OrderBy descendingOrderBy()
{
if (this.descendingOrderBy == null)
{
this.descendingOrderBy = new LongOrderBy(this, false);
}
return this.descendingOrderBy;
}
@Override
public Operation in(final List objects, final Extractor extractor)
{
final LongExtractor longExtractor = (LongExtractor) extractor;
final MutableLongSet set = new LongHashSet();
for (int i = 0, n = objects.size(); i < n; i++)
{
final Object o = objects.get(i);
if (!longExtractor.isAttributeNull(o))
{
set.add(longExtractor.longValueOf(o));
}
}
return this.in(set);
}
@Override
public Operation in(final Iterable objects, final Extractor extractor)
{
final LongExtractor longExtractor = (LongExtractor) extractor;
final MutableLongSet set = new LongHashSet();
for (Object o : objects)
{
if (!longExtractor.isAttributeNull(o))
{
set.add(longExtractor.longValueOf(o));
}
}
return this.in(set);
}
@Override
public Operation zInWithMax(int maxInClause, List objects, Extractor extractor)
{
LongExtractor longExtractor = (LongExtractor) extractor;
MutableLongSet set = new LongHashSet();
for (int i = 0; i < objects.size(); i++)
{
Object o = objects.get(i);
if (!longExtractor.isAttributeNull(o))
{
set.add(longExtractor.longValueOf(o));
if (set.size() > maxInClause)
{
return new None(this);
}
}
}
return this.in(set);
}
@Override
public void parseNumberAndSet(double value, T data, int lineNumber) throws ParseException
{
if (value > Long.MAX_VALUE || value < Long.MIN_VALUE || Math.floor(value) != value)
{
throw new ParseException("Incorrect long value " + value + " on line " +
lineNumber + " for attribute " + this.getClass().getName(), lineNumber);
}
this.setLongValue(data, (long) value);
}
public void populateValueFromResultSet(int position, ResultSet rs, Object[] values)
throws SQLException
{
values[position] = Long.valueOf(rs.getLong(position));
}
@Override
public void parseStringAndSet(String value, T data, int lineNumber, Format format) throws ParseException
{
this.setLongValue(data, Long.parseLong(value));
}
@Override
public void setValueUntil(T o, Long newValue, Timestamp exclusiveUntil)
{
this.setUntil(o, newValue.longValue(), exclusiveUntil);
}
protected void setUntil(Object o, long l, Timestamp exclusiveUntil)
{
throw new RuntimeException("not implemented");
}
public String valueOfAsString(T object, Formatter formatter)
{
return formatter.format(this.longValueOf(object));
}
@Override
public int zCountUniqueInstances(MithraDataObject[] dataObjects)
{
if (this.isAttributeNull((T) dataObjects[0]))
{
return 1;
}
long firstValue = this.longValueOf((T) dataObjects[0]);
MutableLongSet set = null;
for (int i = 1; i < dataObjects.length; i++)
{
long nextValue = this.longValueOf((T) dataObjects[i]);
if (set != null)
{
set.add(nextValue);
}
else if (nextValue != firstValue)
{
set = new LongHashSet();
set.add(firstValue);
set.add(nextValue);
}
}
if (set != null)
{
return set.size();
}
return 1;
}
@Override
public void zPopulateAggregateDataValue(int position, Object value, AggregateData data)
{
data.setValueAt((position), new MutableLong((Long) value));
}
public NumericType getNumericType()
{
return LongNumericType.getInstance();
}
@Override
public void zPopulateValueFromResultSet(int resultSetPosition, int dataPosition, ResultSet rs, Object object, Method method, TimeZone databaseTimezone, DatabaseType dt, Object[] tempArray) throws SQLException
{
long l = rs.getLong(resultSetPosition);
if (rs.wasNull())
{
tempArray[0] = null;
}
else
{
tempArray[0] = l;
}
try
{
method.invoke(object, tempArray);
}
catch (IllegalArgumentException e)
{
if (tempArray[0] == null && method.getParameterTypes()[0].isPrimitive())
{
throw new MithraBusinessException("Aggregate result returned null for " + method.getName() + " of class " + object.getClass().getName() + " which cannot be set as primitive", e);
}
throw new MithraBusinessException("Invalid argument " + tempArray[0] + " passed in invoking method " + method.getName() + " of class " + object.getClass().getName(), e);
}
catch (IllegalAccessException e)
{
throw new MithraBusinessException("No valid access to invoke method " + method.getName() + " of class " + object.getClass().getName(), e);
}
catch (InvocationTargetException e)
{
throw new MithraBusinessException("Error invoking method " + method.getName() + "of class " + object.getClass().getName(), e);
}
}
@Override
public void zPopulateValueFromResultSet(int resultSetPosition, int dataPosition, ResultSet rs, AggregateData data, TimeZone databaseTimezone, DatabaseType dt)
throws SQLException
{
MutableLong obj;
long i = rs.getLong(resultSetPosition);
if (rs.wasNull())
{
obj = new MutableLong();
}
else
{
obj = new MutableLong(i);
}
data.setValueAt(dataPosition, obj);
}
@Override
public void serializeNonNullAggregateDataValue(Nullable valueWrappedInNullable, ObjectOutput out) throws IOException
{
out.writeLong(((MutableNumber)valueWrappedInNullable).longValue());
}
@Override
public Nullable deserializeNonNullAggregateDataValue(ObjectInput in) throws IOException, ClassNotFoundException
{
return new MutableLong(in.readLong());
}
// ByteAttribute operands
public LongAttribute plus(com.gs.fw.finder.attribute.ByteAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createAdditionCalculator(this, (ByteAttribute) attribute));
}
public LongAttribute minus(com.gs.fw.finder.attribute.ByteAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createSubtractionCalculator(this, (ByteAttribute) attribute));
}
public LongAttribute times(com.gs.fw.finder.attribute.ByteAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createMultiplicationCalculator(this, (ByteAttribute) attribute));
}
public LongAttribute dividedBy(com.gs.fw.finder.attribute.ByteAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createDivisionCalculator(this, (ByteAttribute) attribute));
}
// ShortAttribute operands
public LongAttribute plus(com.gs.fw.finder.attribute.ShortAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createAdditionCalculator(this, (ShortAttribute) attribute));
}
public LongAttribute minus(com.gs.fw.finder.attribute.ShortAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createSubtractionCalculator(this, (ShortAttribute) attribute));
}
public LongAttribute times(com.gs.fw.finder.attribute.ShortAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createMultiplicationCalculator(this, (ShortAttribute) attribute));
}
public LongAttribute dividedBy(com.gs.fw.finder.attribute.ShortAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createDivisionCalculator(this, (ShortAttribute) attribute));
}
// IntegerAttribute operands
public LongAttribute plus(com.gs.fw.finder.attribute.IntegerAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createAdditionCalculator(this, (IntegerAttribute) attribute));
}
public LongAttribute minus(com.gs.fw.finder.attribute.IntegerAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createSubtractionCalculator(this, (IntegerAttribute) attribute));
}
public LongAttribute times(com.gs.fw.finder.attribute.IntegerAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createMultiplicationCalculator(this, (IntegerAttribute) attribute));
}
public LongAttribute dividedBy(com.gs.fw.finder.attribute.IntegerAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createDivisionCalculator(this, (IntegerAttribute) attribute));
}
// LongAttribute operands
public LongAttribute plus(com.gs.fw.finder.attribute.LongAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createAdditionCalculator(this, (LongAttribute) attribute));
}
public LongAttribute minus(com.gs.fw.finder.attribute.LongAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createSubtractionCalculator(this, (LongAttribute) attribute));
}
public LongAttribute times(com.gs.fw.finder.attribute.LongAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createMultiplicationCalculator(this, (LongAttribute) attribute));
}
public LongAttribute dividedBy(com.gs.fw.finder.attribute.LongAttribute attribute)
{
return LongNumericType.getInstance().createCalculatedAttribute(LongNumericType.getInstance().createDivisionCalculator(this, (LongAttribute) attribute));
}
// FloatAttribute operands
public FloatAttribute plus(com.gs.fw.finder.attribute.FloatAttribute attribute)
{
return FloatNumericType.getInstance().createCalculatedAttribute(FloatNumericType.getInstance().createAdditionCalculator(this, (FloatAttribute) attribute));
}
public FloatAttribute minus(com.gs.fw.finder.attribute.FloatAttribute attribute)
{
return FloatNumericType.getInstance().createCalculatedAttribute(FloatNumericType.getInstance().createSubtractionCalculator(this, (FloatAttribute) attribute));
}
public FloatAttribute times(com.gs.fw.finder.attribute.FloatAttribute attribute)
{
return FloatNumericType.getInstance().createCalculatedAttribute(FloatNumericType.getInstance().createMultiplicationCalculator(this, (FloatAttribute) attribute));
}
public FloatAttribute dividedBy(com.gs.fw.finder.attribute.FloatAttribute attribute)
{
return FloatNumericType.getInstance().createCalculatedAttribute(FloatNumericType.getInstance().createDivisionCalculator(this, (FloatAttribute) attribute));
}
// DoubleAttribute operands
public DoubleAttribute plus(com.gs.fw.finder.attribute.DoubleAttribute attribute)
{
return DoubleNumericType.getInstance().createCalculatedAttribute(DoubleNumericType.getInstance().createAdditionCalculator(this, (DoubleAttribute) attribute));
}
public DoubleAttribute minus(com.gs.fw.finder.attribute.DoubleAttribute attribute)
{
return DoubleNumericType.getInstance().createCalculatedAttribute(DoubleNumericType.getInstance().createSubtractionCalculator(this, (DoubleAttribute) attribute));
}
public DoubleAttribute times(com.gs.fw.finder.attribute.DoubleAttribute attribute)
{
return DoubleNumericType.getInstance().createCalculatedAttribute(DoubleNumericType.getInstance().createMultiplicationCalculator(this, (DoubleAttribute) attribute));
}
public DoubleAttribute dividedBy(com.gs.fw.finder.attribute.DoubleAttribute attribute)
{
return DoubleNumericType.getInstance().createCalculatedAttribute(DoubleNumericType.getInstance().createDivisionCalculator(this, (DoubleAttribute) attribute));
}
// BigDecimalAttribute operands
public BigDecimalAttribute plus(BigDecimalAttribute attribute)
{
return BigDecimalNumericType.getInstance().createCalculatedAttribute(BigDecimalNumericType.getInstance().createAdditionCalculator(this, attribute));
}
public BigDecimalAttribute minus(BigDecimalAttribute attribute)
{
return BigDecimalNumericType.getInstance().createCalculatedAttribute(BigDecimalNumericType.getInstance().createSubtractionCalculator(this, attribute));
}
public BigDecimalAttribute times(BigDecimalAttribute attribute)
{
return BigDecimalNumericType.getInstance().createCalculatedAttribute(BigDecimalNumericType.getInstance().createMultiplicationCalculator(this, attribute));
}
public BigDecimalAttribute dividedBy(BigDecimalAttribute attribute)
{
return BigDecimalNumericType.getInstance().createCalculatedAttribute(BigDecimalNumericType.getInstance().createDivisionCalculator(this, attribute, attribute.getScale()));
}
@Override
public LongAttribute absoluteValue()
{
return new CalculatedLongAttribute(new AbsoluteValueCalculatorLong(this));
}
public NumericAttribute zDispatchAddTo(NumericAttribute firstAddend)
{
return firstAddend.plus(this);
}
public NumericAttribute zDispatchSubtractFrom(NumericAttribute minuend)
{
return minuend.minus(this);
}
public NumericAttribute zDispatchMultiplyBy(NumericAttribute firstMultiplicand)
{
return firstMultiplicand.times(this);
}
public NumericAttribute zDispatchDivideInto(NumericAttribute divdend)
{
return divdend.dividedBy(this);
}
@Override
public String zGetSqlForDatabaseType(DatabaseType databaseType)
{
return databaseType.getSqlDataTypeForLong();
}
@Override
public AttributeUpdateWrapper zConstructNullUpdateWrapper(MithraDataObject data)
{
return new LongNullUpdateWrapper(this, data);
}
@Override
public Operation zGetOperationFromOriginal(Object original, Attribute left, Map tempOperationPool)
{
if (left.isAttributeNull(original))
{
return this.isNull();
}
return this.eq(((LongAttribute)left).longValueOf(original));
}
@Override
public Operation zGetPrototypeOperation(Map<Attribute, Object> tempOperationPool)
{
return this.eq(0);
}
@Override
public Operation zGetOperationFromResult(T result, Map<Attribute, Object> tempOperationPool)
{
if (this.isAttributeNull(result))
{
return this.isNull();
}
return this.eq(this.longValueOf(result));
}
@Override
public LongAggregateAttribute min()
{
return new LongAggregateAttribute(new MinCalculatorNumeric(this));
}
@Override
public LongAggregateAttribute max()
{
return new LongAggregateAttribute(new MaxCalculatorNumeric(this));
}
public LongAggregateAttribute sum()
{
return new LongAggregateAttribute(new SumCalculatorNumeric(this));
}
public LongAggregateAttribute avg()
{
return new LongAggregateAttribute(new AverageCalculatorNumeric(this));
}
@Override
protected void zWriteNonNullSerial(ReladomoSerializationContext context, SerialWriter writer, T reladomoObject) throws IOException
{
writer.writeLong(context, this.getAttributeName(), this.longValueOf(reladomoObject));
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.introduceField;
import com.intellij.codeInsight.highlighting.HighlightManager;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.HelpID;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.ui.TypeSelectorManagerImpl;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.RefactoringUtil;
import com.intellij.refactoring.util.classMembers.ClassMemberReferencesVisitor;
import com.intellij.refactoring.util.occurences.ExpressionOccurenceManager;
import com.intellij.refactoring.util.occurences.OccurenceManager;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
public class IntroduceConstantHandler extends BaseExpressionToFieldHandler {
public static final String REFACTORING_NAME = RefactoringBundle.message("introduce.constant.title");
protected String getHelpID() {
return HelpID.INTRODUCE_CONSTANT;
}
public void invoke(Project project, PsiExpression[] expressions) {
for (PsiExpression expression : expressions) {
final PsiFile file = expression.getContainingFile();
if (!CommonRefactoringUtil.checkReadOnlyStatus(project, file)) return;
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
super.invoke(project, expressions, null);
}
public void invoke(@NotNull final Project project, final Editor editor, PsiFile file, DataContext dataContext) {
if (!CommonRefactoringUtil.checkReadOnlyStatus(project, file)) return;
PsiDocumentManager.getInstance(project).commitAllDocuments();
ElementToWorkOn.processElementToWorkOn(editor, file, REFACTORING_NAME, getHelpID(), project, getElementProcessor(project, editor));
}
protected boolean invokeImpl(final Project project, final PsiLocalVariable localVariable, final Editor editor) {
final LocalToFieldHandler localToFieldHandler = new LocalToFieldHandler(project, true){
@Override
protected Settings showRefactoringDialog(PsiClass aClass,
PsiLocalVariable local,
PsiExpression[] occurences,
boolean isStatic) {
return IntroduceConstantHandler.this.showRefactoringDialog(project, editor, aClass, local.getInitializer(), local.getType(), occurences, local, null);
}
};
return localToFieldHandler.convertLocalToField(localVariable, editor);
}
protected Settings showRefactoringDialog(Project project,
Editor editor,
PsiClass parentClass,
PsiExpression expr,
PsiType type,
PsiExpression[] occurences,
PsiElement anchorElement,
PsiElement anchorElementIfAll) {
final PsiMethod containingMethod = PsiTreeUtil.getParentOfType(expr != null ? expr : anchorElement, PsiMethod.class);
for (PsiExpression occurrence : occurences) {
if (RefactoringUtil.isAssignmentLHS(occurrence)) {
String message =
RefactoringBundle.getCannotRefactorMessage("Selected expression is used for write");
CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, getHelpID());
highlightError(project, editor, occurrence);
return null;
}
}
PsiLocalVariable localVariable = null;
if (expr instanceof PsiReferenceExpression) {
PsiElement ref = ((PsiReferenceExpression)expr).resolve();
if (ref instanceof PsiLocalVariable) {
localVariable = (PsiLocalVariable)ref;
}
} else if (anchorElement instanceof PsiLocalVariable) {
localVariable = (PsiLocalVariable)anchorElement;
}
if (localVariable == null) {
final PsiElement errorElement = isStaticFinalInitializer(expr);
if (errorElement != null) {
String message =
RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("selected.expression.cannot.be.a.constant.initializer"));
CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, getHelpID());
highlightError(project, editor, errorElement);
return null;
}
}
else {
final PsiExpression initializer = localVariable.getInitializer();
if (initializer == null) {
String message = RefactoringBundle
.getCannotRefactorMessage(RefactoringBundle.message("variable.does.not.have.an.initializer", localVariable.getName()));
CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, getHelpID());
return null;
}
final PsiElement errorElement = isStaticFinalInitializer(initializer);
if (errorElement != null) {
String message = RefactoringBundle.getCannotRefactorMessage(
RefactoringBundle.message("initializer.for.variable.cannot.be.a.constant.initializer", localVariable.getName()));
CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, getHelpID());
highlightError(project, editor, errorElement);
return null;
}
}
final IntroduceConstantDialog dialog =
new IntroduceConstantDialog(project, parentClass, expr, localVariable, false, occurences, getParentClass(),
new TypeSelectorManagerImpl(project, type, containingMethod, expr, occurences));
dialog.show();
if (!dialog.isOK()) {
if (occurences.length > 1) {
WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting"));
}
return null;
}
return new Settings(dialog.getEnteredName(), dialog.isReplaceAllOccurrences(), true, true,
BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION, dialog.getFieldVisibility(), localVariable,
dialog.getSelectedType(), dialog.isDeleteVariable(), dialog.getDestinationClass(), dialog.isAnnotateAsNonNls(),
dialog.introduceEnumConstant());
}
private static void highlightError(Project project, Editor editor, PsiElement errorElement) {
if (editor != null) {
final TextAttributes attributes = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES);
final TextRange textRange = errorElement.getTextRange();
HighlightManager.getInstance(project).addRangeHighlight(editor, textRange.getStartOffset(), textRange.getEndOffset(), attributes, true, new ArrayList<RangeHighlighter>());
}
}
protected String getRefactoringName() {
return REFACTORING_NAME;
}
@Nullable
private PsiElement isStaticFinalInitializer(PsiExpression expr) {
PsiClass parentClass = expr != null ? getParentClass(expr) : null;
if (parentClass == null) return null;
IsStaticFinalInitializerExpression visitor = new IsStaticFinalInitializerExpression(parentClass, expr);
expr.accept(visitor);
return visitor.getElementReference();
}
protected OccurenceManager createOccurenceManager(final PsiExpression selectedExpr, final PsiClass parentClass) {
return new ExpressionOccurenceManager(selectedExpr, parentClass, null);
}
private static class IsStaticFinalInitializerExpression extends ClassMemberReferencesVisitor {
private PsiElement myElementReference = null;
private final PsiExpression myInitializer;
public IsStaticFinalInitializerExpression(PsiClass aClass, PsiExpression initializer) {
super(aClass);
myInitializer = initializer;
}
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
final PsiElement psiElement = expression.resolve();
if ((psiElement instanceof PsiLocalVariable || psiElement instanceof PsiParameter) &&
!PsiTreeUtil.isAncestor(myInitializer, psiElement, false)) {
myElementReference = expression;
}
else {
super.visitReferenceExpression(expression);
}
}
protected void visitClassMemberReferenceElement(PsiMember classMember, PsiJavaCodeReferenceElement classMemberReference) {
if (!classMember.hasModifierProperty(PsiModifier.STATIC)) {
myElementReference = classMemberReference;
}
}
@Override
public void visitElement(PsiElement element) {
if (myElementReference != null) return;
super.visitElement(element);
}
@Nullable
public PsiElement getElementReference() {
return myElementReference;
}
}
public PsiClass getParentClass(@NotNull PsiExpression initializerExpression) {
final PsiType type = initializerExpression.getType();
if (type != null && PsiUtil.isConstantExpression(initializerExpression)) {
if (type instanceof PsiPrimitiveType ||
PsiType.getJavaLangString(initializerExpression.getManager(), initializerExpression.getResolveScope()).equals(type)) {
return super.getParentClass(initializerExpression);
}
}
PsiElement parent = initializerExpression.getUserData(ElementToWorkOn.PARENT);
if (parent == null) parent = initializerExpression;
PsiClass aClass = PsiTreeUtil.getParentOfType(parent, PsiClass.class);
while (aClass != null) {
if (aClass.hasModifierProperty(PsiModifier.STATIC)) return aClass;
if (aClass.getParent() instanceof PsiJavaFile) return aClass;
aClass = PsiTreeUtil.getParentOfType(aClass, PsiClass.class);
}
return null;
}
protected boolean validClass(PsiClass parentClass, Editor editor) {
return true;
}
}
| |
// Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.extensions.api.groups;
import com.google.gerrit.extensions.client.ListGroupsOption;
import com.google.gerrit.extensions.common.GroupInfo;
import com.google.gerrit.extensions.restapi.NotImplementedException;
import com.google.gerrit.extensions.restapi.RestApiException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
public interface Groups {
/**
* Look up a group by ID.
* <p>
* <strong>Note:</strong> This method eagerly reads the group. Methods that
* mutate the group do not necessarily re-read the group. Therefore, calling a
* getter method on an instance after calling a mutation method on that same
* instance is not guaranteed to reflect the mutation. It is not recommended
* to store references to {@code groupApi} instances.
*
* @param id any identifier supported by the REST API, including group name or
* UUID.
* @return API for accessing the group.
* @throws RestApiException if an error occurred.
*/
GroupApi id(String id) throws RestApiException;
/** Create a new group with the given name and default options. */
GroupApi create(String name) throws RestApiException;
/** Create a new group. */
GroupApi create(GroupInput input) throws RestApiException;
/** @return new request for listing groups. */
ListRequest list();
abstract class ListRequest {
private final EnumSet<ListGroupsOption> options =
EnumSet.noneOf(ListGroupsOption.class);
private final List<String> projects = new ArrayList<>();
private final List<String> groups = new ArrayList<>();
private boolean visibleToAll;
private String user;
private boolean owned;
private int limit;
private int start;
private String substring;
private String suggest;
public List<GroupInfo> get() throws RestApiException {
Map<String, GroupInfo> map = getAsMap();
List<GroupInfo> result = new ArrayList<>(map.size());
for (Map.Entry<String, GroupInfo> e : map.entrySet()) {
// ListGroups "helpfully" nulls out names when converting to a map.
e.getValue().name = e.getKey();
result.add(e.getValue());
}
return Collections.unmodifiableList(result);
}
public abstract Map<String, GroupInfo> getAsMap() throws RestApiException;
public ListRequest addOption(ListGroupsOption option) {
options.add(option);
return this;
}
public ListRequest addOptions(ListGroupsOption... options) {
return addOptions(Arrays.asList(options));
}
public ListRequest addOptions(Iterable<ListGroupsOption> options) {
for (ListGroupsOption option : options) {
this.options.add(option);
}
return this;
}
public ListRequest withProject(String project) {
projects.add(project);
return this;
}
public ListRequest addGroup(String uuid) {
groups.add(uuid);
return this;
}
public ListRequest withVisibleToAll(boolean visible) {
visibleToAll = visible;
return this;
}
public ListRequest withUser(String user) {
this.user = user;
return this;
}
public ListRequest withOwned(boolean owned) {
this.owned = owned;
return this;
}
public ListRequest withLimit(int limit) {
this.limit = limit;
return this;
}
public ListRequest withStart(int start) {
this.start = start;
return this;
}
public ListRequest withSubstring(String substring) {
this.substring = substring;
return this;
}
public ListRequest withSuggest(String suggest) {
this.suggest = suggest;
return this;
}
public EnumSet<ListGroupsOption> getOptions() {
return options;
}
public List<String> getProjects() {
return Collections.unmodifiableList(projects);
}
public List<String> getGroups() {
return Collections.unmodifiableList(groups);
}
public boolean getVisibleToAll() {
return visibleToAll;
}
public String getUser() {
return user;
}
public boolean getOwned() {
return owned;
}
public int getLimit() {
return limit;
}
public int getStart() {
return start;
}
public String getSubstring() {
return substring;
}
public String getSuggest() {
return suggest;
}
}
/**
* A default implementation which allows source compatibility
* when adding new methods to the interface.
**/
class NotImplemented implements Groups {
@Override
public GroupApi id(String id) throws RestApiException {
throw new NotImplementedException();
}
@Override
public GroupApi create(String name) throws RestApiException {
throw new NotImplementedException();
}
@Override
public GroupApi create(GroupInput input) throws RestApiException {
throw new NotImplementedException();
}
@Override
public ListRequest list() {
throw new NotImplementedException();
}
}
}
| |
/**
* Copyright 2014-2016 CyberVision, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.demo.events.activities;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.support.annotation.NonNull;
import android.support.annotation.StringRes;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.TextInputLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.TextView;
import org.kaaproject.kaa.demo.events.EventsDemoApp;
import org.kaaproject.kaa.demo.events.R;
import org.kaaproject.kaa.demo.events.utils.KaaChatManager;
import org.kaaproject.kaa.examples.event.Chat;
import org.kaaproject.kaa.examples.event.ChatEvent;
import org.kaaproject.kaa.examples.event.Message;
public class MainActivity extends AppCompatActivity implements Chat.Listener {
Toolbar mToolbar;
FloatingActionButton mFloatingActionButton;
RecyclerView mRecyclerView;
private KaaChatManager mKaaChatManager;
private ChatAdapter mChatAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mKaaChatManager = EventsDemoApp.app(this).getKaaChatManager();
mToolbar = (Toolbar) findViewById(R.id.toolbar);
mToolbar.setTitle(EventsDemoApp.app(this).username());
mToolbar.inflateMenu(R.menu.menu_main_activity);
mToolbar.setOnMenuItemClickListener(new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case R.id.edit_nickname:
showDialog(MainActivity.this, R.string.activity_main_edit_nickname_hint,
new OnTextInputListener() {
@Override
public void onTextInput(String text) {
EventsDemoApp.app(MainActivity.this).newUsername(text);
mToolbar.setTitle(text);
}
});
break;
default:
return false;
}
return true;
}
});
mFloatingActionButton = (FloatingActionButton) findViewById(R.id.fab);
mFloatingActionButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
showDialog(MainActivity.this, R.string.activity_main_new_chat_hint,
new OnTextInputListener() {
@Override
public void onTextInput(String text) {
mKaaChatManager.createChatRoom(text);
mChatAdapter.notifyDataSetChanged();
}
});
}
});
mRecyclerView = (RecyclerView) findViewById(R.id.chats);
mRecyclerView.setLayoutManager(new LinearLayoutManager(this));
mChatAdapter = new ChatAdapter(mKaaChatManager);
mRecyclerView.setAdapter(mChatAdapter);
}
@Override
protected void onStart() {
super.onStart();
mChatAdapter.notifyDataSetChanged();
mKaaChatManager.addChatListener(this);
}
@Override
protected void onStop() {
super.onStop();
mKaaChatManager.removeChatListener(this);
}
@Override
public void onEvent(final ChatEvent chatEvent, String s) {
switch (chatEvent.getEventType()) {
case CREATE:
case DELETE:
mChatAdapter.notifyDataSetChanged();
break;
default:
break;
}
}
@Override
public void onEvent(Message message, String s) {
// messages are ignored
}
static void showDialog(Context context,
@StringRes int hint,
@NonNull final OnTextInputListener callback) {
final View promptsView =
LayoutInflater.from(context).inflate(R.layout.dialog_input_text, null);
final AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(
context);
alertDialogBuilder.setView(promptsView);
final TextInputLayout textInputLayout =
(TextInputLayout) promptsView.findViewById(R.id.input_text_layout);
textInputLayout.setHint(context.getString(hint));
final EditText userInputEditText = (EditText) promptsView
.findViewById(R.id.input_text);
final DialogInterface.OnClickListener clickListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case Dialog.BUTTON_POSITIVE:
callback.onTextInput(userInputEditText.getText().toString());
//fallthrough
case Dialog.BUTTON_NEGATIVE:
dialog.dismiss();
break;
}
}
};
alertDialogBuilder
.setPositiveButton(R.string.input_text_dialog_ok, clickListener)
.setNegativeButton(R.string.input_text_dialog_cancel, clickListener);
final AlertDialog alertDialog = alertDialogBuilder.create();
alertDialog.show();
}
interface OnTextInputListener {
void onTextInput(String text);
}
/**
* Adapter for the chat list
*/
static final class ChatAdapter extends RecyclerView.Adapter<ChatAdapter.ChatViewHolder> {
private final KaaChatManager mKaaChatManager;
ChatAdapter(KaaChatManager kaaChatManager) {
mKaaChatManager = kaaChatManager;
}
@Override
public ChatViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ChatViewHolder(LayoutInflater.from(parent.getContext())
.inflate(android.R.layout.simple_list_item_1, parent, false));
}
@Override
public void onBindViewHolder(ChatViewHolder holder, int position) {
final String chatName = mKaaChatManager.getChats().get(position);
((TextView) holder.itemView).setText(chatName);
holder.itemView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
mKaaChatManager.deleteChatRoom(chatName);
notifyDataSetChanged();
return true;
}
});
}
@Override
public int getItemCount() {
return mKaaChatManager.getChats().size();
}
static final class ChatViewHolder extends RecyclerView.ViewHolder
implements View.OnClickListener {
ChatViewHolder(View itemView) {
super(itemView);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View v) {
ChatScreenActivity.open(v.getContext(), ((TextView) itemView).getText().toString());
}
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.event;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertNotEquals;
import java.util.ArrayList;
import java.util.List;
import org.flowable.bpmn.model.FlowNode;
import org.flowable.engine.common.api.delegate.event.FlowableEntityEvent;
import org.flowable.engine.common.api.delegate.event.FlowableEvent;
import org.flowable.engine.common.api.delegate.event.FlowableEventListener;
import org.flowable.engine.delegate.event.FlowableActivityCancelledEvent;
import org.flowable.engine.delegate.event.FlowableCancelledEvent;
import org.flowable.engine.delegate.event.FlowableEngineEventType;
import org.flowable.engine.delegate.event.FlowableProcessStartedEvent;
import org.flowable.engine.delegate.event.impl.FlowableActivityCancelledEventImpl;
import org.flowable.engine.impl.delegate.event.FlowableEngineEntityEvent;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.task.Task;
import org.flowable.engine.test.Deployment;
/**
* Test case for all {@link FlowableEvent}s related to process instances.
*
* @author Tijs Rademakers
*/
public class ProcessInstanceEventsTest extends PluggableFlowableTestCase {
private TestInitializedEntityEventListener listener;
/**
* Test create, update and delete events of process instances.
*/
@Deployment(resources = { "org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml" })
public void testProcessInstanceEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
// Check create-event
assertEquals(6, listener.getEventsReceived().size());
assertTrue(listener.getEventsReceived().get(0) instanceof FlowableEngineEntityEvent);
// process instance create event
FlowableEngineEntityEvent event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
// start event create event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
// start event create initialized
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(4);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(5);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
listener.clearEventsReceived();
// Check update event when suspended/activated
runtimeService.suspendProcessInstanceById(processInstance.getId());
runtimeService.activateProcessInstanceById(processInstance.getId());
assertEquals(4, listener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
listener.clearEventsReceived();
// Check update event when process-definition is suspended (should
// cascade suspend/activate all process instances)
repositoryService.suspendProcessDefinitionById(processInstance.getProcessDefinitionId(), true, null);
repositoryService.activateProcessDefinitionById(processInstance.getProcessDefinitionId(), true, null);
assertEquals(4, listener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
listener.clearEventsReceived();
// Check update-event when business-key is updated
runtimeService.updateBusinessKey(processInstance.getId(), "thekey");
assertEquals(1, listener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_UPDATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "Testing events");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals(1, processCancelledEvents.size());
FlowableCancelledEvent cancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertEquals(FlowableEngineEventType.PROCESS_CANCELLED, cancelledEvent.getType());
assertEquals(processInstance.getId(), cancelledEvent.getProcessInstanceId());
assertEquals(processInstance.getId(), cancelledEvent.getExecutionId());
listener.clearEventsReceived();
}
/**
* Test create, update and delete events of process instances.
*/
@Deployment(resources = { "org/flowable/engine/test/api/runtime/nestedSubProcess.bpmn20.xml", "org/flowable/engine/test/api/runtime/subProcess.bpmn20.xml" })
public void testSubProcessInstanceEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("nestedSimpleSubProcess");
assertNotNull(processInstance);
String processDefinitionId = processInstance.getProcessDefinitionId();
// Check create-event one main process the second one Scope execution, and the third one subprocess
assertEquals(11, listener.getEventsReceived().size());
assertTrue(listener.getEventsReceived().get(0) instanceof FlowableEngineEntityEvent);
// process instance created event
FlowableEngineEntityEvent event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processDefinitionId, event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
String processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
// start event created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
// start event initialized event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(4);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), ((ExecutionEntity) event.getEntity()).getId());
// Process start
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(5);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
// sub process instance created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(6);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
ExecutionEntity subProcessEntity = (ExecutionEntity) event.getEntity();
assertEquals(processExecutionId, subProcessEntity.getSuperExecutionId());
String subProcessInstanceId = subProcessEntity.getProcessInstanceId();
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(7);
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
subProcessEntity = (ExecutionEntity) event.getEntity();
assertEquals(processExecutionId, subProcessEntity.getSuperExecutionId());
// sub process instance start created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(8);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertNotEquals(subProcessInstanceId, event.getExecutionId());
String subProcessDefinitionId = ((ExecutionEntity) event.getEntity()).getProcessDefinitionId();
assertNotNull(subProcessDefinitionId);
ProcessDefinition subProcessDefinition = repositoryService.getProcessDefinition(subProcessDefinitionId);
assertEquals("simpleSubProcess", subProcessDefinition.getKey());
// sub process instance start initialized event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(9);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertNotEquals(subProcessInstanceId, event.getExecutionId());
subProcessDefinitionId = ((ExecutionEntity) event.getEntity()).getProcessDefinitionId();
assertNotNull(subProcessDefinitionId);
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(10);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertEquals(subProcessDefinitionId, event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertEquals(processDefinitionId, ((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertEquals(processInstance.getId(), ((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
listener.clearEventsReceived();
}
/**
* Test process with signals start.
*/
@Deployment(resources = { "org/flowable/engine/test/bpmn/event/signal/SignalEventTest.testSignalWithGlobalScope.bpmn20.xml" })
public void testSignalProcessInstanceStart() throws Exception {
this.runtimeService.startProcessInstanceByKey("processWithSignalCatch");
listener.clearEventsReceived();
runtimeService.startProcessInstanceByKey("processWithSignalThrow");
listener.clearEventsReceived();
}
/**
* Test Start->End process on PROCESS_COMPLETED event
*/
@Deployment(resources = { "org/flowable/engine/test/api/event/ProcessInstanceEventsTest.noneTaskProcess.bpmn20.xml" })
public void testProcessCompleted_StartEnd() throws Exception {
this.runtimeService.startProcessInstanceByKey("noneTaskProcess");
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test Start->User Task process on PROCESS_COMPLETED event
*/
@Deployment(resources = { "org/flowable/engine/test/api/event/ProcessInstanceEventsTest.noEndProcess.bpmn20.xml" })
public void testProcessCompleted_NoEnd() throws Exception {
ProcessInstance noEndProcess = this.runtimeService.startProcessInstanceByKey("noEndProcess");
Task task = taskService.createTaskQuery().processInstanceId(noEndProcess.getId()).singleResult();
taskService.complete(task.getId());
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test +-->Task1 Start-<> +-->Task1
*
* process on PROCESS_COMPLETED event
*/
@Deployment(resources = { "org/flowable/engine/test/api/event/ProcessInstanceEventsTest.parallelGatewayNoEndProcess.bpmn20.xml" })
public void testProcessCompleted_ParallelGatewayNoEnd() throws Exception {
this.runtimeService.startProcessInstanceByKey("noEndProcess");
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test +-->End1 Start-<> +-->End2
* <p/>
* process on PROCESS_COMPLETED event
*/
@Deployment(resources = { "org/flowable/engine/test/api/event/ProcessInstanceEventsTest.parallelGatewayTwoEndsProcess.bpmn20.xml" })
public void testProcessCompleted_ParallelGatewayTwoEnds() throws Exception {
this.runtimeService.startProcessInstanceByKey("noEndProcess");
List<FlowableEvent> events = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED);
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, events.size());
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInCallActivityMulitInstance.bpmn",
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.subProcessTerminateTerminateAll.bpmn20.xml"})
public void testProcessCompleted_TerminateInCallActivityMultiInstanceTerminateAll() throws Exception {
runtimeService.startProcessInstanceByKey("terminateEndEventExample");
List<FlowableEvent> events = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT was expected 6 times.", 6, events.size());
}
@Deployment(resources = { "org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml" })
public void testProcessInstanceCancelledEvents_cancel() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "delete_test");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("ActivitiEventType.PROCESS_CANCELLED was expected 1 time.", 1, processCancelledEvents.size());
FlowableCancelledEvent processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("ActivitiEventType.ACTIVITY_CANCELLED was expected 1 time.", 1, taskCancelledEvents.size());
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) taskCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableActivityCancelledEvent.class.isAssignableFrom(activityCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), activityCancelledEvent.getProcessInstanceId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", activityCancelledEvent.getCause());
listener.clearEventsReceived();
}
@Deployment(resources = { "org/flowable/engine/test/api/runtime/nestedSubProcess.bpmn20.xml", "org/flowable/engine/test/api/runtime/subProcess.bpmn20.xml" })
public void testProcessInstanceCancelledEvents_cancelProcessHierarchy() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("nestedSimpleSubProcess");
ProcessInstance subProcess = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertNotNull(processInstance);
listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "delete_test");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("ActivitiEventType.PROCESS_CANCELLED was expected 2 times.", 2, processCancelledEvents.size());
FlowableCancelledEvent processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", subProcess.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", subProcess.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(1);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
assertEquals("No task can be active for deleted process.", 0, this.taskService.createTaskQuery().processInstanceId(processInstance.getId()).count());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("ActivitiEventType.ACTIVITY_CANCELLED was expected 1 time.", 1, taskCancelledEvents.size());
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) taskCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableActivityCancelledEvent.class.isAssignableFrom(activityCancelledEvent.getClass()));
assertEquals("The process instance has to point to the subprocess", subProcess.getId(), activityCancelledEvent.getProcessInstanceId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", activityCancelledEvent.getCause());
listener.clearEventsReceived();
}
@Deployment(resources = { "org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml" })
public void testProcessInstanceCancelledEvents_complete() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("There should be no FlowableEventType.PROCESS_CANCELLED event after process complete.", 0, processCancelledEvents.size());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("There should be no FlowableEventType.ACTIVITY_CANCELLED event.", 0, taskCancelledEvents.size());
}
@Deployment(resources = { "org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml" })
public void testProcessInstanceTerminatedEvents_complete() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("There should be no FlowableEventType.PROCESS_TERMINATED event after process complete.", 0, processTerminatedEvents.size());
}
@Deployment(resources = "org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testProcessTerminate.bpmn")
public void testProcessInstanceTerminatedEvents() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventExample");
long executionEntities = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).count();
assertEquals(3, executionEntities);
Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTerminateTask").singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertThat(processCompletedEvent.getProcessInstanceId(), is(pi.getProcessInstanceId()));
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat("There should be exactly two FlowableEventType.ACTIVITY_CANCELLED event after the task complete.", activityTerminatedEvents.size(), is(2));
for (FlowableEvent event : activityTerminatedEvents) {
FlowableActivityCancelledEventImpl activityEvent = (FlowableActivityCancelledEventImpl) event;
if (activityEvent.getActivityId().equals("preNormalTerminateTask")) {
assertThat("The user task must be terminated", activityEvent.getActivityId(), is("preNormalTerminateTask"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_2"));
} else if (activityEvent.getActivityId().equals("EndEvent_2")) {
assertThat("The end event must be terminated", activityEvent.getActivityId(), is("EndEvent_2"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_2"));
}
}
}
@Deployment(resources = { "org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInCallActivity.bpmn",
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.subProcessTerminate.bpmn" })
public void testProcessInstanceTerminatedEvents_callActivity() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventExample");
Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preNormalEnd").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertNotEquals(pi.getProcessInstanceId(), processCompletedEvent.getProcessInstanceId());
assertThat(processCompletedEvent.getProcessDefinitionId(), containsString("terminateEndEventSubprocessExample"));
}
@Deployment(resources = { "org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInSubProcessWithBoundaryTerminateAll.bpmn20.xml"})
public void testTerminateAllInSubProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventWithBoundary");
Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTermInnerTask").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertEquals(pi.getProcessInstanceId(), processCompletedEvent.getProcessInstanceId());
}
@Deployment(resources = { "org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInParentProcess.bpmn",
"org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml" })
public void testProcessInstanceTerminatedEvents_terminateInParentProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateParentProcess");
// should terminate the called process and continue the parent
Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTerminateEnd").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly two FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT events after the task complete.", 2,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(1);
assertThat(processCompletedEvent.getProcessInstanceId(), is(pi.getProcessInstanceId()));
assertThat(processCompletedEvent.getProcessDefinitionId(), containsString("terminateParentProcess"));
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat("3 activities must be cancelled.", activityTerminatedEvents.size(), is(3));
for (FlowableEvent event : activityTerminatedEvents) {
FlowableActivityCancelledEventImpl activityEvent = (FlowableActivityCancelledEventImpl) event;
if (activityEvent.getActivityId().equals("theTask")) {
assertThat("The user task must be terminated in the called sub process.", activityEvent.getActivityId(), is("theTask"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_3"));
} else if (activityEvent.getActivityId().equals("CallActivity_1")) {
assertThat("The call activity must be terminated", activityEvent.getActivityId(), is("CallActivity_1"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_3"));
} else if (activityEvent.getActivityId().equals("EndEvent_3")) {
assertThat("The end event must be terminated", activityEvent.getActivityId(), is("EndEvent_3"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_3"));
}
}
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorOnCallActivity-parent.bpmn20.xml",
"org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml"
})
public void testProcessCompletedEvents_callActivityErrorEndEvent() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("catchErrorOnCallActivity");
Task task = taskService.createTaskQuery().singleResult();
assertEquals("Task in subprocess", task.getName());
List<ProcessInstance> subProcesses = runtimeService.createProcessInstanceQuery().superProcessInstanceId(pi.getId()).list();
assertEquals(1, subProcesses.size());
// Completing the task will reach the end error event,
// which is caught on the call activity boundary
taskService.complete(task.getId());
List<FlowableEvent> processCompletedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT);
assertEquals("There should be exactly an FlowableEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT event after the task complete.", 1,
processCompletedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processCompletedEvents.get(0);
assertEquals(subProcesses.get(0).getId(), processCompletedEvent.getExecutionId());
task = taskService.createTaskQuery().singleResult();
assertEquals("Escalated Task", task.getName());
// Completing the task will end the process instance
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/multiinstance/MultiInstanceTest.testParallelCallActivity.bpmn20.xml",
"org/flowable/engine/test/bpmn/multiinstance/MultiInstanceTest.externalSubProcess.bpmn20.xml" })
public void testDeleteMultiInstanceCallActivityProcessInstance() {
assertEquals(0, taskService.createTaskQuery().count());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("miParallelCallActivity");
assertEquals(7, runtimeService.createProcessInstanceQuery().count());
assertEquals(12, taskService.createTaskQuery().count());
this.listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "testing instance deletion");
assertEquals("Task cancelled event has to be fired.", FlowableEngineEventType.ACTIVITY_CANCELLED, this.listener.getEventsReceived().get(0).getType());
assertEquals("SubProcess cancelled event has to be fired.", FlowableEngineEventType.PROCESS_CANCELLED, this.listener.getEventsReceived().get(2).getType());
assertEquals(0, runtimeService.createProcessInstanceQuery().count());
assertEquals(0, taskService.createTaskQuery().count());
}
@Deployment(resources = "org/flowable/engine/test/api/runtime/subProcessWithTerminateEnd.bpmn20.xml")
public void testProcessInstanceTerminatedEventInSubProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("subProcessWithTerminateEndTest");
long executionEntities = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).count();
assertEquals(4, executionEntities);
List<Task> tasks = taskService.createTaskQuery().processInstanceId(pi.getId()).list();
assertEquals(1, tasks.size());
Execution execution = runtimeService.createExecutionQuery().messageEventSubscriptionName("cancel").singleResult();
assertNotNull(execution);
// message received cancels the SubProcess. We expect an event for all flow elements
// when the process state changes. We expect the activity cancelled event for the task within the
// Subprocess and the SubProcess itself
runtimeService.messageEventReceived("cancel", execution.getId());
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals(3, activityTerminatedEvents.size());
boolean endEventFound = false;
boolean taskFound = false;
boolean subProcessFound = false;
for (FlowableEvent terminatedEvent : activityTerminatedEvents) {
FlowableActivityCancelledEvent activityEvent = (FlowableActivityCancelledEvent) terminatedEvent;
if ("endEvent".equals(activityEvent.getActivityType())) {
endEventFound = true;
} else if ("userTask".equals(activityEvent.getActivityType())) {
taskFound = true;
assertEquals("task", activityEvent.getActivityId());
} else if ("subProcess".equals(activityEvent.getActivityType())) {
subProcessFound = true;
assertEquals("embeddedSubprocess", activityEvent.getActivityId());
}
}
assertTrue(endEventFound);
assertTrue(taskFound);
assertTrue(subProcessFound);
}
@Deployment(resources = "org/flowable/engine/test/api/runtime/multipleSubprocessTerminateEnd.bpmn20.xml")
public void testProcessInstanceWithMultipleSubprocessAndTerminateEnd2() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("multiplesubProcessWithTerminateEndTest");
List<Execution> subprocesses = runtimeService.createExecutionQuery().processInstanceId(pi.getId())
.onlySubProcessExecutions().list();
assertEquals(2, subprocesses.size());
List<Task> tasks = taskService.createTaskQuery().processInstanceId(pi.getId()).list();
assertEquals(2, tasks.size());
Task task2 = null;
for (Task task : tasks) {
if ("Task in subprocess2".equals(task.getName())) {
task2 = task;
break;
}
}
// Complete user task in subprocess2. This flows out of subprocess2 to
// the terminate end event. This will cause subprocess1 to be cancelled along
// with the user task, boundary event and intermediate catch event defined in or
// on subprocess1.
assertNotNull(task2);
taskService.complete(task2.getId());
// Subprocess2 completed and transitioned to terminate end. We expect
// ACTIVITY_CANCELLED for Subprocess1, task1 defined in subprocess1, boundary event defined on
// and the timer intermediate catch event defined in subprocess1
boolean endEventFound = false;
boolean userTaskFound = false;
boolean subprocessFound = false;
boolean timerCatchEventFound = false;
boolean boundaryEventFound = false;
List<FlowableEvent> activityTerminatedEvents = listener
.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals(5, activityTerminatedEvents.size());
for (FlowableEvent flowableEvent: activityTerminatedEvents)
{
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) flowableEvent;
if ("endEvent".equals(activityCancelledEvent.getActivityType())) {
assertEquals("End", activityCancelledEvent.getActivityName());
endEventFound = true;
}
else if ("intermediateCatchEvent".equals(activityCancelledEvent.getActivityType())) {
assertEquals("timer", activityCancelledEvent.getActivityId());
timerCatchEventFound = true;
}
else if ("boundaryEvent".equals(activityCancelledEvent.getActivityType())) {
boundaryEventFound = true;
}
else if ("userTask".equals(activityCancelledEvent.getActivityType())) {
assertEquals("Task in subprocess1", activityCancelledEvent.getActivityName());
userTaskFound = true;
}
else if ("subProcess".equals(activityCancelledEvent.getActivityType())) {
assertEquals("subprocess1", activityCancelledEvent.getActivityId());
subprocessFound = true;
}
}
assertTrue(endEventFound);
assertTrue(timerCatchEventFound);
assertTrue(boundaryEventFound);
assertTrue(userTaskFound);
assertTrue(subprocessFound);
List<FlowableEvent> processCompletedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED);
assertEquals(0, processCompletedEvents.size());
List<FlowableEvent> processCompletedTerminateEndEvents = listener
.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals(1, processCompletedTerminateEndEvents.size());
// Only expect PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT, not
// PROCESS_CANCELLED.
List<FlowableEvent> processCanceledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals(0, processCanceledEvents.size());
}
@Override
protected void initializeServices() {
super.initializeServices();
this.listener = new TestInitializedEntityEventListener();
processEngineConfiguration.getEventDispatcher().addEventListener(this.listener);
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
if (listener != null) {
listener.clearEventsReceived();
processEngineConfiguration.getEventDispatcher().removeEventListener(listener);
}
}
private class TestInitializedEntityEventListener implements FlowableEventListener {
private List<FlowableEvent> eventsReceived;
public TestInitializedEntityEventListener() {
eventsReceived = new ArrayList<FlowableEvent>();
}
public List<FlowableEvent> getEventsReceived() {
return eventsReceived;
}
public void clearEventsReceived() {
eventsReceived.clear();
}
@Override
public void onEvent(FlowableEvent event) {
if (event instanceof FlowableEntityEvent && ProcessInstance.class.isAssignableFrom(((FlowableEntityEvent) event).getEntity().getClass())) {
// check whether entity in the event is initialized before
// adding to the list.
assertNotNull(((ExecutionEntity) ((FlowableEntityEvent) event).getEntity()).getId());
eventsReceived.add(event);
} else if (FlowableEngineEventType.PROCESS_CANCELLED == event.getType() || FlowableEngineEventType.ACTIVITY_CANCELLED == event.getType()) {
eventsReceived.add(event);
}
}
@Override
public boolean isFailOnException() {
return true;
}
public List<FlowableEvent> filterEvents(FlowableEngineEventType eventType) {// count
// timer cancelled events
List<FlowableEvent> filteredEvents = new ArrayList<FlowableEvent>();
List<FlowableEvent> eventsReceived = listener.getEventsReceived();
for (FlowableEvent eventReceived : eventsReceived) {
if (eventType == eventReceived.getType()) {
filteredEvents.add(eventReceived);
}
}
return filteredEvents;
}
}
}
| |
package com.github.chrisdchristo.pipe;
import com.google.gson.reflect.TypeToken;
import io.undertow.Handlers;
import io.undertow.Undertow;
import io.undertow.server.HttpServerExchange;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.InputStream;
import java.lang.reflect.Type;
import java.util.Set;
import static io.restassured.RestAssured.given;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertNull;
public class JsonMarshallerTest {
public static final String ROUTE_BOUNCE = "/bounce";
public static final class BounceAPI implements Pipe.Duplex<Mock.Car, Mock.Car> {
@Override
public Class<Mock.Car> inputType() { return Mock.Car.class; }
@Override
public Set<Marshaller> marshallers() { return Mock.marshallers; }
public Mock.Car handle(final HttpServerExchange exchange, final Pipe.Method method, final Mock.Car input) {
return input;
}
}
public static final class BounceDuplicateAPI implements Pipe.Duplex<Mock.Car,Mock.Car> {
@Override
public Class<Mock.Car> inputType() { return Mock.Car.class; }
@Override
public Set<Marshaller> marshallers() { return Mock.marshallers; }
public Mock.Car handle(final HttpServerExchange exchange, final Pipe.Method method, final Mock.Car input) {
return input;
}
}
public static final String ROUTE_BOUNCE_POST = "/bounce_post";
public static final class BouncePostAPI implements Pipe.Duplex.Post<Mock.Car,Mock.Car> {
@Override
public Class<Mock.Car> inputType() { return Mock.Car.class; }
@Override
public Set<Marshaller> marshallers() { return Mock.marshallers; }
public Mock.Car handle(final HttpServerExchange exchange, final Mock.Car input) {
return input;
}
}
public static final String ROUTE_BOUNCE_PUT = "/bounce_put";
public static final class BouncePutAPI implements Pipe.Duplex.Put<Mock.Car,Mock.Car> {
@Override
public Class<Mock.Car> inputType() { return Mock.Car.class; }
@Override
public Set<Marshaller> marshallers() { return Mock.marshallers; }
public Mock.Car handle(final HttpServerExchange exchange, final Mock.Car input) {
return input;
}
}
public static final String ROUTE_BOUNCE_DELETE = "/bounce_delete";
public static final class BounceDeleteAPI implements Pipe.Duplex.Delete<Mock.Car,Mock.Car> {
@Override
public Class<Mock.Car> inputType() { return Mock.Car.class; }
@Override
public Set<Marshaller> marshallers() { return Mock.marshallers; }
public Mock.Car handle(final HttpServerExchange exchange, final Mock.Car input) {
return input;
}
}
public static final Undertow undertow = Undertow.builder()
.addHttpListener(Mock.PORT, Mock.SERVER)
.setHandler(Handlers.path()
.addExactPath(ROUTE_BOUNCE, new BounceAPI())
.addExactPath(ROUTE_BOUNCE, new BounceAPI()) // duplicate - OK!
.addExactPath(ROUTE_BOUNCE, new BounceDuplicateAPI()) // duplicate but different handler - will override previous prefix OK!
.addExactPath(ROUTE_BOUNCE_POST, new BouncePostAPI())
.addExactPath(ROUTE_BOUNCE_PUT, new BouncePutAPI())
.addExactPath(ROUTE_BOUNCE_DELETE, new BounceDeleteAPI())
).build();
@Before
public final void before() {
undertow.start();
}
@After
public final void stop() {
undertow.stop();
}
@Test
public final void bounce_Get() {
given().port(Mock.PORT)
.get(ROUTE_BOUNCE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|GET"))))
.statusCode(200);
}
@Test
public final void bounce_Post() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.post(ROUTE_BOUNCE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void bounce_Put() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.put(ROUTE_BOUNCE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void bounce_Delete() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.delete(ROUTE_BOUNCE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void bouncePost_Get() {
given().port(Mock.PORT)
.get(ROUTE_BOUNCE_POST)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|GET"))))
.statusCode(200);
}
@Test
public final void bouncePost_Post() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.post(ROUTE_BOUNCE_POST)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void bouncePost_Put() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.put(ROUTE_BOUNCE_POST)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|PUT"))))
.statusCode(200);
}
@Test
public final void bouncePost_Delete() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.delete(ROUTE_BOUNCE_POST)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|DELETE"))))
.statusCode(200);
}
@Test
public final void bouncePut_Get() {
given().port(Mock.PORT)
.get(ROUTE_BOUNCE_PUT)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|GET"))))
.statusCode(200);
}
@Test
public final void bouncePut_Post() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.post(ROUTE_BOUNCE_PUT)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|POST"))))
.statusCode(200);
}
@Test
public final void bouncePut_Put() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.put(ROUTE_BOUNCE_PUT)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void bouncePut_Delete() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.delete(ROUTE_BOUNCE_PUT)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|DELETE"))))
.statusCode(200);
}
@Test
public final void bounceDelete_Get() {
given().port(Mock.PORT)
.get(ROUTE_BOUNCE_DELETE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|GET"))))
.statusCode(200);
}
@Test
public final void bounceDelete_Post() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.post(ROUTE_BOUNCE_DELETE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|POST"))))
.statusCode(200);
}
@Test
public final void bounceDelete_Put() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.put(ROUTE_BOUNCE_DELETE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Pipe.Error.of("unsupported-method|PUT"))))
.statusCode(200);
}
@Test
public final void bounceDelete_Delete() {
given().port(Mock.PORT)
.body(Mock.jsonMarshaller.data(Mock.CAR))
.delete(ROUTE_BOUNCE_DELETE)
.then()
.body(equalTo(Mock.jsonMarshaller.data(Mock.CAR)))
.statusCode(200);
}
@Test
public final void json_FromNull() {
assertNull(Mock.jsonMarshaller.data((String) null));
assertNull(Mock.jsonMarshaller.data((InputStream) null));
assertNull(Mock.jsonMarshaller.data((Mock.Car) null));
assertNull(Mock.jsonMarshaller.data((Object) null));
}
@Test
public final void json_FromObject() {
Assert.assertEquals(Mock.JSON, Mock.jsonMarshaller.data(Mock.CAR));
}
@Test
public final void pojo_FromInputStream() {
Assert.assertEquals(Mock.CAR, Mock.jsonMarshaller.pojo(Mock.IS, Mock.Car.class));
}
@Test
public final void pojo_FromJson() {
Assert.assertEquals(Mock.CAR, Mock.jsonMarshaller.pojo(Mock.JSON, Mock.Car.class));
}
@Test
public final void pojo_Null() {
assertNull(Mock.jsonMarshaller.pojo((String)null, (Class)null));
assertNull(Mock.jsonMarshaller.pojo((String)null, (TypeToken)null));
assertNull(Mock.jsonMarshaller.pojo((String)null, (Type)null));
assertNull(Mock.jsonMarshaller.pojo((String)null, (Class<Mock.Car>)null));
assertNull(Mock.jsonMarshaller.pojo((InputStream)null, (Class)null));
assertNull(Mock.jsonMarshaller.pojo((InputStream)null, (TypeToken)null));
assertNull(Mock.jsonMarshaller.pojo((InputStream)null, (Type)null));
assertNull(Mock.jsonMarshaller.pojo((InputStream)null, (Class<Mock.Car>)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.JSON, (Class)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.JSON, (TypeToken)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.JSON, (Type)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.IS, (Class)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.IS, (Type)null));
assertNull(Mock.jsonMarshaller.pojo(Mock.IS, (TypeToken)null));
assertNull(Mock.jsonMarshaller.pojo((String)null, Mock.Car.class));
assertNull(Mock.jsonMarshaller.pojo((InputStream)null, Mock.Car.class));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.cube.metadata.timeline;
import java.util.*;
import org.apache.lens.cube.metadata.MetastoreUtil;
import org.apache.lens.cube.metadata.TimePartition;
import org.apache.lens.cube.metadata.TimePartitionRange;
import org.apache.lens.cube.metadata.UpdatePeriod;
import org.apache.lens.server.api.error.LensException;
import org.apache.hadoop.hive.ql.metadata.Table;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import lombok.Data;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
/**
* Represents the in-memory data structure that represents timeline of all existing partitions for a given storage
* table, update period, partition column. Is an Abstract class. Can be implemented in multiple ways.
*
* @see org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline
* @see org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline
*/
@Data
@Slf4j
public abstract class PartitionTimeline implements Iterable<TimePartition> {
private final String storageTableName;
private final UpdatePeriod updatePeriod;
private final String partCol;
private TreeSet<TimePartition> all;
/** wrapper on latest data */
public Date getLatestDate() {
return latest() == null ? null : latest().getDate();
}
/**
* Sets PartitionTimeline implementation class's name and specific params in table param.
*
* @param table
* @see #init(org.apache.hadoop.hive.ql.metadata.Table)
*/
public void updateTableParams(Table table) {
String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol());
String storageClass = MetastoreUtil.getPartitionTimelineStorageClassKey(getUpdatePeriod(), getPartCol());
table.getParameters().put(storageClass, this.getClass().getCanonicalName());
for (Map.Entry<String, String> entry : toProperties().entrySet()) {
table.getParameters().put(prefix + entry
.getKey(), entry.getValue());
}
}
/**
* Extracts timeline implementation class from table params and instantiates it with other arguments, also in table
* params.
*
* @param table
* @throws LensException
* @see #updateTableParams(org.apache.hadoop.hive.ql.metadata.Table)
*/
public void init(Table table) throws LensException {
HashMap<String, String> props = Maps.newHashMap();
String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol());
for (Map.Entry<String, String> entry : table.getParameters().entrySet()) {
if (entry.getKey().startsWith(prefix)) {
props.put(entry.getKey().substring(prefix.length()), entry.getValue());
}
}
log.info("initializing timeline from table properties: {},{},{}",
getStorageTableName(), getUpdatePeriod(), getPartCol());
initFromProperties(props);
log.info("initialized to: {}", this);
}
/**
* Add partition to local memory to be sent for batch addition.
*
* @see #commitBatchAdditions()
*/
public void addForBatchAddition(TimePartition partition) {
if (all == null) {
all = Sets.newTreeSet();
}
all.add(partition);
}
/**
* Commit all partitions that were added to batch addition queue. //TODO: improve batch addition implementation.
*
* @return true if all the partitions were added successfully, or no partitions needed to be added
* @throws LensException
*/
public boolean commitBatchAdditions() throws LensException {
if (getAll() == null) {
return true;
}
log.info("initializing timeline from batch addition: {},{},{}",
getStorageTableName(), getUpdatePeriod(), getPartCol());
boolean result = add(getAll());
all = null;
log.info("initialized to: {}", this);
return result;
}
/**
* Add partition to timeline
*
* @param partition
* @return whether add was successful
* @throws LensException
*/
public abstract boolean add(@NonNull TimePartition partition) throws LensException;
/**
* Add multiple partitions to timeline
*
* @param partitions
* @return whether add was successful
* @throws LensException
*/
public boolean add(@NonNull Collection<TimePartition> partitions) throws LensException {
boolean result = true;
for (TimePartition partition : partitions) {
result &= add(partition);
}
// Can also return the failed to add items.
return result;
}
/**
* Add partition range to the timeline. Default implementation is to iterate over the range and add
* each time partition belonging to the given range. Implementing classes can override.
*
* @param partitionRange
* @return whether add was successful
* @throws LensException
*/
boolean add(TimePartitionRange partitionRange) throws LensException {
boolean ret = true;
for (TimePartition part : partitionRange) {
ret &= add(part);
}
return ret;
}
/**
* drop partition.
*
* @param toDrop
* @return whether drop was successful
* @throws LensException
*/
public abstract boolean drop(@NonNull TimePartition toDrop) throws LensException;
/**
* latest partition. will be null if no partitions exist.
*
* @return
*/
public abstract TimePartition latest();
/**
* serialize member objects as map
*
* @return
*/
public abstract Map<String, String> toProperties();
/**
* deserialize member variables from given map
*
* @param properties
* @return true if after deserializing, the timeline is in consistent state
* @throws LensException
* @see #isConsistent()
*/
public abstract boolean initFromProperties(Map<String, String> properties) throws LensException;
/**
* Whether No partitions have been registered
*
* @return
*/
public abstract boolean isEmpty();
/**
* whether timeline is in consistent state
*
* @return
*/
public abstract boolean isConsistent();
/**
* Checks partition existance
*
* @param partition
* @return
*/
public abstract boolean exists(TimePartition partition);
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver12;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmArpShaVer12 implements OFOxmArpSha {
private static final Logger logger = LoggerFactory.getLogger(OFOxmArpShaVer12.class);
// version: 1.2
final static byte WIRE_VERSION = 3;
final static int LENGTH = 10;
private final static MacAddress DEFAULT_VALUE = MacAddress.NONE;
// OF message fields
private final MacAddress value;
//
// Immutable default instance
final static OFOxmArpShaVer12 DEFAULT = new OFOxmArpShaVer12(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmArpShaVer12(MacAddress value) {
if(value == null) {
throw new NullPointerException("OFOxmArpShaVer12: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80003006L;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public MatchField<MacAddress> getMatchField() {
return MatchField.ARP_SHA;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<MacAddress> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public MacAddress getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
public OFOxmArpSha.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmArpSha.Builder {
final OFOxmArpShaVer12 parentMessage;
// OF message fields
private boolean valueSet;
private MacAddress value;
BuilderWithParent(OFOxmArpShaVer12 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80003006L;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public OFOxmArpSha.Builder setValue(MacAddress value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<MacAddress> getMatchField() {
return MatchField.ARP_SHA;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<MacAddress> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.2");
}
@Override
public MacAddress getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFOxmArpSha build() {
MacAddress value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmArpShaVer12(
value
);
}
}
static class Builder implements OFOxmArpSha.Builder {
// OF message fields
private boolean valueSet;
private MacAddress value;
@Override
public long getTypeLen() {
return 0x80003006L;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public OFOxmArpSha.Builder setValue(MacAddress value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<MacAddress> getMatchField() {
return MatchField.ARP_SHA;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<MacAddress> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.2");
}
@Override
public MacAddress getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.2");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
//
@Override
public OFOxmArpSha build() {
MacAddress value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmArpShaVer12(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmArpSha> {
@Override
public OFOxmArpSha readFrom(ChannelBuffer bb) throws OFParseError {
// fixed value property typeLen == 0x80003006L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80003006)
throw new OFParseError("Wrong typeLen: Expected=0x80003006L(0x80003006L), got="+typeLen);
MacAddress value = MacAddress.read6Bytes(bb);
OFOxmArpShaVer12 oxmArpShaVer12 = new OFOxmArpShaVer12(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmArpShaVer12);
return oxmArpShaVer12;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmArpShaVer12Funnel FUNNEL = new OFOxmArpShaVer12Funnel();
static class OFOxmArpShaVer12Funnel implements Funnel<OFOxmArpShaVer12> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmArpShaVer12 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80003006L
sink.putInt((int) 0x80003006);
message.value.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmArpShaVer12> {
@Override
public void write(ChannelBuffer bb, OFOxmArpShaVer12 message) {
// fixed value property typeLen = 0x80003006L
bb.writeInt((int) 0x80003006);
message.value.write6Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmArpShaVer12(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmArpShaVer12 other = (OFOxmArpShaVer12) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.RejectedExecutionException;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.MessageHistory;
import org.apache.camel.Ordered;
import org.apache.camel.Processor;
import org.apache.camel.Route;
import org.apache.camel.StatefulService;
import org.apache.camel.StreamCache;
import org.apache.camel.api.management.PerformanceCounter;
import org.apache.camel.management.DelegatePerformanceCounter;
import org.apache.camel.management.mbean.ManagedPerformanceCounter;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.ProcessorDefinitionHelper;
import org.apache.camel.processor.interceptor.BacklogDebugger;
import org.apache.camel.processor.interceptor.BacklogTracer;
import org.apache.camel.processor.interceptor.DefaultBacklogTracerEventMessage;
import org.apache.camel.spi.Contract;
import org.apache.camel.spi.DataType;
import org.apache.camel.spi.InflightRepository;
import org.apache.camel.spi.MessageHistoryFactory;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.spi.RoutePolicy;
import org.apache.camel.spi.StreamCachingStrategy;
import org.apache.camel.spi.Transformer;
import org.apache.camel.spi.UnitOfWork;
import org.apache.camel.util.MessageHelper;
import org.apache.camel.util.OrderedComparator;
import org.apache.camel.util.StopWatch;
import org.apache.camel.util.UnitOfWorkHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Internal {@link Processor} that Camel routing engine used during routing for cross cutting functionality such as:
* <ul>
* <li>Execute {@link UnitOfWork}</li>
* <li>Keeping track which route currently is being routed</li>
* <li>Execute {@link RoutePolicy}</li>
* <li>Gather JMX performance statics</li>
* <li>Tracing</li>
* <li>Debugging</li>
* <li>Message History</li>
* <li>Stream Caching</li>
* <li>{@link Transformer}</li>
* </ul>
* ... and more.
* <p/>
* This implementation executes this cross cutting functionality as a {@link CamelInternalProcessorAdvice} advice (before and after advice)
* by executing the {@link CamelInternalProcessorAdvice#before(org.apache.camel.Exchange)} and
* {@link CamelInternalProcessorAdvice#after(org.apache.camel.Exchange, Object)} callbacks in correct order during routing.
* This reduces number of stack frames needed during routing, and reduce the number of lines in stacktraces, as well
* makes debugging the routing engine easier for end users.
* <p/>
* <b>Debugging tips:</b> Camel end users whom want to debug their Camel applications with the Camel source code, then make sure to
* read the source code of this class about the debugging tips, which you can find in the
* {@link #process(org.apache.camel.Exchange, org.apache.camel.AsyncCallback)} method.
* <p/>
* The added advices can implement {@link Ordered} to control in which order the advices are executed.
*/
public class CamelInternalProcessor extends DelegateAsyncProcessor {
private static final Logger LOG = LoggerFactory.getLogger(CamelInternalProcessor.class);
private final List<CamelInternalProcessorAdvice> advices = new ArrayList<CamelInternalProcessorAdvice>();
public CamelInternalProcessor() {
}
public CamelInternalProcessor(Processor processor) {
super(processor);
}
/**
* Adds an {@link CamelInternalProcessorAdvice} advice to the list of advices to execute by this internal processor.
*
* @param advice the advice to add
*/
public void addAdvice(CamelInternalProcessorAdvice advice) {
advices.add(advice);
// ensure advices are sorted so they are in the order we want
advices.sort(new OrderedComparator());
}
/**
* Gets the advice with the given type.
*
* @param type the type of the advice
* @return the advice if exists, or <tt>null</tt> if no advices has been added with the given type.
*/
public <T> T getAdvice(Class<T> type) {
for (CamelInternalProcessorAdvice task : advices) {
if (type.isInstance(task)) {
return type.cast(task);
}
}
return null;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
// ----------------------------------------------------------
// CAMEL END USER - READ ME FOR DEBUGGING TIPS
// ----------------------------------------------------------
// If you want to debug the Camel routing engine, then there is a lot of internal functionality
// the routing engine executes during routing messages. You can skip debugging this internal
// functionality and instead debug where the routing engine continues routing to the next node
// in the routes. The CamelInternalProcessor is a vital part of the routing engine, as its
// being used in between the nodes. As an end user you can just debug the code in this class
// in between the:
// CAMEL END USER - DEBUG ME HERE +++ START +++
// CAMEL END USER - DEBUG ME HERE +++ END +++
// you can see in the code below.
// ----------------------------------------------------------
if (processor == null || !continueProcessing(exchange)) {
// no processor or we should not continue then we are done
callback.done(true);
return true;
}
final List<Object> states = new ArrayList<Object>(advices.size());
for (CamelInternalProcessorAdvice task : advices) {
try {
Object state = task.before(exchange);
states.add(state);
} catch (Throwable e) {
exchange.setException(e);
callback.done(true);
return true;
}
}
// create internal callback which will execute the advices in reverse order when done
callback = new InternalCallback(states, exchange, callback);
// UNIT_OF_WORK_PROCESS_SYNC is @deprecated and we should remove it from Camel 3.0
Object synchronous = exchange.removeProperty(Exchange.UNIT_OF_WORK_PROCESS_SYNC);
if (exchange.isTransacted() || synchronous != null) {
// must be synchronized for transacted exchanges
if (LOG.isTraceEnabled()) {
if (exchange.isTransacted()) {
LOG.trace("Transacted Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange);
} else {
LOG.trace("Synchronous UnitOfWork Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange);
}
}
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
try {
processor.process(exchange);
} catch (Throwable e) {
exchange.setException(e);
}
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
callback.done(true);
return true;
} else {
final UnitOfWork uow = exchange.getUnitOfWork();
// allow unit of work to wrap callback in case it need to do some special work
// for example the MDCUnitOfWork
AsyncCallback async = callback;
if (uow != null) {
async = uow.beforeProcess(processor, exchange, callback);
}
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
if (LOG.isTraceEnabled()) {
LOG.trace("Processing exchange for exchangeId: {} -> {}", exchange.getExchangeId(), exchange);
}
boolean sync = processor.process(exchange, async);
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
// execute any after processor work (in current thread, not in the callback)
if (uow != null) {
uow.afterProcess(processor, exchange, callback, sync);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Exchange processed and is continued routed {} for exchangeId: {} -> {}",
new Object[]{sync ? "synchronously" : "asynchronously", exchange.getExchangeId(), exchange});
}
return sync;
}
}
@Override
public String toString() {
return processor != null ? processor.toString() : super.toString();
}
/**
* Internal callback that executes the after advices.
*/
private final class InternalCallback implements AsyncCallback {
private final List<Object> states;
private final Exchange exchange;
private final AsyncCallback callback;
private InternalCallback(List<Object> states, Exchange exchange, AsyncCallback callback) {
this.states = states;
this.exchange = exchange;
this.callback = callback;
}
@Override
public void done(boolean doneSync) {
// NOTE: if you are debugging Camel routes, then all the code in the for loop below is internal only
// so you can step straight to the finally block and invoke the callback
// we should call after in reverse order
try {
for (int i = advices.size() - 1; i >= 0; i--) {
CamelInternalProcessorAdvice task = advices.get(i);
Object state = states.get(i);
try {
task.after(exchange, state);
} catch (Exception e) {
exchange.setException(e);
// allow all advices to complete even if there was an exception
}
}
} finally {
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
// callback must be called
callback.done(doneSync);
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
}
}
}
/**
* Strategy to determine if we should continue processing the {@link Exchange}.
*/
protected boolean continueProcessing(Exchange exchange) {
Object stop = exchange.getProperty(Exchange.ROUTE_STOP);
if (stop != null) {
boolean doStop = exchange.getContext().getTypeConverter().convertTo(Boolean.class, stop);
if (doStop) {
LOG.debug("Exchange is marked to stop routing: {}", exchange);
return false;
}
}
// determine if we can still run, or the camel context is forcing a shutdown
boolean forceShutdown = exchange.getContext().getShutdownStrategy().forceShutdown(this);
if (forceShutdown) {
String msg = "Run not allowed as ShutdownStrategy is forcing shutting down, will reject executing exchange: " + exchange;
LOG.debug(msg);
if (exchange.getException() == null) {
exchange.setException(new RejectedExecutionException(msg));
}
return false;
}
// yes we can continue
return true;
}
/**
* Advice to invoke callbacks for before and after routing.
*/
public static class RouteLifecycleAdvice implements CamelInternalProcessorAdvice<Object> {
private Route route;
public void setRoute(Route route) {
this.route = route;
}
@Override
public Object before(Exchange exchange) throws Exception {
UnitOfWork uow = exchange.getUnitOfWork();
if (uow != null) {
uow.beforeRoute(exchange, route);
}
return null;
}
@Override
public void after(Exchange exchange, Object object) throws Exception {
UnitOfWork uow = exchange.getUnitOfWork();
if (uow != null) {
uow.afterRoute(exchange, route);
}
}
}
/**
* Advice for JMX instrumentation of the process being invoked.
* <p/>
* This advice keeps track of JMX metrics for performance statistics.
* <p/>
* The current implementation of this advice is only used for route level statistics. For processor levels
* they are still wrapped in the route processor chains.
*/
public static class InstrumentationAdvice implements CamelInternalProcessorAdvice<StopWatch> {
private PerformanceCounter counter;
private String type;
public InstrumentationAdvice(String type) {
this.type = type;
}
public void setCounter(Object counter) {
ManagedPerformanceCounter mpc = null;
if (counter instanceof ManagedPerformanceCounter) {
mpc = (ManagedPerformanceCounter) counter;
}
if (this.counter instanceof DelegatePerformanceCounter) {
((DelegatePerformanceCounter) this.counter).setCounter(mpc);
} else if (mpc != null) {
this.counter = mpc;
} else if (counter instanceof PerformanceCounter) {
this.counter = (PerformanceCounter) counter;
}
}
protected void beginTime(Exchange exchange) {
counter.processExchange(exchange);
}
protected void recordTime(Exchange exchange, long duration) {
if (LOG.isTraceEnabled()) {
LOG.trace("{}Recording duration: {} millis for exchange: {}", new Object[]{type != null ? type + ": " : "", duration, exchange});
}
if (!exchange.isFailed() && exchange.getException() == null) {
counter.completedExchange(exchange, duration);
} else {
counter.failedExchange(exchange);
}
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public StopWatch before(Exchange exchange) throws Exception {
// only record time if stats is enabled
StopWatch answer = counter != null && counter.isStatisticsEnabled() ? new StopWatch() : null;
if (answer != null) {
beginTime(exchange);
}
return answer;
}
@Override
public void after(Exchange exchange, StopWatch watch) throws Exception {
// record end time
if (watch != null) {
recordTime(exchange, watch.stop());
}
}
}
/**
* Advice to inject the current {@link RouteContext} into the {@link UnitOfWork} on the {@link Exchange}
*
* @deprecated this logic has been merged into {@link org.apache.camel.processor.CamelInternalProcessor.UnitOfWorkProcessorAdvice}
*/
@Deprecated
public static class RouteContextAdvice implements CamelInternalProcessorAdvice<UnitOfWork> {
private final RouteContext routeContext;
public RouteContextAdvice(RouteContext routeContext) {
this.routeContext = routeContext;
}
@Override
public UnitOfWork before(Exchange exchange) throws Exception {
// push the current route context
final UnitOfWork unitOfWork = exchange.getUnitOfWork();
if (unitOfWork != null) {
unitOfWork.pushRouteContext(routeContext);
}
return unitOfWork;
}
@Override
public void after(Exchange exchange, UnitOfWork unitOfWork) throws Exception {
if (unitOfWork != null) {
unitOfWork.popRouteContext();
}
}
}
/**
* Advice to keep the {@link InflightRepository} up to date.
*/
public static class RouteInflightRepositoryAdvice implements CamelInternalProcessorAdvice {
private final InflightRepository inflightRepository;
private final String id;
public RouteInflightRepositoryAdvice(InflightRepository inflightRepository, String id) {
this.inflightRepository = inflightRepository;
this.id = id;
}
@Override
public Object before(Exchange exchange) throws Exception {
inflightRepository.add(exchange, id);
return null;
}
@Override
public void after(Exchange exchange, Object state) throws Exception {
inflightRepository.remove(exchange, id);
}
}
/**
* Advice to execute any {@link RoutePolicy} a route may have been configured with.
*/
public static class RoutePolicyAdvice implements CamelInternalProcessorAdvice {
private final List<RoutePolicy> routePolicies;
private Route route;
public RoutePolicyAdvice(List<RoutePolicy> routePolicies) {
this.routePolicies = routePolicies;
}
public void setRoute(Route route) {
this.route = route;
}
/**
* Strategy to determine if this policy is allowed to run
*
* @param policy the policy
* @return <tt>true</tt> to run
*/
protected boolean isRoutePolicyRunAllowed(RoutePolicy policy) {
if (policy instanceof StatefulService) {
StatefulService ss = (StatefulService) policy;
return ss.isRunAllowed();
}
return true;
}
@Override
public Object before(Exchange exchange) throws Exception {
// invoke begin
for (RoutePolicy policy : routePolicies) {
try {
if (isRoutePolicyRunAllowed(policy)) {
policy.onExchangeBegin(route, exchange);
}
} catch (Exception e) {
LOG.warn("Error occurred during onExchangeBegin on RoutePolicy: " + policy
+ ". This exception will be ignored", e);
}
}
return null;
}
@Override
public void after(Exchange exchange, Object data) throws Exception {
// do not invoke it if Camel is stopping as we don't want
// the policy to start a consumer during Camel is stopping
if (isCamelStopping(exchange.getContext())) {
return;
}
for (RoutePolicy policy : routePolicies) {
try {
if (isRoutePolicyRunAllowed(policy)) {
policy.onExchangeDone(route, exchange);
}
} catch (Exception e) {
LOG.warn("Error occurred during onExchangeDone on RoutePolicy: " + policy
+ ". This exception will be ignored", e);
}
}
}
private static boolean isCamelStopping(CamelContext context) {
if (context instanceof StatefulService) {
StatefulService ss = (StatefulService) context;
return ss.isStopping() || ss.isStopped();
}
return false;
}
}
/**
* Advice to execute the {@link BacklogTracer} if enabled.
*/
public static final class BacklogTracerAdvice implements CamelInternalProcessorAdvice, Ordered {
private final BacklogTracer backlogTracer;
private final ProcessorDefinition<?> processorDefinition;
private final ProcessorDefinition<?> routeDefinition;
private final boolean first;
public BacklogTracerAdvice(BacklogTracer backlogTracer, ProcessorDefinition<?> processorDefinition,
ProcessorDefinition<?> routeDefinition, boolean first) {
this.backlogTracer = backlogTracer;
this.processorDefinition = processorDefinition;
this.routeDefinition = routeDefinition;
this.first = first;
}
@Override
public Object before(Exchange exchange) throws Exception {
if (backlogTracer.shouldTrace(processorDefinition, exchange)) {
Date timestamp = new Date();
String toNode = processorDefinition.getId();
String exchangeId = exchange.getExchangeId();
String messageAsXml = MessageHelper.dumpAsXml(exchange.getIn(), true, 4,
backlogTracer.isBodyIncludeStreams(), backlogTracer.isBodyIncludeFiles(), backlogTracer.getBodyMaxChars());
// if first we should add a pseudo trace message as well, so we have a starting message (eg from the route)
String routeId = routeDefinition != null ? routeDefinition.getId() : null;
if (first) {
Date created = exchange.getProperty(Exchange.CREATED_TIMESTAMP, timestamp, Date.class);
DefaultBacklogTracerEventMessage pseudo = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), created, routeId, null, exchangeId, messageAsXml);
backlogTracer.traceEvent(pseudo);
}
DefaultBacklogTracerEventMessage event = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), timestamp, routeId, toNode, exchangeId, messageAsXml);
backlogTracer.traceEvent(event);
}
return null;
}
@Override
public void after(Exchange exchange, Object data) throws Exception {
// noop
}
@Override
public int getOrder() {
// we want tracer just before calling the processor
return Ordered.LOWEST - 1;
}
}
/**
* Advice to execute the {@link org.apache.camel.processor.interceptor.BacklogDebugger} if enabled.
*/
public static final class BacklogDebuggerAdvice implements CamelInternalProcessorAdvice<StopWatch>, Ordered {
private final BacklogDebugger backlogDebugger;
private final Processor target;
private final ProcessorDefinition<?> definition;
private final String nodeId;
public BacklogDebuggerAdvice(BacklogDebugger backlogDebugger, Processor target, ProcessorDefinition<?> definition) {
this.backlogDebugger = backlogDebugger;
this.target = target;
this.definition = definition;
this.nodeId = definition.getId();
}
@Override
public StopWatch before(Exchange exchange) throws Exception {
if (backlogDebugger.isEnabled() && (backlogDebugger.hasBreakpoint(nodeId) || backlogDebugger.isSingleStepMode())) {
StopWatch watch = new StopWatch();
backlogDebugger.beforeProcess(exchange, target, definition);
return watch;
} else {
return null;
}
}
@Override
public void after(Exchange exchange, StopWatch stopWatch) throws Exception {
if (stopWatch != null) {
backlogDebugger.afterProcess(exchange, target, definition, stopWatch.stop());
}
}
@Override
public int getOrder() {
// we want debugger just before calling the processor
return Ordered.LOWEST;
}
}
/**
* Advice to inject new {@link UnitOfWork} to the {@link Exchange} if needed, and as well to ensure
* the {@link UnitOfWork} is done and stopped.
*/
public static class UnitOfWorkProcessorAdvice implements CamelInternalProcessorAdvice<UnitOfWork> {
private final RouteContext routeContext;
public UnitOfWorkProcessorAdvice(RouteContext routeContext) {
this.routeContext = routeContext;
}
@Override
public UnitOfWork before(Exchange exchange) throws Exception {
// if the exchange doesn't have from route id set, then set it if it originated
// from this unit of work
if (routeContext != null && exchange.getFromRouteId() == null) {
String routeId = routeContext.getRoute().idOrCreate(routeContext.getCamelContext().getNodeIdFactory());
exchange.setFromRouteId(routeId);
}
// only return UnitOfWork if we created a new as then its us that handle the lifecycle to done the created UoW
UnitOfWork created = null;
if (exchange.getUnitOfWork() == null) {
// If there is no existing UoW, then we should start one and
// terminate it once processing is completed for the exchange.
created = createUnitOfWork(exchange);
exchange.setUnitOfWork(created);
created.start();
}
// for any exchange we should push/pop route context so we can keep track of which route we are routing
if (routeContext != null) {
UnitOfWork existing = exchange.getUnitOfWork();
if (existing != null) {
existing.pushRouteContext(routeContext);
}
}
return created;
}
@Override
public void after(Exchange exchange, UnitOfWork uow) throws Exception {
UnitOfWork existing = exchange.getUnitOfWork();
// execute done on uow if we created it, and the consumer is not doing it
if (uow != null) {
UnitOfWorkHelper.doneUow(uow, exchange);
}
// after UoW is done lets pop the route context which must be done on every existing UoW
if (routeContext != null && existing != null) {
existing.popRouteContext();
}
}
protected UnitOfWork createUnitOfWork(Exchange exchange) {
return exchange.getContext().getUnitOfWorkFactory().createUnitOfWork(exchange);
}
}
/**
* Advice when an EIP uses the <tt>shareUnitOfWork</tt> functionality.
*/
public static class ChildUnitOfWorkProcessorAdvice extends UnitOfWorkProcessorAdvice {
private final UnitOfWork parent;
public ChildUnitOfWorkProcessorAdvice(RouteContext routeContext, UnitOfWork parent) {
super(routeContext);
this.parent = parent;
}
@Override
protected UnitOfWork createUnitOfWork(Exchange exchange) {
// let the parent create a child unit of work to be used
return parent.createChildUnitOfWork(exchange);
}
}
/**
* Advice when an EIP uses the <tt>shareUnitOfWork</tt> functionality.
*/
public static class SubUnitOfWorkProcessorAdvice implements CamelInternalProcessorAdvice<UnitOfWork> {
@Override
public UnitOfWork before(Exchange exchange) throws Exception {
// begin savepoint
exchange.getUnitOfWork().beginSubUnitOfWork(exchange);
return exchange.getUnitOfWork();
}
@Override
public void after(Exchange exchange, UnitOfWork unitOfWork) throws Exception {
// end sub unit of work
unitOfWork.endSubUnitOfWork(exchange);
}
}
/**
* Advice when Message History has been enabled.
*/
@SuppressWarnings("unchecked")
public static class MessageHistoryAdvice implements CamelInternalProcessorAdvice<MessageHistory> {
private final MessageHistoryFactory factory;
private final ProcessorDefinition<?> definition;
private final String routeId;
public MessageHistoryAdvice(MessageHistoryFactory factory, ProcessorDefinition<?> definition) {
this.factory = factory;
this.definition = definition;
this.routeId = ProcessorDefinitionHelper.getRouteId(definition);
}
@Override
public MessageHistory before(Exchange exchange) throws Exception {
List<MessageHistory> list = exchange.getProperty(Exchange.MESSAGE_HISTORY, List.class);
if (list == null) {
list = new LinkedList<>();
exchange.setProperty(Exchange.MESSAGE_HISTORY, list);
}
// we may be routing outside a route in an onException or interceptor and if so then grab
// route id from the exchange UoW state
String targetRouteId = this.routeId;
if (targetRouteId == null) {
UnitOfWork uow = exchange.getUnitOfWork();
if (uow != null && uow.getRouteContext() != null) {
targetRouteId = uow.getRouteContext().getRoute().getId();
}
}
MessageHistory history = factory.newMessageHistory(targetRouteId, definition, new Date());
list.add(history);
return history;
}
@Override
public void after(Exchange exchange, MessageHistory history) throws Exception {
if (history != null) {
history.nodeProcessingDone();
}
}
}
/**
* Advice for {@link org.apache.camel.spi.StreamCachingStrategy}
*/
public static class StreamCachingAdvice implements CamelInternalProcessorAdvice<StreamCache>, Ordered {
private final StreamCachingStrategy strategy;
public StreamCachingAdvice(StreamCachingStrategy strategy) {
this.strategy = strategy;
}
@Override
public StreamCache before(Exchange exchange) throws Exception {
// check if body is already cached
Object body = exchange.getIn().getBody();
if (body == null) {
return null;
} else if (body instanceof StreamCache) {
StreamCache sc = (StreamCache) body;
// reset so the cache is ready to be used before processing
sc.reset();
return sc;
}
// cache the body and if we could do that replace it as the new body
StreamCache sc = strategy.cache(exchange);
if (sc != null) {
exchange.getIn().setBody(sc);
}
return sc;
}
@Override
public void after(Exchange exchange, StreamCache sc) throws Exception {
Object body;
if (exchange.hasOut()) {
body = exchange.getOut().getBody();
} else {
body = exchange.getIn().getBody();
}
if (body != null && body instanceof StreamCache) {
// reset so the cache is ready to be reused after processing
((StreamCache) body).reset();
}
}
@Override
public int getOrder() {
// we want stream caching first
return Ordered.HIGHEST;
}
}
/**
* Advice for delaying
*/
public static class DelayerAdvice implements CamelInternalProcessorAdvice {
private final long delay;
public DelayerAdvice(long delay) {
this.delay = delay;
}
@Override
public Object before(Exchange exchange) throws Exception {
try {
LOG.trace("Sleeping for: {} millis", delay);
Thread.sleep(delay);
} catch (InterruptedException e) {
LOG.debug("Sleep interrupted");
Thread.currentThread().interrupt();
throw e;
}
return null;
}
@Override
public void after(Exchange exchange, Object data) throws Exception {
// noop
}
}
/**
* Advice for data type contract
* TODO add declarative validation
*/
public static class ContractAdvice implements CamelInternalProcessorAdvice {
private Contract contract;
public ContractAdvice(Contract contract) {
this.contract = contract;
}
@Override
public Object before(Exchange exchange) throws Exception {
DataType from = getCurrentType(exchange, Exchange.INPUT_TYPE);
DataType to = contract.getInputType();
if (to != null && !to.equals(from)) {
LOG.debug("Looking for transformer for INPUT: from='{}', to='{}'", from, to);
doTransform(exchange.getIn(), from, to);
exchange.setProperty(Exchange.INPUT_TYPE, to);
}
return null;
}
@Override
public void after(Exchange exchange, Object data) throws Exception {
Message target = exchange.hasOut() ? exchange.getOut() : exchange.getIn();
DataType from = getCurrentType(exchange, exchange.hasOut() ? Exchange.OUTPUT_TYPE : Exchange.INPUT_TYPE);
DataType to = contract.getOutputType();
if (to != null && !to.equals(from)) {
LOG.debug("Looking for transformer for OUTPUT: from='{}', to='{}'", from, to);
doTransform(target, from, to);
exchange.setProperty(exchange.hasOut() ? Exchange.OUTPUT_TYPE : Exchange.INPUT_TYPE, to);
}
}
private void doTransform(Message message, DataType from, DataType to) throws Exception {
// transform into 'from' type before performing declared transformation
convertIfRequired(message, from);
if (applyExactlyMatchedTransformer(message, from, to)) {
// Found exactly matched transformer. Java-Java transformer is also allowed.
return;
} else if (from == null || from.isJavaType()) {
if (convertIfRequired(message, to)) {
// Java->Java transformation just relies on TypeConverter if no explicit transformer
return;
} else if (from == null) {
// {undefined}->Other transformation - assuming it's already in expected shape
return;
} else if (applyTransformerByToModel(message, from, to)) {
// Java->Other transformation - found a transformer supports 'to' data model
return;
}
} else if (from != null) {
if (to.isJavaType()) {
if (applyTransformerByFromModel(message, from, to)) {
// Other->Java transformation - found a transformer supprts 'from' data model
return;
}
} else if (applyTransformerChain(message, from, to)) {
// Other->Other transformation - found a transformer chain
return;
}
}
throw new IllegalArgumentException("No Transformer found for [from='" + from + "', to='" + to + "']");
}
private boolean convertIfRequired(Message message, DataType type) throws Exception {
// TODO for better performance it may be better to add TypeConveterTransformer
// into transformer registry automatically to avoid unnecessary scan in transformer registry
CamelContext context = message.getExchange().getContext();
if (type != null && type.isJavaType() && type.getName() != null) {
Class<?> typeJava = getClazz(type.getName(), context);
if (!typeJava.isAssignableFrom(message.getBody().getClass())) {
LOG.debug("Converting to '{}'", typeJava.getName());
message.setBody(message.getMandatoryBody(typeJava));
return true;
}
}
return false;
}
private boolean applyTransformer(Transformer transformer, Message message, DataType from, DataType to) throws Exception {
if (transformer != null) {
LOG.debug("Applying transformer: from='{}', to='{}', transformer='{}'", from, to, transformer);
transformer.transform(message, from, to);
return true;
}
return false;
}
private boolean applyExactlyMatchedTransformer(Message message, DataType from, DataType to) throws Exception {
Transformer transformer = message.getExchange().getContext().resolveTransformer(from, to);
return applyTransformer(transformer, message, from, to);
}
private boolean applyTransformerByToModel(Message message, DataType from, DataType to) throws Exception {
Transformer transformer = message.getExchange().getContext().resolveTransformer(to.getModel());
return applyTransformer(transformer, message, from, to);
}
private boolean applyTransformerByFromModel(Message message, DataType from, DataType to) throws Exception {
Transformer transformer = message.getExchange().getContext().resolveTransformer(from.getModel());
return applyTransformer(transformer, message, from, to);
}
private boolean applyTransformerChain(Message message, DataType from, DataType to) throws Exception {
CamelContext context = message.getExchange().getContext();
Transformer fromTransformer = context.resolveTransformer(from.getModel());
Transformer toTransformer = context.resolveTransformer(to.getModel());
if (fromTransformer != null && toTransformer != null) {
LOG.debug("Applying transformer 1/2: from='{}', to='{}', transformer='{}'", from, to, fromTransformer);
fromTransformer.transform(message, from, new DataType(Object.class));
LOG.debug("Applying transformer 2/2: from='{}', to='{}', transformer='{}'", from, to, toTransformer);
toTransformer.transform(message, new DataType(Object.class), to);
return true;
}
return false;
}
private Class<?> getClazz(String type, CamelContext context) throws Exception {
return context.getClassResolver().resolveMandatoryClass(type);
}
private DataType getCurrentType(Exchange exchange, String name) {
Object prop = exchange.getProperty(name);
if (prop instanceof DataType) {
return (DataType)prop;
} else if (prop instanceof String) {
DataType answer = new DataType((String)prop);
exchange.setProperty(name, answer);
return answer;
}
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.dataflow.std.misc;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.nio.ByteBuffer;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
import org.apache.hyracks.api.dataflow.TaskId;
import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.data.std.primitive.ByteArrayPointable;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
import org.apache.hyracks.dataflow.common.data.marshalling.ByteArraySerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.DoubleArraySerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.IntArraySerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.partition.range.RangeMap;
import org.apache.hyracks.dataflow.common.utils.TaskUtil;
import org.apache.hyracks.dataflow.std.base.AbstractActivityNode;
import org.apache.hyracks.dataflow.std.base.AbstractForwardOperatorDescriptor;
import org.apache.hyracks.dataflow.std.base.AbstractStateObject;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
// TODO(ali): forward operator should probably be moved to asterix layer
public class SortForwardOperatorDescriptor extends AbstractForwardOperatorDescriptor {
private static final long serialVersionUID = 1L;
/**
* @param spec used to create the operator id.
* @param sideDataKey the unique key to store the range map in the shared map & transfer it to partitioner.
* @param outputRecordDescriptor the output schema of this operator.
*/
public SortForwardOperatorDescriptor(IOperatorDescriptorRegistry spec, String sideDataKey,
RecordDescriptor outputRecordDescriptor) {
super(spec, sideDataKey, outputRecordDescriptor);
}
@Override
public AbstractActivityNode createForwardDataActivity() {
return new ForwardDataActivity(new ActivityId(odId, FORWARD_DATA_ACTIVITY_ID));
}
@Override
public AbstractActivityNode createSideDataActivity() {
return new RangeMapReaderActivity(new ActivityId(odId, SIDE_DATA_ACTIVITY_ID));
}
/**
* Internal class that is used to transfer the {@link RangeMap} object between activities in different ctx but in
* the same NC, from {@link RangeMapReaderActivity} to {@link ForwardDataActivity}. These activities will share
* the {@link org.apache.hyracks.api.job.IOperatorEnvironment} of the {@link org.apache.hyracks.control.nc.Joblet}
* where the range map will be stored.
*/
private class RangeMapState extends AbstractStateObject {
RangeMap rangeMap;
private RangeMapState(JobId jobId, TaskId stateObjectKey) {
super(jobId, stateObjectKey);
}
}
/**
* Range map reader activity. {@see {@link RangeMapReaderActivityNodePushable}}
*/
private class RangeMapReaderActivity extends AbstractActivityNode {
private static final long serialVersionUID = 1L;
private RangeMapReaderActivity(ActivityId activityId) {
super(activityId);
}
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
throws HyracksDataException {
RecordDescriptor inputRecordDescriptor = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
return new RangeMapReaderActivityNodePushable(ctx, inputRecordDescriptor, getActivityId(), partition);
}
}
/**
* Forward data activity. {@see {@link ForwardDataActivityNodePushable}}
*/
private class ForwardDataActivity extends AbstractActivityNode {
private static final long serialVersionUID = 1L;
private ForwardDataActivity(ActivityId activityId) {
super(activityId);
}
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
throws HyracksDataException {
return new ForwardDataActivityNodePushable(ctx, partition);
}
}
private class RangeMapReaderActivityNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
private final FrameTupleAccessor frameTupleAccessor;
private final FrameTupleReference frameTupleReference;
private final IHyracksTaskContext ctx;
private final ActivityId activityId;
private final int partition;
private int numFields;
private byte[] splitValues;
private int[] splitValuesEndOffsets;
private double[] percentages;
private RangeMapReaderActivityNodePushable(IHyracksTaskContext ctx, RecordDescriptor inputRecordDescriptor,
ActivityId activityId, int partition) {
this.ctx = ctx;
this.frameTupleAccessor = new FrameTupleAccessor(inputRecordDescriptor);
this.frameTupleReference = new FrameTupleReference();
this.activityId = activityId;
this.partition = partition;
this.numFields = -1;
}
@Override
public void open() throws HyracksDataException {
// this activity does not have a consumer to open (it's a sink), and nothing to initialize
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
// "buffer" contains the serialized range map sent by a range map computer function.
// deserialize the range map
frameTupleAccessor.reset(buffer);
if (frameTupleAccessor.getTupleCount() != 1) {
throw HyracksDataException.create(ErrorCode.ONE_TUPLE_RANGEMAP_EXPECTED, sourceLoc);
}
frameTupleReference.reset(frameTupleAccessor, 0);
byte[] rangeMap = frameTupleReference.getFieldData(0);
int offset = frameTupleReference.getFieldStart(0);
int length = frameTupleReference.getFieldLength(0);
ByteArrayPointable pointable = new ByteArrayPointable();
pointable.set(rangeMap, offset + 1, length - 1);
ByteArrayInputStream rangeMapIn = new ByteArrayInputStream(pointable.getByteArray(),
pointable.getContentStartOffset(), pointable.getContentLength());
DataInputStream dataInputStream = new DataInputStream(rangeMapIn);
numFields = IntegerSerializerDeserializer.read(dataInputStream);
splitValues = ByteArraySerializerDeserializer.read(dataInputStream);
splitValuesEndOffsets = IntArraySerializerDeserializer.read(dataInputStream);
percentages = DoubleArraySerializerDeserializer.read(dataInputStream);
}
@Override
public void fail() throws HyracksDataException {
// it's a sink node pushable, nothing to fail
}
@Override
public void close() throws HyracksDataException {
// expecting a range map
if (numFields <= 0 || splitValues == null || splitValuesEndOffsets == null) {
throw HyracksDataException.create(ErrorCode.NO_RANGEMAP_PRODUCED, sourceLoc);
}
// store the range map in the state object of ctx so that next activity (forward) could retrieve it
TaskId rangeMapReaderTaskId = new TaskId(activityId, partition);
RangeMapState rangeMapState = new RangeMapState(ctx.getJobletContext().getJobId(), rangeMapReaderTaskId);
rangeMapState.rangeMap = new RangeMap(numFields, splitValues, splitValuesEndOffsets, percentages);
ctx.setStateObject(rangeMapState);
}
}
private class ForwardDataActivityNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
private final IHyracksTaskContext ctx;
private final int partition;
/**
* @param ctx used to retrieve the range map stored by the range reader activity.
* @param partition used to create the same task id used by the range reader activity for storing the range.
*/
private ForwardDataActivityNodePushable(IHyracksTaskContext ctx, int partition) {
this.ctx = ctx;
this.partition = partition;
}
@Override
public void open() throws HyracksDataException {
// retrieve the range map from the state object (previous activity should have already stored it)
// then deposit it into the ctx so that MToN-partition can pick it up
Object stateObjKey = new TaskId(new ActivityId(odId, SIDE_DATA_ACTIVITY_ID), partition);
RangeMapState rangeMapState = (RangeMapState) ctx.getStateObject(stateObjKey);
TaskUtil.put(sideDataKey, rangeMapState.rangeMap, ctx);
writer.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
writer.nextFrame(buffer);
}
@Override
public void fail() throws HyracksDataException {
writer.fail();
}
@Override
public void close() throws HyracksDataException {
writer.close();
}
@Override
public void flush() throws HyracksDataException {
writer.flush();
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestHalfStoreFileReader {
/**
* Test the scanner and reseek of a half hfile scanner. The scanner API
* demands that seekTo and reseekTo() only return < 0 if the key lies
* before the start of the file (with no position on the scanner). Returning
* 0 if perfect match (rare), and return > 1 if we got an imperfect match.
*
* The latter case being the most common, we should generally be returning 1,
* and if we do, there may or may not be a 'next' in the scanner/file.
*
* A bug in the half file scanner was returning -1 at the end of the bottom
* half, and that was causing the infrastructure above to go null causing NPEs
* and other problems. This test reproduces that failure, and also tests
* both the bottom and top of the file while we are at it.
*
* @throws IOException
*/
@Test
public void testHalfScanAndReseek() throws IOException {
HBaseTestingUtility test_util = new HBaseTestingUtility();
String root_dir = test_util.getDataTestDir("TestHalfStoreFile").toString();
Path p = new Path(root_dir, "test");
Configuration conf = test_util.getConfiguration();
FileSystem fs = FileSystem.get(conf);
CacheConfig cacheConf = new CacheConfig(conf);
HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, p)
.withBlockSize(1024)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
// write some things.
List<KeyValue> items = genSomeKeys();
for (KeyValue kv : items) {
w.append(kv);
}
w.close();
HFile.Reader r = HFile.createReader(fs, p, cacheConf);
r.loadFileInfo();
byte [] midkey = r.midkey();
KeyValue midKV = KeyValue.createKeyValueFromKey(midkey);
midkey = midKV.getRow();
//System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
Reference bottom = new Reference(midkey, Reference.Range.bottom);
doTestOfScanAndReseek(p, fs, bottom, cacheConf);
Reference top = new Reference(midkey, Reference.Range.top);
doTestOfScanAndReseek(p, fs, top, cacheConf);
r.close();
}
private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom,
CacheConfig cacheConf)
throws IOException {
final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
cacheConf, bottom, DataBlockEncoding.NONE);
halfreader.loadFileInfo();
final HFileScanner scanner = halfreader.getScanner(false, false);
scanner.seekTo();
KeyValue curr;
do {
curr = scanner.getKeyValue();
KeyValue reseekKv =
getLastOnCol(curr);
int ret = scanner.reseekTo(reseekKv.getKey());
assertTrue("reseek to returned: " + ret, ret > 0);
//System.out.println(curr + ": " + ret);
} while (scanner.next());
int ret = scanner.reseekTo(getLastOnCol(curr).getKey());
//System.out.println("Last reseek: " + ret);
assertTrue( ret > 0 );
halfreader.close(true);
}
// Tests the scanner on an HFile that is backed by HalfStoreFiles
@Test
public void testHalfScanner() throws IOException {
HBaseTestingUtility test_util = new HBaseTestingUtility();
String root_dir = test_util.getDataTestDir("TestHalfStoreFileScanBefore").toString();
Path p = new Path(root_dir, "test");
Configuration conf = test_util.getConfiguration();
FileSystem fs = FileSystem.get(conf);
CacheConfig cacheConf = new CacheConfig(conf);
HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, p)
.withBlockSize(1024)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
// write some things.
List<KeyValue> items = genSomeKeys();
for (KeyValue kv : items) {
w.append(kv);
}
w.close();
HFile.Reader r = HFile.createReader(fs, p, cacheConf);
r.loadFileInfo();
byte[] midkey = r.midkey();
KeyValue midKV = KeyValue.createKeyValueFromKey(midkey);
midkey = midKV.getRow();
Reference bottom = new Reference(midkey, Reference.Range.bottom);
Reference top = new Reference(midkey, Reference.Range.top);
// Ugly code to get the item before the midkey
KeyValue beforeMidKey = null;
for (KeyValue item : items) {
if (item.equals(midKV)) {
break;
}
beforeMidKey = item;
}
// Seek on the splitKey, should be in top, not in bottom
KeyValue foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
assertEquals(beforeMidKey, foundKeyValue);
// Seek tot the last thing should be the penultimate on the top, the one before the midkey on the bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(items.size() - 1), cacheConf);
assertEquals(items.get(items.size() - 2), foundKeyValue);
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(items.size() - 1), cacheConf);
assertEquals(beforeMidKey, foundKeyValue);
// Try and seek before something that is in the bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(0), cacheConf);
assertNull(foundKeyValue);
// Try and seek before the first thing.
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(0), cacheConf);
assertNull(foundKeyValue);
// Try and seek before the second thing in the top and bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(1), cacheConf);
assertNull(foundKeyValue);
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(1), cacheConf);
assertEquals(items.get(0), foundKeyValue);
// Try to seek before the splitKey in the top file
foundKeyValue = doTestOfSeekBefore(p, fs, top, midKV, cacheConf);
assertNull(foundKeyValue);
}
private KeyValue doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, KeyValue seekBefore,
CacheConfig cacheConfig)
throws IOException {
final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
cacheConfig, bottom, DataBlockEncoding.NONE);
halfreader.loadFileInfo();
final HFileScanner scanner = halfreader.getScanner(false, false);
scanner.seekBefore(seekBefore.getKey());
return scanner.getKeyValue();
}
private KeyValue getLastOnCol(KeyValue curr) {
return KeyValue.createLastOnRow(
curr.getBuffer(), curr.getRowOffset(), curr.getRowLength(),
curr.getBuffer(), curr.getFamilyOffset(), curr.getFamilyLength(),
curr.getBuffer(), curr.getQualifierOffset(), curr.getQualifierLength());
}
static final int SIZE = 1000;
static byte[] _b(String s) {
return Bytes.toBytes(s);
}
List<KeyValue> genSomeKeys() {
List<KeyValue> ret = new ArrayList<KeyValue>(SIZE);
for (int i = 0; i < SIZE; i++) {
KeyValue kv =
new KeyValue(
_b(String.format("row_%04d", i)),
_b("family"),
_b("qualifier"),
1000, // timestamp
_b("value"));
ret.add(kv);
}
return ret;
}
}
| |
/*
* Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.completion.scope.JavaCompletionProcessor;
import com.intellij.codeInsight.daemon.impl.quickfix.ImportClassFix;
import com.intellij.codeInsight.lookup.*;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.icons.AllIcons;
import com.intellij.lang.LangBundle;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.patterns.ElementPattern;
import com.intellij.patterns.PatternCondition;
import com.intellij.patterns.PsiJavaElementPattern;
import com.intellij.patterns.PsiNameValuePairPattern;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.filters.*;
import com.intellij.psi.filters.classes.AnnotationTypeFilter;
import com.intellij.psi.filters.classes.AssignableFromContextFilter;
import com.intellij.psi.filters.element.ModifierFilter;
import com.intellij.psi.filters.getters.ExpectedTypesGetter;
import com.intellij.psi.filters.getters.JavaMembersGetter;
import com.intellij.psi.impl.java.stubs.index.JavaAutoModuleNameIndex;
import com.intellij.psi.impl.java.stubs.index.JavaModuleNameIndex;
import com.intellij.psi.impl.source.PsiJavaCodeReferenceElementImpl;
import com.intellij.psi.impl.source.PsiLabelReference;
import com.intellij.psi.impl.source.tree.ElementType;
import com.intellij.psi.scope.ElementClassFilter;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.ProjectScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.util.Consumer;
import com.intellij.util.DocumentUtil;
import com.intellij.util.ProcessingContext;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.patterns.PsiJavaPatterns.*;
import static com.intellij.util.ObjectUtils.assertNotNull;
/**
* @author peter
*/
public class JavaCompletionContributor extends CompletionContributor {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.JavaCompletionContributor");
static final ElementPattern<PsiElement> ANNOTATION_NAME =
psiElement().withParents(PsiJavaCodeReferenceElement.class, PsiAnnotation.class).afterLeaf("@");
private static final PsiJavaElementPattern.Capture<PsiElement> UNEXPECTED_REFERENCE_AFTER_DOT =
psiElement().afterLeaf(".").insideStarting(psiExpressionStatement());
private static final PsiNameValuePairPattern NAME_VALUE_PAIR =
psiNameValuePair().withSuperParent(2, psiElement(PsiAnnotation.class));
private static final ElementPattern<PsiElement> ANNOTATION_ATTRIBUTE_NAME =
or(psiElement(PsiIdentifier.class).withParent(NAME_VALUE_PAIR),
psiElement().afterLeaf("(").withParent(psiReferenceExpression().withParent(NAME_VALUE_PAIR)));
private static final ElementPattern SWITCH_LABEL =
psiElement().withSuperParent(2, psiElement(PsiSwitchLabelStatement.class).withSuperParent(2,
psiElement(PsiSwitchStatement.class).with(new PatternCondition<PsiSwitchStatement>("enumExpressionType") {
@Override
public boolean accepts(@NotNull PsiSwitchStatement psiSwitchStatement, ProcessingContext context) {
PsiExpression expression = psiSwitchStatement.getExpression();
if (expression == null) return false;
PsiClass aClass = PsiUtil.resolveClassInClassTypeOnly(expression.getType());
return aClass != null && aClass.isEnum();
}
})));
private static final ElementPattern<PsiElement> AFTER_NUMBER_LITERAL =
psiElement().afterLeaf(psiElement().withElementType(
elementType().oneOf(JavaTokenType.DOUBLE_LITERAL, JavaTokenType.LONG_LITERAL, JavaTokenType.FLOAT_LITERAL, JavaTokenType.INTEGER_LITERAL)));
private static final ElementPattern<PsiElement> IMPORT_REFERENCE =
psiElement().withParent(psiElement(PsiJavaCodeReferenceElement.class).withParent(PsiImportStatementBase.class));
private static final ElementPattern<PsiElement> CATCH_OR_FINALLY = psiElement().afterLeaf(
psiElement().withText("}").withParent(
psiElement(PsiCodeBlock.class).afterLeaf(PsiKeyword.TRY)));
private static final ElementPattern<PsiElement> INSIDE_CONSTRUCTOR = psiElement().inside(psiMethod().constructor(true));
@Nullable
public static ElementFilter getReferenceFilter(PsiElement position) {
// Completion after extends in interface, type parameter and implements in class
PsiClass containingClass = PsiTreeUtil.getParentOfType(
position, PsiClass.class, false, PsiCodeBlock.class, PsiMethod.class, PsiExpressionList.class, PsiVariable.class, PsiAnnotation.class);
if (containingClass != null && psiElement().afterLeaf(PsiKeyword.EXTENDS, PsiKeyword.IMPLEMENTS, ",", "&").accepts(position)) {
return new AndFilter(ElementClassFilter.CLASS, new NotFilter(new AssignableFromContextFilter()));
}
if (ANNOTATION_NAME.accepts(position)) {
return new AnnotationTypeFilter();
}
if (JavaKeywordCompletion.isDeclarationStart(position) ||
JavaKeywordCompletion.isInsideParameterList(position) ||
isInsideAnnotationName(position)) {
return new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE);
}
if (psiElement().afterLeaf(PsiKeyword.INSTANCEOF).accepts(position)) {
return new ElementExtractorFilter(ElementClassFilter.CLASS);
}
if (JavaKeywordCompletion.VARIABLE_AFTER_FINAL.accepts(position)) {
return ElementClassFilter.CLASS;
}
if (CATCH_OR_FINALLY.accepts(position) ||
JavaKeywordCompletion.START_SWITCH.accepts(position) ||
JavaKeywordCompletion.isInstanceofPlace(position) ||
JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) {
return null;
}
if (JavaKeywordCompletion.START_FOR.withParents(PsiJavaCodeReferenceElement.class, PsiExpressionStatement.class, PsiForStatement.class).accepts(position)) {
return new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.VARIABLE);
}
if (JavaSmartCompletionContributor.AFTER_NEW.accepts(position)) {
return ElementClassFilter.CLASS;
}
if (psiElement().inside(PsiReferenceParameterList.class).accepts(position)) {
return ElementClassFilter.CLASS;
}
if (psiElement().inside(PsiAnnotationParameterList.class).accepts(position)) {
return createAnnotationFilter(position);
}
PsiVariable var = PsiTreeUtil.getParentOfType(position, PsiVariable.class, false, PsiClass.class);
if (var != null && PsiTreeUtil.isAncestor(var.getInitializer(), position, false)) {
return new ExcludeFilter(var);
}
if (SWITCH_LABEL.accepts(position)) {
return new ClassFilter(PsiField.class) {
@Override
public boolean isAcceptable(Object element, PsiElement context) {
return element instanceof PsiEnumConstant;
}
};
}
PsiForeachStatement loop = PsiTreeUtil.getParentOfType(position, PsiForeachStatement.class);
if (loop != null && PsiTreeUtil.isAncestor(loop.getIteratedValue(), position, false)) {
return new ExcludeFilter(loop.getIterationParameter());
}
if (PsiTreeUtil.getParentOfType(position, PsiPackageAccessibilityStatement.class) != null) {
return applyScopeFilter(ElementClassFilter.PACKAGE, position);
}
if (PsiTreeUtil.getParentOfType(position, PsiUsesStatement.class, PsiProvidesStatement.class) != null) {
ElementFilter filter = new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE);
if (PsiTreeUtil.getParentOfType(position, PsiReferenceList.class) != null) {
filter = applyScopeFilter(filter, position);
}
return filter;
}
return TrueFilter.INSTANCE;
}
private static boolean isInsideAnnotationName(PsiElement position) {
PsiAnnotation anno = PsiTreeUtil.getParentOfType(position, PsiAnnotation.class, true, PsiMember.class);
return anno != null && PsiTreeUtil.isAncestor(anno.getNameReferenceElement(), position, true);
}
private static ElementFilter createAnnotationFilter(PsiElement position) {
List<ElementFilter> filters = ContainerUtil.newArrayList(
ElementClassFilter.CLASS,
ElementClassFilter.PACKAGE,
new AndFilter(new ClassFilter(PsiField.class), new ModifierFilter(PsiModifier.STATIC, PsiModifier.FINAL)));
if (psiElement().insideStarting(psiNameValuePair()).accepts(position)) {
filters.add(new ClassFilter(PsiAnnotationMethod.class) {
@Override
public boolean isAcceptable(Object element, PsiElement context) {
return element instanceof PsiAnnotationMethod && PsiUtil.isAnnotationMethod((PsiElement)element);
}
});
}
return new OrFilter(filters.toArray(ElementFilter.EMPTY_ARRAY));
}
public static ElementFilter applyScopeFilter(ElementFilter filter, PsiElement position) {
Module module = ModuleUtilCore.findModuleForPsiElement(position);
return module != null ? new AndFilter(filter, new SearchScopeFilter(module.getModuleScope())) : filter;
}
@Override
public void fillCompletionVariants(@NotNull final CompletionParameters parameters, @NotNull final CompletionResultSet _result) {
if (parameters.getCompletionType() != CompletionType.BASIC) {
return;
}
final PsiElement position = parameters.getPosition();
if (!isInJavaContext(position)) {
return;
}
if (AFTER_NUMBER_LITERAL.accepts(position) || UNEXPECTED_REFERENCE_AFTER_DOT.accepts(position)) {
_result.stopHere();
return;
}
final CompletionResultSet result = JavaCompletionSorting.addJavaSorting(parameters, _result);
JavaCompletionSession session = new JavaCompletionSession(result);
if (ANNOTATION_ATTRIBUTE_NAME.accepts(position) && !JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) {
addExpectedTypeMembers(parameters, result);
JavaKeywordCompletion.addPrimitiveTypes(result, position, session);
completeAnnotationAttributeName(result, position, parameters);
result.stopHere();
return;
}
PrefixMatcher matcher = result.getPrefixMatcher();
PsiElement parent = position.getParent();
if (new JavaKeywordCompletion(parameters, session).addWildcardExtendsSuper(result, position)) {
return;
}
if (position instanceof PsiIdentifier) {
addIdentifierVariants(parameters, position, result, session, matcher);
}
MultiMap<CompletionResultSet, LookupElement> referenceVariants = addReferenceVariants(parameters, result, session);
Set<String> usedWords = ContainerUtil.map2Set(referenceVariants.values(), LookupElement::getLookupString);
for (Map.Entry<CompletionResultSet, Collection<LookupElement>> entry : referenceVariants.entrySet()) {
session.registerBatchItems(entry.getKey(), entry.getValue());
}
session.flushBatchItems();
if (psiElement().inside(PsiLiteralExpression.class).accepts(position)) {
PsiReference reference = position.getContainingFile().findReferenceAt(parameters.getOffset());
if (reference == null || reference.isSoft()) {
WordCompletionContributor.addWordCompletionVariants(result, parameters, usedWords);
}
}
if (position instanceof PsiIdentifier) {
JavaGenerateMemberCompletionContributor.fillCompletionVariants(parameters, result);
}
addAllClasses(parameters, result, session);
if (position instanceof PsiIdentifier) {
FunctionalExpressionCompletionProvider.addFunctionalVariants(parameters, false, true, result.getPrefixMatcher(), result);
}
if (position instanceof PsiIdentifier &&
parent instanceof PsiReferenceExpression &&
!((PsiReferenceExpression)parent).isQualified() &&
parameters.isExtendedCompletion() &&
StringUtil.isNotEmpty(matcher.getPrefix())) {
new JavaStaticMemberProcessor(parameters).processStaticMethodsGlobally(matcher, result);
}
if (parent instanceof PsiJavaModuleReferenceElement) {
addModuleReferences(parent, parameters.getOriginalFile(), result);
}
result.stopHere();
}
private static void addIdentifierVariants(@NotNull CompletionParameters parameters,
PsiElement position,
CompletionResultSet result,
JavaCompletionSession session, PrefixMatcher matcher) {
session.registerBatchItems(result, getFastIdentifierVariants(parameters, position, matcher, position.getParent(), session));
if (JavaSmartCompletionContributor.AFTER_NEW.accepts(position)) {
session.flushBatchItems();
new JavaInheritorsGetter(ConstructorInsertHandler.BASIC_INSTANCE).generateVariants(parameters, matcher, lookupElement -> {
if (!isSuggestedByKeywordCompletion(lookupElement)) {
session.addClassItem(lookupElement);
}
});
}
suggestSmartCast(parameters, session, false, result);
}
private static boolean isSuggestedByKeywordCompletion(LookupElement lookupElement) {
if (lookupElement instanceof PsiTypeLookupItem) {
PsiType type = ((PsiTypeLookupItem)lookupElement).getType();
return type instanceof PsiArrayType && ((PsiArrayType)type).getComponentType() instanceof PsiPrimitiveType;
}
return false;
}
private static void suggestSmartCast(CompletionParameters parameters, JavaCompletionSession session, boolean quick, Consumer<LookupElement> result) {
if (SmartCastProvider.shouldSuggestCast(parameters)) {
session.flushBatchItems();
SmartCastProvider.addCastVariants(parameters, session.getMatcher(), element -> {
registerClassFromTypeElement(element, session);
result.consume(PrioritizedLookupElement.withPriority(element, 1));
}, quick);
}
}
private static List<LookupElement> getFastIdentifierVariants(@NotNull CompletionParameters parameters,
PsiElement position,
PrefixMatcher matcher,
PsiElement parent,
@NotNull JavaCompletionSession session) {
List<LookupElement> items = new ArrayList<>();
if (TypeArgumentCompletionProvider.IN_TYPE_ARGS.accepts(position)) {
new TypeArgumentCompletionProvider(false, session).addTypeArgumentVariants(parameters, items::add, matcher);
}
FunctionalExpressionCompletionProvider.addFunctionalVariants(parameters, false, false, matcher, items::add);
if (MethodReturnTypeProvider.IN_METHOD_RETURN_TYPE.accepts(position)) {
MethodReturnTypeProvider.addProbableReturnTypes(parameters, element -> {
registerClassFromTypeElement(element, session);
items.add(element);
});
}
suggestSmartCast(parameters, session, true, items::add);
if (parent instanceof PsiReferenceExpression) {
final List<ExpectedTypeInfo> expected = Arrays.asList(ExpectedTypesProvider.getExpectedTypes((PsiExpression)parent, true));
CollectConversion.addCollectConversion((PsiReferenceExpression)parent, expected,
lookupElement -> items.add(JavaSmartCompletionContributor.decorate(lookupElement, expected)));
}
if (IMPORT_REFERENCE.accepts(position)) {
items.add(LookupElementBuilder.create("*"));
}
items.addAll(new JavaKeywordCompletion(parameters, session).getResults());
addExpressionVariants(parameters, position, items::add);
return items;
}
private static void registerClassFromTypeElement(LookupElement element, JavaCompletionSession session) {
PsiType type = assertNotNull(element.as(PsiTypeLookupItem.CLASS_CONDITION_KEY)).getType();
if (type instanceof PsiPrimitiveType) {
session.registerKeyword(type.getCanonicalText(false));
}
else if (type instanceof PsiClassType && ((PsiClassType)type).getParameterCount() == 0) {
PsiClass aClass = ((PsiClassType)type).resolve();
if (aClass != null) {
session.registerClass(aClass);
}
}
}
private static void addExpressionVariants(@NotNull CompletionParameters parameters, PsiElement position, Consumer<LookupElement> result) {
if (JavaSmartCompletionContributor.INSIDE_EXPRESSION.accepts(position) &&
!JavaKeywordCompletion.AFTER_DOT.accepts(position) && !SmartCastProvider.shouldSuggestCast(parameters)) {
addExpectedTypeMembers(parameters, result);
if (SameSignatureCallParametersProvider.IN_CALL_ARGUMENT.accepts(position)) {
new SameSignatureCallParametersProvider().addSignatureItems(parameters, result);
}
}
}
public static boolean isInJavaContext(PsiElement position) {
return PsiUtilCore.findLanguageFromElement(position).isKindOf(JavaLanguage.INSTANCE);
}
public static void addAllClasses(CompletionParameters parameters, CompletionResultSet result, JavaCompletionSession session) {
if (!isClassNamePossible(parameters) || !mayStartClassName(result)) {
return;
}
if (parameters.getInvocationCount() >= 2) {
JavaClassNameCompletionContributor.addAllClasses(parameters, parameters.getInvocationCount() <= 2, result.getPrefixMatcher(), element -> {
if (!session.alreadyProcessed(element)) {
result.addElement(JavaCompletionUtil.highlightIfNeeded(null, element, element.getObject(), parameters.getPosition()));
}
});
}
else {
advertiseSecondCompletion(parameters.getPosition().getProject(), result);
}
}
public static void advertiseSecondCompletion(Project project, CompletionResultSet result) {
if (FeatureUsageTracker.getInstance().isToBeAdvertisedInLookup(CodeCompletionFeatures.SECOND_BASIC_COMPLETION, project)) {
result.addLookupAdvertisement("Press " + getActionShortcut(IdeActions.ACTION_CODE_COMPLETION) + " to see non-imported classes");
}
}
private static MultiMap<CompletionResultSet, LookupElement> addReferenceVariants(final CompletionParameters parameters,
CompletionResultSet result,
JavaCompletionSession session) {
MultiMap<CompletionResultSet, LookupElement> items = MultiMap.create();
final PsiElement position = parameters.getPosition();
final boolean first = parameters.getInvocationCount() <= 1;
final boolean isSwitchLabel = SWITCH_LABEL.accepts(position);
final boolean isAfterNew = JavaClassNameCompletionContributor.AFTER_NEW.accepts(position);
final boolean pkgContext = JavaCompletionUtil.inSomePackage(position);
final PsiType[] expectedTypes = ExpectedTypesGetter.getExpectedTypes(parameters.getPosition(), true);
LegacyCompletionContributor.processReferences(parameters, result, (reference, result1) -> {
if (reference instanceof PsiJavaReference) {
ElementFilter filter = getReferenceFilter(position);
if (filter != null) {
if (INSIDE_CONSTRUCTOR.accepts(position) &&
(parameters.getInvocationCount() <= 1 || CheckInitialized.isInsideConstructorCall(position))) {
filter = new AndFilter(filter, new CheckInitialized(position));
}
final PsiFile originalFile = parameters.getOriginalFile();
JavaCompletionProcessor.Options options =
JavaCompletionProcessor.Options.DEFAULT_OPTIONS
.withCheckAccess(first)
.withFilterStaticAfterInstance(first)
.withShowInstanceInStaticContext(!first);
for (LookupElement element : JavaCompletionUtil.processJavaReference(position,
(PsiJavaReference)reference,
new ElementExtractorFilter(filter),
options,
result1.getPrefixMatcher(), parameters)) {
if (session.alreadyProcessed(element)) {
continue;
}
if (isSwitchLabel) {
items.putValue(result1, new IndentingDecorator(TailTypeDecorator.withTail(element, TailType.createSimpleTailType(':'))));
}
else {
final LookupItem item = element.as(LookupItem.CLASS_CONDITION_KEY);
if (originalFile instanceof PsiJavaCodeReferenceCodeFragment &&
!((PsiJavaCodeReferenceCodeFragment)originalFile).isClassesAccepted() && item != null) {
item.setTailType(TailType.NONE);
}
if (item instanceof JavaMethodCallElement) {
JavaMethodCallElement call = (JavaMethodCallElement)item;
final PsiMethod method = call.getObject();
if (method.getTypeParameters().length > 0) {
final PsiType returned = TypeConversionUtil.erasure(method.getReturnType());
PsiType matchingExpectation = returned == null
? null
: ContainerUtil.find(expectedTypes, type -> type.isAssignableFrom(returned));
if (matchingExpectation != null) {
call.setInferenceSubstitutorFromExpectedType(position, matchingExpectation);
}
}
}
items.putValue(result1, element);
}
}
}
return;
}
if (reference instanceof PsiLabelReference) {
items.putValues(result1, LabelReferenceCompletion.processLabelReference((PsiLabelReference)reference));
return;
}
final Object[] variants = reference.getVariants();
//noinspection ConstantConditions
if (variants == null) {
LOG.error("Reference=" + reference);
}
for (Object completion : variants) {
if (completion == null) {
LOG.error("Position=" + position + "\n;Reference=" + reference + "\n;variants=" + Arrays.toString(variants));
}
if (completion instanceof LookupElement && !session.alreadyProcessed((LookupElement)completion)) {
items.putValue(result1, (LookupElement)completion);
}
else if (completion instanceof PsiClass) {
Condition<PsiClass> condition = psiClass -> !session.alreadyProcessed(psiClass) &&
JavaCompletionUtil.isSourceLevelAccessible(position, psiClass, pkgContext);
items.putValues(result1, JavaClassNameCompletionContributor.createClassLookupItems(
(PsiClass)completion,
isAfterNew,
JavaClassNameInsertHandler.JAVA_CLASS_INSERT_HANDLER,
condition));
}
else {
//noinspection deprecation
items.putValue(result1, LookupItemUtil.objectToLookupItem(completion));
}
}
});
return items;
}
static boolean isClassNamePossible(CompletionParameters parameters) {
boolean isSecondCompletion = parameters.getInvocationCount() >= 2;
PsiElement position = parameters.getPosition();
if (JavaKeywordCompletion.isInstanceofPlace(position) || JavaMemberNameCompletionContributor.INSIDE_TYPE_PARAMS_PATTERN.accepts(position)) {
return false;
}
final PsiElement parent = position.getParent();
if (!(parent instanceof PsiJavaCodeReferenceElement)) return isSecondCompletion;
if (((PsiJavaCodeReferenceElement)parent).getQualifier() != null) return isSecondCompletion;
if (parent instanceof PsiJavaCodeReferenceElementImpl &&
((PsiJavaCodeReferenceElementImpl)parent).getKind(parent.getContainingFile()) == PsiJavaCodeReferenceElementImpl.PACKAGE_NAME_KIND) {
return false;
}
PsiElement grand = parent.getParent();
if (grand instanceof PsiSwitchLabelStatement) {
return false;
}
if (psiElement().inside(PsiImportStatement.class).accepts(parent)) {
return isSecondCompletion;
}
if (grand instanceof PsiAnonymousClass) {
grand = grand.getParent();
}
if (grand instanceof PsiNewExpression && ((PsiNewExpression)grand).getQualifier() != null) {
return false;
}
if (JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) {
return false;
}
return true;
}
public static boolean mayStartClassName(CompletionResultSet result) {
return StringUtil.isNotEmpty(result.getPrefixMatcher().getPrefix());
}
private static void completeAnnotationAttributeName(CompletionResultSet result, PsiElement insertedElement,
CompletionParameters parameters) {
PsiNameValuePair pair = PsiTreeUtil.getParentOfType(insertedElement, PsiNameValuePair.class);
PsiAnnotationParameterList parameterList = (PsiAnnotationParameterList)assertNotNull(pair).getParent();
PsiAnnotation anno = (PsiAnnotation)parameterList.getParent();
boolean showClasses = psiElement().afterLeaf("(").accepts(insertedElement);
PsiClass annoClass = null;
final PsiJavaCodeReferenceElement referenceElement = anno.getNameReferenceElement();
if (referenceElement != null) {
final PsiElement element = referenceElement.resolve();
if (element instanceof PsiClass) {
annoClass = (PsiClass)element;
if (annoClass.findMethodsByName("value", false).length == 0) {
showClasses = false;
}
}
}
if (showClasses && insertedElement.getParent() instanceof PsiReferenceExpression) {
final Set<LookupElement> set = JavaCompletionUtil.processJavaReference(
insertedElement, (PsiJavaReference)insertedElement.getParent(), new ElementExtractorFilter(createAnnotationFilter(insertedElement)),
JavaCompletionProcessor.Options.DEFAULT_OPTIONS, result.getPrefixMatcher(), parameters);
for (final LookupElement element : set) {
result.addElement(element);
}
addAllClasses(parameters, result, new JavaCompletionSession(result));
}
if (annoClass != null && annoClass.isAnnotationType()) {
final PsiNameValuePair[] existingPairs = parameterList.getAttributes();
methods: for (PsiMethod method : annoClass.getMethods()) {
if (!(method instanceof PsiAnnotationMethod)) continue;
final String attrName = method.getName();
for (PsiNameValuePair existingAttr : existingPairs) {
if (PsiTreeUtil.isAncestor(existingAttr, insertedElement, false)) break;
if (Comparing.equal(existingAttr.getName(), attrName) ||
PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME.equals(attrName) && existingAttr.getName() == null) continue methods;
}
LookupElementBuilder element = LookupElementBuilder.createWithIcon(method).withInsertHandler(new InsertHandler<LookupElement>() {
@Override
public void handleInsert(InsertionContext context, LookupElement item) {
final Editor editor = context.getEditor();
TailType.EQ.processTail(editor, editor.getCaretModel().getOffset());
context.setAddCompletionChar(false);
context.commitDocument();
PsiAnnotationParameterList paramList =
PsiTreeUtil.findElementOfClassAtOffset(context.getFile(), context.getStartOffset(), PsiAnnotationParameterList.class, false);
if (paramList != null && paramList.getAttributes().length > 0 && paramList.getAttributes()[0].getName() == null) {
int valueOffset = paramList.getAttributes()[0].getTextRange().getStartOffset();
context.getDocument().insertString(valueOffset, PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME);
TailType.EQ.processTail(editor, valueOffset + PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME.length());
}
}
});
PsiAnnotationMemberValue defaultValue = ((PsiAnnotationMethod)method).getDefaultValue();
if (defaultValue != null) {
element = element.withTailText(" default " + defaultValue.getText(), true);
}
result.addElement(element);
}
}
}
@Override
public String advertise(@NotNull final CompletionParameters parameters) {
if (!(parameters.getOriginalFile() instanceof PsiJavaFile)) return null;
if (parameters.getCompletionType() == CompletionType.BASIC && parameters.getInvocationCount() > 0) {
PsiElement position = parameters.getPosition();
if (psiElement().withParent(psiReferenceExpression().withFirstChild(psiReferenceExpression().referencing(psiClass()))).accepts(position)) {
if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.GLOBAL_MEMBER_NAME)) {
final String shortcut = getActionShortcut(IdeActions.ACTION_CODE_COMPLETION);
if (StringUtil.isNotEmpty(shortcut)) {
return "Pressing " + shortcut + " twice without a class qualifier would show all accessible static methods";
}
}
}
}
if (parameters.getCompletionType() != CompletionType.SMART && shouldSuggestSmartCompletion(parameters.getPosition())) {
if (CompletionUtil.shouldShowFeature(parameters, CodeCompletionFeatures.EDITING_COMPLETION_SMARTTYPE_GENERAL)) {
final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION);
if (StringUtil.isNotEmpty(shortcut)) {
return CompletionBundle.message("completion.smart.hint", shortcut);
}
}
}
if (parameters.getCompletionType() == CompletionType.SMART && parameters.getInvocationCount() == 1) {
final PsiType[] psiTypes = ExpectedTypesGetter.getExpectedTypes(parameters.getPosition(), true);
if (psiTypes.length > 0) {
if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_TOAR)) {
final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION);
if (StringUtil.isNotEmpty(shortcut)) {
for (final PsiType psiType : psiTypes) {
final PsiType type = PsiUtil.extractIterableTypeParameter(psiType, false);
if (type != null) {
return CompletionBundle.message("completion.smart.aslist.hint", shortcut, type.getPresentableText());
}
}
}
}
if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_ASLIST)) {
final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION);
if (StringUtil.isNotEmpty(shortcut)) {
for (final PsiType psiType : psiTypes) {
if (psiType instanceof PsiArrayType) {
final PsiType componentType = ((PsiArrayType)psiType).getComponentType();
if (!(componentType instanceof PsiPrimitiveType)) {
return CompletionBundle.message("completion.smart.toar.hint", shortcut, componentType.getPresentableText());
}
}
}
}
}
if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_CHAIN)) {
final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION);
if (StringUtil.isNotEmpty(shortcut)) {
return CompletionBundle.message("completion.smart.chain.hint", shortcut);
}
}
}
}
return null;
}
@Override
public String handleEmptyLookup(@NotNull final CompletionParameters parameters, final Editor editor) {
if (!(parameters.getOriginalFile() instanceof PsiJavaFile)) return null;
final String ad = advertise(parameters);
final String suffix = ad == null ? "" : "; " + StringUtil.decapitalize(ad);
if (parameters.getCompletionType() == CompletionType.SMART) {
PsiExpression expression = PsiTreeUtil.getContextOfType(parameters.getPosition(), PsiExpression.class, true);
if (expression instanceof PsiLiteralExpression) {
return LangBundle.message("completion.no.suggestions") + suffix;
}
if (expression instanceof PsiInstanceOfExpression) {
final PsiInstanceOfExpression instanceOfExpression = (PsiInstanceOfExpression)expression;
if (PsiTreeUtil.isAncestor(instanceOfExpression.getCheckType(), parameters.getPosition(), false)) {
return LangBundle.message("completion.no.suggestions") + suffix;
}
}
final Set<PsiType> expectedTypes = JavaCompletionUtil.getExpectedTypes(parameters);
if (expectedTypes != null) {
PsiType type = expectedTypes.size() == 1 ? expectedTypes.iterator().next() : null;
if (type != null) {
final PsiType deepComponentType = type.getDeepComponentType();
String expectedType = type.getPresentableText();
if (expectedType.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED)) {
return null;
}
if (deepComponentType instanceof PsiClassType) {
if (((PsiClassType)deepComponentType).resolve() != null) {
return CompletionBundle.message("completion.no.suggestions.of.type", expectedType) + suffix;
}
return CompletionBundle.message("completion.unknown.type", expectedType) + suffix;
}
if (!PsiType.NULL.equals(type)) {
return CompletionBundle.message("completion.no.suggestions.of.type", expectedType) + suffix;
}
}
}
}
return LangBundle.message("completion.no.suggestions") + suffix;
}
@Override
public boolean invokeAutoPopup(@NotNull PsiElement position, char typeChar) {
return typeChar == ':' && JavaTokenType.COLON == position.getNode().getElementType();
}
private static boolean shouldSuggestSmartCompletion(final PsiElement element) {
if (shouldSuggestClassNameCompletion(element)) return false;
final PsiElement parent = element.getParent();
if (parent instanceof PsiReferenceExpression && ((PsiReferenceExpression)parent).getQualifier() != null) return false;
if (parent instanceof PsiReferenceExpression && parent.getParent() instanceof PsiReferenceExpression) return true;
return ExpectedTypesGetter.getExpectedTypes(element, false).length > 0;
}
private static boolean shouldSuggestClassNameCompletion(final PsiElement element) {
if (element == null) return false;
final PsiElement parent = element.getParent();
if (parent == null) return false;
return parent.getParent() instanceof PsiTypeElement || parent.getParent() instanceof PsiExpressionStatement ||
parent.getParent() instanceof PsiReferenceList;
}
@Override
public void beforeCompletion(@NotNull final CompletionInitializationContext context) {
final PsiFile file = context.getFile();
if (file instanceof PsiJavaFile) {
if (context.getInvocationCount() > 0) {
autoImport(file, context.getStartOffset() - 1, context.getEditor());
PsiElement leaf = file.findElementAt(context.getStartOffset() - 1);
if (leaf != null) leaf = PsiTreeUtil.prevVisibleLeaf(leaf);
PsiVariable variable = PsiTreeUtil.getParentOfType(leaf, PsiVariable.class);
if (variable != null) {
PsiTypeElement typeElement = variable.getTypeElement();
if (typeElement != null) {
PsiType type = typeElement.getType();
if (type instanceof PsiClassType && ((PsiClassType)type).resolve() == null) {
autoImportReference(file, context.getEditor(), typeElement.getInnermostComponentReferenceElement());
}
}
}
}
if (context.getCompletionType() == CompletionType.BASIC) {
if (PsiTreeUtil.findElementOfClassAtOffset(file, context.getStartOffset() - 1, PsiReferenceParameterList.class, false) != null) {
context.setDummyIdentifier(CompletionInitializationContext.DUMMY_IDENTIFIER_TRIMMED);
return;
}
if (semicolonNeeded(context.getEditor(), file, context.getStartOffset())) {
context.setDummyIdentifier(CompletionInitializationContext.DUMMY_IDENTIFIER.trim() + ";");
return;
}
PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, context.getStartOffset(), PsiJavaCodeReferenceElement.class, false);
if (ref != null && !(ref instanceof PsiReferenceExpression)) {
if (JavaSmartCompletionContributor.AFTER_NEW.accepts(ref)) {
final PsiReferenceParameterList paramList = ref.getParameterList();
if (paramList != null && paramList.getTextLength() > 0) {
context.getOffsetMap().addOffset(ConstructorInsertHandler.PARAM_LIST_START, paramList.getTextRange().getStartOffset());
context.getOffsetMap().addOffset(ConstructorInsertHandler.PARAM_LIST_END, paramList.getTextRange().getEndOffset());
}
}
return;
}
final PsiElement element = file.findElementAt(context.getStartOffset());
if (psiElement().inside(PsiAnnotation.class).accepts(element)) {
return;
}
context.setDummyIdentifier(CompletionInitializationContext.DUMMY_IDENTIFIER_TRIMMED);
}
}
}
public static boolean semicolonNeeded(Editor editor, PsiFile file, int startOffset) {
PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, startOffset, PsiJavaCodeReferenceElement.class, false);
if (ref != null && !(ref instanceof PsiReferenceExpression)) {
if (ref.getParent() instanceof PsiTypeElement) {
return true;
}
}
if (psiElement(PsiIdentifier.class).withParent(psiParameter()).accepts(file.findElementAt(startOffset))) {
return true;
}
HighlighterIterator iterator = ((EditorEx)editor).getHighlighter().createIterator(startOffset);
if (iterator.atEnd()) return false;
if (iterator.getTokenType() == JavaTokenType.IDENTIFIER) {
iterator.advance();
}
while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) {
iterator.advance();
}
if (!iterator.atEnd() &&
iterator.getTokenType() == JavaTokenType.LPARENTH &&
PsiTreeUtil.getParentOfType(ref, PsiExpression.class, PsiClass.class) == null) {
// looks like a method declaration, e.g. StringBui<caret>methodName() inside a class
return true;
}
if (!iterator.atEnd() &&
iterator.getTokenType() == JavaTokenType.COLON &&
PsiTreeUtil.findElementOfClassAtOffset(file, startOffset, PsiConditionalExpression.class, false) == null) {
return true;
}
while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) {
iterator.advance();
}
if (iterator.atEnd() || iterator.getTokenType() != JavaTokenType.IDENTIFIER) return false;
iterator.advance();
while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) {
iterator.advance();
}
if (iterator.atEnd()) return false;
return iterator.getTokenType() == JavaTokenType.EQ; // <caret> foo = something, we don't want the reference to be treated as a type
}
private static void autoImport(@NotNull final PsiFile file, int offset, @NotNull final Editor editor) {
final CharSequence text = editor.getDocument().getCharsSequence();
while (offset > 0 && Character.isJavaIdentifierPart(text.charAt(offset))) offset--;
if (offset <= 0) return;
while (offset > 0 && Character.isWhitespace(text.charAt(offset))) offset--;
if (offset <= 0 || text.charAt(offset) != '.') return;
offset--;
while (offset > 0 && Character.isWhitespace(text.charAt(offset))) offset--;
if (offset <= 0) return;
autoImportReference(file, editor, extractReference(PsiTreeUtil.findElementOfClassAtOffset(file, offset, PsiExpression.class, false)));
}
private static void autoImportReference(@NotNull PsiFile file, @NotNull Editor editor, @Nullable PsiJavaCodeReferenceElement element) {
if (element == null) return;
while (true) {
final PsiJavaCodeReferenceElement qualifier = extractReference(element.getQualifier());
if (qualifier == null) break;
element = qualifier;
}
if (!(element.getParent() instanceof PsiMethodCallExpression) && element.multiResolve(true).length == 0) {
new ImportClassFix(element).doFix(editor, false, false);
PsiDocumentManager.getInstance(file.getProject()).commitDocument(editor.getDocument());
}
}
@Nullable
private static PsiJavaCodeReferenceElement extractReference(@Nullable PsiElement expression) {
if (expression instanceof PsiJavaCodeReferenceElement) {
return (PsiJavaCodeReferenceElement)expression;
}
if (expression instanceof PsiMethodCallExpression) {
return ((PsiMethodCallExpression)expression).getMethodExpression();
}
return null;
}
private static void addExpectedTypeMembers(CompletionParameters parameters, final Consumer<LookupElement> result) {
if (parameters.getInvocationCount() <= 1) { // on second completion, StaticMemberProcessor will suggest those
for (final ExpectedTypeInfo info : JavaSmartCompletionContributor.getExpectedTypes(parameters)) {
new JavaMembersGetter(info.getDefaultType(), parameters).addMembers(false, result);
}
}
}
private static void addModuleReferences(PsiElement moduleRef, PsiFile originalFile, CompletionResultSet result) {
PsiElement statement = moduleRef.getParent();
boolean requires;
if ((requires = statement instanceof PsiRequiresStatement) || statement instanceof PsiPackageAccessibilityStatement) {
PsiElement parent = statement.getParent();
if (parent != null) {
Project project = moduleRef.getProject();
Set<String> filter = new HashSet<>();
filter.add(((PsiJavaModule)parent).getName());
JavaModuleNameIndex index = JavaModuleNameIndex.getInstance();
GlobalSearchScope scope = ProjectScope.getAllScope(project);
for (String name : index.getAllKeys(project)) {
if (index.get(name, project, scope).size() > 0 && filter.add(name)) {
LookupElement lookup = LookupElementBuilder.create(name).withIcon(AllIcons.Nodes.JavaModule);
if (requires) lookup = TailTypeDecorator.withTail(lookup, TailType.SEMICOLON);
result.addElement(lookup);
}
}
if (requires) {
Module module = ModuleUtilCore.findModuleForFile(originalFile);
if (module != null) {
VirtualFile[] roots = ModuleRootManager.getInstance(module).orderEntries().withoutSdk().librariesOnly().getClassesRoots();
scope = GlobalSearchScope.filesScope(project, Arrays.asList(roots));
for (String name : JavaAutoModuleNameIndex.getAllKeys(project)) {
if (JavaAutoModuleNameIndex.getFilesByKey(name, scope).size() > 0 &&
PsiNameHelper.isValidModuleName(name, parent) &&
filter.add(name)) {
LookupElement lookup = LookupElementBuilder.create(name).withIcon(AllIcons.FileTypes.Archive);
lookup = TailTypeDecorator.withTail(lookup, TailType.SEMICOLON);
lookup = PrioritizedLookupElement.withPriority(lookup, -1);
result.addElement(lookup);
}
}
}
}
}
}
}
static class IndentingDecorator extends LookupElementDecorator<LookupElement> {
public IndentingDecorator(LookupElement delegate) {
super(delegate);
}
@Override
public void handleInsert(InsertionContext context) {
super.handleInsert(context);
Project project = context.getProject();
Document document = context.getDocument();
int lineStartOffset = DocumentUtil.getLineStartOffset(context.getStartOffset(), document);
PsiDocumentManager.getInstance(project).commitDocument(document);
CodeStyleManager.getInstance(project).adjustLineIndent(context.getFile(), lineStartOffset);
}
}
private static class SearchScopeFilter implements ElementFilter {
private final GlobalSearchScope myScope;
public SearchScopeFilter(GlobalSearchScope scope) {
myScope = scope;
}
@Override
public boolean isAcceptable(Object element, @Nullable PsiElement context) {
if (element instanceof PsiPackage) {
return ((PsiDirectoryContainer)element).getDirectories(myScope).length > 0;
}
else if (element instanceof PsiElement) {
PsiFile psiFile = ((PsiElement)element).getContainingFile();
if (psiFile != null) {
VirtualFile file = psiFile.getVirtualFile();
return file != null && myScope.contains(file);
}
}
return false;
}
@Override
public boolean isClassAcceptable(Class hintClass) {
return true;
}
}
}
| |
/*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2011/12/04 Support deleting alerts
// ZAP: 2012/01/02 Separate param and attack
// ZAP: 2012/01/23 Changed the method compareTo to compare the fields correctly
// with each other.
// ZAP: 2012/03/15 Changed the methods toPluginXML and getUrlParamXML to use the class
// StringBuilder instead of StringBuffer and replaced some string concatenations with
// calls to the method append of the class StringBuilder.
// ZAP: 2012/04/25 Added @Override annotation to all appropriate methods.
// ZAP: 2012/05/02 Changed to not create a new String in the setters.
// ZAP: 2012/07/10 Issue 323: Added getIconUrl()
// ZAP: 2012/10/08 Issue 391: Performance improvements
// ZAP: 2012/12/19 Code Cleanup: Moved array brackets from variable name to type
// ZAP: 2013/07/12 Issue 713: Add CWE and WASC numbers to issues
// ZAP: 2013/09/08 Issue 691: Handle old plugins
// ZAP: 2013/11/16 Issue 866: Alert keeps HttpMessage longer than needed when HistoryReference is set/available
// ZAP: 2014/04/10 Issue 1042: Having significant issues opening a previous session
// ZAP: 2014/05/23 Issue 1209: Reliability becomes Confidence and add levels
// ZAP: 2015/01/04 Issue 1419: Include alert's evidence in HTML report
// ZAP: 2014/01/04 Issue 1475: Alerts with different name from same scanner might not be shown in report
// ZAP: 2015/02/09 Issue 1525: Introduce a database interface layer to allow for alternative implementations
// ZAP: 2015/08/24 Issue 1849: Option to merge related issues in reports
// ZAP: 2015/11/16 Issue 1555: Rework inclusion of HTML tags in reports
// ZAP: 2016/02/26 Deprecate alert as an element of Alert in favour of name
// ZAP: 2016/05/25 Normalise equals/hashCode/compareTo
package org.parosproxy.paros.core.scanner;
import java.net.URL;
import org.apache.commons.httpclient.URI;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.db.DatabaseException;
import org.parosproxy.paros.db.RecordAlert;
import org.parosproxy.paros.extension.report.ReportGenerator;
import org.parosproxy.paros.model.HistoryReference;
import org.parosproxy.paros.network.HttpMalformedHeaderException;
import org.parosproxy.paros.network.HttpMessage;
public class Alert implements Comparable<Alert> {
public static final int RISK_INFO = 0;
public static final int RISK_LOW = 1;
public static final int RISK_MEDIUM = 2;
public static final int RISK_HIGH = 3;
// ZAP: Added FALSE_POSITIVE
public static final int CONFIDENCE_FALSE_POSITIVE = 0;
/**
* @deprecated (2.4.0) Replaced by {@link #CONFIDENCE_LOW} confidence.
* SUSPICIOUS reliability has been deprecated in favour of using CONFIDENCE_LOW confidence.
*/
@Deprecated
public static final int SUSPICIOUS = 1;
public static final int CONFIDENCE_LOW = 1;
/**
* @deprecated (2.4.0) Replaced by {@link #CONFIDENCE_MEDIUM} confidence.
* WARNING reliability has been deprecated in favour of using CONFIDENCE_MEDIUM confidence.
*/
@Deprecated
public static final int WARNING = 2;
public static final int CONFIDENCE_MEDIUM = 2;
public static final int CONFIDENCE_HIGH = 3;
public static final int CONFIDENCE_USER_CONFIRMED = 4;
public static final String[] MSG_RISK = {"Informational", "Low", "Medium", "High"};
// ZAP: Added "false positive"
/**
* @deprecated (2.4.0) Replaced by {@link #MSG_CONFIDENCE}.
* Use of reliability has been deprecated in favour of using confidence.
*/
@Deprecated
public static final String[] MSG_RELIABILITY = {"False Positive", "Low", "Medium", "High", "Confirmed"};
public static final String[] MSG_CONFIDENCE = {"False Positive", "Low", "Medium", "High", "Confirmed"};
private int alertId = -1; // ZAP: Changed default alertId
private int pluginId = 0;
private String name = "";
private int risk = RISK_INFO;
/**
* @deprecated
* Use of reliability has been deprecated in favour of using confidence
*/
@Deprecated
private int reliability = CONFIDENCE_MEDIUM;
private int confidence = CONFIDENCE_MEDIUM;
private String description = "";
private String uri = "";
private String param = "";
private String attack = "";
private String otherInfo = "";
private String solution = "";
private String reference = "";
private String evidence = "";
private int cweId = -1;
private int wascId = -1;
// Tempory ref - should be cleared asap after use
private HttpMessage message = null;
// ZAP: Added sourceHistoryId to Alert
private int sourceHistoryId = 0;
private HistoryReference historyRef = null;
// ZAP: Added logger
private static final Logger logger = Logger.getLogger(Alert.class);
// Cache this info so that we dont have to keep a ref to the HttpMessage
private String method = null;
private String postData;
private URI msgUri = null;
public Alert(int pluginId) {
this.pluginId = pluginId;
}
public Alert(int pluginId, int risk, int confidence, String name) {
this(pluginId);
setRiskConfidence(risk, confidence);
setName(name);
}
public Alert(RecordAlert recordAlert) {
this(recordAlert.getPluginId(), recordAlert.getRisk(), recordAlert.getConfidence(), recordAlert.getAlert());
// ZAP: Set the alertId
this.alertId = recordAlert.getAlertId();
try {
HistoryReference hRef = new HistoryReference(recordAlert.getHistoryId());
setDetail(recordAlert.getDescription(), recordAlert.getUri(),
recordAlert.getParam(), recordAlert.getAttack(), recordAlert.getOtherInfo(),
recordAlert.getSolution(), recordAlert.getReference(),
recordAlert.getEvidence(), recordAlert.getCweId(), recordAlert.getWascId(),
null);
setHistoryRef(hRef);
} catch (HttpMalformedHeaderException e) {
// ZAP: Just an indication the history record doesnt exist
logger.debug(e.getMessage(), e);
} catch (Exception e) {
// ZAP: Log the exception
logger.error(e.getMessage(), e);
}
}
public Alert(RecordAlert recordAlert, HistoryReference ref) {
this(recordAlert.getPluginId(), recordAlert.getRisk(), recordAlert.getConfidence(), recordAlert.getAlert());
// ZAP: Set the alertId
this.alertId = recordAlert.getAlertId();
setDetail(recordAlert.getDescription(), recordAlert.getUri(),
recordAlert.getParam(), recordAlert.getAttack(), recordAlert.getOtherInfo(),
recordAlert.getSolution(), recordAlert.getReference(),
recordAlert.getEvidence(), recordAlert.getCweId(), recordAlert.getWascId(),
null);
setHistoryRef(ref);
}
/**
* @deprecated (2.4.0) Replaced by {@link #setRiskConfidence(int, int)}.
* Use of reliability has been deprecated in favour of using confidence
*/
@Deprecated
public void setRiskReliability(int risk, int confidence) {
this.risk = risk;
this.confidence = confidence;
}
public void setRiskConfidence(int risk, int confidence) {
this.risk = risk;
this.confidence = confidence;
}
/**
* @deprecated (2.5.0) Replaced by {@link #setName}.
* Use of alert has been deprecated in favour of using name.
*/
@Deprecated
public void setAlert(String alert) {
setName(alert);
}
/**
* Sets the name of the alert to name
* @param name the name to set for the alert
* @since 2.5.0
*/
public void setName(String name) {
if (name == null) return;
this.name = name;
}
/**
* @deprecated (2.2.0) Replaced by
* {@link #setDetail(String, String, String, String, String, String, String, String, int, int, HttpMessage)}. It
* will be removed in a future release.
*/
@Deprecated
@SuppressWarnings("javadoc")
public void setDetail(String description, String uri, String param, String attack, String otherInfo,
String solution, String reference, HttpMessage msg) {
setDetail(description, uri, param, attack, otherInfo, solution, reference, "", -1, -1, msg);
}
/**
* @since 2.2.0
*/
public void setDetail(String description, String uri, String param, String attack, String otherInfo,
String solution, String reference, String evidence, int cweId, int wascId, HttpMessage msg) {
setDescription(description);
setUri(uri);
setParam(param);
setAttack(attack);
setOtherInfo(otherInfo);
setSolution(solution);
setReference(reference);
setMessage(msg);
setEvidence(evidence);
setCweId(cweId);
setWascId(wascId);
if (msg != null) {
setHistoryRef(msg.getHistoryRef());
}
}
private void setDetail(String description, String uri, String param, String attack, String otherInfo,
String solution, String reference, HistoryReference href) {
setDescription(description);
setUri(uri);
setParam(param);
setAttack(attack);
setOtherInfo(otherInfo);
setSolution(solution);
setReference(reference);
setHistoryRef(href);
}
public void setUri(String uri) {
// ZAP: Cope with null
if (uri == null) return;
// ZAP: Changed to not create a new String.
this.uri = uri;
}
public void setDescription(String description) {
if (description == null) return;
// ZAP: Changed to not create a new String.
this.description = description;
}
public void setParam(String param) {
if (param == null) return;
// ZAP: Changed to not create a new String.
this.param = param;
}
public void setOtherInfo(String otherInfo) {
if (otherInfo == null) return;
// ZAP: Changed to not create a new String.
this.otherInfo = otherInfo;
}
public void setSolution(String solution) {
if (solution == null) return;
// ZAP: Changed to not create a new String.
this.solution = solution;
}
public void setReference(String reference) {
if (reference == null) return;
// ZAP: Changed to not create a new String.
this.reference = reference;
}
public void setMessage(HttpMessage message) {
if (message != null) {
this.message = message;
this.method = message.getRequestHeader().getMethod();
this.postData = message.getRequestBody().toString();
this.msgUri = message.getRequestHeader().getURI();
} else {
// Used to clear the ref so we dont hold onto it
this.message = null;
}
}
@Override
public int compareTo(Alert alert2) {
if (risk < alert2.risk) {
return -1;
} else if (risk > alert2.risk) {
return 1;
}
if (confidence < alert2.confidence) {
return -1;
} else if (confidence > alert2.confidence) {
return 1;
}
if (pluginId < alert2.pluginId) {
return -1;
} else if (pluginId > alert2.pluginId) {
return 1;
}
int result = name.compareToIgnoreCase(alert2.name);
if (result != 0) {
return result;
}
// ZAP: changed to compare the field uri with alert2.uri
result = uri.compareToIgnoreCase(alert2.uri);
if (result != 0) {
return result;
}
// ZAP: changed to compare the field param with alert2.param
result = param.compareToIgnoreCase(alert2.param);
if (result != 0) {
return result;
}
result = otherInfo.compareToIgnoreCase(alert2.otherInfo);
if (result != 0) {
return result;
}
result = compareStrings(evidence, alert2.evidence);
if (result != 0) {
return result;
}
return compareStrings(attack, alert2.attack);
}
private int compareStrings(String string, String otherString) {
if (string == null) {
if (otherString == null) {
return 0;
}
return -1;
} else if (otherString == null) {
return 1;
}
return string.compareTo(otherString);
}
/**
Override equals. Alerts are equal if the plugin id, alert, other info, uri and param is the same.
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Alert item = (Alert) obj;
if (risk != item.risk) {
return false;
}
if (confidence != item.confidence) {
return false;
}
if (pluginId != item.pluginId) {
return false;
}
if (!name.equals(item.name)) {
return false;
}
if (!uri.equalsIgnoreCase(item.uri)) {
return false;
}
if (!param.equalsIgnoreCase(item.param)) {
return false;
}
if (!otherInfo.equalsIgnoreCase(item.otherInfo)) {
return false;
}
if (evidence == null) {
if (item.evidence != null) {
return false;
}
} else if (!evidence.equals(item.evidence)) {
return false;
}
if (attack == null) {
if (item.attack != null) {
return false;
}
} else if (!attack.equals(item.attack)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + risk;
result = prime * result + confidence;
result = prime * result + ((evidence == null) ? 0 : evidence.hashCode());
result = prime * result + name.hashCode();
result = prime * result + otherInfo.hashCode();
result = prime * result + param.hashCode();
result = prime * result + pluginId;
result = prime * result + uri.hashCode();
result = prime * result + ((attack == null) ? 0 : attack.hashCode());
return result;
}
/**
Create a new instance of AlertItem with same members.
*/
public Alert newInstance() {
Alert item = new Alert(this.pluginId);
item.setRiskConfidence(this.risk, this.confidence);
item.setName(this.name);
item.setDetail(this.description, this.uri, this.param, this.attack, this.otherInfo, this.solution, this.reference, this.historyRef);
return item;
}
public String toPluginXML(String urls) {
StringBuilder sb = new StringBuilder(150); // ZAP: Changed the type to StringBuilder.
sb.append("<alertitem>\r\n");
sb.append(" <pluginid>").append(pluginId).append("</pluginid>\r\n");
sb.append(" <alert>").append(replaceEntity(name)).append("</alert>\r\n"); //Deprecated in 2.5.0, maintain for compatibility with custom code
sb.append(" <name>").append(replaceEntity(name)).append("</name>\r\n");
sb.append(" <riskcode>").append(risk).append("</riskcode>\r\n");
sb.append(" <confidence>").append(confidence).append("</confidence>\r\n");
sb.append(" <riskdesc>").append(replaceEntity(MSG_RISK[risk] + " (" + MSG_CONFIDENCE[confidence] + ")")).append("</riskdesc>\r\n");
sb.append(" <desc>").append(replaceEntity(paragraph(description))).append("</desc>\r\n");
sb.append(urls);
sb.append(" <solution>").append(replaceEntity(paragraph(solution))).append("</solution>\r\n");
// ZAP: Added otherInfo to the report
if (otherInfo != null && otherInfo.length() > 0) {
sb.append(" <otherinfo>").append(replaceEntity(paragraph(otherInfo))).append("</otherinfo>\r\n");
}
sb.append(" <reference>" ).append(replaceEntity(paragraph(reference))).append("</reference>\r\n");
if (cweId > 0) {
sb.append(" <cweid>" ).append(cweId).append("</cweid>\r\n");
}
if (wascId > 0) {
sb.append(" <wascid>" ).append(wascId).append("</wascid>\r\n");
}
sb.append("</alertitem>\r\n");
return sb.toString();
}
public String replaceEntity(String text) {
String result = null;
if (text != null) {
result = ReportGenerator.entityEncode(text);
}
return result;
}
public String paragraph(String text) {
return "<p>" + text.replaceAll("\\r\\n","</p><p>").replaceAll("\\n","</p><p>") + "</p>";
}
/**
* @deprecated (2.5.0) Replaced by {@link #getName}.
* Use of alert has been deprecated in favour of using name.
* @return Returns the alert.
*/
@Deprecated
public String getAlert() {
return name;
}
/**
* @return Returns the name of the alert.
* @since 2.5.0
*/
public String getName() {
return name;
}
/**
* @return Returns the description.
*/
public String getDescription() {
return description;
}
/**
* @return Returns the id.
*/
public int getPluginId() {
return pluginId;
}
/**
* @return Returns the message.
*/
public HttpMessage getMessage() {
if (this.message != null) {
return this.message;
}
if (this.historyRef != null) {
try {
return this.historyRef.getHttpMessage();
} catch (HttpMalformedHeaderException | DatabaseException e) {
logger.error(e.getMessage(), e);
}
}
return null;
}
/**
* @return Returns the otherInfo.
*/
public String getOtherInfo() {
return otherInfo;
}
/**
* @return Returns the param.
*/
public String getParam() {
return param;
}
/**
* @return Returns the reference.
*/
public String getReference() {
return reference;
}
/**
* @deprecated (2.4.0) Replaced by {@link #getConfidence()}.
* @return the reliability.
*/
@Deprecated
public int getReliability() {
return confidence;
}
/**
* @return Returns the confidence.
*/
public int getConfidence() {
return confidence;
}
/**
* @return Returns the risk.
*/
public int getRisk() {
return risk;
}
public URL getIconUrl() {
//TODO: Shouldn't be necessary to check both but let's be careful
if (reliability == Alert.CONFIDENCE_FALSE_POSITIVE || confidence == Alert.CONFIDENCE_FALSE_POSITIVE) {
// Special case - theres no risk - use the green flag
return Constant.OK_FLAG_IMAGE_URL;
}
switch (risk) {
case Alert.RISK_INFO:
return Constant.INFO_FLAG_IMAGE_URL;
case Alert.RISK_LOW:
return Constant.LOW_FLAG_IMAGE_URL;
case Alert.RISK_MEDIUM:
return Constant.MED_FLAG_IMAGE_URL;
case Alert.RISK_HIGH:
return Constant.HIGH_FLAG_IMAGE_URL;
}
return null;
}
/**
* @return Returns the solution.
*/
public String getSolution() {
return solution;
}
/**
* @return Returns the uri.
*/
public String getUri() {
return uri;
}
/**
* @return Returns the alertId.
*/
public int getAlertId() {
return alertId;
}
/**
* @param alertId The alertId to set.
*/
public void setAlertId(int alertId) {
this.alertId = alertId;
}
public String getUrlParamXML() {
StringBuilder sb = new StringBuilder(200); // ZAP: Changed the type to StringBuilder.
sb.append(" <uri>").append(replaceEntity(uri)).append("</uri>\r\n");
if (param != null && param.length() > 0) {
sb.append(" <param>").append(replaceEntity(param)).append("</param>\r\n");
}
if (attack != null && attack.length() > 0) {
sb.append(" <attack>").append(replaceEntity(attack)).append("</attack>\r\n");
}
if (evidence != null && evidence.length() > 0) {
sb.append(" <evidence>").append(replaceEntity(evidence)).append("</evidence>\r\n");
}
return sb.toString();
}
public int getSourceHistoryId() {
return sourceHistoryId;
}
public void setSourceHistoryId(int sourceHistoryId) {
this.sourceHistoryId = sourceHistoryId;
}
public HistoryReference getHistoryRef () {
return this.historyRef;
}
public void setHistoryRef(HistoryReference historyRef) {
this.historyRef = historyRef;
if (historyRef != null) {
this.message = null;
this.method = historyRef.getMethod();
this.msgUri = historyRef.getURI();
this.postData = historyRef.getRequestBody();
this.sourceHistoryId = historyRef.getHistoryId();
}
}
public String getAttack() {
return attack;
}
public void setAttack(String attack) {
this.attack = attack;
}
public String getMethod() {
return method;
}
public String getPostData() {
return postData;
}
public URI getMsgUri() {
return msgUri;
}
public String getEvidence() {
return evidence;
}
public void setEvidence(String evidence) {
this.evidence = evidence;
}
public int getCweId() {
return cweId;
}
public void setCweId(int cweId) {
this.cweId = cweId;
}
public int getWascId() {
return wascId;
}
public void setWascId(int wascId) {
this.wascId = wascId;
}
}
| |
/*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import java.util.ArrayList;
import java.util.List;
import org.jboss.netty.util.CharsetUtil;
/**
* Creates a new {@link ChannelBuffer} by allocating new space or by wrapping
* or copying existing byte arrays, byte buffers and a string.
*
* <h3>Use static import</h3>
* This classes is intended to be used with Java 5 static import statement:
*
* <pre>
* import static org.jboss.netty.buffer.{@link ChannelBuffers}.*;
*
* {@link ChannelBuffer} heapBuffer = buffer(128);
* {@link ChannelBuffer} directBuffer = directBuffer(256);
* {@link ChannelBuffer} dynamicBuffer = dynamicBuffer(512);
* {@link ChannelBuffer} wrappedBuffer = wrappedBuffer(new byte[128], new byte[256]);
* {@link ChannelBuffer} copiedBuffe r = copiedBuffer({@link ByteBuffer}.allocate(128));
* </pre>
*
* <h3>Allocating a new buffer</h3>
*
* Three buffer types are provided out of the box.
*
* <ul>
* <li>{@link #buffer(int)} allocates a new fixed-capacity heap buffer.</li>
* <li>{@link #directBuffer(int)} allocates a new fixed-capacity direct buffer.</li>
* <li>{@link #dynamicBuffer(int)} allocates a new dynamic-capacity heap
* buffer, whose capacity increases automatically as needed by a write
* operation.</li>
* </ul>
*
* <h3>Creating a wrapped buffer</h3>
*
* Wrapped buffer is a buffer which is a view of one or more existing
* byte arrays and byte buffers. Any changes in the content of the original
* array or buffer will be visible in the wrapped buffer. Various wrapper
* methods are provided and their name is all {@code wrappedBuffer()}.
* You might want to take a look at the methods that accept varargs closely if
* you want to create a buffer which is composed of more than one array to
* reduce the number of memory copy.
*
* <h3>Creating a copied buffer</h3>
*
* Copied buffer is a deep copy of one or more existing byte arrays, byte
* buffers or a string. Unlike a wrapped buffer, there's no shared data
* between the original data and the copied buffer. Various copy methods are
* provided and their name is all {@code copiedBuffer()}. It is also convenient
* to use this operation to merge multiple buffers into one buffer.
*
* <h3>Miscellaneous utility methods</h3>
*
* This class also provides various utility methods to help implementation
* of a new buffer type, generation of hex dump and swapping an integer's
* byte order.
*
* @author <a href="http://www.jboss.org/netty/">The Netty Project</a>
* @author <a href="http://gleamynode.net/">Trustin Lee</a>
*
* @version $Rev: 2269 $, $Date: 2010-05-06 16:37:27 +0900 (Thu, 06 May 2010) $
*
* @apiviz.landmark
* @apiviz.has org.jboss.netty.buffer.ChannelBuffer oneway - - creates
*/
public class ChannelBuffers {
/**
* Big endian byte order.
*/
public static final ByteOrder BIG_ENDIAN = ByteOrder.BIG_ENDIAN;
/**
* Little endian byte order.
*/
public static final ByteOrder LITTLE_ENDIAN = ByteOrder.LITTLE_ENDIAN;
/**
* A buffer whose capacity is {@code 0}.
*/
public static final ChannelBuffer EMPTY_BUFFER = new BigEndianHeapChannelBuffer(0);
private static final char[] HEXDUMP_TABLE = new char[256 * 4];
static {
final char[] DIGITS = "0123456789abcdef".toCharArray();
for (int i = 0; i < 256; i ++) {
HEXDUMP_TABLE[(i << 1) + 0] = DIGITS[i >>> 4 & 0x0F];
HEXDUMP_TABLE[(i << 1) + 1] = DIGITS[i >>> 0 & 0x0F];
}
}
/**
* Creates a new big-endian Java heap buffer with the specified
* {@code capacity}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer buffer(int capacity) {
return buffer(BIG_ENDIAN, capacity);
}
/**
* Creates a new Java heap buffer with the specified {@code endianness}
* and {@code capacity}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer buffer(ByteOrder endianness, int capacity) {
if (endianness == BIG_ENDIAN) {
if (capacity == 0) {
return EMPTY_BUFFER;
}
return new BigEndianHeapChannelBuffer(capacity);
} else if (endianness == LITTLE_ENDIAN) {
if (capacity == 0) {
return EMPTY_BUFFER;
}
return new LittleEndianHeapChannelBuffer(capacity);
} else {
throw new NullPointerException("endianness");
}
}
/**
* Creates a new big-endian direct buffer with the specified
* {@code capacity}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer directBuffer(int capacity) {
return directBuffer(BIG_ENDIAN, capacity);
}
/**
* Creates a new direct buffer with the specified {@code endianness} and
* {@code capacity}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer directBuffer(ByteOrder endianness, int capacity) {
if (endianness == null) {
throw new NullPointerException("endianness");
}
if (capacity == 0) {
return EMPTY_BUFFER;
}
ChannelBuffer buffer = new ByteBufferBackedChannelBuffer(
ByteBuffer.allocateDirect(capacity).order(endianness));
buffer.clear();
return buffer;
}
/**
* Creates a new big-endian dynamic buffer whose estimated data length is
* {@code 256} bytes. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer dynamicBuffer() {
return dynamicBuffer(BIG_ENDIAN, 256);
}
public static ChannelBuffer dynamicBuffer(ChannelBufferFactory factory) {
if (factory == null) {
throw new NullPointerException("factory");
}
return new DynamicChannelBuffer(factory.getDefaultOrder(), 256, factory);
}
/**
* Creates a new big-endian dynamic buffer with the specified estimated
* data length. More accurate estimation yields less unexpected
* reallocation overhead. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer dynamicBuffer(int estimatedLength) {
return dynamicBuffer(BIG_ENDIAN, estimatedLength);
}
/**
* Creates a new dynamic buffer with the specified endianness and
* the specified estimated data length. More accurate estimation yields
* less unexpected reallocation overhead. The new buffer's
* {@code readerIndex} and {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer dynamicBuffer(ByteOrder endianness, int estimatedLength) {
return new DynamicChannelBuffer(endianness, estimatedLength);
}
/**
* Creates a new big-endian dynamic buffer with the specified estimated
* data length using the specified factory. More accurate estimation yields
* less unexpected reallocation overhead. The new buffer's {@code readerIndex}
* and {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer dynamicBuffer(int estimatedLength, ChannelBufferFactory factory) {
if (factory == null) {
throw new NullPointerException("factory");
}
return new DynamicChannelBuffer(factory.getDefaultOrder(), estimatedLength, factory);
}
/**
* Creates a new dynamic buffer with the specified endianness and
* the specified estimated data length using the specified factory.
* More accurate estimation yields less unexpected reallocation overhead.
* The new buffer's {@code readerIndex} and {@code writerIndex} are {@code 0}.
*/
public static ChannelBuffer dynamicBuffer(ByteOrder endianness, int estimatedLength, ChannelBufferFactory factory) {
return new DynamicChannelBuffer(endianness, estimatedLength, factory);
}
/**
* Creates a new big-endian buffer which wraps the specified {@code array}.
* A modification on the specified array's content will be visible to the
* returned buffer.
*/
public static ChannelBuffer wrappedBuffer(byte[] array) {
return wrappedBuffer(BIG_ENDIAN, array);
}
/**
* Creates a new buffer which wraps the specified {@code array} with the
* specified {@code endianness}. A modification on the specified array's
* content will be visible to the returned buffer.
*/
public static ChannelBuffer wrappedBuffer(ByteOrder endianness, byte[] array) {
if (endianness == BIG_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new BigEndianHeapChannelBuffer(array);
} else if (endianness == LITTLE_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new LittleEndianHeapChannelBuffer(array);
} else {
throw new NullPointerException("endianness");
}
}
/**
* Creates a new big-endian buffer which wraps the sub-region of the
* specified {@code array}. A modification on the specified array's
* content will be visible to the returned buffer.
*/
public static ChannelBuffer wrappedBuffer(byte[] array, int offset, int length) {
return wrappedBuffer(BIG_ENDIAN, array, offset, length);
}
/**
* Creates a new buffer which wraps the sub-region of the specified
* {@code array} with the specified {@code endianness}. A modification on
* the specified array's content will be visible to the returned buffer.
*/
public static ChannelBuffer wrappedBuffer(ByteOrder endianness, byte[] array, int offset, int length) {
if (endianness == null) {
throw new NullPointerException("endianness");
}
if (offset == 0) {
if (length == array.length) {
return wrappedBuffer(endianness, array);
} else {
if (length == 0) {
return EMPTY_BUFFER;
} else {
return new TruncatedChannelBuffer(wrappedBuffer(endianness, array), length);
}
}
} else {
if (length == 0) {
return EMPTY_BUFFER;
} else {
return new SlicedChannelBuffer(wrappedBuffer(endianness, array), offset, length);
}
}
}
/**
* Creates a new buffer which wraps the specified NIO buffer's current
* slice. A modification on the specified buffer's content will be
* visible to the returned buffer.
*/
public static ChannelBuffer wrappedBuffer(ByteBuffer buffer) {
if (!buffer.hasRemaining()) {
return EMPTY_BUFFER;
}
if (buffer.hasArray()) {
return wrappedBuffer(buffer.order(), buffer.array(), buffer.arrayOffset() + buffer.position(),buffer.remaining());
} else {
return new ByteBufferBackedChannelBuffer(buffer);
}
}
/**
* Creates a new buffer which wraps the specified buffer's readable bytes.
* A modification on the specified buffer's content will be visible to the
* returned buffer.
*/
public static ChannelBuffer wrappedBuffer(ChannelBuffer buffer) {
if (buffer.readable()) {
return buffer.slice();
} else {
return EMPTY_BUFFER;
}
}
/**
* Creates a new big-endian composite buffer which wraps the specified
* arrays without copying them. A modification on the specified arrays'
* content will be visible to the returned buffer.
*/
public static ChannelBuffer wrappedBuffer(byte[]... arrays) {
return wrappedBuffer(BIG_ENDIAN, arrays);
}
/**
* Creates a new composite buffer which wraps the specified arrays without
* copying them. A modification on the specified arrays' content will be
* visible to the returned buffer.
*
* @param endianness the endianness of the new buffer
*/
public static ChannelBuffer wrappedBuffer(ByteOrder endianness, byte[]... arrays) {
switch (arrays.length) {
case 0:
break;
case 1:
if (arrays[0].length != 0) {
return wrappedBuffer(endianness, arrays[0]);
}
break;
default:
// Get the list of the component, while guessing the byte order.
final List<ChannelBuffer> components = new ArrayList<ChannelBuffer>(arrays.length);
for (byte[] a: arrays) {
if (a == null) {
break;
}
if (a.length > 0) {
components.add(wrappedBuffer(endianness, a));
}
}
return compositeBuffer(endianness, components);
}
return EMPTY_BUFFER;
}
private static ChannelBuffer compositeBuffer(
ByteOrder endianness, List<ChannelBuffer> components) {
switch (components.size()) {
case 0:
return EMPTY_BUFFER;
case 1:
return components.get(0);
default:
return new CompositeChannelBuffer(endianness, components);
}
}
/**
* Creates a new composite buffer which wraps the readable bytes of the
* specified buffers without copying them. A modification on the content
* of the specified buffers will be visible to the returned buffer.
*
* @throws IllegalArgumentException
* if the specified buffers' endianness are different from each
* other
*/
public static ChannelBuffer wrappedBuffer(ChannelBuffer... buffers) {
switch (buffers.length) {
case 0:
break;
case 1:
if (buffers[0].readable()) {
return wrappedBuffer(buffers[0]);
}
break;
default:
ByteOrder order = null;
final List<ChannelBuffer> components = new ArrayList<ChannelBuffer>(buffers.length);
for (ChannelBuffer c: buffers) {
if (c == null) {
break;
}
if (c.readable()) {
if (order != null) {
if (!order.equals(c.order())) {
throw new IllegalArgumentException(
"inconsistent byte order");
}
} else {
order = c.order();
}
if (c instanceof CompositeChannelBuffer) {
// Expand nested composition.
components.addAll(
((CompositeChannelBuffer) c).decompose(
c.readerIndex(), c.readableBytes()));
} else {
// An ordinary buffer (non-composite)
components.add(c.slice());
}
}
}
return compositeBuffer(order, components);
}
return EMPTY_BUFFER;
}
/**
* Creates a new composite buffer which wraps the slices of the specified
* NIO buffers without copying them. A modification on the content of the
* specified buffers will be visible to the returned buffer.
*
* @throws IllegalArgumentException
* if the specified buffers' endianness are different from each
* other
*/
public static ChannelBuffer wrappedBuffer(ByteBuffer... buffers) {
switch (buffers.length) {
case 0:
break;
case 1:
if (buffers[0].hasRemaining()) {
return wrappedBuffer(buffers[0]);
}
break;
default:
ByteOrder order = null;
final List<ChannelBuffer> components = new ArrayList<ChannelBuffer>(buffers.length);
for (ByteBuffer b: buffers) {
if (b == null) {
break;
}
if (b.hasRemaining()) {
if (order != null) {
if (!order.equals(b.order())) {
throw new IllegalArgumentException(
"inconsistent byte order");
}
} else {
order = b.order();
}
components.add(wrappedBuffer(b));
}
}
return compositeBuffer(order, components);
}
return EMPTY_BUFFER;
}
/**
* Creates a new big-endian buffer whose content is a copy of the
* specified {@code array}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and {@code array.length} respectively.
*/
public static ChannelBuffer copiedBuffer(byte[] array) {
return copiedBuffer(BIG_ENDIAN, array);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is a copy of the specified {@code array}. The new buffer's
* {@code readerIndex} and {@code writerIndex} are {@code 0} and
* {@code array.length} respectively.
*/
public static ChannelBuffer copiedBuffer(ByteOrder endianness, byte[] array) {
if (endianness == BIG_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new BigEndianHeapChannelBuffer(array.clone());
} else if (endianness == LITTLE_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new LittleEndianHeapChannelBuffer(array.clone());
} else {
throw new NullPointerException("endianness");
}
}
/**
* Creates a new big-endian buffer whose content is a copy of the
* specified {@code array}'s sub-region. The new buffer's
* {@code readerIndex} and {@code writerIndex} are {@code 0} and
* the specified {@code length} respectively.
*/
public static ChannelBuffer copiedBuffer(byte[] array, int offset, int length) {
return copiedBuffer(BIG_ENDIAN, array, offset, length);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is a copy of the specified {@code array}'s sub-region. The new
* buffer's {@code readerIndex} and {@code writerIndex} are {@code 0} and
* the specified {@code length} respectively.
*/
public static ChannelBuffer copiedBuffer(ByteOrder endianness, byte[] array, int offset, int length) {
if (endianness == null) {
throw new NullPointerException("endianness");
}
if (length == 0) {
return EMPTY_BUFFER;
}
byte[] copy = new byte[length];
System.arraycopy(array, offset, copy, 0, length);
return wrappedBuffer(endianness, copy);
}
/**
* Creates a new buffer whose content is a copy of the specified
* {@code buffer}'s current slice. The new buffer's {@code readerIndex}
* and {@code writerIndex} are {@code 0} and {@code buffer.remaining}
* respectively.
*/
public static ChannelBuffer copiedBuffer(ByteBuffer buffer) {
int length = buffer.remaining();
if (length == 0) {
return EMPTY_BUFFER;
}
byte[] copy = new byte[length];
int position = buffer.position();
try {
buffer.get(copy);
} finally {
buffer.position(position);
}
return wrappedBuffer(buffer.order(), copy);
}
/**
* Creates a new buffer whose content is a copy of the specified
* {@code buffer}'s readable bytes. The new buffer's {@code readerIndex}
* and {@code writerIndex} are {@code 0} and {@code buffer.readableBytes}
* respectively.
*/
public static ChannelBuffer copiedBuffer(ChannelBuffer buffer) {
if (buffer.readable()) {
return buffer.copy();
} else {
return EMPTY_BUFFER;
}
}
/**
* Creates a new big-endian buffer whose content is a merged copy of
* the specified {@code arrays}. The new buffer's {@code readerIndex}
* and {@code writerIndex} are {@code 0} and the sum of all arrays'
* {@code length} respectively.
*/
public static ChannelBuffer copiedBuffer(byte[]... arrays) {
return copiedBuffer(BIG_ENDIAN, arrays);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is a merged copy of the specified {@code arrays}. The new
* buffer's {@code readerIndex} and {@code writerIndex} are {@code 0}
* and the sum of all arrays' {@code length} respectively.
*/
public static ChannelBuffer copiedBuffer(ByteOrder endianness, byte[]... arrays) {
switch (arrays.length) {
case 0:
return EMPTY_BUFFER;
case 1:
if (arrays[0].length == 0) {
return EMPTY_BUFFER;
} else {
return copiedBuffer(endianness, arrays[0]);
}
}
// Merge the specified arrays into one array.
int length = 0;
for (byte[] a: arrays) {
if (Integer.MAX_VALUE - length < a.length) {
throw new IllegalArgumentException(
"The total length of the specified arrays is too big.");
}
length += a.length;
}
if (length == 0) {
return EMPTY_BUFFER;
}
byte[] mergedArray = new byte[length];
for (int i = 0, j = 0; i < arrays.length; i ++) {
byte[] a = arrays[i];
System.arraycopy(a, 0, mergedArray, j, a.length);
j += a.length;
}
return wrappedBuffer(endianness, mergedArray);
}
/**
* Creates a new buffer whose content is a merged copy of the specified
* {@code buffers}' readable bytes. The new buffer's {@code readerIndex}
* and {@code writerIndex} are {@code 0} and the sum of all buffers'
* {@code readableBytes} respectively.
*
* @throws IllegalArgumentException
* if the specified buffers' endianness are different from each
* other
*/
public static ChannelBuffer copiedBuffer(ChannelBuffer... buffers) {
switch (buffers.length) {
case 0:
return EMPTY_BUFFER;
case 1:
return copiedBuffer(buffers[0]);
}
ChannelBuffer[] copiedBuffers = new ChannelBuffer[buffers.length];
for (int i = 0; i < buffers.length; i ++) {
copiedBuffers[i] = copiedBuffer(buffers[i]);
}
return wrappedBuffer(copiedBuffers);
}
/**
* Creates a new buffer whose content is a merged copy of the specified
* {@code buffers}' slices. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and the sum of all buffers'
* {@code remaining} respectively.
*
* @throws IllegalArgumentException
* if the specified buffers' endianness are different from each
* other
*/
public static ChannelBuffer copiedBuffer(ByteBuffer... buffers) {
switch (buffers.length) {
case 0:
return EMPTY_BUFFER;
case 1:
return copiedBuffer(buffers[0]);
}
ChannelBuffer[] copiedBuffers = new ChannelBuffer[buffers.length];
for (int i = 0; i < buffers.length; i ++) {
copiedBuffers[i] = copiedBuffer(buffers[i]);
}
return wrappedBuffer(copiedBuffers);
}
/**
* Creates a new big-endian buffer whose content is the specified
* {@code string} encoded in the specified {@code charset}.
* The new buffer's {@code readerIndex} and {@code writerIndex} are
* {@code 0} and the length of the encoded string respectively.
*/
public static ChannelBuffer copiedBuffer(CharSequence string, Charset charset) {
return copiedBuffer(BIG_ENDIAN, string, charset);
}
/**
* Creates a new big-endian buffer whose content is a subregion of
* the specified {@code string} encoded in the specified {@code charset}.
* The new buffer's {@code readerIndex} and {@code writerIndex} are
* {@code 0} and the length of the encoded string respectively.
*/
public static ChannelBuffer copiedBuffer(
CharSequence string, int offset, int length, Charset charset) {
return copiedBuffer(BIG_ENDIAN, string, offset, length, charset);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is the specified {@code string} encoded in the specified
* {@code charset}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and the length of the encoded string
* respectively.
*/
public static ChannelBuffer copiedBuffer(ByteOrder endianness, CharSequence string, Charset charset) {
if (string == null) {
throw new NullPointerException("string");
}
if (string instanceof CharBuffer) {
return copiedBuffer(endianness, (CharBuffer) string, charset);
}
return copiedBuffer(endianness, CharBuffer.wrap(string), charset);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is a subregion of the specified {@code string} encoded in the
* specified {@code charset}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and the length of the encoded string
* respectively.
*/
public static ChannelBuffer copiedBuffer(
ByteOrder endianness, CharSequence string, int offset, int length, Charset charset) {
if (string == null) {
throw new NullPointerException("string");
}
if (length == 0) {
return EMPTY_BUFFER;
}
if (string instanceof CharBuffer) {
CharBuffer buf = (CharBuffer) string;
if (buf.hasArray()) {
return copiedBuffer(
endianness,
buf.array(),
buf.arrayOffset() + buf.position() + offset,
length, charset);
}
buf = buf.slice();
buf.limit(length);
buf.position(offset);
return copiedBuffer(endianness, buf, charset);
}
return copiedBuffer(
endianness, CharBuffer.wrap(string, offset, offset + length),
charset);
}
/**
* Creates a new big-endian buffer whose content is the specified
* {@code array} encoded in the specified {@code charset}.
* The new buffer's {@code readerIndex} and {@code writerIndex} are
* {@code 0} and the length of the encoded string respectively.
*/
public static ChannelBuffer copiedBuffer(char[] array, Charset charset) {
return copiedBuffer(BIG_ENDIAN, array, 0, array.length, charset);
}
/**
* Creates a new big-endian buffer whose content is a subregion of
* the specified {@code array} encoded in the specified {@code charset}.
* The new buffer's {@code readerIndex} and {@code writerIndex} are
* {@code 0} and the length of the encoded string respectively.
*/
public static ChannelBuffer copiedBuffer(
char[] array, int offset, int length, Charset charset) {
return copiedBuffer(BIG_ENDIAN, array, offset, length, charset);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is the specified {@code array} encoded in the specified
* {@code charset}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and the length of the encoded string
* respectively.
*/
public static ChannelBuffer copiedBuffer(ByteOrder endianness, char[] array, Charset charset) {
return copiedBuffer(endianness, array, 0, array.length, charset);
}
/**
* Creates a new buffer with the specified {@code endianness} whose
* content is a subregion of the specified {@code array} encoded in the
* specified {@code charset}. The new buffer's {@code readerIndex} and
* {@code writerIndex} are {@code 0} and the length of the encoded string
* respectively.
*/
public static ChannelBuffer copiedBuffer(
ByteOrder endianness, char[] array, int offset, int length, Charset charset) {
if (array == null) {
throw new NullPointerException("array");
}
if (length == 0) {
return EMPTY_BUFFER;
}
return copiedBuffer(
endianness, CharBuffer.wrap(array, offset, length), charset);
}
private static ChannelBuffer copiedBuffer(ByteOrder endianness, CharBuffer buffer, Charset charset) {
CharBuffer src = buffer;
ByteBuffer dst = ChannelBuffers.encodeString(src, charset);
ChannelBuffer result = wrappedBuffer(endianness, dst.array());
result.writerIndex(dst.remaining());
return result;
}
/**
* @deprecated Use {@link #copiedBuffer(CharSequence, Charset)} instead.
*/
@Deprecated
public static ChannelBuffer copiedBuffer(String string, String charsetName) {
return copiedBuffer(string, Charset.forName(charsetName));
}
/**
* @deprecated Use {@link #copiedBuffer(ByteOrder, CharSequence, Charset)} instead.
*/
@Deprecated
public static ChannelBuffer copiedBuffer(ByteOrder endianness, String string, String charsetName) {
return copiedBuffer(endianness, string, Charset.forName(charsetName));
}
/**
* Creates a read-only buffer which disallows any modification operations
* on the specified {@code buffer}. The new buffer has the same
* {@code readerIndex} and {@code writerIndex} with the specified
* {@code buffer}.
*/
public static ChannelBuffer unmodifiableBuffer(ChannelBuffer buffer) {
if (buffer instanceof ReadOnlyChannelBuffer) {
buffer = ((ReadOnlyChannelBuffer) buffer).unwrap();
}
return new ReadOnlyChannelBuffer(buffer);
}
/**
* Returns a <a href="http://en.wikipedia.org/wiki/Hex_dump">hex dump</a>
* of the specified buffer's readable bytes.
*/
public static String hexDump(ChannelBuffer buffer) {
return hexDump(buffer, buffer.readerIndex(), buffer.readableBytes());
}
/**
* Returns a <a href="http://en.wikipedia.org/wiki/Hex_dump">hex dump</a>
* of the specified buffer's sub-region.
*/
public static String hexDump(ChannelBuffer buffer, int fromIndex, int length) {
if (length < 0) {
throw new IllegalArgumentException("length: " + length);
}
if (length == 0) {
return "";
}
int endIndex = fromIndex + length;
char[] buf = new char[length << 1];
int srcIdx = fromIndex;
int dstIdx = 0;
for (; srcIdx < endIndex; srcIdx ++, dstIdx += 2) {
System.arraycopy(
HEXDUMP_TABLE, buffer.getUnsignedByte(srcIdx) << 1,
buf, dstIdx, 2);
}
return new String(buf);
}
/**
* Calculates the hash code of the specified buffer. This method is
* useful when implementing a new buffer type.
*/
public static int hashCode(ChannelBuffer buffer) {
final int aLen = buffer.readableBytes();
final int intCount = aLen >>> 2;
final int byteCount = aLen & 3;
int hashCode = 1;
int arrayIndex = buffer.readerIndex();
if (buffer.order() == BIG_ENDIAN) {
for (int i = intCount; i > 0; i --) {
hashCode = 31 * hashCode + buffer.getInt(arrayIndex);
arrayIndex += 4;
}
} else {
for (int i = intCount; i > 0; i --) {
hashCode = 31 * hashCode + swapInt(buffer.getInt(arrayIndex));
arrayIndex += 4;
}
}
for (int i = byteCount; i > 0; i --) {
hashCode = 31 * hashCode + buffer.getByte(arrayIndex ++);
}
if (hashCode == 0) {
hashCode = 1;
}
return hashCode;
}
/**
* Returns {@code true} if and only if the two specified buffers are
* identical to each other as described in {@code ChannelBuffer#equals(Object)}.
* This method is useful when implementing a new buffer type.
*/
public static boolean equals(ChannelBuffer bufferA, ChannelBuffer bufferB) {
final int aLen = bufferA.readableBytes();
if (aLen != bufferB.readableBytes()) {
return false;
}
final int longCount = aLen >>> 3;
final int byteCount = aLen & 7;
int aIndex = bufferA.readerIndex();
int bIndex = bufferB.readerIndex();
if (bufferA.order() == bufferB.order()) {
for (int i = longCount; i > 0; i --) {
if (bufferA.getLong(aIndex) != bufferB.getLong(bIndex)) {
return false;
}
aIndex += 8;
bIndex += 8;
}
} else {
for (int i = longCount; i > 0; i --) {
if (bufferA.getLong(aIndex) != swapLong(bufferB.getLong(bIndex))) {
return false;
}
aIndex += 8;
bIndex += 8;
}
}
for (int i = byteCount; i > 0; i --) {
if (bufferA.getByte(aIndex) != bufferB.getByte(bIndex)) {
return false;
}
aIndex ++;
bIndex ++;
}
return true;
}
/**
* Compares the two specified buffers as described in {@link ChannelBuffer#compareTo(ChannelBuffer)}.
* This method is useful when implementing a new buffer type.
*/
public static int compare(ChannelBuffer bufferA, ChannelBuffer bufferB) {
final int aLen = bufferA.readableBytes();
final int bLen = bufferB.readableBytes();
final int minLength = Math.min(aLen, bLen);
final int uintCount = minLength >>> 2;
final int byteCount = minLength & 3;
int aIndex = bufferA.readerIndex();
int bIndex = bufferB.readerIndex();
if (bufferA.order() == bufferB.order()) {
for (int i = uintCount; i > 0; i --) {
long va = bufferA.getUnsignedInt(aIndex);
long vb = bufferB.getUnsignedInt(bIndex);
if (va > vb) {
return 1;
} else if (va < vb) {
return -1;
}
aIndex += 4;
bIndex += 4;
}
} else {
for (int i = uintCount; i > 0; i --) {
long va = bufferA.getUnsignedInt(aIndex);
long vb = swapInt(bufferB.getInt(bIndex)) & 0xFFFFFFFFL;
if (va > vb) {
return 1;
} else if (va < vb) {
return -1;
}
aIndex += 4;
bIndex += 4;
}
}
for (int i = byteCount; i > 0; i --) {
short va = bufferA.getUnsignedByte(aIndex);
short vb = bufferB.getUnsignedByte(bIndex);
if (va > vb) {
return 1;
} else if (va < vb) {
return -1;
}
aIndex ++;
bIndex ++;
}
return aLen - bLen;
}
/**
* The default implementation of {@link ChannelBuffer#indexOf(int, int, byte)}.
* This method is useful when implementing a new buffer type.
*/
public static int indexOf(ChannelBuffer buffer, int fromIndex, int toIndex, byte value) {
if (fromIndex <= toIndex) {
return firstIndexOf(buffer, fromIndex, toIndex, value);
} else {
return lastIndexOf(buffer, fromIndex, toIndex, value);
}
}
/**
* The default implementation of {@link ChannelBuffer#indexOf(int, int, ChannelBufferIndexFinder)}.
* This method is useful when implementing a new buffer type.
*/
public static int indexOf(ChannelBuffer buffer, int fromIndex, int toIndex, ChannelBufferIndexFinder indexFinder) {
if (fromIndex <= toIndex) {
return firstIndexOf(buffer, fromIndex, toIndex, indexFinder);
} else {
return lastIndexOf(buffer, fromIndex, toIndex, indexFinder);
}
}
/**
* Toggles the endianness of the specified 16-bit short integer.
*/
public static short swapShort(short value) {
return (short) (value << 8 | value >>> 8 & 0xff);
}
/**
* Toggles the endianness of the specified 24-bit medium integer.
*/
public static int swapMedium(int value) {
return value << 16 & 0xff0000 | value & 0xff00 | value >>> 16 & 0xff;
}
/**
* Toggles the endianness of the specified 32-bit integer.
*/
public static int swapInt(int value) {
return swapShort((short) value) << 16 |
swapShort((short) (value >>> 16)) & 0xffff;
}
/**
* Toggles the endianness of the specified 64-bit long integer.
*/
public static long swapLong(long value) {
return (long) swapInt((int) value) << 32 |
swapInt((int) (value >>> 32)) & 0xffffffffL;
}
private static int firstIndexOf(ChannelBuffer buffer, int fromIndex, int toIndex, byte value) {
fromIndex = Math.max(fromIndex, 0);
if (fromIndex >= toIndex || buffer.capacity() == 0) {
return -1;
}
for (int i = fromIndex; i < toIndex; i ++) {
if (buffer.getByte(i) == value) {
return i;
}
}
return -1;
}
private static int lastIndexOf(ChannelBuffer buffer, int fromIndex, int toIndex, byte value) {
fromIndex = Math.min(fromIndex, buffer.capacity());
if (fromIndex < 0 || buffer.capacity() == 0) {
return -1;
}
for (int i = fromIndex - 1; i >= toIndex; i --) {
if (buffer.getByte(i) == value) {
return i;
}
}
return -1;
}
private static int firstIndexOf(ChannelBuffer buffer, int fromIndex, int toIndex, ChannelBufferIndexFinder indexFinder) {
fromIndex = Math.max(fromIndex, 0);
if (fromIndex >= toIndex || buffer.capacity() == 0) {
return -1;
}
for (int i = fromIndex; i < toIndex; i ++) {
if (indexFinder.find(buffer, i)) {
return i;
}
}
return -1;
}
private static int lastIndexOf(ChannelBuffer buffer, int fromIndex, int toIndex, ChannelBufferIndexFinder indexFinder) {
fromIndex = Math.min(fromIndex, buffer.capacity());
if (fromIndex < 0 || buffer.capacity() == 0) {
return -1;
}
for (int i = fromIndex - 1; i >= toIndex; i --) {
if (indexFinder.find(buffer, i)) {
return i;
}
}
return -1;
}
static ByteBuffer encodeString(CharBuffer src, Charset charset) {
final CharsetEncoder encoder = CharsetUtil.getEncoder(charset);
final ByteBuffer dst = ByteBuffer.allocate(
(int) ((double) src.remaining() * encoder.maxBytesPerChar()));
try {
CoderResult cr = encoder.encode(src, dst, true);
if (!cr.isUnderflow()) {
cr.throwException();
}
cr = encoder.flush(dst);
if (!cr.isUnderflow()) {
cr.throwException();
}
} catch (CharacterCodingException x) {
throw new IllegalStateException(x);
}
dst.flip();
return dst;
}
static String decodeString(ByteBuffer src, Charset charset) {
final CharsetDecoder decoder = CharsetUtil.getDecoder(charset);
final CharBuffer dst = CharBuffer.allocate(
(int) ((double) src.remaining() * decoder.maxCharsPerByte()));
try {
CoderResult cr = decoder.decode(src, dst, true);
if (!cr.isUnderflow()) {
cr.throwException();
}
cr = decoder.flush(dst);
if (!cr.isUnderflow()) {
cr.throwException();
}
} catch (CharacterCodingException x) {
throw new IllegalStateException(x);
}
return dst.flip().toString();
}
private ChannelBuffers() {
// Unused
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.RegionNetworkEndpointGroupsClient.ListPagedResponse;
import com.google.api.client.http.HttpMethods;
import com.google.api.core.BetaApi;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.DeleteRegionNetworkEndpointGroupRequest;
import com.google.cloud.compute.v1.GetRegionNetworkEndpointGroupRequest;
import com.google.cloud.compute.v1.InsertRegionNetworkEndpointGroupRequest;
import com.google.cloud.compute.v1.ListRegionNetworkEndpointGroupsRequest;
import com.google.cloud.compute.v1.NetworkEndpointGroup;
import com.google.cloud.compute.v1.NetworkEndpointGroupList;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.Operation.Status;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the RegionNetworkEndpointGroups service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public class HttpJsonRegionNetworkEndpointGroupsStub extends RegionNetworkEndpointGroupsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder().add(Operation.getDescriptor()).build();
private static final ApiMethodDescriptor<DeleteRegionNetworkEndpointGroupRequest, Operation>
deleteMethodDescriptor =
ApiMethodDescriptor.<DeleteRegionNetworkEndpointGroupRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionNetworkEndpointGroups/Delete")
.setHttpMethod(HttpMethods.DELETE)
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteRegionNetworkEndpointGroupRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{networkEndpointGroup}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteRegionNetworkEndpointGroupRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(
fields, "networkEndpointGroup", request.getNetworkEndpointGroup());
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteRegionNetworkEndpointGroupRequest>
serializer = ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteRegionNetworkEndpointGroupRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<
GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>
getMethodDescriptor =
ApiMethodDescriptor
.<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionNetworkEndpointGroups/Get")
.setHttpMethod(HttpMethods.GET)
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetRegionNetworkEndpointGroupRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{networkEndpointGroup}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetRegionNetworkEndpointGroupRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "networkEndpointGroup", request.getNetworkEndpointGroup());
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetRegionNetworkEndpointGroupRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<NetworkEndpointGroup>newBuilder()
.setDefaultInstance(NetworkEndpointGroup.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<InsertRegionNetworkEndpointGroupRequest, Operation>
insertMethodDescriptor =
ApiMethodDescriptor.<InsertRegionNetworkEndpointGroupRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionNetworkEndpointGroups/Insert")
.setHttpMethod(HttpMethods.POST)
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<InsertRegionNetworkEndpointGroupRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<InsertRegionNetworkEndpointGroupRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<InsertRegionNetworkEndpointGroupRequest>
serializer = ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"networkEndpointGroupResource",
request.getNetworkEndpointGroupResource()))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(InsertRegionNetworkEndpointGroupRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<
ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>
listMethodDescriptor =
ApiMethodDescriptor
.<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionNetworkEndpointGroups/List")
.setHttpMethod(HttpMethods.GET)
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListRegionNetworkEndpointGroupsRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListRegionNetworkEndpointGroupsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListRegionNetworkEndpointGroupsRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<NetworkEndpointGroupList>newBuilder()
.setDefaultInstance(NetworkEndpointGroupList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<DeleteRegionNetworkEndpointGroupRequest, Operation> deleteCallable;
private final OperationCallable<DeleteRegionNetworkEndpointGroupRequest, Operation, Operation>
deleteOperationCallable;
private final UnaryCallable<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>
getCallable;
private final UnaryCallable<InsertRegionNetworkEndpointGroupRequest, Operation> insertCallable;
private final OperationCallable<InsertRegionNetworkEndpointGroupRequest, Operation, Operation>
insertOperationCallable;
private final UnaryCallable<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>
listCallable;
private final UnaryCallable<ListRegionNetworkEndpointGroupsRequest, ListPagedResponse>
listPagedCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonRegionOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonRegionNetworkEndpointGroupsStub create(
RegionNetworkEndpointGroupsStubSettings settings) throws IOException {
return new HttpJsonRegionNetworkEndpointGroupsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonRegionNetworkEndpointGroupsStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonRegionNetworkEndpointGroupsStub(
RegionNetworkEndpointGroupsStubSettings.newBuilder().build(), clientContext);
}
public static final HttpJsonRegionNetworkEndpointGroupsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonRegionNetworkEndpointGroupsStub(
RegionNetworkEndpointGroupsStubSettings.newBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of HttpJsonRegionNetworkEndpointGroupsStub, using the given settings.
* This is protected so that it is easy to make a subclass, but otherwise, the static factory
* methods should be preferred.
*/
protected HttpJsonRegionNetworkEndpointGroupsStub(
RegionNetworkEndpointGroupsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonRegionNetworkEndpointGroupsCallableFactory());
}
/**
* Constructs an instance of HttpJsonRegionNetworkEndpointGroupsStub, using the given settings.
* This is protected so that it is easy to make a subclass, but otherwise, the static factory
* methods should be preferred.
*/
protected HttpJsonRegionNetworkEndpointGroupsStub(
RegionNetworkEndpointGroupsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonRegionOperationsStub.create(clientContext, callableFactory);
HttpJsonCallSettings<DeleteRegionNetworkEndpointGroupRequest, Operation>
deleteTransportSettings =
HttpJsonCallSettings.<DeleteRegionNetworkEndpointGroupRequest, Operation>newBuilder()
.setMethodDescriptor(deleteMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>
getTransportSettings =
HttpJsonCallSettings
.<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>newBuilder()
.setMethodDescriptor(getMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<InsertRegionNetworkEndpointGroupRequest, Operation>
insertTransportSettings =
HttpJsonCallSettings.<InsertRegionNetworkEndpointGroupRequest, Operation>newBuilder()
.setMethodDescriptor(insertMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>
listTransportSettings =
HttpJsonCallSettings
.<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>newBuilder()
.setMethodDescriptor(listMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
this.deleteCallable =
callableFactory.createUnaryCallable(
deleteTransportSettings, settings.deleteSettings(), clientContext);
this.deleteOperationCallable =
callableFactory.createOperationCallable(
deleteTransportSettings,
settings.deleteOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getCallable =
callableFactory.createUnaryCallable(
getTransportSettings, settings.getSettings(), clientContext);
this.insertCallable =
callableFactory.createUnaryCallable(
insertTransportSettings, settings.insertSettings(), clientContext);
this.insertOperationCallable =
callableFactory.createOperationCallable(
insertTransportSettings,
settings.insertOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listCallable =
callableFactory.createUnaryCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.listPagedCallable =
callableFactory.createPagedCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(deleteMethodDescriptor);
methodDescriptors.add(getMethodDescriptor);
methodDescriptors.add(insertMethodDescriptor);
methodDescriptors.add(listMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<DeleteRegionNetworkEndpointGroupRequest, Operation> deleteCallable() {
return deleteCallable;
}
@Override
public OperationCallable<DeleteRegionNetworkEndpointGroupRequest, Operation, Operation>
deleteOperationCallable() {
return deleteOperationCallable;
}
@Override
public UnaryCallable<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup> getCallable() {
return getCallable;
}
@Override
public UnaryCallable<InsertRegionNetworkEndpointGroupRequest, Operation> insertCallable() {
return insertCallable;
}
@Override
public OperationCallable<InsertRegionNetworkEndpointGroupRequest, Operation, Operation>
insertOperationCallable() {
return insertOperationCallable;
}
@Override
public UnaryCallable<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>
listCallable() {
return listCallable;
}
@Override
public UnaryCallable<ListRegionNetworkEndpointGroupsRequest, ListPagedResponse>
listPagedCallable() {
return listPagedCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DataTracker;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.concurrent.Refs;
/**
* Manage compaction options.
*/
public class CompactionController implements AutoCloseable
{
private static final Logger logger = LoggerFactory.getLogger(CompactionController.class);
public final ColumnFamilyStore cfs;
private DataTracker.SSTableIntervalTree overlappingTree;
private Refs<SSTableReader> overlappingSSTables;
private final Iterable<SSTableReader> compacting;
public final int gcBefore;
protected CompactionController(ColumnFamilyStore cfs, int maxValue)
{
this(cfs, null, maxValue);
}
public CompactionController(ColumnFamilyStore cfs, Set<SSTableReader> compacting, int gcBefore)
{
assert cfs != null;
this.cfs = cfs;
this.gcBefore = gcBefore;
this.compacting = compacting;
refreshOverlaps();
}
void maybeRefreshOverlaps()
{
for (SSTableReader reader : overlappingSSTables)
{
if (reader.isMarkedCompacted())
{
refreshOverlaps();
return;
}
}
}
private void refreshOverlaps()
{
if (this.overlappingSSTables != null)
overlappingSSTables.release();
if (compacting == null)
overlappingSSTables = Refs.tryRef(Collections.<SSTableReader>emptyList());
else
overlappingSSTables = cfs.getAndReferenceOverlappingSSTables(compacting);
this.overlappingTree = DataTracker.buildIntervalTree(overlappingSSTables);
}
public Set<SSTableReader> getFullyExpiredSSTables()
{
return getFullyExpiredSSTables(cfs, compacting, overlappingSSTables, gcBefore);
}
/**
* Finds expired sstables
*
* works something like this;
* 1. find "global" minTimestamp of overlapping sstables and compacting sstables containing any non-expired data
* 2. build a list of fully expired candidates
* 3. check if the candidates to be dropped actually can be dropped (maxTimestamp < global minTimestamp)
* - if not droppable, remove from candidates
* 4. return candidates.
*
* @param cfStore
* @param compacting we take the drop-candidates from this set, it is usually the sstables included in the compaction
* @param overlapping the sstables that overlap the ones in compacting.
* @param gcBefore
* @return
*/
public static Set<SSTableReader> getFullyExpiredSSTables(ColumnFamilyStore cfStore, Iterable<SSTableReader> compacting, Iterable<SSTableReader> overlapping, int gcBefore)
{
logger.debug("Checking droppable sstables in {}", cfStore);
if (compacting == null)
return Collections.<SSTableReader>emptySet();
List<SSTableReader> candidates = new ArrayList<SSTableReader>();
long minTimestamp = Long.MAX_VALUE;
for (SSTableReader sstable : overlapping)
{
// Overlapping might include fully expired sstables. What we care about here is
// the min timestamp of the overlapping sstables that actually contain live data.
if (sstable.getSSTableMetadata().maxLocalDeletionTime >= gcBefore)
minTimestamp = Math.min(minTimestamp, sstable.getMinTimestamp());
}
for (SSTableReader candidate : compacting)
{
if (candidate.getSSTableMetadata().maxLocalDeletionTime < gcBefore)
candidates.add(candidate);
else
minTimestamp = Math.min(minTimestamp, candidate.getMinTimestamp());
}
// At this point, minTimestamp denotes the lowest timestamp of any relevant
// SSTable that contains a constructive value. candidates contains all the
// candidates with no constructive values. The ones out of these that have
// (getMaxTimestamp() < minTimestamp) serve no purpose anymore.
Iterator<SSTableReader> iterator = candidates.iterator();
while (iterator.hasNext())
{
SSTableReader candidate = iterator.next();
if (candidate.getMaxTimestamp() >= minTimestamp)
{
iterator.remove();
}
else
{
logger.debug("Dropping expired SSTable {} (maxLocalDeletionTime={}, gcBefore={})",
candidate, candidate.getSSTableMetadata().maxLocalDeletionTime, gcBefore);
}
}
return new HashSet<>(candidates);
}
public String getKeyspace()
{
return cfs.keyspace.getName();
}
public String getColumnFamily()
{
return cfs.name;
}
/**
* @return the largest timestamp before which it's okay to drop tombstones for the given partition;
* i.e., after the maxPurgeableTimestamp there may exist newer data that still needs to be suppressed
* in other sstables. This returns the minimum timestamp for any SSTable that contains this partition and is not
* participating in this compaction, or LONG.MAX_VALUE if no such SSTable exists.
*/
public long maxPurgeableTimestamp(DecoratedKey key)
{
List<SSTableReader> filteredSSTables = overlappingTree.search(key);
long min = Long.MAX_VALUE;
for (SSTableReader sstable : filteredSSTables)
{
// if we don't have bloom filter(bf_fp_chance=1.0 or filter file is missing),
// we check index file instead.
if (sstable.getBloomFilter() instanceof AlwaysPresentFilter && sstable.getPosition(key, SSTableReader.Operator.EQ, false) != null)
min = Math.min(min, sstable.getMinTimestamp());
else if (sstable.getBloomFilter().isPresent(key.getKey()))
min = Math.min(min, sstable.getMinTimestamp());
}
return min;
}
public void invalidateCachedRow(DecoratedKey key)
{
cfs.invalidateCachedRow(key);
}
public void close()
{
overlappingSSTables.release();
}
}
| |
/*
* P4Java - java integration with Perforce SCM
* Copyright (C) 2007-, Mike Wille, Tek42
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* You can contact the author at:
*
* Web: http://tek42.com
* Email: mike@tek42.com
* Mail: 755 W Big Beaver Road
* Suite 1110
* Troy, MI 48084
*/
package com.tek42.perforce.parse;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FilterWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import org.slf4j.Logger;
import com.tek42.perforce.Depot;
import com.tek42.perforce.PerforceException;
import com.tek42.perforce.process.Executor;
import org.slf4j.LoggerFactory;
/**
* Provides default functionality for interacting with Perforce using the template design pattern.
*
* @author Mike Wille
*/
public abstract class AbstractPerforceTemplate {
private static final String p4errors[] = new String[] {
"Connect to server failed; check $P4PORT",
"Perforce password (P4PASSWD) invalid or unset.",
"Password not allowed at this server security level, use 'p4 login'",
"Can't create a new user - over license quota.",
"Client '*' can only be used from host '*'",
"Access for user '",
"Your session has expired, please login again.",
"You don't have permission for this operation.",
"Password invalid.",
};
@SuppressWarnings("unused")
private transient Logger logger; // Obsolete field, present just to keep demarshaller happy
@SuppressWarnings("unused")
private transient String errors[]; // Obsolete field, present just to keep demarshaller happy
private final Depot depot;
final String maxError = "Request too large";
public AbstractPerforceTemplate(Depot depot) {
this.depot = depot;
}
public Logger getLogger()
{
if(depot.getLogger() != null){
return depot.getLogger();
} else {
return LoggerFactory.getLogger(this.getClass());
}
}
/**
* Parses lines of formatted text for a list of values. Tokenizes each line into columns and adds the column
* specified by index to the list.
*
* @param response The response from perforce to parse
* @param index The column index to add to the list
* @return A List of strings parsed from the response
*/
protected List<String> parseList(StringBuilder response, int index) {
StringTokenizer lines = new StringTokenizer(response.toString(), "\n\r");
List<String> list = new ArrayList<String>(100);
while(lines.hasMoreElements()) {
StringTokenizer columns = new StringTokenizer(lines.nextToken());
for(int column = 0; column < index; column++) {
columns.nextToken();
}
list.add(columns.nextToken());
}
return list;
}
/**
* Check to see if the perforce request resulted in a "too many results" error. If so, special handling needs
* to happen.
*
* @param response The response from perforce
* @return True if the limit was reached, false otherwise.
*/
protected boolean hitMax(StringBuilder response) {
return response.toString().startsWith(maxError);
}
/**
* Used to filter the response from perforce so the API can throw out
* useless lines and thus save memory during large operations.
* ie. synced/refreshed lines from 'p4 sync'
*/
public abstract static class ResponseFilter {
public abstract boolean accept(String line);
public boolean reject(String line){
return !accept(line);
}
}
/**
* Adds any extra parameters that need to be applied to all perforce commands. For example, adding the login ticket
* to authenticate with.
*
* @param cmd
* String array that will be executed
* @return A (possibly) modified string array to be executed in place of the original.
*/
protected String[] getExtraParams(String cmd[]) {
String ticket = depot.getP4Ticket();
if(ticket != null) {
// Insert the ticket for the password if tickets are being used...
String newCmds[] = new String[cmd.length + 2];
newCmds[0] = getP4Exe();
newCmds[1] = "-P";
newCmds[2] = ticket;
for(int i = 3; (i - 2) < cmd.length; i++) {
newCmds[i] = cmd[i - 2];
}
cmd = newCmds;
} else {
cmd[0] = getP4Exe();
}
return cmd;
}
/**
* Handles the IO for opening a process, writing to it, flushing, closing, and then handling any errors.
*
* @param object The perforce object to save
* @param builder The builder responsible for saving the object
* @throws PerforceException If there is any errors thrown from perforce
*/
@SuppressWarnings("unchecked")
protected void saveToPerforce(Object object, Builder builder) throws PerforceException {
boolean loop = false;
boolean attemptLogin = true;
//StringBuilder response = new StringBuilder();
do {
int mesgIndex = -1;//, count = 0;
Executor p4 = depot.getExecFactory().newExecutor();
String debugCmd = "";
try {
String cmds[] = getExtraParams(builder.getSaveCmd(getP4Exe(), object));
// for exception reporting...
for(String cm : cmds) {
debugCmd += cm + " ";
}
// back to our regularly scheduled programming...
p4.exec(cmds);
BufferedReader reader = p4.getReader();
// Maintain a log of what was sent to p4 on std input
final StringBuilder log = new StringBuilder();
// Conditional use of std input for saving the perforce entity
if(builder.requiresStandardInput()) {
BufferedWriter writer = p4.getWriter();
Writer fwriter = new FilterWriter(writer) {
public void write(String str) throws IOException {
log.append(str);
out.write(str);
}
};
builder.save(object, fwriter);
fwriter.flush();
fwriter.close();
}
String line;
StringBuilder error = new StringBuilder();
StringBuilder info = new StringBuilder();
int exitCode = 0;
while((line = reader.readLine()) != null) {
// Check for authentication errors...
if (mesgIndex == -1)
mesgIndex = checkAuthnErrors(line);
if (mesgIndex != -1) {
error.append(line);
} else if(line.startsWith("error")) {
if(!line.trim().equals("") && (line.indexOf("up-to-date") < 0) && (line.indexOf("no file(s) to resolve") < 0)) {
error.append(line.substring(6));
}
} else if(line.startsWith("exit")) {
exitCode = Integer.parseInt(line.substring(line.indexOf(" ") + 1, line.length()));
} else {
if(line.indexOf(":") > -1)
info.append(line.substring(line.indexOf(":")));
else
info.append(line);
}
}
reader.close();
loop = false;
// If we failed to execute because of an authentication issue, try a p4 login.
if(mesgIndex == 1 || mesgIndex == 2 || mesgIndex == 6) {
if (attemptLogin) {
// password is unset means that perforce isn't using the environment var P4PASSWD
// Instead it is using tickets. We must attempt to login via p4 login, then
// retry this cmd.
p4.close();
login();
loop = true;
attemptLogin = false;
mesgIndex = -1; // cancel this error for now
continue;
}
}
if(mesgIndex != -1 || exitCode != 0) {
if(error.length() != 0) {
error.append("\nFor Command: ").append(debugCmd);
if (log.length() > 0) {
error.append("\nWith Data:\n===================\n");
error.append(log);
error.append("\n===================\n");
}
throw new PerforceException(error.toString());
}
throw new PerforceException(info.toString());
}
} catch(IOException e) {
throw new PerforceException("Failed to open connection to perforce", e);
} finally {
try{
p4.getWriter().close();
} catch (IOException e) {
//failed to close pipe, but we can't do much about that
}
try{
p4.getReader().close();
} catch (IOException e) {
//failed to close pipe, but we can't do much about that
}
p4.close();
}
} while(loop);
}
/**
* Executes a perforce command and returns the output as a StringBuilder.
*
* @param cmd The perforce commands to execute. Each command and argument is it's own array element
* @return The response from perforce as a stringbuilder
* @throws PerforceException If perforce throws any errors
*/
protected StringBuilder getPerforceResponse(String cmd[]) throws PerforceException {
return getPerforceResponse(cmd, new ResponseFilter(){
@Override
public boolean accept(String line) {
return true;
}
});
}
protected StringBuilder getPerforceResponse(String cmd[], ResponseFilter filter) throws PerforceException {
// TODO: Create a way to wildcard portions of the error checking. Add method to check for these errors.
boolean loop = false;
boolean attemptLogin = true;
List<String> lines = null;
int totalLength = 0;
do {
int mesgIndex = -1, count = 0;
Executor p4 = depot.getExecFactory().newExecutor();
String debugCmd = "";
// get entire cmd to execute
cmd = getExtraParams(cmd);
// setup information for logging...
for(String cm : cmd) {
debugCmd += cm + " ";
}
// Perform execution and IO
p4.exec(cmd);
BufferedReader reader = p4.getReader();
String line = null;
totalLength = 0;
lines = new ArrayList<String>(1024);
try
{
p4.getWriter().close();
while((line = reader.readLine()) != null) {
// only check for errors if we have not found one already
if (mesgIndex == -1)
mesgIndex = checkAuthnErrors(line);
if(filter.reject(line)) continue;
lines.add(line);
totalLength += line.length();
count++;
}
}
catch(IOException ioe)
{
//this is generally not anything to worry about. The underlying
//perforce process terminated and that causes java to be angry
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
ioe.printStackTrace(pw);
pw.flush();
sw.flush();
getLogger().warn("Perforce process terminated suddenly");
getLogger().warn(sw.toString());
}
finally{
try{
p4.getWriter().close();
} catch (IOException e) {
getLogger().warn("Write pipe failed to close.");
}
try{
p4.getReader().close();
} catch (IOException e) {
getLogger().warn("Read pipe failed to close.");
}
p4.close();
}
loop = false;
// If we failed to execute because of an authentication issue, try a p4 login.
if(attemptLogin && (mesgIndex == 1 || mesgIndex == 2 || mesgIndex == 6)) {
// password is unset means that perforce isn't using the environment var P4PASSWD
// Instead it is using tickets. We must attempt to login via p4 login, then
// retry this cmd.
p4.close();
login();
loop = true;
attemptLogin = false;
continue;
}
// We aren't using the exact message because we want to add the username for more info
if(mesgIndex == 4)
throw new PerforceException("Access for user '" + depot.getUser() + "' has not been enabled by 'p4 protect'");
if(mesgIndex != -1)
throw new PerforceException(p4errors[mesgIndex]);
if(count == 0)
throw new PerforceException("No output for: " + debugCmd);
} while(loop);
StringBuilder response = new StringBuilder(totalLength + lines.size());
for (String line : lines)
{
response.append(line);
response.append("\n");
}
return response;
}
/**
* Executes a p4 command and returns the output as list of lines.
*
* TODO Introduce a method that handles prefixed messages (i.e. "p4 -s <sub-command>"),
* and can thus stop reading once if reads the "exit: <exit-code>" line, which
* should avoid the "expected" Exception at EOF.
*
* @param cmd
* The perforce command to execute. The command and arguments are
* each in their own array element (e.g. cmd = {"p4", "info"}).
* @return
* The response from perforce as a list
* @throws PerforceException
*/
protected List<String> getRawPerforceResponseLines(String cmd[]) throws PerforceException {
List<String> lines = new ArrayList<String>(1024);
Executor p4 = depot.getExecFactory().newExecutor();
String debugCmd = "";
// get entire cmd to execute
cmd = getExtraParams(cmd);
// setup information for logging...
for(String cm : cmd) {
debugCmd += cm + " ";
}
// Perform execution and IO
p4.exec(cmd);
try
{
BufferedReader reader = p4.getReader();
p4.getWriter().close();
String line = null;
while((line = reader.readLine()) != null) {
lines.add(line);
}
}
catch(IOException ioe)
{
//this is generally not anything to worry about. The underlying
//perforce process terminated and that causes java to be angry.
// TODO Given the above comment, should we bother to log a warning?
// See this blog for a discussion of IOException with message "Write end dead" from pipes:
// http://techtavern.wordpress.com/2008/07/16/whats-this-ioexception-write-end-dead/
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
ioe.printStackTrace(pw);
pw.flush();
sw.flush();
getLogger().warn("IOException reading from Perforce process (may just be EOF)");
getLogger().warn(sw.toString());
}
finally{
try{
p4.getWriter().close();
} catch (IOException e) {
getLogger().warn("Write pipe failed to close.");
}
try{
p4.getReader().close();
} catch (IOException e) {
getLogger().warn("Read pipe failed to close.");
}
p4.close();
}
return lines;
}
/**
* Tries to perform a p4 login if the security level on the server is set to level 3 and no ticket was set via
* depot.setP4Ticket().
* <p>
* Unfortunately, this likely doesn't work on windows.
*
* @throws PerforceException If perforce throws any errors
*/
protected void login() throws PerforceException {
try {
// try the default location for p4 executable
String ticket = null;
try {
ticket = p4Login(getP4Exe());
} catch (PerforceException e) {
// Strange error under hudson's execution of unit tests. It appears
// that the environment is not setup correctly from within hudson. The sh shell
// cannot find the p4 executable. So we'll try again with a hard coded path.
// Though, I don't believe this problem exists outside of the build environment,
// and wouldn't normally worry, I still want to be able to test security level 3
// from the automated build...
getLogger().warn("Login with '" + getP4Exe() + "' failed: " + e.getMessage());
try {
ticket = p4Login("/usr/bin/p4");
} catch (PerforceException e1) {
// throw the original exception and not the one caused by the workaround
getLogger().warn("Attempt to workaround p4 executable location failed", e1);
throw e;
}
}
// if we obtained a ticket, save it for later use. Our environment setup by Depot can't usually
// see the .p4tickets file.
if (ticket != null && !ticket.contains("Enter password:")) {
getLogger().warn("Using p4 issued ticket.");
depot.setP4Ticket(ticket);
}
} catch(IOException e) {
throw new PerforceException("Unable to login via p4 login due to IOException: " + e.getMessage());
}
}
/**
* Read the last line of output which should be the ticket.
*
* @param p4Exe the perforce executable with or without full path information
* @return the p4 ticket
* @throws IOException if an I/O error prevents this from working
* @throws PerforceException if the execution of the p4Exe fails
*/
private String p4Login(String p4Exe) throws IOException, PerforceException {
Executor login = depot.getExecFactory().newExecutor();
login.exec(new String[] { p4Exe, "login", "-p" });
try {
// "echo" the password for the p4 process to read
BufferedWriter writer = login.getWriter();
try {
writer.write(depot.getPassword() + "\n");
} finally {
// help the writer move the data
writer.flush();
}
// read the ticket from the output
String ticket = null;
BufferedReader reader = login.getReader();
String line;
// The last line output from p4 login will be the ticket
while ((line = reader.readLine()) != null) {
int error = checkAuthnErrors(line);
if (error != -1)
throw new PerforceException("Login attempt failed: " + line);
ticket = line;
}
return ticket;
} finally {
login.close();
}
}
/**
* Check for authentication errors.
*
* @param line the perforce response line
* @return the index in the p4errors array or -1
*/
private int checkAuthnErrors(String line) {
for (int i = 0; i < p4errors.length; i++) {
if (line.indexOf(p4errors[i]) != -1)
return i;
}
return -1;
}
protected String getP4Exe() {
return depot.getExecutable();
}
}
| |
/* Probar ElementValuePairs
*/
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0)
@Anotacion (q = 5, k = 7, l = 0) package NombrePaquete;
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc;
import org.apache.orc.impl.HadoopShims;
import org.apache.orc.impl.KeyProvider;
import org.apache.orc.impl.LocalKey;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
/**
* This is an in-memory implementation of {@link KeyProvider}.
*
* The primary use of this class is for when the user doesn't have a
* Hadoop KMS running and wishes to use encryption. It is also useful for
* testing.
*
* The local keys for this class are encrypted/decrypted using the cipher
* in CBC/NoPadding mode and a constant IV. Since the key is random, the
* constant IV is not a problem.
*
* This class is not thread safe.
*/
public class InMemoryKeystore implements KeyProvider {
/**
* Support AES 256 ?
*/
public static final boolean SUPPORTS_AES_256;
static {
try {
SUPPORTS_AES_256 = Cipher.getMaxAllowedKeyLength("AES") >= 256;
} catch (final NoSuchAlgorithmException e) {
throw new IllegalArgumentException("Unknown algorithm", e);
}
}
private final Random random;
/**
* A map that stores the 'keyName@version'
* and 'metadata + material' mapping.
*/
private final TreeMap<String, KeyVersion> keys = new TreeMap<>();
/**
* A map from the keyName (without version) to the currentVersion.
*/
private final Map<String, Integer> currentVersion = new HashMap<>();
/**
* Create a new InMemoryKeystore.
*/
public InMemoryKeystore() {
this(new SecureRandom());
}
/**
* Create an InMemoryKeystore with the given random generator.
* Except for testing, this must be a SecureRandom.
*/
public InMemoryKeystore(Random random) {
this.random = random;
}
/**
* Build a version string from a basename and version number. Converts
* "/aaa/bbb" and 3 to "/aaa/bbb@3".
*
* @param name the basename of the key
* @param version the version of the key
* @return the versionName of the key.
*/
private static String buildVersionName(final String name,
final int version) {
return name + "@" + version;
}
/**
* Get the list of key names from the key provider.
*
* @return a list of key names
*/
@Override
public List<String> getKeyNames() {
return new ArrayList<>(currentVersion.keySet());
}
/**
* Get the current metadata for a given key. This is used when encrypting
* new data.
*
* @param keyName the name of a key
* @return metadata for the current version of the key
*/
@Override
public HadoopShims.KeyMetadata getCurrentKeyVersion(final String keyName) {
String versionName = buildVersionName(keyName, currentVersion.get(keyName));
KeyVersion keyVersion = keys.get(versionName);
if (keyVersion == null) {
throw new IllegalArgumentException("Unknown key " + keyName);
}
return keys.get(versionName);
}
/**
* Create a local key for the given key version.
*
* @param key the master key version
* @return the local key's material
*/
@Override
public LocalKey createLocalKey(final HadoopShims.KeyMetadata key) {
final String keyVersion = buildVersionName(key.getKeyName(), key.getVersion());
final KeyVersion secret = keys.get(keyVersion);
if (secret == null) {
throw new IllegalArgumentException("Unknown key " + key);
}
final EncryptionAlgorithm algorithm = secret.getAlgorithm();
byte[] encryptedKey = new byte[algorithm.keyLength()];
random.nextBytes(encryptedKey);
byte[] iv = new byte[algorithm.getIvLength()];
System.arraycopy(encryptedKey, 0, iv, 0, iv.length);
Cipher localCipher = algorithm.createCipher();
try {
localCipher.init(Cipher.DECRYPT_MODE,
new SecretKeySpec(secret.getMaterial(),
algorithm.getAlgorithm()), new IvParameterSpec(iv));
} catch (final InvalidKeyException e) {
throw new IllegalStateException(
"ORC bad encryption key for " + keyVersion, e);
} catch (final InvalidAlgorithmParameterException e) {
throw new IllegalStateException(
"ORC bad encryption parameter for " + keyVersion, e);
}
try {
byte[] decryptedKey = localCipher.doFinal(encryptedKey);
return new LocalKey(algorithm, decryptedKey, encryptedKey);
} catch (final IllegalBlockSizeException e) {
throw new IllegalStateException(
"ORC bad block size for " + keyVersion, e);
} catch (final BadPaddingException e) {
throw new IllegalStateException(
"ORC bad padding for " + keyVersion, e);
}
}
/**
* Create a local key for the given key version and initialization vector.
* Given a probabilistically unique iv, it will generate a unique key
* with the master key at the specified version. This allows the encryption
* to use this local key for the encryption and decryption without ever
* having access to the master key.
* <p>
* This uses KeyProviderCryptoExtension.decryptEncryptedKey with a fixed key
* of the appropriate length.
*
* @param key the master key version
* @param encryptedKey the unique initialization vector
* @return the local key's material
*/
@Override
public Key decryptLocalKey(HadoopShims.KeyMetadata key,
byte[] encryptedKey) {
final String keyVersion = buildVersionName(key.getKeyName(), key.getVersion());
final KeyVersion secret = keys.get(keyVersion);
if (secret == null) {
return null;
}
final EncryptionAlgorithm algorithm = secret.getAlgorithm();
byte[] iv = new byte[algorithm.getIvLength()];
System.arraycopy(encryptedKey, 0, iv, 0, iv.length);
Cipher localCipher = algorithm.createCipher();
try {
localCipher.init(Cipher.DECRYPT_MODE,
new SecretKeySpec(secret.getMaterial(),
algorithm.getAlgorithm()), new IvParameterSpec(iv));
} catch (final InvalidKeyException e) {
throw new IllegalStateException(
"ORC bad encryption key for " + keyVersion, e);
} catch (final InvalidAlgorithmParameterException e) {
throw new IllegalStateException(
"ORC bad encryption parameter for " + keyVersion, e);
}
try {
byte[] decryptedKey = localCipher.doFinal(encryptedKey);
return new SecretKeySpec(decryptedKey, algorithm.getAlgorithm());
} catch (final IllegalBlockSizeException e) {
throw new IllegalStateException(
"ORC bad block size for " + keyVersion, e);
} catch (final BadPaddingException e) {
throw new IllegalStateException(
"ORC bad padding for " + keyVersion, e);
}
}
@Override
public HadoopShims.KeyProviderKind getKind() {
return HadoopShims.KeyProviderKind.HADOOP;
}
/**
* Function that takes care of adding a new key.<br>
* A new key can be added only if:
* <ul>
* <li>This is a new key and no prior key version exist.</li>
* <li>If the key exists (has versions), then the new version to be added should be greater than
* the version that already exists.</li>
* </ul>
*
* @param keyName Name of the key to be added
* @param algorithm Algorithm used
* @param masterKey Master key
* @return this
*/
public InMemoryKeystore addKey(String keyName, EncryptionAlgorithm algorithm,
byte[] masterKey) throws IOException {
return addKey(keyName, 0, algorithm, masterKey);
}
/**
* Function that takes care of adding a new key.<br>
* A new key can be added only if:
* <ul>
* <li>This is a new key and no prior key version exist.</li>
* <li>If the key exists (has versions), then the new version to be added should be greater than
* the version that already exists.</li>
* </ul>
*
* @param keyName Name of the key to be added
* @param version Key Version
* @param algorithm Algorithm used
* @param masterKey Master key
* @return this
*/
public InMemoryKeystore addKey(String keyName, int version,
EncryptionAlgorithm algorithm,
byte[] masterKey) throws IOException {
/* Test weather platform supports the algorithm */
if (!SUPPORTS_AES_256 && (algorithm != EncryptionAlgorithm.AES_CTR_128)) {
algorithm = EncryptionAlgorithm.AES_CTR_128;
}
final byte[] buffer = new byte[algorithm.keyLength()];
if (algorithm.keyLength() > masterKey.length) {
System.arraycopy(masterKey, 0, buffer, 0, masterKey.length);
/* fill with zeros */
Arrays.fill(buffer, masterKey.length, buffer.length - 1, (byte) 0);
} else {
System.arraycopy(masterKey, 0, buffer, 0, algorithm.keyLength());
}
final KeyVersion key = new KeyVersion(keyName, version, algorithm,
buffer);
/* Check whether the key is already present and has a smaller version */
Integer currentKeyVersion = currentVersion.get(keyName);
if (currentKeyVersion != null && currentKeyVersion >= version) {
throw new IOException(String
.format("Key %s with equal or higher version %d already exists",
keyName, version));
}
keys.put(buildVersionName(keyName, version), key);
currentVersion.put(keyName, version);
return this;
}
/**
* This class contains the meta-data and the material for the key.
*/
static class KeyVersion extends HadoopShims.KeyMetadata {
private final byte[] material;
KeyVersion(final String keyName, final int version,
final EncryptionAlgorithm algorithm, final byte[] material) {
super(keyName, version, algorithm);
this.material = material;
}
/**
* Get the material for the key
*
* @return the material
*/
private byte[] getMaterial() {
return material;
}
}
}
| |
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.jetbrains.protocolReader;
import org.jetbrains.protocolReader.TextOutput;
import org.jetbrains.jsonProtocol.ItemDescriptor;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.util.*;
import static org.jetbrains.jsonProtocol.ProtocolMetaModel.*;
/**
* Read metamodel and generates set of files with Java classes/interfaces for the protocol.
*/
class Generator {
private static final String PARSER_INTERFACE_LIST_CLASS_NAME = "GeneratedReaderInterfaceList";
static final String READER_INTERFACE_NAME = "ProtocolReponseReader";
final List<String> jsonProtocolParserClassNames = new ArrayList<>();
final List<ParserRootInterfaceItem> parserRootInterfaceItems = new ArrayList<>();
final TypeMap typeMap = new TypeMap();
private final FileSet fileSet;
private final Naming naming;
Generator(String outputDir, String rootPackage, String requestClassName) throws IOException {
fileSet = new FileSet(FileSystems.getDefault().getPath(outputDir));
naming = new Naming(rootPackage, requestClassName);
}
public Naming getNaming() {
return naming;
}
public static final class Naming {
public final ClassNameScheme params;
public final ClassNameScheme additionalParam;
public final ClassNameScheme outputTypedef;
public final ClassNameScheme.Input commandResult;
public final ClassNameScheme.Input eventData;
public final ClassNameScheme inputValue;
public final ClassNameScheme inputEnum;
public final ClassNameScheme inputTypedef;
public final ClassNameScheme commonTypedef;
public final String inputPackage;
public final String requestClassName;
private Naming(String rootPackage, String requestClassName) {
this.requestClassName = requestClassName;
//noinspection UnnecessaryLocalVariable
String outputPackage = rootPackage;
params = new ClassNameScheme.Output("", outputPackage);
additionalParam = new ClassNameScheme.Output("", outputPackage);
outputTypedef = new ClassNameScheme.Output("Typedef", outputPackage);
commonTypedef = new ClassNameScheme.Common("Typedef", rootPackage);
inputPackage = rootPackage;
commandResult = new ClassNameScheme.Input("Result", inputPackage);
eventData = new ClassNameScheme.Input("EventData", inputPackage);
inputValue = new ClassNameScheme.Input("Value", inputPackage);
inputEnum = new ClassNameScheme.Input("", inputPackage);
inputTypedef = new ClassNameScheme.Input("Typedef", inputPackage);
}
}
void go(Root metamodel) throws IOException {
initializeKnownTypes();
List<Domain> domainList = metamodel.domains();
Map<String, DomainGenerator> domainGeneratorMap = new HashMap<>();
for (Domain domain : domainList) {
// todo DOMDebugger
if (domain.hidden() || domain.domain().equals("DOMDebugger")) {
System.out.println("Domain skipped: " + domain.domain());
continue;
}
DomainGenerator domainGenerator = new DomainGenerator(this, domain);
domainGeneratorMap.put(domain.domain(), domainGenerator);
domainGenerator.registerTypes();
}
for (Domain domain : domainList) {
if (!domain.hidden() && !domain.domain().equals("DOMDebugger")) {
System.out.println("Domain generated: " + domain.domain());
}
}
typeMap.setDomainGeneratorMap(domainGeneratorMap);
for (DomainGenerator domainGenerator : domainGeneratorMap.values()) {
domainGenerator.generateCommandsAndEvents();
}
typeMap.generateRequestedTypes();
generateParserInterfaceList();
generateParserRoot(parserRootInterfaceItems);
fileSet.deleteOtherFiles();
}
QualifiedTypeData resolveType(final ItemDescriptor typedObject, final ResolveAndGenerateScope scope) {
UnqualifiedTypeData unqualifiedType = switchByType(typedObject, new TypeVisitor<UnqualifiedTypeData>() {
@Override
public UnqualifiedTypeData visitRef(String refName) {
return new UnqualifiedTypeData(resolveRefType(scope.getDomainName(), refName, scope.getTypeDirection()));
}
@Override
public UnqualifiedTypeData visitBoolean() {
return UnqualifiedTypeData.BOOLEAN;
}
@Override
public UnqualifiedTypeData visitEnum(List<String> enumConstants) {
assert scope instanceof MemberScope;
return new UnqualifiedTypeData(((MemberScope)scope).generateEnum(typedObject.description(), enumConstants));
}
@Override
public UnqualifiedTypeData visitString() {
return UnqualifiedTypeData.STRING;
}
@Override
public UnqualifiedTypeData visitInteger() {
return UnqualifiedTypeData.INT;
}
@Override
public UnqualifiedTypeData visitNumber() {
return UnqualifiedTypeData.NUMBER;
}
@Override
public UnqualifiedTypeData visitArray(ArrayItemType items) {
return new UnqualifiedTypeData(BoxableType.createList(scope.resolveType(items).getJavaType()));
}
@Override
public UnqualifiedTypeData visitObject(List<ObjectProperty> properties) {
BoxableType nestedObjectName;
try {
nestedObjectName = scope.generateNestedObject(typedObject.description(), properties);
}
catch (IOException e) {
throw new RuntimeException(e);
}
return new UnqualifiedTypeData(nestedObjectName, false);
}
@Override
public UnqualifiedTypeData visitUnknown() {
return UnqualifiedTypeData.ANY;
}
});
return unqualifiedType.getQualifiedType(typedObject instanceof ItemDescriptor.Named && ((ItemDescriptor.Named)typedObject).optional());
}
private static class UnqualifiedTypeData {
private final BoxableType typeRef;
private final boolean nullable;
UnqualifiedTypeData(BoxableType typeRef) {
this(typeRef, false);
}
UnqualifiedTypeData(BoxableType typeRef, boolean nullable) {
this.typeRef = typeRef;
this.nullable = nullable;
}
QualifiedTypeData getQualifiedType(boolean optional) {
return new QualifiedTypeData(typeRef, optional, nullable);
}
static final UnqualifiedTypeData BOOLEAN = new UnqualifiedTypeData(BoxableType.BOOLEAN, false);
static final UnqualifiedTypeData STRING = new UnqualifiedTypeData(BoxableType.STRING, false);
static final UnqualifiedTypeData INT = new UnqualifiedTypeData(BoxableType.INT, false);
static final UnqualifiedTypeData NUMBER = new UnqualifiedTypeData(BoxableType.NUMBER, false);
static final UnqualifiedTypeData ANY = new UnqualifiedTypeData(BoxableType.OBJECT, true);
}
private void generateParserInterfaceList() throws IOException {
FileUpdater fileUpdater = startJavaFile(getNaming().inputPackage, PARSER_INTERFACE_LIST_CLASS_NAME + ".java");
// Write classes in stable order.
Collections.sort(jsonProtocolParserClassNames);
TextOutput out = fileUpdater.out;
out.append("public class ").append(PARSER_INTERFACE_LIST_CLASS_NAME).openBlock();
out.append("public static final Class<?>[] LIST =").openBlock();
for (String name : jsonProtocolParserClassNames) {
out.append(name).append(".class,").newLine();
}
out.closeBlock();
out.semi();
out.closeBlock();
fileUpdater.update();
}
private void generateParserRoot(List<ParserRootInterfaceItem> parserRootInterfaceItems) throws IOException {
FileUpdater fileUpdater = startJavaFile(getNaming().inputPackage, READER_INTERFACE_NAME + ".java");
// Write classes in stable order.
Collections.sort(parserRootInterfaceItems);
TextOutput out = fileUpdater.out;
out.append("@org.chromium.protocolReader.JsonParserRoot").newLine();
out.append("public interface ").append(READER_INTERFACE_NAME).openBlock();
for (ParserRootInterfaceItem item : parserRootInterfaceItems) {
item.writeCode(out);
}
out.closeBlock();
fileUpdater.update();
}
/**
* Resolve absolute (DOMAIN.TYPE) or relative (TYPE) type name
*/
private BoxableType resolveRefType(String scopeDomainName, String refName,
TypeData.Direction direction) {
int pos = refName.indexOf('.');
String domainName;
String shortName;
if (pos == -1) {
domainName = scopeDomainName;
shortName = refName;
}
else {
domainName = refName.substring(0, pos);
shortName = refName.substring(pos + 1);
}
return typeMap.resolve(domainName, shortName, direction);
}
static String generateMethodNameSubstitute(String originalName, TextOutput out) {
if (!BAD_METHOD_NAMES.contains(originalName)) {
return originalName;
}
out.append("@org.chromium.protocolReader.JsonField(jsonLiteralName=\"").append(originalName).append("\")").newLine();
return "get" + Character.toUpperCase(originalName.charAt(0)) + originalName.substring(1);
}
static String capitalizeFirstChar(String s) {
if (!s.isEmpty() && Character.isLowerCase(s.charAt(0))) {
s = Character.toUpperCase(s.charAt(0)) + s.substring(1);
}
return s;
}
FileUpdater startJavaFile(ClassNameScheme nameScheme, Domain domain, String baseName) throws IOException {
return startJavaFile(nameScheme.getPackageNameVirtual(domain.domain()), nameScheme.getShortName(baseName) + ".java");
}
private FileUpdater startJavaFile(String packageName, String filename) {
FileUpdater fileUpdater = fileSet.createFileUpdater(packageName.replace('.', '/') + "/" + filename);
fileUpdater.out.append("// Generated source").newLine().append("package ").append(packageName).semi().newLine().newLine();
return fileUpdater;
}
static <R> R switchByType(ItemDescriptor typedObject, TypeVisitor<R> visitor) {
String refName = typedObject instanceof ItemDescriptor.Referenceable ? ((ItemDescriptor.Referenceable)typedObject).ref() : null;
if (refName != null) {
return visitor.visitRef(refName);
}
String typeName = typedObject.type();
switch (typeName) {
case BOOLEAN_TYPE:
return visitor.visitBoolean();
case STRING_TYPE:
if (typedObject.getEnum() != null) {
return visitor.visitEnum(typedObject.getEnum());
}
return visitor.visitString();
case INTEGER_TYPE:
return visitor.visitInteger();
case NUMBER_TYPE:
return visitor.visitNumber();
case ARRAY_TYPE:
return visitor.visitArray(typedObject.items());
case OBJECT_TYPE:
if (!(typedObject instanceof ItemDescriptor.Type)) {
return visitor.visitObject(null);
}
return visitor.visitObject(((ItemDescriptor.Type)typedObject).properties());
case ANY_TYPE:
return visitor.visitUnknown();
case UNKNOWN_TYPE:
return visitor.visitUnknown();
}
throw new RuntimeException("Unrecognized type " + typeName);
}
private static void initializeKnownTypes() {
// Code example:
// typeMap.getTypeData("Page", "Cookie").getInput().setJavaTypeName("Object");
}
private static final Set<String> BAD_METHOD_NAMES = new HashSet<>(Arrays.asList(
"this"
));
}
| |
/*
* #%L
* Native ARchive plugin for Maven
* %%
* Copyright (C) 2002 - 2014 NAR Maven Plugin developers.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.maven_nar.cpptasks.platforms;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import com.github.maven_nar.cpptasks.CUtil;
import com.github.maven_nar.cpptasks.TargetMatcher;
import com.github.maven_nar.cpptasks.VersionInfo;
import com.github.maven_nar.cpptasks.compiler.LinkType;
/**
* Platform specific behavior for Microsoft Windows.
*
* @author Curt Arnold
*/
public final class WindowsPlatform {
/**
* Adds source or object files to the bidded fileset to
* support version information.
*
* @param versionInfo
* version information
* @param linkType
* link type
* @param isDebug
* true if debug build
* @param outputFile
* name of generated executable
* @param objDir
* directory for generated files
* @param matcher
* bidded fileset
* @throws IOException
* if unable to write version resource
*/
public static void addVersionFiles(final VersionInfo versionInfo, final LinkType linkType, final File outputFile,
final boolean isDebug, final File objDir, final TargetMatcher matcher) throws IOException {
if (versionInfo == null) {
throw new NullPointerException("versionInfo");
}
if (linkType == null) {
throw new NullPointerException("linkType");
}
if (outputFile == null) {
throw new NullPointerException("outputFile");
}
if (objDir == null) {
throw new NullPointerException("objDir");
}
/**
* Fully resolve version info
*/
final VersionInfo mergedInfo = versionInfo.merge();
final File versionResource = new File(objDir, "versioninfo.rc");
boolean notChanged = false;
//
// if the resource exists
//
if (versionResource.exists()) {
final ByteArrayOutputStream memStream = new ByteArrayOutputStream();
final Writer writer = new BufferedWriter(new OutputStreamWriter(memStream));
writeResource(writer, mergedInfo, outputFile, isDebug, linkType);
writer.close();
final ByteArrayInputStream proposedResource = new ByteArrayInputStream(memStream.toByteArray());
final InputStream existingResource = new FileInputStream(versionResource);
//
//
//
notChanged = hasSameContent(proposedResource, existingResource);
existingResource.close();
}
//
// if the resource file did not exist or will be changed then
// write the file
//
if (!notChanged) {
final Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(versionResource)));
writeResource(writer, mergedInfo, outputFile, isDebug, linkType);
writer.close();
}
if (matcher != null) {
matcher.visit(new File(versionResource.getParent()), versionResource.getName());
}
}
/**
* Converts parsed version information into a string representation.
*
* @param buf
* StringBuffer string buffer to receive version number
* @param version
* short[] four-element array
*/
private static void encodeVersion(final StringBuffer buf, final short[] version) {
for (int i = 0; i < 3; i++) {
buf.append(Short.toString(version[i]));
buf.append(',');
}
buf.append(Short.toString(version[3]));
}
/**
* Compare two input streams for duplicate content
*
* Naive implementation, but should not be performance issue.
*
* @param stream1
* stream
* @param stream2
* stream
* @return true if streams are identical in content
* @throws IOException
* if error reading streams
*/
private static boolean hasSameContent(final InputStream stream1, final InputStream stream2) throws IOException {
int byte1 = -1;
int byte2 = -1;
do {
byte1 = stream1.read();
byte2 = stream2.read();
} while (byte1 == byte2 && byte1 != -1);
return byte1 == byte2;
}
/**
* Parse version string into array of four short values.
*
* @param version
* String version
* @return short[] four element array
*/
public static short[] parseVersion(final String version) {
final short[] values = new short[] {
0, 0, 0, 0
};
if (version != null) {
final StringBuffer buf = new StringBuffer(version);
int start = 0;
for (int i = 0; i < 4; i++) {
int end = version.indexOf('.', start);
if (end <= 0) {
end = version.length();
for (int j = end; j > start; j--) {
final String part = buf.substring(start, end);
try {
values[i] = Short.parseShort(part);
break;
} catch (final NumberFormatException ex) {
values[i] = 0;
}
}
break;
} else {
final String part = buf.substring(start, end);
try {
values[i] = Short.parseShort(part);
start = end + 1;
} catch (final NumberFormatException ex) {
break;
}
}
}
}
return values;
}
/**
* Writes windows resource.
*
* @param writer
* writer, may not be nul
* @param versionInfo
* version information
* @param outputFile
* executable file
* @param isDebug
* true if debug
* @param linkType
* link type
* @throws IOException
* if error writing resource file
*/
public static void writeResource(final Writer writer, final VersionInfo versionInfo, final File outputFile,
final boolean isDebug, final LinkType linkType) throws IOException {
// writer.write("#include \"windows.h\"\n");
writer.write("VS_VERSION_INFO VERSIONINFO\n");
final StringBuffer buf = new StringBuffer("FILEVERSION ");
encodeVersion(buf, parseVersion(versionInfo.getFileversion()));
buf.append("\nPRODUCTVERSION ");
encodeVersion(buf, parseVersion(versionInfo.getProductversion()));
buf.append("\n");
writer.write(buf.toString());
buf.setLength(0);
buf.append("FILEFLAGSMASK 0x1L /* VS_FF_DEBUG */");
final Boolean patched = versionInfo.getPatched();
final Boolean prerelease = versionInfo.getPrerelease();
if (patched != null) {
buf.append(" | 0x4L /* VS_FF_PATCHED */");
}
if (prerelease != null) {
buf.append(" | 0x2L /* VS_FF_PRERELEASE */");
}
if (versionInfo.getPrivatebuild() != null) {
buf.append(" | 0x8L /* VS_FF_PRIVATEBUILD */");
}
if (versionInfo.getSpecialbuild() != null) {
buf.append(" | 0x20L /* VS_FF_SPECIALBUILD */");
}
buf.append('\n');
writer.write(buf.toString());
buf.setLength(0);
buf.append("FILEFLAGS ");
if (isDebug) {
buf.append("0x1L /* VS_FF_DEBUG */ | ");
}
if (Boolean.TRUE.equals(patched)) {
buf.append("0x4L /* VS_FF_PATCHED */ | ");
}
if (Boolean.TRUE.equals(prerelease)) {
buf.append("0x2L /* VS_FF_PRERELEASE */ | ");
}
// FIXME: What are the possible values of private build? Would it be sufficient to check if private build is empty?
if (Boolean.TRUE.equals(versionInfo.getPrivatebuild())) {
buf.append("0x8L /* VS_FF_PRIVATEBUILD */ | ");
}
// FIXME: What are the possible values of special build? Would it be sufficient to check if special build is empty?
if (Boolean.TRUE.equals(versionInfo.getSpecialbuild())) {
buf.append("0x20L /* VS_FF_SPECIALBUILD */ | ");
}
if (buf.length() > 10) {
buf.setLength(buf.length() - 3);
buf.append('\n');
} else {
buf.append("0\n");
}
writer.write(buf.toString());
buf.setLength(0);
writer.write("FILEOS 0x40004 /* VOS_NT_WINDOWS32 */\nFILETYPE ");
if (linkType.isExecutable()) {
writer.write("0x1L /* VFT_APP */\n");
} else {
if (linkType.isSharedLibrary()) {
writer.write("0x2L /* VFT_DLL */\n");
} else if (linkType.isStaticLibrary()) {
writer.write("0x7L /* VFT_STATIC_LIB */\n");
} else {
writer.write("0x0L /* VFT_UNKNOWN */\n");
}
}
writer.write("FILESUBTYPE 0x0L\n");
writer.write("BEGIN\n");
writer.write("BLOCK \"StringFileInfo\"\n");
writer.write(" BEGIN\n#ifdef UNICODE\nBLOCK \"040904B0\"\n");
writer.write("#else\nBLOCK \"040904E4\"\n#endif\n");
writer.write("BEGIN\n");
if (versionInfo.getFilecomments() != null) {
writer.write("VALUE \"Comments\", \"");
writer.write(versionInfo.getFilecomments());
writer.write("\\0\"\n");
}
if (versionInfo.getCompanyname() != null) {
writer.write("VALUE \"CompanyName\", \"");
writer.write(versionInfo.getCompanyname());
writer.write("\\0\"\n");
}
if (versionInfo.getFiledescription() != null) {
writer.write("VALUE \"FileDescription\", \"");
writer.write(versionInfo.getFiledescription());
writer.write("\\0\"\n");
}
if (versionInfo.getFileversion() != null) {
writer.write("VALUE \"FileVersion\", \"");
writer.write(versionInfo.getFileversion());
writer.write("\\0\"\n");
}
final String baseName = CUtil.getBasename(outputFile);
String internalName = versionInfo.getInternalname();
if (internalName == null) {
internalName = baseName;
}
writer.write("VALUE \"InternalName\", \"");
writer.write(internalName);
writer.write("\\0\"\n");
if (versionInfo.getLegalcopyright() != null) {
writer.write("VALUE \"LegalCopyright\", \"");
writer.write(versionInfo.getLegalcopyright());
writer.write("\\0\"\n");
}
if (versionInfo.getLegaltrademarks() != null) {
writer.write("VALUE \"LegalTrademarks\", \"");
writer.write(versionInfo.getLegaltrademarks());
writer.write("\\0\"\n");
}
writer.write("VALUE \"OriginalFilename\", \"");
writer.write(baseName);
writer.write("\\0\"\n");
if (versionInfo.getPrivatebuild() != null) {
writer.write("VALUE \"PrivateBuild\", \"");
writer.write(versionInfo.getPrivatebuild());
writer.write("\\0\"\n");
}
if (versionInfo.getProductname() != null) {
writer.write("VALUE \"ProductName\", \"");
writer.write(versionInfo.getProductname());
writer.write("\\0\"\n");
}
if (versionInfo.getProductversion() != null) {
writer.write("VALUE \"ProductVersion\", \"");
writer.write(versionInfo.getProductversion());
writer.write("\\0\"\n");
}
if (versionInfo.getSpecialbuild() != null) {
writer.write("VALUE \"SpecialBuild\", \"");
writer.write(versionInfo.getSpecialbuild());
writer.write("\\0\"\n");
}
writer.write("END\n");
writer.write("END\n");
writer.write("BLOCK \"VarFileInfo\"\n");
writer.write("BEGIN\n#ifdef UNICODE\n");
writer.write("VALUE \"Translation\", 0x409, 1200\n");
writer.write("#else\n");
writer.write("VALUE \"Translation\", 0x409, 1252\n");
writer.write("#endif\n");
writer.write("END\n");
writer.write("END\n");
}
/**
* Constructor.
*/
private WindowsPlatform() {
}
}
| |
/*
* Portions Copyright (c) 2009-10 Zepheira LLC and James Leigh, Some
Rights Reserved
* Portions Copyright (c) 2010-11 Talis Inc, Some Rights Reserved
* Portions Copyright (c) 2012 3 Round Stones Inc., Some Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.callimachusproject.engine.helpers;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.callimachusproject.engine.RDFEventReader;
import org.callimachusproject.engine.RDFParseException;
import org.callimachusproject.engine.events.RDFEvent;
import org.callimachusproject.engine.events.Triple;
import org.callimachusproject.engine.events.TriplePattern;
import org.callimachusproject.engine.model.AbsoluteTermFactory;
import org.callimachusproject.engine.model.GraphNodePath;
import org.callimachusproject.engine.model.IRI;
import org.callimachusproject.engine.model.Node;
import org.callimachusproject.engine.model.Term;
import org.openrdf.http.object.exceptions.BadRequest;
import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.impl.StatementImpl;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.query.GraphQueryResult;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.rio.RDFHandlerException;
/**
* Track what node the triples are about and ensures they match one of the given
* patterns.
*
* @author James Leigh
*
*/
public final class TripleVerifier implements Cloneable {
private static final String NOT_IN_EDIT_TEMPLATE = "http://callimachusproject.org/callimachus-for-web-developers#Edit_template";
private static final String LDP = "http://www.w3.org/ns/ldp#";
private static final String LDP_CONTAINS = LDP + "contains";
private final AbsoluteTermFactory tf = AbsoluteTermFactory.newInstance();
private final Set<URI> subjects;
private final Set<URI> partners;
private final Map<Resource, Set<Statement>> connected;
private final Map<Resource, Set<Statement>> disconnected;
private final Set<URI> allTypes;
private final Map<Resource, Set<URI>> types;
private final Set<URI> ldpURIs;
private boolean empty = true;
private Set<TriplePattern> patterns;
public TripleVerifier() {
subjects = new HashSet<URI>();
partners = new HashSet<URI>();
connected = new HashMap<Resource, Set<Statement>>();
disconnected = new HashMap<Resource, Set<Statement>>();
allTypes = new LinkedHashSet<URI>();
types = new HashMap<Resource, Set<URI>>();
ldpURIs = new HashSet<URI>();
empty = true;
}
@Override
public TripleVerifier clone() {
return new TripleVerifier(this);
}
private TripleVerifier(TripleVerifier cloned) {
subjects = new HashSet<URI>(cloned.subjects);
partners = new HashSet<URI>(cloned.partners);
connected = new HashMap<Resource, Set<Statement>>(cloned.connected);
disconnected = new HashMap<Resource, Set<Statement>>(cloned.disconnected);
allTypes = new LinkedHashSet<URI>(cloned.allTypes);
types = new HashMap<Resource, Set<URI>>(cloned.types);
ldpURIs = new HashSet<URI>(cloned.ldpURIs);
empty = cloned.empty;
patterns = cloned.patterns;
}
public String toString() {
return subjects.toString();
}
public void accept(RDFEventReader reader) throws RDFParseException {
if (patterns == null) {
patterns = new LinkedHashSet<TriplePattern>();
}
try {
while (reader.hasNext()) {
RDFEvent next = reader.next();
if (next.isTriplePattern()) {
TriplePattern tp = next.asTriplePattern();
GraphNodePath pred = tp.getProperty();
if (!tp.isInverse()) {
if (pred.isIRI()) {
accept(tp);
}
}
}
}
} finally {
reader.close();
}
}
public void accept(GraphQueryResult reader) throws RDFParseException,
QueryEvaluationException {
if (patterns == null) {
patterns = new LinkedHashSet<TriplePattern>();
}
try {
while (reader.hasNext()) {
Statement st = reader.next();
Resource subj = st.getSubject();
URI pred = st.getPredicate();
Value obj = st.getObject();
accept(new Triple((Node) asTerm(subj), (IRI) asTerm(pred),
asTerm(obj)));
}
} finally {
reader.close();
}
}
public void accept(TriplePattern pattern) {
if (patterns == null) {
patterns = new LinkedHashSet<TriplePattern>();
}
patterns.add(pattern);
}
public boolean isDisconnectedNodePresent() {
return !disconnected.isEmpty();
}
public Set<Statement> getConnections() {
Set<Statement> set = new HashSet<Statement>();
for (Set<Statement> nodes : connected.values()) {
set.addAll(nodes);
}
return set;
}
public boolean isAbout(Resource about) {
if (isEmpty())
return true;
if (!isSingleton())
return false;
URI subject = getSubject();
if (subject.equals(about))
return true;
if (subject.getNamespace().equals(about.stringValue() + '#'))
return true;
return false;
}
public boolean isEmpty() {
return empty;
}
public boolean isSingleton() {
if (subjects.isEmpty())
return false;
if (subjects.size() == 1)
return true;
URI about = getSubject();
String hash = about.stringValue() + "#";
for (URI subj : subjects) {
if (subj.equals(about))
continue;
if (subj.getNamespace().equals(hash))
continue;
return false;
}
return true;
}
public boolean isContainmentTriplePresent() {
return ldpURIs.contains(new URIImpl(LDP_CONTAINS));
}
public URI getSubject() {
URI about = null;
for (URI subj : subjects) {
String ns = subj.getNamespace();
if (!ns.endsWith("#")) {
return subj;
} else if (about == null) {
about = subj;
} else if (ns.equals(about.getNamespace())) {
return new URIImpl(ns.substring(0, ns.length() - 1));
}
}
return about;
}
public void addSubject(URI subj) {
subjects.add(subj);
}
public Set<URI> getAllTypes() {
return allTypes;
}
public Set<URI> getTypes(URI subject) {
if (types.containsKey(subject))
return types.get(subject);
return Collections.emptySet();
}
public Set<URI> getPartners() {
return partners;
}
public void verify(Resource subj, URI pred, Value obj)
throws RDFHandlerException {
Set<TriplePattern> alternatives = findAlternatives(subj, pred, obj);
if (alternatives != null && alternatives.isEmpty())
throw new BadRequest("Triple pattern " + subj + " " + pred + " "
+ obj + " must be present in template to use it")
.addLdpConstraint(NOT_IN_EDIT_TEMPLATE);
if (alternatives != null)
throw new BadRequest("Triple " + subj + " " + pred + " " + obj
+ " must match one of " + alternatives);
if (subj instanceof URI) {
addSubject((URI) subj);
}
if (RDF.TYPE.equals(pred) && obj instanceof URI) {
if (!types.containsKey(subj)) {
types.put(subj, new HashSet<URI>());
}
types.get(subj).add((URI) obj);
allTypes.add((URI) obj);
if (obj.stringValue().startsWith(LDP)) {
ldpURIs.add((URI) obj);
}
} else if (pred.stringValue().startsWith(LDP)) {
ldpURIs.add(pred);
}
link(subj, pred, obj);
empty = false;
}
private Set<TriplePattern> findAlternatives(Resource subj, URI pred, Value obj) throws RDFHandlerException {
if (patterns == null)
return null;
Term sterm = asTerm(subj);
Term pterm = asTerm(pred);
Term oterm = asTerm(obj);
for (TriplePattern tp : patterns) {
if (tp.getSubject().isIRI()) {
if (!tp.getSubject().equals(sterm))
continue;
}
if (tp.getProperty().isIRI()) {
if (!tp.getProperty().equals(pterm))
continue;
}
if (tp.getObject().isIRI() || tp.getObject().isLiteral()) {
if (!tp.getObject().equals(oterm))
continue;
}
if (tp.isInverse())
throw new RDFHandlerException("Inverse relationships cannot be used here");
return null;
}
Set<TriplePattern> alt1 = new LinkedHashSet<TriplePattern>();
Set<TriplePattern> alt2 = new LinkedHashSet<TriplePattern>();
for (TriplePattern tp : patterns) {
if (tp.getProperty().equals(pterm)) {
alt1.add(tp);
if (tp.getSubject().equals(sterm)
|| tp.getObject().equals(oterm)) {
alt2.add(tp);
}
}
}
return alt2.isEmpty() ? alt1 : alt2;
}
private Term asTerm(Value obj) {
if (obj instanceof Literal) {
Literal lit = (Literal) obj;
if (lit.getDatatype() != null) {
return tf.literal(obj.stringValue(), tf.iri(lit.getDatatype()
.stringValue()));
} else if (lit.getLanguage() != null) {
return tf.literal(obj.stringValue(), lit.getLanguage());
} else {
return tf.literal(obj.stringValue());
}
} else if (obj instanceof URI) {
return tf.iri(obj.stringValue());
} else {
return tf.node(obj.stringValue());
}
}
private void link(Resource subj, URI pred, Value obj) {
boolean subjConnected = subj instanceof URI || connected.containsKey(subj);
if (!subjConnected && !disconnected.containsKey(subj)) {
disconnected.put(subj, new HashSet<Statement>());
}
if (obj instanceof URI) {
URI uri = (URI) obj;
String ns = uri.getNamespace();
if (ns.endsWith("#")) {
partners.add(new URIImpl(ns.substring(0, ns.length() - 1)));
} else {
partners.add(uri);
}
} else if (obj instanceof Resource) {
if (subjConnected) {
connect(new StatementImpl(subj, pred, obj));
} else {
disconnected.get(subj).add(new StatementImpl(subj, pred, obj));
}
}
}
private void connect(Statement st) {
Set<Statement> set = connected.get(st.getObject());
if (set == null) {
connected.put((Resource) st.getObject(), set = new HashSet<Statement>());
}
set.add(st);
Set<Statement> removed = disconnected.remove(st.getObject());
if (removed != null) {
for (Statement connecting : removed) {
connect(connecting);
}
}
}
}
| |
package edu.buffalo.cse.cse486586.simpledynamo;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.telephony.TelephonyManager;
import android.text.Html;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.HashMap;
class RamanKey {
private String hashNode;
private String strNode;
RamanKey(String hashNode, String strNode) {
this.hashNode = hashNode;
this.strNode = strNode;
}
public void setHashNode(String hashNode) {
this.hashNode = hashNode;
}
public void setStrNode(String strNode) {
this.strNode = strNode;
}
public String getHashNode() {
return hashNode;
}
public String getStrNode() {
return strNode;
}
public int hashCode() {
return hashNode.hashCode();
}
public boolean equals(Object obj) {
if (obj instanceof RamanKey) {
RamanKey ramanKey = (RamanKey) obj;
return (ramanKey.getStrNode().equals(this.getStrNode()));
} else {
return false;
}
}
}
class RamanMessage {
private String coordinatorNode;
private String senderNode;
private String receiverNode;
private String message;
private String key;
private String value;
private ArrayList<String> replicaList;
private int rowsDeleted;
private boolean responseReceived;
private String jsonResponse;
private HashMap<String, String> queryResponseMap = new HashMap<String, String>();
// Initiator Message Type 1
RamanMessage(String coordinatorNode, String senderNode) {
this.coordinatorNode = coordinatorNode;
this.senderNode = senderNode;
}
// Initiator Message Type 2
RamanMessage(String coordinatorNode, String senderNode, String message) {
this.coordinatorNode = coordinatorNode;
this.senderNode = senderNode;
this.message = message;
}
// Normal Request Message
RamanMessage(String coordinatorNode, String senderNode, String receiverNode, String message) {
this.coordinatorNode = coordinatorNode;
this.senderNode = senderNode;
this.receiverNode = receiverNode;
this.message = message;
}
// For Query Request
RamanMessage(String coordinatorNode, String senderNode, String receiverNode, String message, String key) {
this.coordinatorNode = coordinatorNode;
this.senderNode = senderNode;
this.receiverNode = receiverNode;
this.message = message;
this.key = key;
}
// For Insert Request
RamanMessage(String coordinatorNode, String senderNode, String receiverNode, String message, String key, String value) {
this.coordinatorNode = coordinatorNode;
this.senderNode = senderNode;
this.receiverNode = receiverNode;
this.message = message;
this.key = key;
this.value = value;
}
public void setCoordinatorNode(String coordinatorNode) {
this.coordinatorNode = coordinatorNode;
}
public void setSenderNode(String senderNode) {
this.senderNode = senderNode;
}
public void setReceiverNode(String receiverNode) {
this.receiverNode = receiverNode;
}
public void setMessage(String message) {
this.message = message;
}
public void setKey(String key) {
this.key = key;
}
public void setValue(String value) {
this.value = value;
}
public void setReplicaList(ArrayList<String> replicaList) {
this.replicaList = replicaList;
}
public void setRowsDeleted(int rowsDeleted) {
this.rowsDeleted = rowsDeleted;
}
public void setResponseReceived(boolean responseReceived) {
this.responseReceived = responseReceived;
}
public void setJsonResponse(String jsonResponse) {
this.jsonResponse = jsonResponse;
}
public void addQueryResponse(String key, String value) {
queryResponseMap.put(key, value);
}
public String getCoordinatorNode() {
return coordinatorNode;
}
public String getSenderNode() {
return senderNode;
}
public String getReceiverNode() {
return receiverNode;
}
public String getMessage() {
return message;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
public ArrayList<String> getReplicaList() {
return replicaList;
}
public int getRowsDeleted() {
return rowsDeleted;
}
public boolean getResponseReceived() {
return responseReceived;
}
public String getJsonResponse() {
return jsonResponse;
}
public HashMap<String, String> getQueryResponseMap() {
return queryResponseMap;
}
}
public class SimpleDynamoActivity extends Activity implements View.OnClickListener {
private Resources res;
private Button btnDump, btnDelete, btnTestInsert, btnTestQuery;
private TextView mTextView, txtPrevNode, txtNextNode, txtDynamoRingNodes;
private EditText mEditText;
private String MY_PORT, MY_EMULATOR_NODE;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_simple_dynamo);
res = getResources();
mTextView = (TextView) findViewById(R.id.textView1);
mTextView.setMovementMethod(new ScrollingMovementMethod());
txtPrevNode = (TextView) findViewById(R.id.txt_prev_node);
txtNextNode = (TextView) findViewById(R.id.txt_next_node);
txtDynamoRingNodes = (TextView) findViewById(R.id.txt_dynamo_ring_nodes);
mEditText = (EditText) findViewById(R.id.edit_txt);
btnDump = (Button) findViewById(R.id.button1);
btnDelete = (Button) findViewById(R.id.button2);
btnTestInsert = (Button) findViewById(R.id.btn_test_insert);
btnTestQuery = (Button) findViewById(R.id.btn_test_query);
/*
* Calculate the port number that this AVD listens on.
* It is just a hack that I came up with to get around the networking limitations of AVDs.
* The explanation is provided in the PA1 spec.
*/
TelephonyManager tel = (TelephonyManager) this.getSystemService(Context.TELEPHONY_SERVICE);
String portStr = tel.getLine1Number().substring(tel.getLine1Number().length() - 4);
MY_EMULATOR_NODE = String.valueOf((Integer.parseInt(portStr)));
MY_PORT = String.valueOf((Integer.parseInt(portStr) * 2));
LocalBroadcastManager.getInstance(this).registerReceiver(dynamoRingUpdateListener, new IntentFilter(Globals.DYNAMO_RING_UPDATE_LISTENER));
}
@Override
protected void onResume() {
super.onResume();
btnDump.setOnClickListener(this);
btnDelete.setOnClickListener(this);
btnTestInsert.setOnClickListener(this);
btnTestQuery.setOnClickListener(this);
txtDynamoRingNodes.setText(Globals.LIST_REPLICAS_GLOBAL.toString());
}
// Will be called whenever the next prev node gets change
private BroadcastReceiver nextPrevNodeListener = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
}
};
// Will be called whenever the dynamo ring gets updated
private BroadcastReceiver dynamoRingUpdateListener = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String strPrevNode = intent.getStringExtra(Globals.TXT_PREV_NODE);
String strNextNode = intent.getStringExtra(Globals.TXT_NEXT_NODE);
String strDynamoRingNodes = intent.getStringExtra(Globals.TXT_DYNAMO_RING_NODES);
txtPrevNode.setText(strPrevNode);
txtNextNode.setText(strNextNode);
txtDynamoRingNodes.setText(strDynamoRingNodes);
}
};
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
if (mEditText.getText().toString().equalsIgnoreCase(Globals.LOCAL_QUERY)) {
// Local Dump
if (showDump(Globals.LOCAL_QUERY)) {
Log.v(Globals.TAG, "Local Dump Success");
mTextView.append("\nLocal Dump Success\n");
} else {
Log.v(Globals.TAG, "Local Dump Fail");
mTextView.append("\nLocal Dump Fail\n");
}
} else if (mEditText.getText().toString().equalsIgnoreCase(Globals.GLOBAL_QUERY)) {
// Global Dump
if (showDump(Globals.GLOBAL_QUERY)) {
Log.v(Globals.TAG, "Global Dump Success");
mTextView.append("\nGlobal Dump Success\n");
} else {
Log.v(Globals.TAG, "Global Dump Fail");
mTextView.append("\nGlobal Dump Fail\n");
}
} else {
mTextView.append("\nEntered Key is not correct. It can handle either @ or * queries\n");
}
break;
case R.id.button2:
deleteData(mEditText.getText().toString());
break;
case R.id.btn_test_insert:
// Test Insert
if (testInsert()) {
Log.v(Globals.TAG, "Insert Success");
mTextView.append("\nInsert Success\n");
} else {
Log.v(Globals.TAG, "Insert Fail");
mTextView.append("\nInsert Fail\n");
}
break;
case R.id.btn_test_query:
// Test Query
if (testQuery()) {
Log.v(Globals.TAG, "Query Success");
mTextView.append("\nQuery success\n");
} else {
Log.v(Globals.TAG, "Query fail");
mTextView.append("\nQuery fail\n");
}
break;
}
mEditText.getText().clear();
}
private boolean showDump(String query) {
Cursor resultCursor = null;
boolean success = true;
try {
resultCursor = getContentResolver().query(Globals.mUri, null, query, null, null);
if (resultCursor == null) {
success = false;
Log.v(Globals.TAG, "Result null");
throw new Exception();
}
if (resultCursor.moveToFirst()) {
do {
int keyIndex = resultCursor.getColumnIndex(Globals.KEY_FIELD);
int valueIndex = resultCursor.getColumnIndex(Globals.VALUE_FIELD);
if (keyIndex == -1 || valueIndex == -1) {
success = false;
Log.v(Globals.TAG, "Wrong columns");
resultCursor.close();
throw new Exception();
} else {
String strKey = resultCursor.getString(keyIndex);
String strValue = resultCursor.getString(valueIndex);
String displayedMsg = "\nKey : " + strKey + "\nValue : " + strValue;
// Displaying Color text so as to differentiate messages sent by different devices
String colorStrReceived = "<font color='" + getColor(MY_PORT) + "'>" + displayedMsg + "</font>";
mTextView.append("\n ");
mTextView.append(Html.fromHtml(colorStrReceived));
}
} while (resultCursor.moveToNext());
}
} catch (Exception e) {
success = false;
Log.v(Globals.TAG, "Exception in showDump()");
e.printStackTrace();
} finally {
if (null != resultCursor && !resultCursor.isClosed()) {
resultCursor.close();
}
}
return success;
}
private void deleteData(String query) {
int rowsDeleted = 0;
try {
rowsDeleted = getContentResolver().delete(Globals.mUri, query, null);
String displayedMsg = "\nDeleted : " + rowsDeleted + " rows";
// Displaying Color text so as to differentiate messages sent by different devices
String colorStrReceived = "<font color='" + getColor(MY_PORT) + "'>" + displayedMsg + "</font>";
mTextView.append("\n ");
mTextView.append(Html.fromHtml(colorStrReceived));
} catch (Exception e) {
mTextView.append("\nDelete Query Failed\n");
}
}
private boolean testInsert() {
try {
String key = mEditText.getText().toString();
ContentValues cv = new ContentValues();
cv.put(Globals.KEY_FIELD, key);
cv.put(Globals.VALUE_FIELD, key);
getContentResolver().insert(Globals.mUri, cv);
} catch (Exception e) {
Log.e(Globals.TAG, e.toString());
return false;
}
return true;
}
private boolean testQuery() {
try {
String key = mEditText.getText().toString();
Cursor resultCursor = getContentResolver().query(Globals.mUri, null,
key, null, null);
if (resultCursor == null) {
Log.e(Globals.TAG, "Result null");
throw new Exception();
}
int keyIndex = resultCursor.getColumnIndex(Globals.KEY_FIELD);
int valueIndex = resultCursor.getColumnIndex(Globals.VALUE_FIELD);
if (keyIndex == -1 || valueIndex == -1) {
Log.e(Globals.TAG, "Wrong columns");
resultCursor.close();
throw new Exception();
}
resultCursor.moveToFirst();
if (!(resultCursor.isFirst() && resultCursor.isLast())) {
Log.e(Globals.TAG, "Wrong number of rows");
resultCursor.close();
throw new Exception();
}
String returnKey = resultCursor.getString(keyIndex);
String returnValue = resultCursor.getString(valueIndex);
if (!(returnKey.equals(key) && returnValue.equals(key))) {
Log.e(Globals.TAG, "(key, value) pairs don't match\n");
resultCursor.close();
throw new Exception();
}
resultCursor.close();
} catch (Exception e) {
return false;
}
return true;
}
private int getColor(String port) {
int textColor = res.getColor(R.color.my_port);
if (Globals.REMOTE_PORT0.contains(port)) {
textColor = res.getColor(R.color.remote_port0);
} else if (Globals.REMOTE_PORT1.contains(port)) {
textColor = res.getColor(R.color.remote_port1);
} else if (Globals.REMOTE_PORT2.contains(port)) {
textColor = res.getColor(R.color.remote_port2);
} else if (Globals.REMOTE_PORT3.contains(port)) {
textColor = res.getColor(R.color.remote_port3);
} else if (Globals.REMOTE_PORT4.contains(port)) {
textColor = res.getColor(R.color.remote_port4);
}
return textColor;
}
public void onStop() {
super.onStop();
Log.v(Globals.TAG, "onStop()");
}
}
| |
package com.joanfuentes.hintcaseexample;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.Switch;
import com.joanfuentes.hintcase.HintCase;
import com.joanfuentes.hintcaseassets.contentholderanimators.FadeInContentHolderAnimator;
import com.joanfuentes.hintcaseassets.hintcontentholders.SimpleHintContentHolder;
import com.joanfuentes.hintcaseassets.shapeanimators.RevealCircleShapeAnimator;
import com.joanfuentes.hintcaseassets.shapeanimators.RevealRectangularShapeAnimator;
import com.joanfuentes.hintcaseassets.shapeanimators.UnrevealCircleShapeAnimator;
import com.joanfuentes.hintcaseassets.shapeanimators.UnrevealRectangularShapeAnimator;
import com.joanfuentes.hintcaseassets.shapes.CircularShape;
public class TargetHintActivity extends AppCompatActivity {
public Activity getActivity() {
return this;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_target_hint);
configureToolbar();
setViews();
}
private void configureToolbar() {
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
if (toolbar != null) {
toolbar.setTitle("Hintcase - Target View");
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.menu_target_hint, menu);
launchAutomaticHint();
return true;
}
private void launchAutomaticHint() {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
View actionSearchView = findViewById(R.id.action_search);
if (actionSearchView != null) {
SimpleHintContentHolder blockInfo =
new SimpleHintContentHolder.Builder(actionSearchView.getContext())
.setContentTitle("Search")
.setContentText("This is an automatic example of a hint over a toolbar item")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.setMarginByResourcesId(R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin,
R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin)
.build();
new HintCase(actionSearchView.getRootView())
.setTarget(actionSearchView, new CircularShape())
.setShapeAnimators(new RevealCircleShapeAnimator(),
new UnrevealCircleShapeAnimator())
.setHintBlock(blockInfo)
.show();
}
}
}, 500);
}
private void setViews() {
Button buttonExample1 = (Button) findViewById(R.id.button_example_1);
if(buttonExample1 != null) {
buttonExample1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
SimpleHintContentHolder blockInfo = new SimpleHintContentHolder.Builder(view.getContext())
.setContentTitle("Attention!")
.setContentText("This is a hint related with a button.")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.build();
new HintCase(view.getRootView())
.setTarget(findViewById(R.id.button),HintCase.TARGET_IS_NOT_CLICKABLE)
.setBackgroundColorByResourceId(R.color.colorPrimary)
.setShapeAnimators(new RevealRectangularShapeAnimator(), new UnrevealRectangularShapeAnimator())
.setHintBlock(blockInfo, new FadeInContentHolderAnimator())
.show();
}
});
}
Button buttonExample2 = (Button) findViewById(R.id.button_example_2);
if(buttonExample2 != null) {
buttonExample2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
SimpleHintContentHolder blockInfo = new SimpleHintContentHolder.Builder(view.getContext())
.setContentTitle("Attention!")
.setContentText("This is a hint related with a text.. Please, be careful")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.build();
new HintCase(view.getRootView())
.setTarget(findViewById(R.id.textView), new CircularShape(), HintCase.TARGET_IS_NOT_CLICKABLE)
.setBackgroundColorByResourceId(R.color.colorPrimary)
.setShapeAnimators(new RevealCircleShapeAnimator(), new UnrevealCircleShapeAnimator())
.setHintBlock(blockInfo, new FadeInContentHolderAnimator())
.show();
}
});
}
Switch switchButton = (Switch) findViewById(R.id.switch_button);
if (switchButton != null) {
switchButton.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Snackbar.make(getActivity().getWindow().getDecorView(), "Switch was changed", Snackbar.LENGTH_SHORT).show();
}
});
}
Button buttonExample3 = (Button) findViewById(R.id.button_example_3);
if(buttonExample3 != null) {
buttonExample3.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
SimpleHintContentHolder blockInfo = new SimpleHintContentHolder.Builder(view.getContext())
.setContentTitle("Activate your powers!")
.setContentText("you have the full control over your power. On to be a Hero, Off to be a looser.")
.setTitleStyle(R.style.title_light)
.setContentStyle(R.style.content_light)
.setGravity(Gravity.CENTER)
.setMarginByResourcesId(R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin,
R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin)
.build();
new HintCase(view.getRootView())
.setTarget(findViewById(R.id.switch_button), HintCase.TARGET_IS_CLICKABLE)
.setBackgroundColorByResourceId(android.R.color.holo_blue_dark)
.setShapeAnimators(new RevealRectangularShapeAnimator(),
new UnrevealRectangularShapeAnimator())
.setHintBlock(blockInfo, new FadeInContentHolderAnimator())
.show();
}
});
}
Button buttonExample4 = (Button) findViewById(R.id.button_example_4);
if(buttonExample4 != null) {
buttonExample4.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
SimpleHintContentHolder blockInfo = new SimpleHintContentHolder.Builder(view.getContext())
.setContentTitle("Attention!")
.setContentText("This is a hint related with a radio button.")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.setGravity(Gravity.CENTER)
.setMarginByResourcesId(R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin,
R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin)
.build();
new HintCase(view.getRootView())
.setTarget(findViewById(R.id.radio_button), HintCase.TARGET_IS_CLICKABLE)
.setBackgroundColor(0xCC000000)
.setShapeAnimators(new RevealRectangularShapeAnimator(), new UnrevealRectangularShapeAnimator())
.setHintBlock(blockInfo, new FadeInContentHolderAnimator())
.show();
}
});
}
Button buttonExample5 = (Button) findViewById(R.id.button_example_5);
if(buttonExample5 != null) {
buttonExample5.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
SimpleHintContentHolder blockInfo = new SimpleHintContentHolder.Builder(view.getContext())
.setContentTitle("FAB button power!")
.setContentText("The FAB button is gonna help you with the main action of every screen.")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.build();
new HintCase(view.getRootView())
.setTarget(findViewById(R.id.fab), new CircularShape())
.setShapeAnimators(new RevealCircleShapeAnimator(),
new UnrevealCircleShapeAnimator())
.setHintBlock(blockInfo)
.show();
}
});
}
Button buttonExample6 = (Button) findViewById(R.id.button_example_6);
if(buttonExample6 != null) {
buttonExample6.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
View actionCameraView = findViewById(R.id.action_camera);
if (actionCameraView != null) {
actionCameraView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Snackbar.make(getActivity().getWindow().getDecorView(), "Camera was clicked", Snackbar.LENGTH_SHORT).show();
}
});
SimpleHintContentHolder blockInfo =
new SimpleHintContentHolder.Builder(actionCameraView.getContext())
.setContentTitle("Camera icon!")
.setContentText("This is an example of a hint over a toolbar item")
.setTitleStyle(R.style.title)
.setContentStyle(R.style.content)
.setMarginByResourcesId(R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin,
R.dimen.activity_vertical_margin,
R.dimen.activity_horizontal_margin)
.build();
new HintCase(actionCameraView.getRootView())
.setTarget(actionCameraView, new CircularShape(), HintCase.TARGET_IS_CLICKABLE)
.setShapeAnimators(new RevealCircleShapeAnimator(),
new UnrevealCircleShapeAnimator())
.setHintBlock(blockInfo)
.show();
}
}
});
}
}
}
| |
/*
* Copyright 2015-2019 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.api;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.mycontroller.standalone.AppProperties.RESOURCE_TYPE;
import org.mycontroller.standalone.McObjectManager;
import org.mycontroller.standalone.api.jaxrs.model.Query;
import org.mycontroller.standalone.api.jaxrs.model.QueryResponse;
import org.mycontroller.standalone.api.jaxrs.model.ResourcePurgeConf;
import org.mycontroller.standalone.api.jaxrs.model.SensorVariableJson;
import org.mycontroller.standalone.db.DB_QUERY;
import org.mycontroller.standalone.db.DaoUtils;
import org.mycontroller.standalone.db.DeleteResourceUtils;
import org.mycontroller.standalone.db.SensorUtils;
import org.mycontroller.standalone.db.tables.Node;
import org.mycontroller.standalone.db.tables.Room;
import org.mycontroller.standalone.db.tables.Sensor;
import org.mycontroller.standalone.db.tables.SensorVariable;
import org.mycontroller.standalone.exceptions.McBadRequestException;
import org.mycontroller.standalone.exceptions.McDuplicateException;
import org.mycontroller.standalone.exceptions.McException;
import org.mycontroller.standalone.exceptions.McInvalidException;
import org.mycontroller.standalone.message.IMessage;
import org.mycontroller.standalone.metrics.MetricsUtils;
import org.mycontroller.standalone.metrics.MetricsUtils.METRIC_TYPE;
import org.mycontroller.standalone.model.ResourceModel;
import org.mycontroller.standalone.utils.McUtils;
import lombok.extern.slf4j.Slf4j;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.3
*/
@Slf4j
public class SensorApi {
public QueryResponse getAll(HashMap<String, Object> filters) {
Query query = Query.get(filters);
if (query.getOrderBy().equalsIgnoreCase(Sensor.KEY_NODE_EUI)) {
query.setOrderByRawQuery(DB_QUERY.getQuery(DB_QUERY.ORDER_BY_NODE_EUI));
} else if (query.getOrderBy().equalsIgnoreCase(Sensor.KEY_NODE_NAME)) {
query.setOrderByRawQuery(DB_QUERY.getQuery(DB_QUERY.ORDER_BY_NODE_NAME));
}
return DaoUtils.getSensorDao().getAll(query);
}
public Sensor get(HashMap<String, Object> filters) {
QueryResponse response = getAll(filters);
@SuppressWarnings("unchecked")
List<Sensor> items = (List<Sensor>) response.getData();
if (items != null && !items.isEmpty()) {
return items.get(0);
}
return null;
}
public Sensor get(int id) {
return DaoUtils.getSensorDao().getById(id);
}
public void deleteIds(List<Integer> ids) {
DeleteResourceUtils.deleteSensors(ids);
}
public void update(Sensor sensor) throws McException {
Sensor availabilityCheck = DaoUtils.getSensorDao().get(sensor.getNode().getId(), sensor.getSensorId());
if (availabilityCheck != null && !sensor.getId().equals(availabilityCheck.getId())) {
throw new McDuplicateException("A sensor available with this sensor id!");
}
try {
if (McObjectManager.getEngine(sensor.getNode().getGatewayTable().getId()).validate(sensor)) {
DaoUtils.getSensorDao().update(sensor);
// Update Variable Types
SensorUtils.updateSensorVariables(sensor);
}
} catch (Exception ex) {
throw new McException(ex);
}
}
public void add(Sensor sensor) throws McException {
Sensor availabilityCheck = DaoUtils.getSensorDao().get(sensor.getNode().getId(), sensor.getSensorId());
if (availabilityCheck != null) {
throw new McDuplicateException("A sensor available with this sensor id!");
}
try {
Node node = DaoUtils.getNodeDao().getById(sensor.getNode().getId());
//Take variable types reference
List<String> variableTypes = sensor.getVariableTypes();
if (McObjectManager.getEngine(node.getGatewayTable().getId()).validate(sensor)) {
DaoUtils.getSensorDao().create(sensor);
GoogleAnalyticsApi.instance().trackSensorCreation("manual");
sensor = DaoUtils.getSensorDao().get(sensor.getNode().getId(), sensor.getSensorId());
// Update Variable Types
sensor.setVariableTypes(variableTypes);
//Update into database
SensorUtils.updateSensorVariables(sensor);
}
} catch (Exception ex) {
_logger.error("Exception,", ex);
throw new McException(ex);
}
}
public List<SensorVariableJson> getVariables(List<Integer> ids) {
List<SensorVariable> sensorVariables = DaoUtils.getSensorVariableDao().getAll(ids);
List<SensorVariableJson> sensorVariableJson = new ArrayList<SensorVariableJson>();
//Convert to SensorVariableJson
if (sensorVariables != null) {
for (SensorVariable sensorVariable : sensorVariables) {
sensorVariableJson.add(new SensorVariableJson(sensorVariable));
}
}
return sensorVariableJson;
}
public QueryResponse getVariables(HashMap<String, Object> filters) {
Query query = Query.get(filters);
QueryResponse queryResponse = DaoUtils.getSensorVariableDao().getAll(query);
if (queryResponse != null) {
@SuppressWarnings("unchecked")
List<SensorVariable> variables = (List<SensorVariable>) queryResponse.getData();
List<SensorVariableJson> variablesJson = new ArrayList<SensorVariableJson>();
for (SensorVariable variable : variables) {
variablesJson.add(new SensorVariableJson(variable));
}
queryResponse.setData(variablesJson);
}
return queryResponse;
}
public SensorVariableJson getVariable(Integer id) {
SensorVariable sensorVariable = DaoUtils.getSensorVariableDao().get(id);
//Convert to SensorVariableJson
return new SensorVariableJson(sensorVariable);
}
public String sendPayload(SensorVariableJson sensorVariableJson) throws McInvalidException, McBadRequestException {
SensorVariable sensorVariable = DaoUtils.getSensorVariableDao().get(sensorVariableJson.getId());
if (sensorVariable != null) {
sensorVariable.setValue(String.valueOf(sensorVariableJson.getValue()));
return sendPayload(sensorVariable);
} else {
throw new McBadRequestException("null not allowed");
}
}
public String sendPayload(SensorVariable sensorVariable) throws McInvalidException, McBadRequestException {
if (sensorVariable != null) {
switch (sensorVariable.getMetricType()) {
case BINARY:
if (McUtils.getBoolean(sensorVariable.getValue() == null)) {
throw new McInvalidException("Invalid value: " + sensorVariable.getValue());
}
break;
case DOUBLE:
if (McUtils.getDouble(sensorVariable.getValue()) == null) {
throw new McInvalidException("Invalid value: " + sensorVariable.getValue());
}
break;
default:
break;
}
sensorVariable.setValue(String.valueOf(sensorVariable.getValue()));
return McObjectManager.getMcActionEngine().sendPayload(sensorVariable);
} else {
throw new McBadRequestException("null not allowed");
}
}
public void updateVariable(SensorVariableJson sensorVariableJson) throws McBadRequestException {
SensorVariable sensorVariable = DaoUtils.getSensorVariableDao().get(sensorVariableJson.getId());
if (sensorVariable != null) {
if (!sensorVariable.getMetricType().getText().equalsIgnoreCase(sensorVariableJson.getMetricType())) {
//clear existing data
MetricsUtils.engine().purge(new ResourceModel(RESOURCE_TYPE.SENSOR_VARIABLE, sensorVariable));
//Update new metric type
sensorVariable.setMetricType(METRIC_TYPE.fromString(sensorVariableJson.getMetricType()));
}
//Update Unit type
sensorVariable.setUnitType(sensorVariableJson.getUnitType());
//Update sensor variable readOnly option
sensorVariable.setReadOnly(sensorVariableJson.getReadOnly());
//Update offset
sensorVariable.setOffset(sensorVariableJson.getOffset());
//Update priority
sensorVariable.setPriority(sensorVariableJson.getPriority());
//Update Graph settings
sensorVariable.setProperties(sensorVariableJson.getProperties());
//Update sensor variable
DaoUtils.getSensorVariableDao().update(sensorVariable);
} else {
throw new McBadRequestException("null not allowed");
}
}
public Sensor getSensor(String sensorName, String... roomsName) {
RoomApi roomApi = new RoomApi();
Room room = roomApi.getRoom(roomsName);
return getSensor(sensorName, room.getId());
}
public Sensor getSensor(String sensorName, Integer roomId) {
return DaoUtils.getSensorDao().getByRoomId(sensorName, roomId);
}
public Sensor getSensor(String sensorName) {
return DaoUtils.getSensorDao().getByRoomId(sensorName, null);
}
private SensorVariable getSensorVariable(Sensor sensor, String variableType) {
for (SensorVariable sv : sensor.getVariables()) {
if (sv.getVariableType().getText().equalsIgnoreCase(variableType)) {
return sv;
}
}
return null;
}
public SensorVariable getSensorVariable(String sensorName, String variableType, String... roomsName) {
return getSensorVariable(getSensor(sensorName, roomsName), variableType);
}
public SensorVariable getSensorVariable(String sensorName, String variableType, Integer roomId) {
return getSensorVariable(getSensor(sensorName, roomId), variableType);
}
public void sendRawMessage(IMessage message) throws McBadRequestException {
message.setTxMessage(true);
if (message.isValid()) {
// check does it a sleeping node or normal node
Node node = DaoUtils.getNodeDao().get(message.getGatewayId(), message.getNodeEui());
if (node != null && node.getSmartSleepEnabled()) {
McObjectManager.getEngine(message.getGatewayId()).sendSleepNode(message);
} else {
McObjectManager.getEngine(message.getGatewayId()).send(message);
}
} else {
throw new McBadRequestException("Required field is missing! " + message);
}
}
public void deleteSensorVariable(Integer... ids) {
for (Integer id : ids) {
_logger.info("Delete Sensor Variable initiated for the id:{}", id);
DaoUtils.getSensorVariableDao().deleteById(id);
}
}
public void purgeSensorVariable(ResourcePurgeConf purge) throws McBadRequestException {
_logger.debug("{}", purge);
if (purge.getId() == null) {
throw new McBadRequestException("Required field is missing! " + purge);
}
SensorVariable sVar = DaoUtils.getSensorVariableDao().getById(purge.getId());
if (sVar == null) {
throw new McBadRequestException("Selected sensor variable is not found! " + purge);
}
MetricsUtils.engine().purge(new ResourceModel(RESOURCE_TYPE.SENSOR_VARIABLE, sVar), purge);
}
}
| |
/**
* Copyright (C) 2013
* by 52 North Initiative for Geospatial Open Source Software GmbH
*
* Contact: Andreas Wytzisk
* 52 North Initiative for Geospatial Open Source Software GmbH
* Martin-Luther-King-Weg 24
* 48155 Muenster, Germany
* info@52north.org
*
* This program is free software; you can redistribute and/or modify it under
* the terms of the GNU General Public License version 2 as published by the
* Free Software Foundation.
*
* This program is distributed WITHOUT ANY WARRANTY; even without the implied
* WARRANTY OF MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* this program (see gnu-gpl v2.txt). If not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA or
* visit the Free Software Foundation web page, http://www.fsf.org.
*/
package org.n52.sos.service.operator;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.n52.sos.exception.ConfigurationException;
import org.n52.sos.ogc.ows.OwsExceptionReport;
import org.n52.sos.request.operator.RequestOperatorRepository;
import org.n52.sos.util.AbstractConfiguringServiceLoaderRepository;
import org.n52.sos.util.CollectionHelper;
import org.n52.sos.util.MultiMaps;
import org.n52.sos.util.SetMultiMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* @author Christian Autermann <c.autermann@52north.org>
*
* @since 4.0.0
*/
public class ServiceOperatorRepository extends AbstractConfiguringServiceLoaderRepository<ServiceOperator> {
private static ServiceOperatorRepository instance;
/**
* Implemented ServiceOperator
*/
private final Map<ServiceOperatorKey, ServiceOperator> serviceOperators = Maps.newHashMap();
/** supported SOS versions */
private final SetMultiMap<String, String> supportedVersions = MultiMaps.newSetMultiMap();
/** supported services */
private final Set<String> supportedServices = Sets.newHashSet();
/**
* Load implemented request listener
*
* @throws ConfigurationException
* If no request listener is implemented
*/
private ServiceOperatorRepository() throws ConfigurationException {
super(ServiceOperator.class, false);
load(false);
}
public static ServiceOperatorRepository getInstance() {
if (instance == null) {
instance = new ServiceOperatorRepository();
}
return instance;
}
/**
* Load the implemented request listener and add them to a map with
* operation name as key
*
* @param implementations
* the loaded implementations
*
* @throws ConfigurationException
* If no request listener is implemented
*/
@Override
protected void processConfiguredImplementations(Set<ServiceOperator> implementations)
throws ConfigurationException {
this.serviceOperators.clear();
this.supportedServices.clear();
this.supportedVersions.clear();
for (ServiceOperator so : implementations) {
this.serviceOperators.put(so.getServiceOperatorKey(), so);
this.supportedVersions.add(so.getServiceOperatorKey().getService(), so.getServiceOperatorKey()
.getVersion());
this.supportedServices.add(so.getServiceOperatorKey().getService());
}
}
/**
* Update/reload the implemented request listener
*
* @throws ConfigurationException
* If no request listener is implemented
*/
@Override
public void update() throws ConfigurationException {
RequestOperatorRepository.getInstance().update();
super.update();
}
/**
* @return the implemented request listener
*/
public Map<ServiceOperatorKey, ServiceOperator> getServiceOperators() {
return Collections.unmodifiableMap(serviceOperators);
}
public Set<ServiceOperatorKey> getServiceOperatorKeyTypes() {
return getServiceOperators().keySet();
}
public ServiceOperator getServiceOperator(ServiceOperatorKey sok) {
return serviceOperators.get(sok);
}
/**
* @param service
* the service
* @param version
* the version
* @return the implemented request listener
*
*
* @throws OwsExceptionReport
*/
public ServiceOperator getServiceOperator(String service, String version) throws OwsExceptionReport {
return getServiceOperator(new ServiceOperatorKey(service, version));
}
/**
* @return the supportedVersions
*
* @deprecated use getSupporteVersions(String service)
*/
@Deprecated
public Set<String> getSupportedVersions() {
return getAllSupportedVersions();
}
public Set<String> getAllSupportedVersions() {
return CollectionHelper.union(this.supportedVersions.values());
}
/**
* @param service
* the service
* @return the supportedVersions
*
*/
public Set<String> getSupportedVersions(String service) {
if (isServiceSupported(service)) {
return Collections.unmodifiableSet(supportedVersions.get(service));
}
return Sets.newHashSet();
}
/**
* @param version
* the version
* @return the supportedVersions
*
* @deprecated use isVersionSupported(String service, String version)
*/
@Deprecated
public boolean isVersionSupported(String version) {
return getAllSupportedVersions().contains(version);
}
/**
* @param service
* the service
* @param version
* the version
* @return the supportedVersions
*
*/
public boolean isVersionSupported(String service, String version) {
return isServiceSupported(service) && supportedVersions.get(service).contains(version);
}
/**
* @return the supportedVersions
*/
public Set<String> getSupportedServices() {
return Collections.unmodifiableSet(this.supportedServices);
}
public boolean isServiceSupported(String service) {
return this.supportedServices.contains(service);
}
}
| |
package org.moxie.tests;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
import org.moxie.ArtifactVersion;
/**
* Test DefaultArtifactVersion.
*
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
*/
public class ArtifactVersionTest
extends TestCase
{
private ArtifactVersion newArtifactVersion( String version )
{
return new ArtifactVersion( version );
}
private void checkVersionParsing( String version, int major, int minor, int incremental, int buildnumber,
String qualifier )
{
ArtifactVersion artifactVersion = newArtifactVersion( version );
String parsed = "'" + version + "' parsed as ('" + artifactVersion.getMajorVersion() + "', '"
+ artifactVersion.getMinorVersion() + "', '" + artifactVersion.getIncrementalVersion() + "', '"
+ artifactVersion.getBuildNumber() + "', '" + artifactVersion.getQualifier() + "'), ";
assertEquals( parsed + "check major version", major, artifactVersion.getMajorVersion() );
assertEquals( parsed + "check minor version", minor, artifactVersion.getMinorVersion() );
assertEquals( parsed + "check incremental version", incremental, artifactVersion.getIncrementalVersion() );
assertEquals( parsed + "check build number", buildnumber, artifactVersion.getBuildNumber() );
assertEquals( parsed + "check qualifier", qualifier, artifactVersion.getQualifier() );
assertEquals( "check " + version + " string value", version, artifactVersion.toString() );
}
public void testVersionParsing()
{
checkVersionParsing( "1" , 1, 0, 0, 0, null );
checkVersionParsing( "1.2" , 1, 2, 0, 0, null );
checkVersionParsing( "1.2.3" , 1, 2, 3, 0, null );
checkVersionParsing( "1.2.3-1" , 1, 2, 3, 1, null );
checkVersionParsing( "1.2.3-alpha-1" , 1, 2, 3, 0, "alpha-1" );
checkVersionParsing( "1.2-alpha-1" , 1, 2, 0, 0, "alpha-1" );
checkVersionParsing( "1.2-alpha-1-20050205.060708-1" , 1, 2, 0, 0, "alpha-1-20050205.060708-1" );
checkVersionParsing( "RELEASE" , 0, 0, 0, 0, "RELEASE" );
checkVersionParsing( "2.0-1" , 2, 0, 0, 1, null );
// 0 at the beginning of a number has a special handling
checkVersionParsing( "02" , 0, 0, 0, 0, "02" );
checkVersionParsing( "0.09" , 0, 0, 0, 0, "0.09" );
checkVersionParsing( "0.2.09" , 0, 0, 0, 0, "0.2.09" );
checkVersionParsing( "2.0-01" , 2, 0, 0, 0, "01" );
// version schemes not really supported: fully transformed as qualifier
checkVersionParsing( "1.0.1b" , 0, 0, 0, 0, "1.0.1b" );
checkVersionParsing( "1.0M2" , 0, 0, 0, 0, "1.0M2" );
checkVersionParsing( "1.0RC2" , 0, 0, 0, 0, "1.0RC2" );
checkVersionParsing( "1.7.3.0" , 0, 0, 0, 0, "1.7.3.0" );
checkVersionParsing( "1.7.3.0-1" , 0, 0, 0, 0, "1.7.3.0-1" );
checkVersionParsing( "PATCH-1193602" , 0, 0, 0, 0, "PATCH-1193602" );
checkVersionParsing( "5.0.0alpha-2006020117" , 0, 0, 0, 0, "5.0.0alpha-2006020117" );
checkVersionParsing( "1.0.0.-SNAPSHOT", 0, 0, 0, 0, "1.0.0.-SNAPSHOT" );
checkVersionParsing( "1..0-SNAPSHOT", 0, 0, 0, 0, "1..0-SNAPSHOT" );
checkVersionParsing( "1.0.-SNAPSHOT", 0, 0, 0, 0, "1.0.-SNAPSHOT" );
checkVersionParsing( ".1.0-SNAPSHOT", 0, 0, 0, 0, ".1.0-SNAPSHOT" );
checkVersionParsing( "1.2.3.200705301630" , 0, 0, 0, 0, "1.2.3.200705301630" );
checkVersionParsing( "1.2.3-200705301630" , 1, 2, 3, 0, "200705301630" );
}
public void testVersionComparing()
{
assertVersionEqual( "1", "1" );
assertVersionOlder( "1", "2" );
assertVersionOlder( "1.5", "2" );
assertVersionOlder( "1", "2.5" );
assertVersionEqual( "1", "1.0" );
assertVersionEqual( "1", "1.0.0" );
assertVersionOlder( "1.0", "1.1" );
assertVersionOlder( "1.1", "1.2" );
assertVersionOlder( "1.0.0", "1.1" );
assertVersionOlder( "1.1", "1.2.0" );
assertVersionOlder( "1.0-alpha-1", "1.0" );
assertVersionOlder( "1.0-alpha-1", "1.0-alpha-2" );
assertVersionOlder( "1.0-alpha-2", "1.0-alpha-15" );
assertVersionOlder( "1.0-alpha-1", "1.0-beta-1" );
assertVersionOlder( "1.0-beta-1", "1.0-SNAPSHOT" );
assertVersionOlder( "1.0-SNAPSHOT", "1.0" );
assertVersionOlder( "1.0-alpha-1-SNAPSHOT", "1.0-alpha-1" );
assertVersionOlder( "1.0", "1.0-1" );
assertVersionOlder( "1.0-1", "1.0-2" );
assertVersionEqual( "2.0-0", "2.0" );
assertVersionOlder( "2.0", "2.0-1" );
assertVersionOlder( "2.0.0", "2.0-1" );
assertVersionOlder( "2.0-1", "2.0.1" );
assertVersionOlder( "2.0.1-klm", "2.0.1-lmn" );
assertVersionOlder( "2.0.1", "2.0.1-xyz" );
assertVersionOlder( "2.0.1-xyz-1", "2.0.1-1-xyz" );
assertVersionOlder( "2.0.1", "2.0.1-123" );
assertVersionOlder( "2.0.1-xyz", "2.0.1-123" );
assertVersionOlder( "1.2.3-10000000000", "1.2.3-10000000001" );
assertVersionOlder( "1.2.3-1", "1.2.3-10000000001" );
assertVersionOlder( "2.3.0-v200706262000", "2.3.0-v200706262130" ); // org.eclipse:emf:2.3.0-v200706262000
// org.eclipse.wst.common_core.feature_2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq
assertVersionOlder( "2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq", "2.0.0.v200706041906-7C78EK9E_EkMNfNOd2d8qq" );
}
public void testVersionSnapshotComparing()
{
assertVersionEqual( "1-SNAPSHOT", "1-SNAPSHOT" );
assertVersionOlder( "1-SNAPSHOT", "2-SNAPSHOT" );
assertVersionOlder( "1.5-SNAPSHOT", "2-SNAPSHOT" );
assertVersionOlder( "1-SNAPSHOT", "2.5-SNAPSHOT" );
assertVersionEqual( "1-SNAPSHOT", "1.0-SNAPSHOT" );
assertVersionEqual( "1-SNAPSHOT", "1.0.0-SNAPSHOT" );
assertVersionOlder( "1.0-SNAPSHOT", "1.1-SNAPSHOT" );
assertVersionOlder( "1.1-SNAPSHOT", "1.2-SNAPSHOT" );
assertVersionOlder( "1.0.0-SNAPSHOT", "1.1-SNAPSHOT" );
assertVersionOlder( "1.1-SNAPSHOT", "1.2.0-SNAPSHOT" );
//assertVersionOlder( "1.0-alpha-1-SNAPSHOT", "1.0-SNAPSHOT" );
assertVersionOlder( "1.0-alpha-1-SNAPSHOT", "1.0-alpha-2-SNAPSHOT" );
assertVersionOlder( "1.0-alpha-1-SNAPSHOT", "1.0-beta-1-SNAPSHOT" );
assertVersionOlder( "1.0-beta-1-SNAPSHOT", "1.0-SNAPSHOT-SNAPSHOT" );
assertVersionOlder( "1.0-SNAPSHOT-SNAPSHOT", "1.0-SNAPSHOT" );
assertVersionOlder( "1.0-alpha-1-SNAPSHOT-SNAPSHOT", "1.0-alpha-1-SNAPSHOT" );
assertVersionOlder( "1.0-SNAPSHOT", "1.0-1-SNAPSHOT" );
assertVersionOlder( "1.0-1-SNAPSHOT", "1.0-2-SNAPSHOT" );
//assertVersionEqual( "2.0-0-SNAPSHOT", "2.0-SNAPSHOT" );
assertVersionOlder( "2.0-SNAPSHOT", "2.0-1-SNAPSHOT" );
assertVersionOlder( "2.0.0-SNAPSHOT", "2.0-1-SNAPSHOT" );
assertVersionOlder( "2.0-1-SNAPSHOT", "2.0.1-SNAPSHOT" );
assertVersionOlder( "2.0.1-klm-SNAPSHOT", "2.0.1-lmn-SNAPSHOT" );
// assertVersionOlder( "2.0.1-xyz-SNAPSHOT", "2.0.1-SNAPSHOT" );
assertVersionOlder( "2.0.1-SNAPSHOT", "2.0.1-123-SNAPSHOT" );
assertVersionOlder( "2.0.1-xyz-SNAPSHOT", "2.0.1-123-SNAPSHOT" );
}
public void testSnapshotVsReleases()
{
assertVersionOlder( "1.0-RC1", "1.0-SNAPSHOT" );
assertVersionOlder( "1.0-rc1", "1.0-SNAPSHOT" );
assertVersionOlder( "1.0-rc-1", "1.0-SNAPSHOT" );
}
public void testHashCode()
{
ArtifactVersion v1 = newArtifactVersion( "1" );
ArtifactVersion v2 = newArtifactVersion( "1.0" );
assertEquals( true, v1.equals( v2 ) );
assertEquals( v1.hashCode(), v2.hashCode() );
}
public void testEqualsNullSafe()
{
assertFalse( newArtifactVersion( "1" ).equals( null ) );
}
public void testEqualsTypeSafe()
{
assertFalse( newArtifactVersion( "1" ).equals( "non-an-artifact-version-instance" ) );
}
private void assertVersionOlder( String left, String right )
{
assertTrue( left + " should be older than " + right,
newArtifactVersion( left ).compareTo( newArtifactVersion( right ) ) < 0 );
assertTrue( right + " should be newer than " + left,
newArtifactVersion( right ).compareTo( newArtifactVersion( left ) ) > 0 );
}
private void assertVersionEqual( String left, String right )
{
assertTrue( left + " should be equal to " + right,
newArtifactVersion( left ).compareTo( newArtifactVersion( right ) ) == 0 );
assertTrue( right + " should be equal to " + left,
newArtifactVersion( right ).compareTo( newArtifactVersion( left ) ) == 0 );
}
}
| |
/*
* Gray8GaussDeblurHoriz.java
*
* Created on November 3, 2007, 3:07 PM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*
* Copyright 2007 by Jon A. Webb
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the Lesser GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package jjil.algorithm;
import jjil.core.Complex;
import jjil.core.Complex32Image;
import jjil.core.Error;
import jjil.core.Gray8Image;
import jjil.core.Image;
import jjil.core.PipelineStage;
/**
* Uses deconvolution to remove blur from a Gray8Image. The blur removed is a
* horizontal Gaussian blur with a given standard deviation. The background noise
* level in the input image can be adjusted. The output Gray8Image is rescaled so the
* maximum and minimum values fill the range from Byte.MIN_VALUE to Byte.MAX_VALUE.
* @author webb
*/
public class Gray8GaussDeblurHoriz extends PipelineStage {
private int nNoise;
private int nStdDev;
Gray8Fft fft;
Complex32IFft ifft;
/**
* These coefficients are the Fourier transform of the Gaussian
* f(t) = exp(-t**2 / (2*sigma**2))
* which is the Gaussian
* F(k) = sigma * sqrt(2*pi) * exp(-2*(pi*k*sigma)**2)
* where t ranges from 0.01 to 1.0 (t is multiplied by 100).
* They have been multiplied by 256 and rounded to the nearest integer.
* Coefficients of zero have been dropped.
* They were computed by putting the Excel macro
* =ROW(A1)/100*SQRT(2*PI())*EXP(-2*POWER(PI()*(COLUMN(A1)-1)*ROW(A1)/100,2))
* in cell A1, filling to create a table with 100 rows, then rounding with
* =ROUND(A1*256,0)
*/
private int rxnCoeffs[][] = {
{6,6,6,6,6,6,6,6,6,5,5,5,5,5,4,4,4,4,3,3,3,3,2,2,2,2,2,2,1,1,1,1,1,1,1,1},
{13,13,12,12,11,11,10,9,8,7,6,5,4,3,3,2,2,1,1,1,1},
{19,19,18,16,14,12,10,8,6,5,3,2,1,1,1},
{26,25,23,19,15,12,8,5,3,2,1,1},
{32,31,26,21,15,9,5,3,1,1},
{39,36,29,20,12,7,3,1},
{45,41,31,19,10,4,1},
{51,45,31,16,7,2,1},
{58,49,30,14,4,1},
{64,53,29,11,3},
{71,56,27,8,2},
{77,58,25,6,1},
{83,60,22,4},
{90,61,19,3},
{96,62,16,2},
{103,62,14,1},
{109,62,11,1},
{116,61,9},
{122,60,7},
{128,58,5},
{135,56,4},
{141,54,3},
{148,52,2},
{154,49,2},
{160,47,1},
{167,44,1},
{173,41,1},
{180,38},
{186,35},
{193,33},
{199,30},
{205,27},
{212,25},
{218,22},
{225,20},
{231,18},
{237,16},
{244,14},
{250,12},
{257,11},
{263,10},
{270,8},
{276,7},
{282,6},
{289,5},
{295,5},
{302,4},
{308,3},
{314,3},
{321,2},
{327,2},
{334,2},
{340,1},
{347,1},
{353,1},
{359,1},
{366,1},
{372},
{379},
{385},
{391},
{398},
{404},
{411},
{417},
{424},
{430},
{436},
{443},
{449},
{456},
{462},
{468},
{475},
{481},
{488},
{494},
{501},
{507},
{513},
{520},
{526},
{533},
{539},
{545},
{552},
{558},
{565},
{571},
{578},
{584},
{590},
{597},
{603},
{610},
{616},
{622},
{629},
{635},
};
/**
* Creates a new instance of Gray8GaussDeblurHoriz.
* @param nStdDev Standard deviation of the Gaussian blur operator to deblur with. The
* value is multiplied by 100 so a value of 5 corresponds to a standard deviation
* of the Gaussian of 0.05.
* @param nNoise The expected noise level in the input image. The deconvolution has the
* potential to amplify noise levels since it is a high pass filter. The noise
* parameter limits this by not dividing by any Gaussian element (in the
* frequency domain) less than this value. The Gaussian elements have been scaled
* by 256, so a value equal to, say, 64 keeps the maximum amplification of the
* deconvolution less than 256/64 = 4.
* @throws jjil.core.Error if the standard deviation parameter is out of range.
*/
public Gray8GaussDeblurHoriz(int nStdDev, int nNoise) throws jjil.core.Error {
setStdDev(nStdDev);
this.nNoise = nNoise;
this.fft = new Gray8Fft();
this.ifft = new Complex32IFft(true);
}
/**
* Deblurs an input Gray8Image which has been blurred by a horizontal Gaussian
* of the given standard deviation and which has a background noise level less
* than the given level.
* @param im Input Gray8Image.
* @throws jjil.core.Error if the input is not a Gray8Image or is not square.
*/
public void push(Image im) throws jjil.core.Error {
if (im.getWidth() != im.getHeight()) {
throw new Error(
Error.PACKAGE.ALGORITHM,
ErrorCodes.IMAGE_NOT_SQUARE,
im.toString(),
null,
null);
}
if (!(im instanceof Gray8Image)) {
throw new Error(
Error.PACKAGE.ALGORITHM,
ErrorCodes.IMAGE_NOT_GRAY8IMAGE,
im.toString(),
null,
null);
}
this.fft.push(im);
Complex32Image cxmIm = (Complex32Image) this.fft.getFront();
Complex cxIn[] = cxmIm.getData();
Complex32Image cxmResult = new Complex32Image(im.getWidth(), im.getHeight());
Complex cxOut[] = cxmResult.getData();
// compute inverse filter
int rnCoeff[] = this.rxnCoeffs[this.nStdDev];
for (int i=0; i<cxmIm.getHeight(); i++) {
int nRow = i * cxmIm.getWidth();
for (int j=0; j<cxmIm.getWidth(); j++) {
int nCoeff;
if (j < cxmIm.getWidth()/2) {
nCoeff = j;
} else {
nCoeff = cxmIm.getWidth() - j;
}
if (nCoeff < rnCoeff.length &&
rnCoeff[nCoeff] > this.nNoise) {
cxOut[nRow + j] = cxIn[nRow + j].lsh(8).div(rnCoeff[nCoeff]);
} else {
cxOut[nRow + j] = cxIn[nRow + j];
}
}
}
// inverse FFT to get result
this.ifft.push(cxmResult);
super.setOutput(this.ifft.getFront());
}
/**
* Changes current standard deviation value.
* @param nStdDev Input standard deviation, multiplied by 100.
* @throws jjil.core.Error if the parameter is out of range.
*/
public void setStdDev(int nStdDev) throws jjil.core.Error {
if (nStdDev < 0 || nStdDev > this.rxnCoeffs.length) {
throw new Error(
Error.PACKAGE.ALGORITHM,
ErrorCodes.PARAMETER_OUT_OF_RANGE,
new Integer(nStdDev).toString(),
new Integer(0).toString(),
new Integer(this.rxnCoeffs.length).toString());
}
this.nStdDev = nStdDev;
}
}
| |
package com.reason.lang.rescript;
import com.intellij.psi.*;
import com.intellij.psi.util.*;
import com.reason.lang.core.*;
import com.reason.lang.core.psi.*;
import com.reason.lang.core.psi.impl.*;
import com.reason.lang.reason.*;
import java.util.*;
@SuppressWarnings("ConstantConditions")
public class JsxParsingTest extends ResParsingTestCase {
public void test_empty_tag() {
PsiTag e = (PsiTag) firstElement(parseCode("<div>children</div>"));
PsiTagStart tag = PsiTreeUtil.findChildOfType(e, PsiTagStart.class);
assertEquals("<div>", tag.getText());
assertNotNull(ORUtil.nextSiblingWithTokenType(tag.getFirstChild(), m_types.TAG_GT));
assertEquals("children", PsiTreeUtil.findChildOfType(e, PsiTagBody.class).getText());
assertEquals("</div>", PsiTreeUtil.findChildOfType(e, PsiTagClose.class).getText());
}
public void test_tag_name() {
PsiTag e = (PsiTag) firstElement(parseCode("<Comp render={() => <Another/>}/>"));
PsiTagStart tag = PsiTreeUtil.findChildOfType(e, PsiTagStart.class);
assertEquals("Comp", tag.getNameIdentifier().getText());
}
public void test_inner_closing_tag() {
PsiTag e = (PsiTag) firstElement(parseCode("<div><div/></div>"));
assertEquals("<div>", PsiTreeUtil.findChildOfType(e, PsiTagStart.class).getText());
assertEquals("<div/>", PsiTreeUtil.findChildOfType(e, PsiTagBody.class).getText());
assertEquals("</div>", PsiTreeUtil.findChildOfType(e, PsiTagClose.class).getText());
}
public void test_multiple_closing_tag() {
PsiTag e = (PsiTag) firstElement(parseCode("<div><div></div></div>"));
assertEquals("<div>", PsiTreeUtil.findChildOfType(e, PsiTagStart.class).getText());
assertEquals("<div></div>", PsiTreeUtil.findChildOfType(e, PsiTagBody.class).getText());
assertEquals("</div>", PsiTreeUtil.findChildOfType(e, PsiTagClose.class).getText());
}
public void test_option_tag() {
PsiTag e = (PsiTag) firstElement(parseCode("<option>children</option>"));
PsiTagStart tag = PsiTreeUtil.findChildOfType(e, PsiTagStart.class);
assertEquals("<option>", tag.getText());
assertNotNull(ORUtil.nextSiblingWithTokenType(tag.getFirstChild(), m_types.TAG_GT));
assertEquals("children", PsiTreeUtil.findChildOfType(e, PsiTagBody.class).getText());
assertEquals("</option>", PsiTreeUtil.findChildOfType(e, PsiTagClose.class).getText());
}
public void test_option_closeable_tag() {
// option here is not a Rescript keyword
PsiLet let = first(letExpressions(parseCode("let _ = <option className/>")));
PsiTagStart jsx = first(PsiTreeUtil.findChildrenOfType(let, PsiTagStart.class));
assertNotNull(jsx);
}
public void test_tag_name_with_dot() {
// option here is not a ReasonML keyword
PsiLet let = first(letExpressions(parseCode("let _ = <Container.Test></Container.Test>")));
PsiTagStart tagStart = first(PsiTreeUtil.findChildrenOfType(let, PsiTagStart.class));
assertInstanceOf(tagStart.getNameIdentifier(), PsiUpperSymbol.class);
assertEquals("Test", tagStart.getNameIdentifier().getText());
PsiElement nextSibling = tagStart.getFirstChild().getNextSibling();
assertEquals(m_types.TAG_NAME, nextSibling.getFirstChild().getNode().getElementType());
nextSibling = nextSibling.getNextSibling().getNextSibling();
assertEquals(m_types.TAG_NAME, nextSibling.getFirstChild().getNode().getElementType());
PsiTagClose tagClose = first(PsiTreeUtil.findChildrenOfType(let, PsiTagClose.class));
nextSibling = tagClose.getFirstChild().getNextSibling();
assertEquals(m_types.TAG_NAME, nextSibling.getFirstChild().getNode().getElementType());
nextSibling = nextSibling.getNextSibling().getNextSibling();
assertEquals(m_types.TAG_NAME, nextSibling.getFirstChild().getNode().getElementType());
}
public void test_tag_prop_with_paren() {
PsiTag tag = (PsiTag) firstElement(parseCode("<div style=(x) onFocus=a11y.onFocus/>"));
Collection<PsiTagProperty> properties =
PsiTreeUtil.findChildrenOfType(tag, PsiTagProperty.class);
assertEquals(2, properties.size());
Iterator<PsiTagProperty> itProperties = properties.iterator();
assertEquals("style=(x)", itProperties.next().getText());
assertEquals("onFocus=a11y.onFocus", itProperties.next().getText());
}
public void test_tag_props_with_dot() {
PsiTag e = (PsiTag) firstElement(parseCode("<a className=Styles.link href=h download=d></a>"));
List<PsiTagProperty> props = new ArrayList<>(e.getProperties());
assertSize(3, props);
assertEquals("className", props.get(0).getName());
assertEquals("Styles.link", props.get(0).getValue().getText());
assertEquals("href", props.get(1).getName());
assertEquals("h", props.get(1).getValue().getText());
assertEquals("download", props.get(2).getName());
assertEquals("d", props.get(2).getValue().getText());
}
public void test_optional_prop() {
PsiTag e = (PsiTag) firstElement(parseCode("<div ?layout ?style onClick=?cb ?other></div>"));
List<PsiTagProperty> props = new ArrayList<>(e.getProperties());
assertSize(4, props);
assertEquals("?layout", props.get(0).getText());
assertEquals("layout", props.get(0).getName());
assertEquals("?style", props.get(1).getText());
assertEquals("onClick=?cb", props.get(2).getText());
assertEquals("?other", props.get(3).getText());
assertNull(PsiTreeUtil.findChildOfType(e, PsiTernary.class));
}
public void test_optional_prop_autoclose() {
PsiTag e = (PsiTag) firstElement(parseCode("<div ?layout ?style onClick=?cb ?other/>"));
List<PsiTagProperty> props = new ArrayList<>(e.getProperties());
assertSize(4, props);
assertEquals("?layout", props.get(0).getText());
assertEquals("layout", props.get(0).getName());
assertEquals("?style", props.get(1).getText());
assertEquals("style", props.get(1).getName());
assertEquals("onClick=?cb", props.get(2).getText());
assertEquals("?other", props.get(3).getText());
assertEquals("other", props.get(3).getName());
assertNull(PsiTreeUtil.findChildOfType(e, PsiTernary.class));
}
public void test_tag_props_with_local_open() {
PsiTag e = (PsiTag) firstElement(parseCode("<Icon width=Dimensions.(3->px) height=Dimensions.(2->rem)>"));
List<PsiTagProperty> props = new ArrayList<>(e.getProperties());
assertSize(2, props);
assertNotNull(PsiTreeUtil.findChildrenOfType(props.get(0), PsiTagPropertyValue.class));
assertNotNull(PsiTreeUtil.findChildrenOfType(props.get(1), PsiTagPropertyValue.class));
}
public void test_tag_chaining() {
Collection<PsiModule> psiModules =
moduleExpressions(
parseCode(
"module GalleryItem = { let make = () => { let x = <div/>; }; };\nmodule GalleryContainer = {};"));
assertEquals(2, psiModules.size());
}
public void test_incorrect_prop() {
PsiTag e = (PsiTag) firstElement(parseCode("<MyComp prunningProp prop=1/>"));
Collection<PsiTagProperty> properties = PsiTreeUtil.findChildrenOfType(e, PsiTagProperty.class);
assertEquals(2, properties.size());
}
public void test_prop02() {
PsiTag e =
(PsiTag)
firstElement(
parseCode(
"<Splitter left={<NotificationsList notifications />} right={<div> {ReasonReact.string(\"switch inside\")} </div>}/>"));
List<PsiTagProperty> properties = ((PsiTagStart) e.getFirstChild()).getProperties();
assertEquals(2, properties.size());
assertEquals("{<NotificationsList notifications />}", properties.get(0).getValue().getText());
assertEquals(
"{<div> {ReasonReact.string(\"switch inside\")} </div>}",
properties.get(1).getValue().getText());
}
public void test_prop03() {
PsiTag e =
(PsiTag)
firstElement(
parseCode(
"<PageContentGrid height={computePageHeight(miniDashboardHeight)} title=\"X\"/>"));
List<PsiTagProperty> properties = ((PsiTagStart) e.getFirstChild()).getProperties();
assertEquals(2, properties.size());
assertEquals(
"{computePageHeight(miniDashboardHeight)}", properties.get(0).getValue().getText());
assertEquals("\"X\"", properties.get(1).getValue().getText());
}
public void test_prop04() {
PsiTag e = (PsiTag) firstElement(parseCode("<Icon colors=[|white, red|] />"));
List<PsiTagProperty> properties = ((PsiTagStart) e.getFirstChild()).getProperties();
assertEquals(1, properties.size());
assertEquals("[|white, red|]", properties.get(0).getValue().getText());
}
public void test_prop_ref() {
PsiTag e = (PsiTag) firstElement(parseCode("<div ref={ReactDOMRe.Ref.domRef(formRef)}/>"));
Collection<PsiTagProperty> properties = PsiTreeUtil.findChildrenOfType(e, PsiTagProperty.class);
PsiTagProperty prop = properties.iterator().next();
assertEquals("ref={ReactDOMRe.Ref.domRef(formRef)}", prop.getText());
}
public void test_fragment() {
PsiTag e = (PsiTag) firstElement(parseCode("<></>"));
assertEquals("<></>", e.getText());
assertNotNull(PsiTreeUtil.findChildOfType(e, PsiTagStart.class));
assertNotNull(PsiTreeUtil.findChildOfType(e, PsiTagClose.class));
}
}
| |
/**
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.flume.agent;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.thrift.transport.TTransportException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.flume.conf.FlumeConfigData;
import com.cloudera.flume.conf.FlumeConfiguration;
import com.cloudera.flume.handlers.endtoend.AckListener;
import com.cloudera.flume.reporter.ReportEvent;
import com.cloudera.util.FixedPeriodBackoff;
import com.cloudera.util.Pair;
import com.cloudera.util.ResultRetryable;
import com.cloudera.util.RetryHarness;
import com.google.common.base.Preconditions;
/**
* This wraps a SingleMasterRPC and provides failover from one master to
* another.
*/
public class MultiMasterRPC implements MasterRPC {
static final Logger LOG = LoggerFactory.getLogger(MultiMasterRPC.class);
final protected int MAX_RETRIES;
final protected int RETRY_PAUSE_MS;
final String rpcProtocol;
protected MasterRPC masterRPC;
protected final List<Pair<String, Integer>> masterAddresses;
// Index of next master to try - wraps round.
protected int nextMaster = 0;
protected String curHost;
protected int curPort = 0;
/**
* Reads the set of master addresses from the configuration. If randomize is
* set, it will shuffle the list. When a failure is detected, the entire set
* of other masters will be tried maxRetries times, with a pause of
* retryPauseMS between sweeps.
*/
public MultiMasterRPC(FlumeConfiguration conf, boolean randomize,
int maxRetries, int retryPauseMS) {
masterAddresses = conf.getMasterHeartbeatServersList();
if (randomize) {
Collections.shuffle(masterAddresses);
}
this.MAX_RETRIES = maxRetries;
this.RETRY_PAUSE_MS = retryPauseMS;
this.rpcProtocol = conf.getMasterHeartbeatRPC();
}
/**
* Reads the set of master addresses from the configuration. If randomize is
* set, it will shuffle the list.
*/
public MultiMasterRPC(FlumeConfiguration conf, boolean randomize) {
this(conf, randomize, conf.getAgentMultimasterMaxRetries(), conf
.getAgentMultimasterRetryBackoff());
}
/**
* Will return null if not connected
*/
public synchronized String getCurHost() {
return curHost;
}
/**
* Will return 0 if not connected
*/
public synchronized int getCurPort() {
return curPort;
}
protected synchronized MasterRPC findServer() throws IOException {
List<String> failedMasters = new ArrayList<String>();
for (int i = 0; i < masterAddresses.size(); ++i) {
Pair<String, Integer> host = masterAddresses.get(nextMaster);
try {
// Next time we need to try the next master along
nextMaster = (nextMaster + 1) % masterAddresses.size();
// We don't know for sure what state the connection is in at this
// point, so to be safe force a close.
close();
MasterRPC out = null;
if (FlumeConfiguration.RPC_TYPE_THRIFT.equals(rpcProtocol)) {
out = new ThriftMasterRPC(host.getLeft(), host.getRight());
} else if (FlumeConfiguration.RPC_TYPE_AVRO.equals(rpcProtocol)) {
out = new AvroMasterRPC(host.getLeft(), host.getRight());
} else {
LOG.error("No valid RPC protocol in configurations.");
continue;
}
curHost = host.getLeft();
curPort = host.getRight();
this.masterRPC = out;
return out;
} catch (Exception e) {
failedMasters.add(host.getLeft() + ":" + host.getRight());
LOG.debug("Couldn't connect to master at " + host.getLeft() + ":"
+ host.getRight() + " because: " + e.getMessage());
}
}
throw new IOException("Could not connect to any master nodes (tried "
+ masterAddresses.size() + ": " + failedMasters + ")");
}
protected synchronized MasterRPC ensureConnected()
throws TTransportException, IOException {
return (masterRPC != null) ? masterRPC : findServer();
}
public synchronized void close() {
// multiple close is ok.
if (this.masterRPC != null) {
try {
this.masterRPC.close();
} catch (IOException e) {
LOG.warn("Failed to close connection with RPC master" + curHost);
}
}
curHost = null;
curPort = 0;
}
/**
* A word about the pattern used here. Each RPC call could fail. If this is
* detected we want to fail over the another master server.
*
* We use Retryables (not perfect, but good enough!) for this. Once a call
* fails by throwing a TException, we try to find another server and fail the
* current attempt. Each attempt to find another server goes in the worst case
* around the list of servers once.
*
* This pattern itself should repeat (otherwise if there are two consecutive
* server failures, due to taking two or more offline, we'll see exceptions
* propagated back to the caller). So we use a retry policy that retries every
* 5s, up to 12 times. The idea is that if a node can't reach any masters for
* 1 minute it's problematic. At this point an exception goes back to the
* caller, and it's their responsibility to deal with the loss.
*
*/
abstract class RPCRetryable<T> extends ResultRetryable<T> {
/**
* Implement RPC call here.
*/
abstract public T doRPC() throws IOException;
public boolean doTry() {
/**
* Getting the locking efficient here is difficult because of subtle race
* conditions. Since all access to MasterRPC is synchronized, we can
* afford to serialize access to this block.
*/
synchronized (MultiMasterRPC.this) {
try {
result = doRPC();
return true;
} catch (Exception e) {
/**
* A subtle race condition - if two RPC calls have failed and fallen
* through to here, both might try and call findServer and race on the
* next good server. This is why we synchronize the whole enclosing
* try block.
*/
try {
LOG.info(e.getMessage());
LOG.debug(e.getMessage(), e);
findServer();
} catch (IOException e1) {
LOG.warn(e1.getMessage());
}
}
return false;
}
}
}
public FlumeConfigData getConfig(final String n) throws IOException {
RPCRetryable<FlumeConfigData> retry = new RPCRetryable<FlumeConfigData>() {
public FlumeConfigData doRPC() throws IOException {
return masterRPC.getConfig(n);
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
return retry.getResult();
} catch (Exception e) {
throw new IOException(e);
}
}
/**
* This checks for an ack with a given ackid at the master
*/
public boolean checkAck(final String ackid) throws IOException {
RPCRetryable<Boolean> retry = new RPCRetryable<Boolean>() {
public Boolean doRPC() throws IOException {
return masterRPC.checkAck(ackid);
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
return retry.getResult();
} catch (Exception e) {
throw new IOException(e);
}
}
public List<String> getLogicalNodes(final String physicalNode)
throws IOException {
RPCRetryable<List<String>> retry = new RPCRetryable<List<String>>() {
public List<String> doRPC() throws IOException {
Preconditions.checkState(masterRPC != null,
"No active master RPC connection");
return masterRPC.getLogicalNodes(physicalNode);
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
return retry.getResult();
} catch (Exception e) {
throw new IOException(e);
}
}
/**
* This method returns the ChokeId->limit (in KB/sec) map for the given
* physical node. This limit puts an approximate upperbound on the number of
* bytes which can be shipped across a choke decorator.
*/
public Map<String, Integer> getChokeMap(final String physicalNode)
throws IOException {
RPCRetryable<Map<String, Integer>> retry = new RPCRetryable<Map<String, Integer>>() {
public Map<String, Integer> doRPC() throws IOException {
return masterRPC.getChokeMap(physicalNode);
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
return retry.getResult();
} catch (Exception e) {
throw new IOException(e);
}
}
public boolean heartbeat(final LogicalNode n) throws IOException {
RPCRetryable<Boolean> retry = new RPCRetryable<Boolean>() {
public Boolean doRPC() throws IOException {
return masterRPC.heartbeat(n);
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
return retry.getResult();
} catch (Exception e) {
throw new IOException(e);
}
}
public void acknowledge(final String group) throws IOException {
RPCRetryable<Void> retry = new RPCRetryable<Void>() {
public Void doRPC() throws IOException {
masterRPC.acknowledge(group);
return result; // Have to return something, but no-one will ever check
// it
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
} catch (Exception e) {
throw new IOException(e);
}
}
public void putReports(final Map<String, ReportEvent> reports)
throws IOException {
RPCRetryable<Void> retry = new RPCRetryable<Void>() {
public Void doRPC() throws IOException {
Preconditions.checkState(masterRPC != null,
"No active master RPC connection");
masterRPC.putReports(reports);
return result;
}
};
RetryHarness harness = new RetryHarness(retry, new FixedPeriodBackoff(
RETRY_PAUSE_MS, MAX_RETRIES), true);
try {
harness.attempt();
} catch (Exception e) {
throw new IOException(e);
}
}
public AckListener createAckListener() {
return masterRPC.createAckListener();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/group_service.proto
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The `GetGroup` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.GetGroupRequest}
*/
public final class GetGroupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.GetGroupRequest)
GetGroupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetGroupRequest.newBuilder() to construct.
private GetGroupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetGroupRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetGroupRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GetGroupRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.GroupServiceProto
.internal_static_google_monitoring_v3_GetGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.GroupServiceProto
.internal_static_google_monitoring_v3_GetGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.GetGroupRequest.class,
com.google.monitoring.v3.GetGroupRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 3;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.GetGroupRequest)) {
return super.equals(obj);
}
com.google.monitoring.v3.GetGroupRequest other = (com.google.monitoring.v3.GetGroupRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.GetGroupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.GetGroupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.GetGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.GetGroupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The `GetGroup` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.GetGroupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.GetGroupRequest)
com.google.monitoring.v3.GetGroupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.GroupServiceProto
.internal_static_google_monitoring_v3_GetGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.GroupServiceProto
.internal_static_google_monitoring_v3_GetGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.GetGroupRequest.class,
com.google.monitoring.v3.GetGroupRequest.Builder.class);
}
// Construct using com.google.monitoring.v3.GetGroupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.GroupServiceProto
.internal_static_google_monitoring_v3_GetGroupRequest_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.GetGroupRequest getDefaultInstanceForType() {
return com.google.monitoring.v3.GetGroupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.GetGroupRequest build() {
com.google.monitoring.v3.GetGroupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.GetGroupRequest buildPartial() {
com.google.monitoring.v3.GetGroupRequest result =
new com.google.monitoring.v3.GetGroupRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.GetGroupRequest) {
return mergeFrom((com.google.monitoring.v3.GetGroupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.GetGroupRequest other) {
if (other == com.google.monitoring.v3.GetGroupRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.v3.GetGroupRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.monitoring.v3.GetGroupRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The group to retrieve. The format is:
* projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID]
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.GetGroupRequest)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.GetGroupRequest)
private static final com.google.monitoring.v3.GetGroupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.GetGroupRequest();
}
public static com.google.monitoring.v3.GetGroupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetGroupRequest> PARSER =
new com.google.protobuf.AbstractParser<GetGroupRequest>() {
@java.lang.Override
public GetGroupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetGroupRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetGroupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetGroupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.GetGroupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2/cloudtasks.proto
package com.google.cloud.tasks.v2;
/**
*
*
* <pre>
* Request message for [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2.PurgeQueueRequest}
*/
public final class PurgeQueueRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2.PurgeQueueRequest)
PurgeQueueRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use PurgeQueueRequest.newBuilder() to construct.
private PurgeQueueRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PurgeQueueRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PurgeQueueRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private PurgeQueueRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_PurgeQueueRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_PurgeQueueRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2.PurgeQueueRequest.class,
com.google.cloud.tasks.v2.PurgeQueueRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2.PurgeQueueRequest)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2.PurgeQueueRequest other =
(com.google.cloud.tasks.v2.PurgeQueueRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2.PurgeQueueRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2.PurgeQueueRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2.PurgeQueueRequest)
com.google.cloud.tasks.v2.PurgeQueueRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_PurgeQueueRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_PurgeQueueRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2.PurgeQueueRequest.class,
com.google.cloud.tasks.v2.PurgeQueueRequest.Builder.class);
}
// Construct using com.google.cloud.tasks.v2.PurgeQueueRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_PurgeQueueRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2.PurgeQueueRequest getDefaultInstanceForType() {
return com.google.cloud.tasks.v2.PurgeQueueRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2.PurgeQueueRequest build() {
com.google.cloud.tasks.v2.PurgeQueueRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2.PurgeQueueRequest buildPartial() {
com.google.cloud.tasks.v2.PurgeQueueRequest result =
new com.google.cloud.tasks.v2.PurgeQueueRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2.PurgeQueueRequest) {
return mergeFrom((com.google.cloud.tasks.v2.PurgeQueueRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2.PurgeQueueRequest other) {
if (other == com.google.cloud.tasks.v2.PurgeQueueRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.tasks.v2.PurgeQueueRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.tasks.v2.PurgeQueueRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The queue name. For example:
* `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2.PurgeQueueRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.PurgeQueueRequest)
private static final com.google.cloud.tasks.v2.PurgeQueueRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2.PurgeQueueRequest();
}
public static com.google.cloud.tasks.v2.PurgeQueueRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PurgeQueueRequest> PARSER =
new com.google.protobuf.AbstractParser<PurgeQueueRequest>() {
@java.lang.Override
public PurgeQueueRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PurgeQueueRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<PurgeQueueRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PurgeQueueRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2.PurgeQueueRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.virtue.game.entity.combat;
import java.util.ArrayList;
import java.util.List;
import org.virtue.game.World;
import org.virtue.game.entity.Entity;
import org.virtue.game.entity.combat.death.DeathHandler;
import org.virtue.game.entity.combat.hit.Hit;
import org.virtue.game.entity.combat.hit.Hit.HitType;
import org.virtue.game.entity.combat.impl.ImpactInfo;
import org.virtue.game.entity.player.Player;
import org.virtue.game.entity.player.var.VarKey;
import org.virtue.network.protocol.update.block.HitMarkBlock;
import org.virtue.network.protocol.update.ref.Bar;
/**
* Handles impacts an entity has received.
* @author Emperor
*
*/
public class ImpactHandler {
/**
* The entity receiving the impacts.
*/
private final Entity entity;
/**
* The entity's death handler.
*/
private final DeathHandler deathHandler;
/**
* The queued impacts.
*/
private List<ImpactInfo> queuedImpacts;
/**
* The queued lifepoints bars.
*/
private List<Bar> queuedBars;
/**
* The queued hit markers.
*/
private List<Hit> queuedHits;
/**
* The maximum amount of lifepoints.
*/
private int maximumLifepoints;
/**
* The entity's current lifepoints.
*/
private int lifepoints;
/**
* If the hitmark block has been scheduled this game cycle.
*/
private boolean scheduledBlock;
private boolean inCombat = false;
/**
* Constructs a new {@code ImpactHandler} {@code Object}.
* @param entity The entity.
*/
public ImpactHandler(Entity entity) {
this.entity = entity;
this.deathHandler = new DeathHandler(entity);
this.queuedImpacts = new ArrayList<>();
this.queuedBars = new ArrayList<>();
this.queuedHits = new ArrayList<>();
}
/**
* Restores the lifepoints amount.
*/
public void restoreLifepoints() {
int current = this.lifepoints;
updateLifepoints(maximumLifepoints);
if (lifepoints > 0) {
queuedBars.add(new Bar(Bar.HITPOINTS, current, lifepoints, 0));
scheduleBlock();
}
}
public void heal(int amount, boolean showHeal) {
int current = this.lifepoints;
if (amount + lifepoints > maximumLifepoints) {
amount = maximumLifepoints - lifepoints;
}
updateLifepoints(amount + lifepoints);
if (showHeal && amount > 0) {
queuedHits.add(new Hit(amount, HitType.HEALED_DAMAGE));
queuedBars.add(new Bar(Bar.HITPOINTS, current, lifepoints, 0));
scheduleBlock();
}
}
public void hit(int hitAmount) {
int last = this.lifepoints;
if (hitAmount > lifepoints) {
hitAmount = lifepoints;
}
queuedHits.add(new Hit(hitAmount, 0, HitType.REGULAR_DAMAGE));
decrementLifepoints(hitAmount);
queuedBars.add(new Bar(Bar.HITPOINTS, last, lifepoints, 0));
scheduleBlock();
}
/**
* Handles a hit.
* @param impact The impact information.
*/
public void hit(ImpactInfo impact) {
int last = this.lifepoints;
int hit = impact.getHit();
if (hit > lifepoints) {
hit = lifepoints;
}
queuedHits.add(impact.selectHitMark(hit, 0));
decrementLifepoints(hit);
queuedBars.add(new Bar(Bar.HITPOINTS, last, lifepoints, 0));
scheduleBlock();
}
/**
* Decrements the lifepoints with the given amount.
* @param amount The amount of lifepoints to decrement.
*/
private void decrementLifepoints(int amount) {
if (lifepoints - amount <= 0) {
amount = lifepoints;
if (!isDead()) {
deathHandler.run();
}
}
updateLifepoints(lifepoints - amount);
}
/**
* Sets the lifepoints amount.
* @param lifepoints The amount of lifepoints.
*/
public void updateLifepoints(int lifepoints) {
setLifepoints(lifepoints);
if (entity instanceof Player) {
((Player) entity).getVars().setVarBitValue(VarKey.Bit.PLAYER_HITPOINTS, lifepoints);
}
}
/**
* Updates the impacts.
*/
public void updateImpacts() {
List<ImpactInfo> newQueue = new ArrayList<>();
for (ImpactInfo impact : queuedImpacts) {
if (impact.getScheduledTick() <= World.getInstance().getCycleCount()) {
hit(impact);
} else {
newQueue.add(impact);
}
}
queuedImpacts = newQueue;
}
/**
* Resets the queued impacts.
*/
public void resetQueue() {
queuedBars.clear();
queuedHits.clear();
scheduledBlock = false;
}
/**
* Queues an impact.
* @param impact The impact information.
*/
public void queue(ImpactInfo impact) {
impact.setScheduledTick(World.getInstance().getCycleCount() + impact.getDelay());
queuedImpacts.add(impact);
}
/**
* Schedules the hitmark block.
*/
public void scheduleBlock() {
if (!scheduledBlock) {
entity.queueUpdateBlock(new HitMarkBlock());
scheduledBlock = true;
}
}
/**
* Gets the entity value.
* @return The entity.
*/
public Entity getEntity() {
return entity;
}
/**
* Sets the lifepoints amount.
* @param lifepoints The amount of lifepoints.
*/
public void setLifepoints(int lifepoints) {
this.lifepoints = lifepoints;
}
/**
* Gets the lifepoints value.
* @return The lifepoints.
*/
public int getLifepoints() {
return lifepoints;
}
/**
* Gets the maximumLifepoints value.
* @return The maximumLifepoints.
*/
public int getMaximumLifepoints() {
if (maximumLifepoints < 1) {
return 1;
}
return maximumLifepoints;
}
/**
* Sets the maximumLifepoints value.
* @param maximumLifepoints The maximumLifepoints to set.
*/
public void setMaximumLifepoints(int maximumLifepoints) {
this.maximumLifepoints = maximumLifepoints;
}
/**
* Gets the currently queued impacts.
* @return The impacts queue.
*/
public List<ImpactInfo> getQueuedImpacts() {
return queuedImpacts;
}
/**
* Gets the queuedBars value.
* @return The queuedBars.
*/
public List<Bar> getQueuedBars() {
return queuedBars;
}
/**
* Gets the dead value.
* @return The dead.
*/
public boolean isDead() {
return deathHandler.isDead();
}
/**
* Sets the dead value.
* @param dead The dead to set.
*/
public void setDead(boolean dead) {
deathHandler.setDead(dead);
}
/**
* Gets the deathHandler value.
* @return The deathHandler.
*/
public DeathHandler getDeathHandler() {
return deathHandler;
}
public boolean inCombat() {
return inCombat;
}
public void setInCombat(boolean inCombat) {
this.inCombat = inCombat;
}
/**
* Gets the queuedHits value.
* @return The queuedHits.
*/
public List<Hit> getQueuedHits() {
return queuedHits;
}
}
| |
/*
* Copyright 2015-2021 the original author or authors.
* Copyright 2015-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.lastaflute.web.ruts.process.formcoins;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.jdbc.Classification;
import org.dbflute.optional.OptionalThing;
import org.dbflute.util.DfCollectionUtil;
import org.dbflute.util.DfReflectionUtil;
import org.dbflute.util.DfTypeUtil.ParseBooleanException;
import org.dbflute.util.DfTypeUtil.ParseDateException;
import org.dbflute.util.Srl;
import org.lastaflute.core.direction.FwAssistantDirector;
import org.lastaflute.core.json.JsonManager;
import org.lastaflute.core.json.JsonObjectConvertible;
import org.lastaflute.core.message.UserMessages;
import org.lastaflute.core.util.ContainerUtil;
import org.lastaflute.core.util.LaClassificationUtil;
import org.lastaflute.core.util.LaClassificationUtil.ClassificationUnknownCodeException;
import org.lastaflute.di.helper.beans.BeanDesc;
import org.lastaflute.di.helper.beans.ParameterizedClassDesc;
import org.lastaflute.di.helper.beans.PropertyDesc;
import org.lastaflute.di.helper.beans.factory.BeanDescFactory;
import org.lastaflute.di.helper.misc.ParameterizedRef;
import org.lastaflute.di.util.LdiClassUtil;
import org.lastaflute.di.util.LdiModifierUtil;
import org.lastaflute.web.exception.Forced400BadRequestException;
import org.lastaflute.web.exception.IndexedPropertyNonParameterizedListException;
import org.lastaflute.web.exception.IndexedPropertyNotListArrayException;
import org.lastaflute.web.exception.JsonBodyCannotReadFromRequestException;
import org.lastaflute.web.exception.RequestClassifiationConvertFailureException;
import org.lastaflute.web.exception.RequestJsonParseFailureException;
import org.lastaflute.web.exception.RequestPropertyMappingFailureException;
import org.lastaflute.web.path.FormMappingOption;
import org.lastaflute.web.ruts.VirtualForm;
import org.lastaflute.web.ruts.config.ActionFormProperty;
import org.lastaflute.web.ruts.multipart.MultipartRequestHandler;
import org.lastaflute.web.ruts.process.ActionRuntime;
import org.lastaflute.web.ruts.process.debugchallenge.JsonDebugChallenge;
import org.lastaflute.web.ruts.process.exception.ActionFormPopulateFailureException;
import org.lastaflute.web.ruts.process.exception.RequestUndefinedParameterInFormException;
import org.lastaflute.web.servlet.filter.RequestLoggingFilter.RequestClientErrorException;
import org.lastaflute.web.servlet.request.RequestManager;
/**
* @author jflute
* @since 1.1.2 (2019/05/03 Friday)
*/
public class FormCoinsHelper { // keep singleton-able to be simple
// ===================================================================================
// Definition
// ==========
protected static final String LF = "\n";
// ===================================================================================
// Attribute
// =========
protected final FwAssistantDirector assistantDirector;
protected final RequestManager requestManager;
// ===================================================================================
// Constructor
// ===========
public FormCoinsHelper(FwAssistantDirector assistantDirector, RequestManager requestManager) {
this.assistantDirector = assistantDirector;
this.requestManager = requestManager;
}
// ===================================================================================
// Populate
// ========
public Map<String, Object> prepareRequestParameterMap(MultipartRequestHandler multipartHandler, FormMappingOption option) {
final HttpServletRequest request = requestManager.getRequest();
final Map<String, Object> paramMap = new LinkedHashMap<String, Object>();
final Enumeration<String> em = request.getParameterNames();
while (em.hasMoreElements()) {
final String name = em.nextElement();
paramMap.put(name, request.getParameterValues(name));
}
if (multipartHandler != null) {
paramMap.putAll(multipartHandler.getAllElements());
}
final OptionalThing<Function<Map<String, Object>, Map<String, Object>>> optFilter = option.getRequestParameterMapFilter();
if (optFilter.isPresent()) { // no map() here, to keep normal route simple
final Map<String, Object> filteredMap = optFilter.get().apply(Collections.unmodifiableMap(paramMap));
return filteredMap != null ? filteredMap : paramMap;
} else { // normally here
return paramMap;
}
}
public void handleIllegalPropertyPopulateException(Object form, String name, Object value, ActionRuntime runtime, Throwable cause)
throws ServletException {
if (isRequestClientErrorException(cause)) { // for indexed property check
throw new ServletException(cause);
}
throwActionFormPopulateFailureException(form, name, value, runtime, cause);
}
protected boolean isRequestClientErrorException(Throwable cause) {
return cause instanceof RequestClientErrorException;
}
protected void throwActionFormPopulateFailureException(Object form, String name, Object value, ActionRuntime runtime, Throwable cause) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to populate the parameter to the form.");
br.addItem("Action Runtime");
br.addElement(runtime);
br.addItem("Action Form");
br.addElement(form);
br.addItem("Property Name");
br.addElement(name);
br.addItem("Property Value");
final Object valueObj;
if (value instanceof String[]) {
final List<Object> objList = DfCollectionUtil.toListFromArray(value);
valueObj = objList.size() == 1 ? objList.get(0) : objList;
} else {
valueObj = value;
}
br.addElement(valueObj);
final String msg = br.buildExceptionMessage();
throw new ActionFormPopulateFailureException(msg, cause);
}
// ===================================================================================
// JSON Body
// =========
public void throwJsonBodyCannotReadFromRequestException(VirtualForm virtualForm, RuntimeException e) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Cannot read request body for JSON.");
br.addItem("Advice");
br.addElement("Your action expects JSON string on request body.");
br.addElement("Make sure your request for JSON body.");
br.addElement("Or it should be form...? e.g. SeaBody => SeaForm");
br.addItem("Body Class");
br.addElement(virtualForm);
final String msg = br.buildExceptionMessage();
throw new JsonBodyCannotReadFromRequestException(msg, e);
}
public String buildJsonBodyDebugDisplay(String value) {
// want to show all as parameter, but limit just in case to avoid large logging
final String trimmed = value.trim();
return !trimmed.isEmpty() ? "\n" + Srl.cut(trimmed, 800, "...") : " *empty body"; // might have rear LF
}
// -----------------------------------------------------
// Bean JSON
// ---------
public void throwJsonBodyParseFailureException(ActionRuntime runtime, VirtualForm virtualForm, String json, RuntimeException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot parse json on the request body.");
sb.append(LF).append(LF).append("[JsonBody Parse Failure]");
sb.append(LF).append(runtime);
sb.append(LF).append(virtualForm);
sb.append(LF).append(json);
final Map<String, Object> retryMap = retryJsonAsMapForDebug(json);
List<JsonDebugChallenge> challengeList = new ArrayList<JsonDebugChallenge>();
if (!retryMap.isEmpty()) {
sb.append(LF).append(buildDebugChallengeTitle());
final List<JsonDebugChallenge> nestedList = prepareJsonBodyDebugChallengeList(virtualForm, retryMap, null);
for (JsonDebugChallenge challenge : nestedList) {
sb.append(challenge.toChallengeDisp());
}
challengeList.addAll(nestedList);
}
throwRequestJsonParseFailureException(sb.toString(), challengeList, e);
}
// -----------------------------------------------------
// List JSON
// ---------
public void throwListJsonBodyParseFailureException(ActionRuntime runtime, VirtualForm virtualForm, String json, RuntimeException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot parse list json on the request body.");
sb.append(LF).append(LF).append("[List JsonBody Parse Failure]");
sb.append(LF).append(runtime);
sb.append(LF).append(virtualForm);
sb.append(LF).append(json);
final List<Map<String, Object>> retryList = retryJsonListAsMapForDebug(json);
final List<JsonDebugChallenge> challengeList = new ArrayList<JsonDebugChallenge>();
if (!retryList.isEmpty()) {
sb.append(LF).append(buildDebugChallengeTitle());
int index = 1;
for (Map<String, Object> retryMap : retryList) {
sb.append(LF).append(" (index: ").append(index).append(")");
final List<JsonDebugChallenge> nestedList = prepareJsonBodyDebugChallengeList(virtualForm, retryMap, index);
challengeList.addAll(nestedList);
nestedList.forEach(challenge -> sb.append(challenge.toChallengeDisp()));
++index;
}
}
throwRequestJsonParseFailureException(sb.toString(), challengeList, e);
}
// -----------------------------------------------------
// Debug Challenge
// ---------------
protected List<JsonDebugChallenge> prepareJsonBodyDebugChallengeList(VirtualForm virtualForm, Map<String, Object> retryMap,
Integer elementIndex) {
if (retryMap.isEmpty()) {
return Collections.emptyList();
}
final List<JsonDebugChallenge> challengeList = new ArrayList<JsonDebugChallenge>();
for (ActionFormProperty property : virtualForm.getFormMeta().properties()) {
final String propertyName = property.getPropertyName();
final Class<?> propertyType = property.getPropertyDesc().getPropertyType();
final JsonDebugChallenge challenge = createJsonDebugChallenge(retryMap, propertyName, propertyType, elementIndex);
challengeList.add(challenge);
}
return Collections.unmodifiableList(challengeList);
}
// ===================================================================================
// Property Set
// ============
public int minIndex(int index1, int index2) {
if (index1 >= 0 && index2 < 0) {
return index1;
} else if (index1 < 0 && index2 >= 0) {
return index2;
} else {
return Math.min(index1, index2);
}
}
// ===================================================================================
// Simple Property
// ===============
// -----------------------------------------------------
// List JSON
// ---------
public void throwListJsonPropertyNonGenericException(Object bean, String name, String json, PropertyDesc pd) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Non-generic list cannot handle the JSON.");
br.addItem("Action Form");
br.addElement(bean);
br.addItem("NonGeneric Property");
br.addElement(pd);
br.addItem("Unhandled JSON");
br.addElement(json);
final String msg = br.buildExceptionMessage();
throw new ActionFormPopulateFailureException(msg);
}
public void throwListJsonPropertyNonParameterizedException(Object bean, String name, String json, PropertyDesc pd, Type plainType) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Non-parameterized list cannot handle the JSON.");
br.addItem("Action Form");
br.addElement(bean);
br.addItem("NonParameterized Property");
br.addElement(pd);
br.addItem("NonParameterized Type");
br.addElement(plainType);
br.addItem("Unhandled JSON");
br.addElement(json);
final String msg = br.buildExceptionMessage();
throw new ActionFormPopulateFailureException(msg);
}
public void throwListJsonPropertyGenericNotScalarException(Object bean, String name, String json, PropertyDesc pd,
ParameterizedType paramedType) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Not scalar generic type for the list JSON parameter.");
br.addItem("Action Form");
br.addElement(bean);
br.addItem("Generic Property");
br.addElement(pd);
br.addItem("Parameterizd Type");
br.addElement(paramedType);
br.addItem("Unhandled JSON");
br.addElement(json);
final String msg = br.buildExceptionMessage();
throw new ActionFormPopulateFailureException(msg);
}
public void throwListJsonParameterParseFailureException(Object bean, String name, String json, Type propertyType, RuntimeException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot parse list json of the request parameter.");
final List<Map<String, Object>> retryList = retryJsonListAsMapForDebug(json);
final List<JsonDebugChallenge> challengeList = new ArrayList<JsonDebugChallenge>();
final StringBuilder challengeSb = new StringBuilder();
if (!retryList.isEmpty()) {
final Class<?> elementType = DfReflectionUtil.getGenericFirstClass(propertyType);
if (elementType != null) { // just in case
int index = 0;
for (Map<String, Object> retryMap : retryList) {
challengeSb.append(LF).append(" (index: ").append(index).append(")");
final List<JsonDebugChallenge> nestedList = prepareJsonParameterDebugChallengeList(retryMap, elementType, json, index);
challengeList.addAll(nestedList);
nestedList.forEach(challenge -> challengeSb.append(challenge.toChallengeDisp()));
++index;
}
}
}
final String challengeDisp = challengeSb.toString();
buildClientErrorHeader(sb, "List JsonParameter Parse Failure", bean, name, json, propertyType, challengeDisp);
throwRequestJsonParseFailureException(sb.toString(), challengeList, e);
}
// -----------------------------------------------------
// Bean JSON
// ---------
public void throwJsonParameterParseFailureException(Object bean, String name, String json, Class<?> propertyType, RuntimeException e) {
final StringBuilder sb = new StringBuilder();
sb.append("Cannot parse json of the request parameter.");
final Map<String, Object> retryMap = retryJsonAsMapForDebug(json);
final List<JsonDebugChallenge> challengeList = prepareJsonParameterDebugChallengeList(retryMap, propertyType, json, null);
final String challengeDisp = buildJsonParameterDebugChallengeDisp(challengeList);
buildClientErrorHeader(sb, "JsonParameter Parse Failure", bean, name, json, propertyType, challengeDisp);
throwRequestJsonParseFailureException(sb.toString(), challengeList, e);
}
protected List<JsonDebugChallenge> prepareJsonParameterDebugChallengeList(Map<String, Object> retryMap, Class<?> beanType, String json,
Integer elementIndex) {
if (retryMap.isEmpty()) {
return Collections.emptyList();
}
final BeanDesc beanDesc = BeanDescFactory.getBeanDesc(beanType);
final int fieldSize = beanDesc.getFieldSize();
final List<JsonDebugChallenge> challengeList = new ArrayList<JsonDebugChallenge>(fieldSize);
for (int i = 0; i < fieldSize; i++) {
final Field field = beanDesc.getField(i);
final JsonDebugChallenge challenge = createJsonDebugChallenge(retryMap, field.getName(), field.getType(), elementIndex);
challengeList.add(challenge);
}
return Collections.unmodifiableList(challengeList);
}
protected String buildJsonParameterDebugChallengeDisp(List<JsonDebugChallenge> challengeList) {
if (challengeList.isEmpty()) {
return null;
}
final StringBuilder sb = new StringBuilder();
challengeList.forEach(challenge -> sb.append(challenge.toChallengeDisp()));
return sb.toString();
}
// -----------------------------------------------------
// Classification
// --------------
public boolean isClassificationProperty(Class<?> propertyType) {
return LaClassificationUtil.isCls(propertyType);
}
public Classification toVerifiedClassification(Object bean, String name, Object code, Class<?> propertyType) {
try {
return LaClassificationUtil.toCls(propertyType, code);
} catch (ClassificationUnknownCodeException e) { // simple message because of catched later
String msg = "Cannot convert the code to the classification: " + code + " to " + propertyType.getSimpleName();
throwRequestClassifiationConvertFailureException(msg, e);
return null; // unreachable
}
}
protected void throwRequestClassifiationConvertFailureException(String msg, Exception e) {
throw new RequestClassifiationConvertFailureException(msg, e);
}
// -----------------------------------------------------
// Type Failure
// ------------
public void throwTypeFailureBadRequest(Object bean, String propertyPath, Class<?> propertyType, Object exp, RuntimeException cause) {
if (cause instanceof Forced400BadRequestException) { // already bad request so no need to new
throw cause; // e.g. classification's exception
}
final StringBuilder sb = new StringBuilder();
sb.append("The property cannot be the type: property=");
sb.append(bean != null ? bean.getClass().getSimpleName() : null);
sb.append("@").append(propertyPath).append("(").append(propertyType.getSimpleName()).append(") value=").append(exp);
throwRequestPropertyMappingFailureException(sb.toString(), cause); // though bad request
}
// -----------------------------------------------------
// Mapping Failure
// ---------------
public void handleMappingFailureException(Object bean, String name, Object value, StringBuilder pathSb, PropertyDesc pd,
RuntimeException e) {
if (!isBadRequestMappingFailureException(e)) {
throw e;
}
// e.g. non-number GET but number type property
// suppress easy 500 error by e.g. non-number GET parameter (similar with path parameter)
// (o): ?seaId=123
// (x): ?seaId=abc *this case
final String beanExp = bean != null ? bean.getClass().getName() : null; // null check just in case
final Object dispValue = value instanceof Object[] ? Arrays.asList((Object[]) value).toString() : value;
final StringBuilder sb = new StringBuilder();
sb.append("Failed to set the value to the property.");
buildClientErrorHeader(sb, "Form Mapping Failure", beanExp, name, dispValue, pd.getPropertyType(), null);
throwRequestPropertyMappingFailureException(sb.toString(), e);
}
protected boolean isBadRequestMappingFailureException(RuntimeException e) {
// may be BeanIllegalPropertyException so also check nested exception
return isTypeFailureException(e) || isTypeFailureException(e.getCause());
}
public boolean isTypeFailureException(Throwable cause) { // except classification here
return cause instanceof NumberFormatException // e.g. Integer, Long
|| cause instanceof ParseDateException // e.g. LocalDate
|| cause instanceof ParseBooleanException // e.g. Boolean
|| cause instanceof RequestClassifiationConvertFailureException // e.g. CDef
;
}
// ===================================================================================
// Parse Index
// ===========
public void throwIndexedPropertyNonNumberIndexException(String name, NumberFormatException e) {
String msg = "Non number index of the indexed property: name=" + name + LF + e.getMessage();
throwRequestPropertyMappingFailureException(msg, e);
}
public void throwIndexedPropertyMinusIndexException(String name, int index) {
String msg = "Minus index of the indexed property: name=" + name;
throwRequestPropertyMappingFailureException(msg);
}
public void throwIndexedPropertySizeOverException(String name, int index) {
String msg = "Too large size of the indexed property: name=" + name + ", index=" + index;
throwRequestPropertyMappingFailureException(msg);
}
// ===================================================================================
// Indexed Property
// ================
// -----------------------------------------------------
// Set Indexed Property
// --------------------
public void setArrayValue(Object array, int[] indexes, Object value) {
for (int i = 0; i < indexes.length - 1; i++) {
array = Array.get(array, indexes[i]);
}
Array.set(array, indexes[indexes.length - 1], value);
}
// -----------------------------------------------------
// Prepare Indexed Property
// ------------------------
public Object getArrayValue(Object array, int[] indexes, Class<?> elementType) {
Object value = array;
elementType = convertArrayClass(elementType);
for (int i = 0; i < indexes.length; i++) {
Object element = Array.get(value, indexes[i]);
if (i == indexes.length - 1 && element == null) {
element = LdiClassUtil.newInstance(elementType);
Array.set(value, indexes[i], element);
}
value = element;
}
return value;
}
// -----------------------------------------------------
// Exception
// ---------
public void throwIndexedPropertyNonParameterizedListException(BeanDesc beanDesc, PropertyDesc pd) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The list of indexed property was not parameterized.");
br.addItem("ActionForm");
br.addElement(toIndexedBeanRealClass(beanDesc.getBeanClass()));
br.addItem("Property");
br.addElement(pd);
br.addItem("Parameterized"); // parameterized info is important here
final ParameterizedClassDesc paramDesc = pd.getParameterizedClassDesc();
if (paramDesc != null) {
br.addElement("isParameterizedClass: " + paramDesc.isParameterizedClass());
br.addElement("parameterizedType: " + paramDesc.getParameterizedType());
br.addElement("rawClass: " + paramDesc.getRawClass());
final ParameterizedClassDesc[] arguments = paramDesc.getArguments();
if (arguments != null && arguments.length > 0) {
int index = 0;
for (ParameterizedClassDesc arg : arguments) {
br.addElement("argument" + index + ": " + arg.getParameterizedType());
++index;
}
}
} else {
br.addElement("getParameterizedClassDesc() returns null");
}
final String msg = br.buildExceptionMessage();
throw new IndexedPropertyNonParameterizedListException(msg);
}
public void throwIndexedPropertyNotListArrayException(BeanDesc beanDesc, PropertyDesc pd) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The indexed property was not list or array.");
br.addItem("Advice");
br.addElement("Confirm the property type in your form.");
if ("ImmutableList".equals(pd.getPropertyType().getSimpleName())) { // patch message
br.addElement("And if you use ImmutableList of Eclipse Collections (as option),");
br.addElement("unfortunately it is not supported as indexed property.");
br.addElement("So use 'java.util.List'.");
}
br.addItem("ActionForm");
br.addElement(toIndexedBeanRealClass(beanDesc.getBeanClass()));
br.addItem("Property");
br.addElement(pd);
final String msg = br.buildExceptionMessage();
throw new IndexedPropertyNotListArrayException(msg);
}
// -----------------------------------------------------
// Real Class
// ----------
protected Class<?> toIndexedBeanRealClass(Class<?> clazz) {
return ContainerUtil.toRealClassIfEnhanced(clazz);
}
// -----------------------------------------------------
// Array Helper
// ------------
public Class<?> getArrayElementType(Class<?> clazz, int depth) {
for (int i = 0; i < depth; i++) {
clazz = clazz.getComponentType();
}
return clazz;
}
public Object expandArray(Object array, int[] indexes, Class<?> elementType) {
int length = Array.getLength(array);
if (length <= indexes[0]) {
int[] newIndexes = new int[indexes.length];
newIndexes[0] = indexes[0] + 1;
Object newArray = Array.newInstance(elementType, newIndexes);
System.arraycopy(array, 0, newArray, 0, length);
array = newArray;
}
if (indexes.length > 1) {
int[] newIndexes = new int[indexes.length - 1];
for (int i = 1; i < indexes.length; i++) {
newIndexes[i - 1] = indexes[i];
}
Array.set(array, indexes[0], expandArray(Array.get(array, indexes[0]), newIndexes, elementType));
}
return array;
}
public Class<?> convertArrayClass(Class<?> clazz) {
return LdiModifierUtil.isAbstract(clazz) && Map.class.isAssignableFrom(clazz) ? HashMap.class : clazz;
}
// ===================================================================================
// JSON Assist
// ===========
public JsonObjectConvertible chooseJsonObjectConvertible(ActionRuntime runtime, FormMappingOption option) {
return option.getRequestJsonEngineProvider()
.map(provider -> (JsonObjectConvertible) provider.apply(runtime))
.orElseGet(() -> getJsonManager());
}
protected JsonManager getJsonManager() {
return requestManager.getJsonManager();
}
@SuppressWarnings("unchecked")
protected Map<String, Object> retryJsonAsMapForDebug(String json) {
try {
return getJsonManager().fromJson(json, Map.class);
} catch (RuntimeException ignored) {
return Collections.emptyMap();
}
}
protected List<Map<String, Object>> retryJsonListAsMapForDebug(String json) {
try {
return getJsonManager().fromJsonParameteried(json, new ParameterizedRef<List<Map<String, Object>>>() {
}.getType());
} catch (RuntimeException ignored) {
return Collections.emptyList();
}
}
protected JsonDebugChallenge createJsonDebugChallenge(Map<String, Object> retryMap, String propertyName, Class<?> propertyType,
Integer elementIndex) {
final JsonManager jsonManager = requestManager.getJsonManager();
final Object mappedValue = retryMap.get(propertyName);
return new JsonDebugChallenge(jsonManager, propertyName, propertyType, mappedValue, elementIndex);
}
// ===================================================================================
// Client Error
// ============
public void buildClientErrorHeader(StringBuilder sb, String title, Object bean, String name, Object value, Type propertyType,
String challengeDisp) {
sb.append(LF).append(LF).append("[").append(title).append("]");
sb.append(LF).append("Mapping To: ");
sb.append(bean.getClass().getSimpleName()).append("@").append(name);
sb.append(" (").append(propertyType.getTypeName()).append(")");
sb.append(LF).append("Requested Value: ");
if (value != null) {
final String exp = value.toString();
sb.append(exp.contains(LF) ? LF : "").append(exp);
} else {
sb.append("null");
}
if (challengeDisp != null && challengeDisp.length() > 0) {
sb.append(LF).append(buildDebugChallengeTitle());
sb.append(challengeDisp); // debugChallenge starts with LF
}
}
public String buildDebugChallengeTitle() {
return "Debug Challenge: (o: maybe assignable, x: cannot, v: no value, ?: unknown)";
}
// no server error because it can occur by user's trick
// while, is likely to due to client bugs (or server) so request client error
public void throwRequestJsonParseFailureException(String msg, List<JsonDebugChallenge> challengeList, RuntimeException cause) {
throw new RequestJsonParseFailureException(msg, getRequestJsonParseFailureMessages(), cause).withChallengeList(challengeList);
}
protected UserMessages getRequestJsonParseFailureMessages() {
return UserMessages.empty();
}
public void throwRequestPropertyMappingFailureException(String msg) {
throw new RequestPropertyMappingFailureException(msg, getRequestPropertyMappingFailureMessages());
}
public void throwRequestPropertyMappingFailureException(String msg, RuntimeException cause) {
throw new RequestPropertyMappingFailureException(msg, getRequestPropertyMappingFailureMessages(), cause);
}
protected UserMessages getRequestPropertyMappingFailureMessages() {
return UserMessages.empty();
}
public void throwRequestUndefinedParameterInFormException(Object bean, String name, Object value, FormMappingOption option,
BeanDesc beanDesc) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Undefined parameter in the form.");
br.addItem("Advice");
br.addElement("Request parameters should be related to any property of form.");
br.addElement("For example:");
br.addElement(" (x): ?sea=mystic&land=oneman");
br.addElement(" public class MaihamaForm { // *Bad: 'land' is undefined");
br.addElement(" public String sea;");
br.addElement(" }");
br.addElement(" (o): ?sea=mystic&land=oneman");
br.addElement(" public class MaihamaForm {");
br.addElement(" public String sea;");
br.addElement(" public String land; // Good");
br.addElement(" }");
br.addElement("");
br.addElement("If you want to ignore the parameter from this check,");
br.addElement("adjust FormMappingOption in ActionAdjustmentProvider.");
br.addItem("Action Form");
br.addElement(bean.getClass().getName());
br.addItem("Defined Property");
final StringBuilder propertySb = new StringBuilder();
for (int i = 0; i < beanDesc.getPropertyDescSize(); i++) {
propertySb.append(i % 5 == 4 ? "\n" : "");
propertySb.append(i > 0 ? ", " : "");
propertySb.append(beanDesc.getPropertyDesc(i).getPropertyName());
}
br.addElement(propertySb);
br.addItem("Requested Parameter");
br.addElement(name + "=" + (value instanceof Object[] ? Arrays.asList((Object[]) value) : value));
br.addItem("Mapping Option");
br.addElement(option);
final String msg = br.buildExceptionMessage();
throw new RequestUndefinedParameterInFormException(msg, getRequestUndefinedParameterInFormMessages());
}
protected UserMessages getRequestUndefinedParameterInFormMessages() {
return UserMessages.empty();
}
}
| |
package com.example.ligang.commonlibrary.swipeback;
import android.app.Activity;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.support.v4.view.ViewCompat;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.example.ligang.commonlibrary.R;
import java.util.ArrayList;
import java.util.List;
public class SwipeBackLayout extends FrameLayout {
/**
* Minimum velocity that will be detected as a fling
*/
private static final int MIN_FLING_VELOCITY = 400; // dips per second
private static final int DEFAULT_SCRIM_COLOR = 0x99000000;
/**
* Edge flag indicating that the left edge should be affected.
*/
public static final int EDGE_LEFT = ViewDragHelper.EDGE_LEFT;
/**
* Edge flag indicating that the right edge should be affected.
*/
public static final int EDGE_RIGHT = ViewDragHelper.EDGE_RIGHT;
/**
* Edge flag indicating that the bottom edge should be affected.
*/
public static final int EDGE_BOTTOM = ViewDragHelper.EDGE_BOTTOM;
/**
* Edge flag set indicating all edges should be affected.
*/
public static final int EDGE_ALL = EDGE_LEFT | EDGE_RIGHT | EDGE_BOTTOM;
/**
* A view is not currently being dragged or animating as a result of a
* fling/snap.
*/
public static final int STATE_IDLE = ViewDragHelper.STATE_IDLE;
/**
* A view is currently being dragged. The position is currently changing as
* a result of user input or simulated user input.
*/
public static final int STATE_DRAGGING = ViewDragHelper.STATE_DRAGGING;
/**
* A view is currently settling into place as a result of a fling or
* predefined non-interactive motion.
*/
public static final int STATE_SETTLING = ViewDragHelper.STATE_SETTLING;
/**
* Default threshold of scroll
*/
private static final float DEFAULT_SCROLL_THRESHOLD = 0.3f;
private static final int OVERSCROLL_DISTANCE = 10;
private static final int[] EDGE_FLAGS = { EDGE_LEFT, EDGE_RIGHT,
EDGE_BOTTOM, EDGE_ALL };
private int mEdgeFlag;
/**
* Threshold of scroll, we will close the activity, when scrollPercent over
* this value;
*/
private float mScrollThreshold = DEFAULT_SCROLL_THRESHOLD;
private Activity mActivity;
private boolean mEnable = true;
private View mContentView;
private ViewDragHelper mDragHelper;
private float mScrollPercent;
private int mContentLeft;
private int mContentTop;
/**
* The set of listeners to be sent events through.
*/
private List<SwipeListener> mListeners;
private Drawable mShadowLeft;
private Drawable mShadowRight;
private Drawable mShadowBottom;
private float mScrimOpacity;
private int mScrimColor = DEFAULT_SCRIM_COLOR;
private boolean mInLayout;
private Rect mTmpRect = new Rect();
/**
* Edge being dragged
*/
private int mTrackingEdge;
public SwipeBackLayout(Context context) {
this(context, null);
}
public SwipeBackLayout(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.SwipeBackLayoutStyle);
}
public SwipeBackLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs);
mDragHelper = ViewDragHelper.create(this, new ViewDragCallback());
TypedArray a = context.obtainStyledAttributes(attrs,
R.styleable.SwipeBackLayout, defStyle, R.style.SwipeBackLayout);
int edgeSize = a.getDimensionPixelSize(
R.styleable.SwipeBackLayout_edge_size, -1);
if (edgeSize > 0)
setEdgeSize(edgeSize);
int mode = EDGE_FLAGS[a
.getInt(R.styleable.SwipeBackLayout_edge_flag, 0)];
setEdgeTrackingEnabled(mode);
int shadowLeft = a
.getResourceId(R.styleable.SwipeBackLayout_shadow_left,
R.drawable.shadow_left);
int shadowRight = a.getResourceId(
R.styleable.SwipeBackLayout_shadow_right,
R.drawable.shadow_right);
int shadowBottom = a.getResourceId(
R.styleable.SwipeBackLayout_shadow_bottom,
R.drawable.shadow_bottom);
setShadow(shadowLeft, EDGE_LEFT);
setShadow(shadowRight, EDGE_RIGHT);
setShadow(shadowBottom, EDGE_BOTTOM);
a.recycle();
final float density = getResources().getDisplayMetrics().density;
final float minVel = MIN_FLING_VELOCITY * density;
mDragHelper.setMinVelocity(minVel);
}
/**
* Set up contentView which will be moved by user gesture
*
* @param view
*/
private void setContentView(View view) {
mContentView = view;
}
public void setEnableGesture(boolean enable) {
mEnable = enable;
}
/**
* Enable edge tracking for the selected edges of the parent view. The
* callback's
* methods will only be invoked for edges for which edge tracking has been
* enabled.
*
* @param edgeFlags
* Combination of edge flags describing the edges to watch
* @see #EDGE_LEFT
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void setEdgeTrackingEnabled(int edgeFlags) {
mEdgeFlag = edgeFlags;
mDragHelper.setEdgeTrackingEnabled(mEdgeFlag);
}
/**
* Set a color to use for the scrim that obscures primary content while a
* drawer is open.
*
* @param color
* Color to use in 0xAARRGGBB format.
*/
public void setScrimColor(int color) {
mScrimColor = color;
invalidate();
}
/**
* Set the size of an edge. This is the range in pixels along the edges of
* this view that will actively detect edge touches or drags if edge
* tracking is enabled.
*
* @param size
* The size of an edge in pixels
*/
public void setEdgeSize(int size) {
mDragHelper.setEdgeSize(size);
}
/**
* Register a callback to be invoked when a swipe event is sent to this
* view.
*
* @param listener
* the swipe listener to attach to this view
* @deprecated use {@link #addSwipeListener} instead
*/
@Deprecated
public void setSwipeListener(SwipeListener listener) {
addSwipeListener(listener);
}
/**
* Add a callback to be invoked when a swipe event is sent to this view.
*
* @param listener
* the swipe listener to attach to this view
*/
public void addSwipeListener(SwipeListener listener) {
if (mListeners == null) {
mListeners = new ArrayList<SwipeListener>();
}
mListeners.add(listener);
}
/**
* Removes a listener from the set of listeners
*
* @param listener
*/
public void removeSwipeListener(SwipeListener listener) {
if (mListeners == null) {
return;
}
mListeners.remove(listener);
}
public static interface SwipeListener {
/**
* Invoke when state change
*
* @param state
* flag to describe scroll state
* @see #STATE_IDLE
* @see #STATE_DRAGGING
* @see #STATE_SETTLING
* @param scrollPercent
* scroll percent of this view
*/
public void onScrollStateChange(int state, float scrollPercent);
/**
* Invoke when edge touched
*
* @param edgeFlag
* edge flag describing the edge being touched
* @see #EDGE_LEFT
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void onEdgeTouch(int edgeFlag);
/**
* Invoke when scroll percent over the threshold for the first time
*/
public void onScrollOverThreshold();
}
/**
* Set scroll threshold, we will close the activity, when scrollPercent over
* this value
*
* @param threshold
*/
public void setScrollThresHold(float threshold) {
if (threshold >= 1.0f || threshold <= 0) {
throw new IllegalArgumentException(
"Threshold value should be between 0 and 1.0");
}
mScrollThreshold = threshold;
}
/**
* Set a drawable used for edge shadow.
*
* @param shadow
* Drawable to use
* Combination of edge flags describing the edge to set
* @see #EDGE_LEFT
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void setShadow(Drawable shadow, int edgeFlag) {
if ((edgeFlag & EDGE_LEFT) != 0) {
mShadowLeft = shadow;
} else if ((edgeFlag & EDGE_RIGHT) != 0) {
mShadowRight = shadow;
} else if ((edgeFlag & EDGE_BOTTOM) != 0) {
mShadowBottom = shadow;
}
invalidate();
}
/**
* Set a drawable used for edge shadow.
*
* @param resId
* Resource of drawable to use
* Combination of edge flags describing the edge to set
* @see #EDGE_LEFT
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void setShadow(int resId, int edgeFlag) {
setShadow(getResources().getDrawable(resId), edgeFlag);
}
/**
* Scroll out contentView and finish the activity
*/
public void scrollToFinishActivity() {
final int childWidth = mContentView.getWidth();
final int childHeight = mContentView.getHeight();
int left = 0, top = 0;
if ((mEdgeFlag & EDGE_LEFT) != 0) {
left = childWidth + mShadowLeft.getIntrinsicWidth()
+ OVERSCROLL_DISTANCE;
mTrackingEdge = EDGE_LEFT;
} else if ((mEdgeFlag & EDGE_RIGHT) != 0) {
left = -childWidth - mShadowRight.getIntrinsicWidth()
- OVERSCROLL_DISTANCE;
mTrackingEdge = EDGE_RIGHT;
} else if ((mEdgeFlag & EDGE_BOTTOM) != 0) {
top = -childHeight - mShadowBottom.getIntrinsicHeight()
- OVERSCROLL_DISTANCE;
mTrackingEdge = EDGE_BOTTOM;
}
mDragHelper.smoothSlideViewTo(mContentView, left, top);
invalidate();
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
if (!mEnable) {
return false;
}
try {
return mDragHelper.shouldInterceptTouchEvent(event);
} catch (ArrayIndexOutOfBoundsException e) {
// FIXME: handle exception
// issues #9
return false;
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mEnable) {
return false;
}
mDragHelper.processTouchEvent(event);
return true;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right,
int bottom) {
mInLayout = true;
if (mContentView != null)
mContentView.layout(mContentLeft, mContentTop, mContentLeft
+ mContentView.getMeasuredWidth(), mContentTop
+ mContentView.getMeasuredHeight());
mInLayout = false;
}
@Override
public void requestLayout() {
if (!mInLayout) {
super.requestLayout();
}
}
@Override
protected boolean drawChild(Canvas canvas, View child, long drawingTime) {
final boolean drawContent = child == mContentView;
drawShadow(canvas, child);
boolean ret = super.drawChild(canvas, child, drawingTime);
if (mScrimOpacity > 0 && drawContent
&& mDragHelper.getViewDragState() != ViewDragHelper.STATE_IDLE) {
drawScrim(canvas, child);
}
return ret;
}
private void drawScrim(Canvas canvas, View child) {
final int baseAlpha = (mScrimColor & 0xff000000) >>> 24;
final int alpha = (int) (baseAlpha * mScrimOpacity);
final int color = alpha << 24 | (mScrimColor & 0xffffff);
if ((mTrackingEdge & EDGE_LEFT) != 0) {
canvas.clipRect(0, 0, child.getLeft(), getHeight());
} else if ((mTrackingEdge & EDGE_RIGHT) != 0) {
canvas.clipRect(child.getRight(), 0, getRight(), getHeight());
} else if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
canvas.clipRect(child.getLeft(), child.getBottom(), getRight(),
getHeight());
}
canvas.drawColor(color);
}
private void drawShadow(Canvas canvas, View child) {
final Rect childRect = mTmpRect;
child.getHitRect(childRect);
if ((mEdgeFlag & EDGE_LEFT) != 0) {
mShadowLeft.setBounds(
childRect.left - mShadowLeft.getIntrinsicWidth(),
childRect.top, childRect.left, childRect.bottom);
mShadowLeft.draw(canvas);
}
if ((mEdgeFlag & EDGE_RIGHT) != 0) {
mShadowRight.setBounds(childRect.right, childRect.top,
childRect.right + mShadowRight.getIntrinsicWidth(),
childRect.bottom);
mShadowRight.draw(canvas);
}
if ((mEdgeFlag & EDGE_BOTTOM) != 0) {
mShadowBottom.setBounds(childRect.left, childRect.bottom,
childRect.right,
childRect.bottom + mShadowBottom.getIntrinsicHeight());
mShadowBottom.draw(canvas);
}
}
public void attachToActivity(Activity activity) {
mActivity = activity;
TypedArray a = activity.getTheme().obtainStyledAttributes(
new int[] { android.R.attr.windowBackground });
int background = a.getResourceId(0, 0);
a.recycle();
ViewGroup decor = (ViewGroup) activity.getWindow().getDecorView();
ViewGroup decorChild = (ViewGroup) decor.getChildAt(0);
decorChild.setBackgroundResource(background);
decor.removeView(decorChild);
addView(decorChild);
setContentView(decorChild);
decor.addView(this);
}
@Override
public void computeScroll() {
mScrimOpacity = 1 - mScrollPercent;
if (mDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
private class ViewDragCallback extends ViewDragHelper.Callback {
private boolean mIsScrollOverValid;
@Override
public boolean tryCaptureView(View view, int i) {
boolean ret = mDragHelper.isEdgeTouched(mEdgeFlag, i);
if (ret) {
if (mDragHelper.isEdgeTouched(EDGE_LEFT, i)) {
mTrackingEdge = EDGE_LEFT;
} else if (mDragHelper.isEdgeTouched(EDGE_RIGHT, i)) {
mTrackingEdge = EDGE_RIGHT;
} else if (mDragHelper.isEdgeTouched(EDGE_BOTTOM, i)) {
mTrackingEdge = EDGE_BOTTOM;
}
if (mListeners != null && !mListeners.isEmpty()) {
for (SwipeListener listener : mListeners) {
listener.onEdgeTouch(mTrackingEdge);
}
}
mIsScrollOverValid = true;
}
return ret;
}
@Override
public int getViewHorizontalDragRange(View child) {
return mEdgeFlag & (EDGE_LEFT | EDGE_RIGHT);
}
@Override
public int getViewVerticalDragRange(View child) {
return mEdgeFlag & EDGE_BOTTOM;
}
@Override
public void onViewPositionChanged(View changedView, int left, int top,
int dx, int dy) {
super.onViewPositionChanged(changedView, left, top, dx, dy);
if ((mTrackingEdge & EDGE_LEFT) != 0) {
mScrollPercent = Math.abs((float) left
/ (mContentView.getWidth() + mShadowLeft
.getIntrinsicWidth()));
} else if ((mTrackingEdge & EDGE_RIGHT) != 0) {
mScrollPercent = Math.abs((float) left
/ (mContentView.getWidth() + mShadowRight
.getIntrinsicWidth()));
} else if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
mScrollPercent = Math.abs((float) top
/ (mContentView.getHeight() + mShadowBottom
.getIntrinsicHeight()));
}
mContentLeft = left;
mContentTop = top;
invalidate();
if (mScrollPercent < mScrollThreshold && !mIsScrollOverValid) {
mIsScrollOverValid = true;
}
if (mListeners != null && !mListeners.isEmpty()
&& mDragHelper.getViewDragState() == STATE_DRAGGING
&& mScrollPercent >= mScrollThreshold && mIsScrollOverValid) {
mIsScrollOverValid = false;
for (SwipeListener listener : mListeners) {
listener.onScrollOverThreshold();
}
}
if (mScrollPercent >= 1) {
if (!mActivity.isFinishing())
mActivity.finish();
}
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
final int childWidth = releasedChild.getWidth();
final int childHeight = releasedChild.getHeight();
int left = 0, top = 0;
if ((mTrackingEdge & EDGE_LEFT) != 0) {
left = xvel > 0 || xvel == 0
&& mScrollPercent > mScrollThreshold ? childWidth
+ mShadowLeft.getIntrinsicWidth() + OVERSCROLL_DISTANCE
: 0;
} else if ((mTrackingEdge & EDGE_RIGHT) != 0) {
left = xvel < 0 || xvel == 0
&& mScrollPercent > mScrollThreshold ? -(childWidth
+ mShadowLeft.getIntrinsicWidth() + OVERSCROLL_DISTANCE)
: 0;
} else if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
top = yvel < 0 || yvel == 0
&& mScrollPercent > mScrollThreshold ? -(childHeight
+ mShadowBottom.getIntrinsicHeight() + OVERSCROLL_DISTANCE)
: 0;
}
mDragHelper.settleCapturedViewAt(left, top);
invalidate();
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
int ret = 0;
if ((mTrackingEdge & EDGE_LEFT) != 0) {
ret = Math.min(child.getWidth(), Math.max(left, 0));
} else if ((mTrackingEdge & EDGE_RIGHT) != 0) {
ret = Math.min(0, Math.max(left, -child.getWidth()));
}
return ret;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
int ret = 0;
if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
ret = Math.min(0, Math.max(top, -child.getHeight()));
}
return ret;
}
@Override
public void onViewDragStateChanged(int state) {
super.onViewDragStateChanged(state);
if (mListeners != null && !mListeners.isEmpty()) {
for (SwipeListener listener : mListeners) {
listener.onScrollStateChange(state, mScrollPercent);
}
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: GetProductVersionCommand.proto
package com.alachisoft.tayzgrid.common.protobuf;
public final class GetProductVersionCommandProtocol {
private GetProductVersionCommandProtocol() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public static final class GetProductVersionCommand extends
com.google.protobuf.GeneratedMessage {
// Use GetProductVersionCommand.newBuilder() to construct.
private GetProductVersionCommand() {
initFields();
}
private GetProductVersionCommand(boolean noInit) {}
private static final GetProductVersionCommand defaultInstance;
public static GetProductVersionCommand getDefaultInstance() {
return defaultInstance;
}
public GetProductVersionCommand getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_fieldAccessorTable;
}
// optional int64 requestId = 1;
public static final int REQUESTID_FIELD_NUMBER = 1;
private boolean hasRequestId;
private long requestId_ = 0L;
public boolean hasRequestId() { return hasRequestId; }
public long getRequestId() { return requestId_; }
// optional string userId = 2 [default = "dummyUser"];
public static final int USERID_FIELD_NUMBER = 2;
private boolean hasUserId;
private java.lang.String userId_ = "dummyUser";
public boolean hasUserId() { return hasUserId; }
public java.lang.String getUserId() { return userId_; }
// optional string pwd = 3 [default = "dummypassword"];
public static final int PWD_FIELD_NUMBER = 3;
private boolean hasPwd;
private java.lang.String pwd_ = "dummypassword";
public boolean hasPwd() { return hasPwd; }
public java.lang.String getPwd() { return pwd_; }
private void initFields() {
}
public final boolean isInitialized() {
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasRequestId()) {
output.writeInt64(1, getRequestId());
}
if (hasUserId()) {
output.writeString(2, getUserId());
}
if (hasPwd()) {
output.writeString(3, getPwd());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasRequestId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, getRequestId());
}
if (hasUserId()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getUserId());
}
if (hasPwd()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(3, getPwd());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand result;
// Construct using com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand();
return builder;
}
protected com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.getDescriptor();
}
public com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand getDefaultInstanceForType() {
return com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand) {
return mergeFrom((com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand other) {
if (other == com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.getDefaultInstance()) return this;
if (other.hasRequestId()) {
setRequestId(other.getRequestId());
}
if (other.hasUserId()) {
setUserId(other.getUserId());
}
if (other.hasPwd()) {
setPwd(other.getPwd());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setRequestId(input.readInt64());
break;
}
case 18: {
setUserId(input.readString());
break;
}
case 26: {
setPwd(input.readString());
break;
}
}
}
}
// optional int64 requestId = 1;
public boolean hasRequestId() {
return result.hasRequestId();
}
public long getRequestId() {
return result.getRequestId();
}
public Builder setRequestId(long value) {
result.hasRequestId = true;
result.requestId_ = value;
return this;
}
public Builder clearRequestId() {
result.hasRequestId = false;
result.requestId_ = 0L;
return this;
}
// optional string userId = 2 [default = "dummyUser"];
public boolean hasUserId() {
return result.hasUserId();
}
public java.lang.String getUserId() {
return result.getUserId();
}
public Builder setUserId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasUserId = true;
result.userId_ = value;
return this;
}
public Builder clearUserId() {
result.hasUserId = false;
result.userId_ = getDefaultInstance().getUserId();
return this;
}
// optional string pwd = 3 [default = "dummypassword"];
public boolean hasPwd() {
return result.hasPwd();
}
public java.lang.String getPwd() {
return result.getPwd();
}
public Builder setPwd(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasPwd = true;
result.pwd_ = value;
return this;
}
public Builder clearPwd() {
result.hasPwd = false;
result.pwd_ = getDefaultInstance().getPwd();
return this;
}
// @@protoc_insertion_point(builder_scope:com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommand)
}
static {
defaultInstance = new GetProductVersionCommand(true);
com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommand)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\036GetProductVersionCommand.proto\022\'com.al" +
"achisoft.tayzgrid.common.protobuf\"d\n\030Get" +
"ProductVersionCommand\022\021\n\trequestId\030\001 \001(\003" +
"\022\031\n\006userId\030\002 \001(\t:\tdummyUser\022\032\n\003pwd\030\003 \001(\t" +
":\rdummypasswordB\"B GetProductVersionComm" +
"andProtocol"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetProductVersionCommand_descriptor,
new java.lang.String[] { "RequestId", "UserId", "Pwd", },
com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.class,
com.alachisoft.tayzgrid.common.protobuf.GetProductVersionCommandProtocol.GetProductVersionCommand.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.yarn.api.records.URL;
import org.junit.Assert;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.junit.AfterClass;
import org.junit.Test;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
* Unit test for the FSDownload class.
*/
public class TestFSDownload {
private static final Logger LOG =
LoggerFactory.getLogger(TestFSDownload.class);
private static AtomicLong uniqueNumberGenerator =
new AtomicLong(System.currentTimeMillis());
private enum TEST_FILE_TYPE {
TAR, JAR, ZIP, TGZ
};
private Configuration conf = new Configuration();
@AfterClass
public static void deleteTestDir() throws IOException {
FileContext fs = FileContext.getLocalFSFileContext();
fs.delete(new Path("target", TestFSDownload.class.getSimpleName()), true);
}
static final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
static LocalResource createFile(FileContext files, Path p, int len,
Random r, LocalResourceVisibility vis) throws IOException {
createFile(files, p, len, r);
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(p));
ret.setSize(len);
ret.setType(LocalResourceType.FILE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(p).getModificationTime());
return ret;
}
static void createFile(FileContext files, Path p, int len, Random r)
throws IOException {
FSDataOutputStream out = null;
try {
byte[] bytes = new byte[len];
out = files.create(p, EnumSet.of(CREATE, OVERWRITE));
r.nextBytes(bytes);
out.write(bytes);
} finally {
if (out != null) out.close();
}
}
static LocalResource createJar(FileContext files, Path p,
LocalResourceVisibility vis) throws IOException {
LOG.info("Create jar file " + p);
File jarFile = new File((files.makeQualified(p)).toUri());
FileOutputStream stream = new FileOutputStream(jarFile);
LOG.info("Create jar out stream ");
JarOutputStream out = new JarOutputStream(stream, new Manifest());
ZipEntry entry = new ZipEntry("classes/1.class");
out.putNextEntry(entry);
out.write(1);
out.write(2);
out.write(3);
out.closeEntry();
ZipEntry entry2 = new ZipEntry("classes/2.class");
out.putNextEntry(entry2);
out.write(1);
out.write(2);
out.write(3);
out.closeEntry();
LOG.info("Done writing jar stream ");
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(p));
FileStatus status = files.getFileStatus(p);
ret.setSize(status.getLen());
ret.setTimestamp(status.getModificationTime());
ret.setType(LocalResourceType.PATTERN);
ret.setVisibility(vis);
ret.setPattern("classes/.*");
return ret;
}
static LocalResource createTarFile(FileContext files, Path p, int len,
Random r, LocalResourceVisibility vis) throws IOException,
URISyntaxException {
byte[] bytes = new byte[len];
r.nextBytes(bytes);
File archiveFile = new File(p.toUri().getPath() + ".tar");
archiveFile.createNewFile();
TarArchiveOutputStream out = new TarArchiveOutputStream(
new FileOutputStream(archiveFile));
TarArchiveEntry entry = new TarArchiveEntry(p.getName());
entry.setSize(bytes.length);
out.putArchiveEntry(entry);
out.write(bytes);
out.closeArchiveEntry();
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(new Path(p.toString()
+ ".tar")));
ret.setSize(len);
ret.setType(LocalResourceType.ARCHIVE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar"))
.getModificationTime());
return ret;
}
static LocalResource createTgzFile(FileContext files, Path p, int len,
Random r, LocalResourceVisibility vis) throws IOException,
URISyntaxException {
byte[] bytes = new byte[len];
r.nextBytes(bytes);
File gzipFile = new File(p.toUri().getPath() + ".tar.gz");
gzipFile.createNewFile();
TarArchiveOutputStream out = new TarArchiveOutputStream(
new GZIPOutputStream(new FileOutputStream(gzipFile)));
TarArchiveEntry entry = new TarArchiveEntry(p.getName());
entry.setSize(bytes.length);
out.putArchiveEntry(entry);
out.write(bytes);
out.closeArchiveEntry();
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(new Path(p.toString()
+ ".tar.gz")));
ret.setSize(len);
ret.setType(LocalResourceType.ARCHIVE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar.gz"))
.getModificationTime());
return ret;
}
static LocalResource createJarFile(FileContext files, Path p, int len,
Random r, LocalResourceVisibility vis) throws IOException,
URISyntaxException {
byte[] bytes = new byte[len];
r.nextBytes(bytes);
File archiveFile = new File(p.toUri().getPath() + ".jar");
archiveFile.createNewFile();
JarOutputStream out = new JarOutputStream(
new FileOutputStream(archiveFile));
out.putNextEntry(new JarEntry(p.getName()));
out.write(bytes);
out.closeEntry();
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(new Path(p.toString()
+ ".jar")));
ret.setSize(len);
ret.setType(LocalResourceType.ARCHIVE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".jar"))
.getModificationTime());
return ret;
}
static LocalResource createZipFile(FileContext files, Path p, int len,
Random r, LocalResourceVisibility vis) throws IOException,
URISyntaxException {
byte[] bytes = new byte[len];
r.nextBytes(bytes);
File archiveFile = new File(p.toUri().getPath() + ".ZIP");
archiveFile.createNewFile();
ZipOutputStream out = new ZipOutputStream(
new FileOutputStream(archiveFile));
out.putNextEntry(new ZipEntry(p.getName()));
out.write(bytes);
out.closeEntry();
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(new Path(p.toString()
+ ".ZIP")));
ret.setSize(len);
ret.setType(LocalResourceType.ARCHIVE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".ZIP"))
.getModificationTime());
return ret;
}
@Test (timeout=10000)
public void testDownloadBadPublic() throws IOException, URISyntaxException,
InterruptedException {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
Map<LocalResource, LocalResourceVisibility> rsrcVis =
new HashMap<LocalResource, LocalResourceVisibility>();
Random rand = new Random();
long sharedSeed = rand.nextLong();
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
Map<LocalResource,Future<Path>> pending =
new HashMap<LocalResource,Future<Path>>();
ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
LocalDirAllocator dirs =
new LocalDirAllocator(TestFSDownload.class.getName());
int size = 512;
LocalResourceVisibility vis = LocalResourceVisibility.PUBLIC;
Path path = new Path(basedir, "test-file");
LocalResource rsrc = createFile(files, path, size, rand, vis);
rsrcVis.put(rsrc, vis);
Path destPath = dirs.getLocalPathForWrite(
basedir.toString(), size, conf);
destPath = new Path (destPath,
Long.toString(uniqueNumberGenerator.incrementAndGet()));
FSDownload fsd =
new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
destPath, rsrc);
pending.put(rsrc, exec.submit(fsd));
exec.shutdown();
while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS));
Assert.assertTrue(pending.get(rsrc).isDone());
try {
for (Map.Entry<LocalResource,Future<Path>> p : pending.entrySet()) {
p.getValue().get();
Assert.fail("We localized a file that is not public.");
}
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof IOException);
}
}
@Test (timeout=60000)
public void testDownloadPublicWithStatCache() throws IOException,
URISyntaxException, InterruptedException, ExecutionException {
FileContext files = FileContext.getLocalFSFileContext(conf);
Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
// if test directory doesn't have ancestor permission, skip this test
FileSystem f = basedir.getFileSystem(conf);
assumeTrue(FSDownload.ancestorsHaveExecutePermissions(f, basedir, null));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
int size = 512;
final ConcurrentMap<Path,AtomicInteger> counts =
new ConcurrentHashMap<Path,AtomicInteger>();
final CacheLoader<Path,Future<FileStatus>> loader =
FSDownload.createStatusCacheLoader(conf);
final LoadingCache<Path,Future<FileStatus>> statCache =
CacheBuilder.newBuilder().build(new CacheLoader<Path,Future<FileStatus>>() {
public Future<FileStatus> load(Path path) throws Exception {
// increment the count
AtomicInteger count = counts.get(path);
if (count == null) {
count = new AtomicInteger(0);
AtomicInteger existing = counts.putIfAbsent(path, count);
if (existing != null) {
count = existing;
}
}
count.incrementAndGet();
// use the default loader
return loader.load(path);
}
});
// test FSDownload.isPublic() concurrently
final int fileCount = 3;
List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>();
for (int i = 0; i < fileCount; i++) {
Random rand = new Random();
long sharedSeed = rand.nextLong();
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
final Path path = new Path(basedir, "test-file-" + i);
createFile(files, path, size, rand);
final FileSystem fs = path.getFileSystem(conf);
final FileStatus sStat = fs.getFileStatus(path);
tasks.add(new Callable<Boolean>() {
public Boolean call() throws IOException {
return FSDownload.isPublic(fs, path, sStat, statCache);
}
});
}
ExecutorService exec = HadoopExecutors.newFixedThreadPool(fileCount);
try {
List<Future<Boolean>> futures = exec.invokeAll(tasks);
// files should be public
for (Future<Boolean> future: futures) {
assertTrue(future.get());
}
// for each path exactly one file status call should be made
for (AtomicInteger count: counts.values()) {
assertSame(count.get(), 1);
}
} finally {
exec.shutdown();
}
}
@Test (timeout=10000)
public void testDownload() throws IOException, URISyntaxException,
InterruptedException {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
Map<LocalResource, LocalResourceVisibility> rsrcVis =
new HashMap<LocalResource, LocalResourceVisibility>();
Random rand = new Random();
long sharedSeed = rand.nextLong();
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
Map<LocalResource,Future<Path>> pending =
new HashMap<LocalResource,Future<Path>>();
ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
LocalDirAllocator dirs =
new LocalDirAllocator(TestFSDownload.class.getName());
int[] sizes = new int[10];
for (int i = 0; i < 10; ++i) {
sizes[i] = rand.nextInt(512) + 512;
LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
if (i%2 == 1) {
vis = LocalResourceVisibility.APPLICATION;
}
Path p = new Path(basedir, "" + i);
LocalResource rsrc = createFile(files, p, sizes[i], rand, vis);
rsrcVis.put(rsrc, vis);
Path destPath = dirs.getLocalPathForWrite(
basedir.toString(), sizes[i], conf);
destPath = new Path (destPath,
Long.toString(uniqueNumberGenerator.incrementAndGet()));
FSDownload fsd =
new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
destPath, rsrc);
pending.put(rsrc, exec.submit(fsd));
}
exec.shutdown();
while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS));
for (Future<Path> path: pending.values()) {
Assert.assertTrue(path.isDone());
}
try {
for (Map.Entry<LocalResource,Future<Path>> p : pending.entrySet()) {
Path localized = p.getValue().get();
assertEquals(sizes[Integer.parseInt(localized.getName())], p.getKey()
.getSize());
FileStatus status = files.getFileStatus(localized.getParent());
FsPermission perm = status.getPermission();
assertEquals("Cache directory permissions are incorrect",
new FsPermission((short)0755), perm);
status = files.getFileStatus(localized);
perm = status.getPermission();
System.out.println("File permission " + perm +
" for rsrc vis " + p.getKey().getVisibility().name());
assert(rsrcVis.containsKey(p.getKey()));
Assert.assertTrue("Private file should be 500",
perm.toShort() == FSDownload.PRIVATE_FILE_PERMS.toShort());
}
} catch (ExecutionException e) {
throw new IOException("Failed exec", e);
}
}
private void downloadWithFileType(TEST_FILE_TYPE fileType) throws IOException,
URISyntaxException, InterruptedException{
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
Random rand = new Random();
long sharedSeed = rand.nextLong();
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
LocalDirAllocator dirs = new LocalDirAllocator(
TestFSDownload.class.getName());
int size = rand.nextInt(512) + 512;
LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
Path p = new Path(basedir, "" + 1);
String strFileName = "";
LocalResource rsrc = null;
switch (fileType) {
case TAR:
rsrc = createTarFile(files, p, size, rand, vis);
break;
case JAR:
rsrc = createJarFile(files, p, size, rand, vis);
rsrc.setType(LocalResourceType.PATTERN);
break;
case ZIP:
rsrc = createZipFile(files, p, size, rand, vis);
strFileName = p.getName() + ".ZIP";
break;
case TGZ:
rsrc = createTgzFile(files, p, size, rand, vis);
break;
}
Path destPath = dirs.getLocalPathForWrite(basedir.toString(), size, conf);
destPath = new Path (destPath,
Long.toString(uniqueNumberGenerator.incrementAndGet()));
FSDownload fsd = new FSDownload(files,
UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
pending.put(rsrc, exec.submit(fsd));
exec.shutdown();
while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS));
try {
pending.get(rsrc).get(); // see if there was an Exception during download
FileStatus[] filesstatus = files.getDefaultFileSystem().listStatus(
basedir);
for (FileStatus filestatus : filesstatus) {
if (filestatus.isDirectory()) {
FileStatus[] childFiles = files.getDefaultFileSystem().listStatus(
filestatus.getPath());
for (FileStatus childfile : childFiles) {
if(strFileName.endsWith(".ZIP") &&
childfile.getPath().getName().equals(strFileName) &&
!childfile.isDirectory()) {
Assert.fail("Failure...After unzip, there should have been a" +
" directory formed with zip file name but found a file. "
+ childfile.getPath());
}
if (childfile.getPath().getName().startsWith("tmp")) {
Assert.fail("Tmp File should not have been there "
+ childfile.getPath());
}
}
}
}
}catch (Exception e) {
throw new IOException("Failed exec", e);
}
}
@Test (timeout=10000)
public void testDownloadArchive() throws IOException, URISyntaxException,
InterruptedException {
downloadWithFileType(TEST_FILE_TYPE.TAR);
}
@Test (timeout=10000)
public void testDownloadPatternJar() throws IOException, URISyntaxException,
InterruptedException {
downloadWithFileType(TEST_FILE_TYPE.JAR);
}
@Test (timeout=10000)
public void testDownloadArchiveZip() throws IOException, URISyntaxException,
InterruptedException {
downloadWithFileType(TEST_FILE_TYPE.ZIP);
}
/*
* To test fix for YARN-3029
*/
@Test (timeout=10000)
public void testDownloadArchiveZipWithTurkishLocale() throws IOException,
URISyntaxException, InterruptedException {
Locale defaultLocale = Locale.getDefault();
// Set to Turkish
Locale turkishLocale = new Locale("tr", "TR");
Locale.setDefault(turkishLocale);
downloadWithFileType(TEST_FILE_TYPE.ZIP);
// Set the locale back to original default locale
Locale.setDefault(defaultLocale);
}
@Test (timeout=10000)
public void testDownloadArchiveTgz() throws IOException, URISyntaxException,
InterruptedException {
downloadWithFileType(TEST_FILE_TYPE.TGZ);
}
private void verifyPermsRecursively(FileSystem fs,
FileContext files, Path p,
LocalResourceVisibility vis) throws IOException {
FileStatus status = files.getFileStatus(p);
if (status.isDirectory()) {
if (vis == LocalResourceVisibility.PUBLIC) {
Assert.assertTrue(status.getPermission().toShort() ==
FSDownload.PUBLIC_DIR_PERMS.toShort());
}
else {
Assert.assertTrue(status.getPermission().toShort() ==
FSDownload.PRIVATE_DIR_PERMS.toShort());
}
if (!status.isSymlink()) {
FileStatus[] statuses = fs.listStatus(p);
for (FileStatus stat : statuses) {
verifyPermsRecursively(fs, files, stat.getPath(), vis);
}
}
}
else {
if (vis == LocalResourceVisibility.PUBLIC) {
Assert.assertTrue(status.getPermission().toShort() ==
FSDownload.PUBLIC_FILE_PERMS.toShort());
}
else {
Assert.assertTrue(status.getPermission().toShort() ==
FSDownload.PRIVATE_FILE_PERMS.toShort());
}
}
}
@Test (timeout=10000)
public void testDirDownload() throws IOException, InterruptedException {
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
Map<LocalResource, LocalResourceVisibility> rsrcVis =
new HashMap<LocalResource, LocalResourceVisibility>();
Random rand = new Random();
long sharedSeed = rand.nextLong();
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
Map<LocalResource,Future<Path>> pending =
new HashMap<LocalResource,Future<Path>>();
ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
LocalDirAllocator dirs =
new LocalDirAllocator(TestFSDownload.class.getName());
for (int i = 0; i < 5; ++i) {
LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
if (i%2 == 1) {
vis = LocalResourceVisibility.APPLICATION;
}
Path p = new Path(basedir, "dir" + i + ".jar");
LocalResource rsrc = createJar(files, p, vis);
rsrcVis.put(rsrc, vis);
Path destPath = dirs.getLocalPathForWrite(
basedir.toString(), conf);
destPath = new Path (destPath,
Long.toString(uniqueNumberGenerator.incrementAndGet()));
FSDownload fsd =
new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
destPath, rsrc);
pending.put(rsrc, exec.submit(fsd));
}
exec.shutdown();
while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS));
for (Future<Path> path: pending.values()) {
Assert.assertTrue(path.isDone());
}
try {
for (Map.Entry<LocalResource,Future<Path>> p : pending.entrySet()) {
Path localized = p.getValue().get();
FileStatus status = files.getFileStatus(localized);
System.out.println("Testing path " + localized);
assert(status.isDirectory());
assert(rsrcVis.containsKey(p.getKey()));
verifyPermsRecursively(localized.getFileSystem(conf),
files, localized, rsrcVis.get(p.getKey()));
}
} catch (ExecutionException e) {
throw new IOException("Failed exec", e);
}
}
@Test (timeout=10000)
public void testUniqueDestinationPath() throws Exception {
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
ExecutorService singleThreadedExec = HadoopExecutors
.newSingleThreadExecutor();
LocalDirAllocator dirs =
new LocalDirAllocator(TestFSDownload.class.getName());
Path destPath = dirs.getLocalPathForWrite(basedir.toString(), conf);
destPath =
new Path(destPath, Long.toString(uniqueNumberGenerator
.incrementAndGet()));
Path p = new Path(basedir, "dir" + 0 + ".jar");
LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
LocalResource rsrc = createJar(files, p, vis);
FSDownload fsd =
new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
destPath, rsrc);
Future<Path> rPath = singleThreadedExec.submit(fsd);
singleThreadedExec.shutdown();
while (!singleThreadedExec.awaitTermination(1000, TimeUnit.MILLISECONDS));
Assert.assertTrue(rPath.isDone());
// Now FSDownload will not create a random directory to localize the
// resource. Therefore the final localizedPath for the resource should be
// destination directory (passed as an argument) + file name.
Assert.assertEquals(destPath, rPath.get().getParent());
}
}
| |
/*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.Lists;
import java.util.List;
/**
* Tests for {@link J2clPass}.
*/
public class J2clPassTest extends CompilerTestCase {
public J2clPassTest() {
this.enableNormalize();
}
private void testDoesntChange(List<SourceFile> js) {
test(js, js);
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return new J2clPass(compiler);
}
public void testQualifiedInlines() {
// Arrays functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Arrays = function() {};",
"Arrays.$create = function() { return 1; }",
"Arrays.$init = function() { return 2; }",
"Arrays.$instanceIsOfType = function() { return 3; }",
"Arrays.$castTo = function() { return 4; }",
"",
"alert(Arrays.$create());",
"alert(Arrays.$init());",
"alert(Arrays.$instanceIsOfType());",
"alert(Arrays.$castTo());"))),
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Arrays = function() {};",
"Arrays.$create = function() { return 1; }",
"Arrays.$init = function() { return 2; }",
"Arrays.$instanceIsOfType = function() { return 3; }",
"Arrays.$castTo = function() { return 4; }",
"",
"alert(1);",
"alert(2);",
"alert(3);",
"alert(4);"))));
// Casts functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.to = function() { return 1; }",
"",
"alert(Casts.to());"))),
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.to = function() { return 1; }",
"",
"alert(1);"))));
// Interface $markImplementor() functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var FooInterface = function() {};",
"FooInterface.$markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"var Foo = function() {};",
"FooInterface.$markImplementor(Foo);"))),
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var FooInterface = function() {};",
"FooInterface.$markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"var Foo = function() {};",
"{Foo.$implements__FooInterface = true;}"))));
}
public void testRenamedQualifierStillInlines() {
// Arrays functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $jscomp = {};",
"$jscomp.scope = {};",
"$jscomp.scope.Arrays = {};",
"$jscomp.scope.Arrays.$create = function() { return 1; }",
"$jscomp.scope.Arrays.$init = function() { return 2; }",
"$jscomp.scope.Arrays.$instanceIsOfType = function() { return 3; }",
"$jscomp.scope.Arrays.$castTo = function() { return 4; }",
"",
"alert($jscomp.scope.Arrays.$create());",
"alert($jscomp.scope.Arrays.$init());",
"alert($jscomp.scope.Arrays.$instanceIsOfType());",
"alert($jscomp.scope.Arrays.$castTo());"))),
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $jscomp = {};",
"$jscomp.scope = {};",
"$jscomp.scope.Arrays = {};",
"$jscomp.scope.Arrays.$create = function() { return 1; }",
"$jscomp.scope.Arrays.$init = function() { return 2; }",
"$jscomp.scope.Arrays.$instanceIsOfType = function() { return 3; }",
"$jscomp.scope.Arrays.$castTo = function() { return 4; }",
"",
"alert(1);",
"alert(2);",
"alert(3);",
"alert(4);"))));
// Casts functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.to = function() { return 1; }",
"",
"alert(Casts.to());"))),
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.to = function() { return 1; }",
"",
"alert(1);"))));
// Interface $markImplementor() functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $jscomp = {};",
"$jscomp.scope = {};",
"$jscomp.scope.FooInterface = function() {};",
"$jscomp.scope.FooInterface.$markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"$jscomp.scope.Foo = function() {};",
"$jscomp.scope.FooInterface.$markImplementor($jscomp.scope.Foo);"))),
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $jscomp = {};",
"$jscomp.scope = {};",
"$jscomp.scope.FooInterface = function() {};",
"$jscomp.scope.FooInterface.$markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"$jscomp.scope.Foo = function() {};",
"{$jscomp.scope.Foo.$implements__FooInterface = true;}"))));
}
public void testUnexpectedFunctionDoesntInline() {
// Arrays functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Arrays = function() {};",
"Arrays.fooBar = function() { return 4; }",
"",
"alert(Arrays.fooBar());"))));
// Casts functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.fooBar = function() { return 4; }",
"",
"alert(Casts.fooBar());"))));
// No applicable for $markImplementor() inlining since it is not limited to just certain class
// files and so there are no specific files in which "other" functions should be ignored.
}
public void testUnqualifiedDoesntInline() {
// Arrays functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $create = function() { return 1; }",
"var $init = function() { return 2; }",
"var $instanceIsOfType = function() { return 3; }",
"var $castTo = function() { return 4; }",
"",
"alert($create());",
"alert($init());",
"alert($instanceIsOfType());",
"alert($castTo());"))));
// Casts functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"j2cl/transpiler/vmbootstrap/Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var to = function() { return 1; }", "", "alert(to());"))));
// Interface $markImplementor() functions.
test(
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"var Foo = function() {};",
"$markImplementor(Foo);"))),
Lists.newArrayList(
SourceFile.fromCode(
"name/doesnt/matter/Foo.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var $markImplementor = function(classDef) {",
" classDef.$implements__FooInterface = true;",
"}",
"",
"var Foo = function() {};",
"$markImplementor(Foo);"))));
}
public void testWrongFileNameDoesntInline() {
// Arrays functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"Arrays.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Arrays = function() {};",
"Arrays.$create = function() { return 1; }",
"Arrays.$init = function() { return 2; }",
"Arrays.$instanceIsOfType = function() { return 3; }",
"Arrays.$castTo = function() { return 4; }",
"",
"alert(Arrays.$create());",
"alert(Arrays.$init());",
"alert(Arrays.$instanceIsOfType());",
"alert(Arrays.$castTo());"))));
// Casts functions.
testDoesntChange(
Lists.newArrayList(
SourceFile.fromCode(
"Casts.impl.js",
LINE_JOINER.join(
// Function definitions and calls are qualified globals.
"var Casts = function() {};",
"Casts.to = function() { return 1; }",
"",
"alert(Casts.to());"))));
// No applicable for $markImplementor() inlining since it is not limited to just certain class
// files.
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.thoughtworks.selenium;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.util.regex.Pattern;
/**
* Provides a base class that implements some handy functionality for Selenium testing (you are
* <i>not</i> required to extend this class).
*
* <p>
* This class adds a number of "verify" commands, which are like "assert" commands, but they don't
* stop the test when they fail. Instead, verification errors are all thrown at once during
* tearDown.
* </p>
*
* @author Nelson Sproul (nsproul@bea.com) Mar 13-06
* @deprecated The RC interface will be removed in Selenium 3.0. Please migrate to using WebDriver.
*/
@Deprecated
@SuppressWarnings("JavaDoc")
public class SeleneseTestBase {
private static final boolean THIS_IS_WINDOWS = File.pathSeparator.equals(";");
private boolean captureScreenShotOnFailure = false;
/** Use this object to run all of your selenium tests */
protected Selenium selenium;
protected StringBuffer verificationErrors = new StringBuffer();
public SeleneseTestBase() {
super();
}
/**
* Calls this.setUp(null)
*
* @see #setUp(String)
* @throws Exception because why not
*/
public void setUp() throws Exception {
this.setUp(null);
}
/**
* Calls this.setUp with the specified url and a default browser. On Windows, the default browser
* is *iexplore; otherwise, the default browser is *firefox.
*
* @see #setUp(String, String)
* @param url the baseUrl to use for your Selenium tests
* @throws Exception just in case
*
*/
public void setUp(String url) throws Exception {
setUp(url, runtimeBrowserString());
}
protected String runtimeBrowserString() {
String defaultBrowser = System.getProperty("selenium.defaultBrowser");
if (null != defaultBrowser && defaultBrowser.startsWith("${")) {
defaultBrowser = null;
}
if (defaultBrowser == null) {
if (THIS_IS_WINDOWS) {
defaultBrowser = "*iexplore";
} else {
defaultBrowser = "*firefox";
}
}
return defaultBrowser;
}
/**
* Creates a new DefaultSelenium object and starts it using the specified baseUrl and browser
* string. The port is selected as follows: if the server package's RemoteControlConfiguration
* class is on the classpath, that class' default port is used. Otherwise, if the "server.port"
* system property is specified, that is used - failing that, the default of 4444 is used.
*
* @see #setUp(String, String, int)
* @param url the baseUrl for your tests
* @param browserString the browser to use, e.g. *firefox
* @throws Exception throws them all!
*/
public void setUp(String url, String browserString) throws Exception {
setUp(url, browserString, getDefaultPort());
}
protected int getDefaultPort() {
try {
Class<?> c = Class.forName("org.openqa.selenium.server.RemoteControlConfiguration");
Method getDefaultPort = c.getMethod("getDefaultPort");
Integer portNumber = (Integer) getDefaultPort.invoke(null);
return portNumber.intValue();
} catch (Exception e) {
return Integer.getInteger("selenium.port", 4444).intValue();
}
}
/**
* Creates a new DefaultSelenium object and starts it using the specified baseUrl and browser
* string. The port is selected as follows: if the server package's RemoteControlConfiguration
* class is on the classpath, that class' default port is used. Otherwise, if the "server.port"
* system property is specified, that is used - failing that, the default of 4444 is used.
*
* @see #setUp(String, String, int)
* @param url the baseUrl for your tests
* @param browserString the browser to use, e.g. *firefox
* @param port the port that you want to run your tests on
* @throws Exception exception all the things!
*/
public void setUp(String url, String browserString, int port) throws Exception {
if (url == null) {
url = "http://localhost:" + port;
}
selenium = new DefaultSelenium("localhost", port, browserString, url);
selenium.start();
}
/** Like assertTrue, but fails at the end of the test (during tearDown)
* @param b boolean to verify is true
*/
public void verifyTrue(boolean b) {
try {
assertTrue(b);
} catch (Error e) {
verificationErrors.append(throwableToString(e));
}
}
/** Like assertFalse, but fails at the end of the test (during tearDown)
* @param b boolean to verify is false
*/
public void verifyFalse(boolean b) {
try {
assertFalse(b);
} catch (Error e) {
verificationErrors.append(throwableToString(e));
}
}
/** @return the body text of the current page */
public String getText() {
return selenium.getEval("this.page().bodyText()");
}
/** Like assertEquals, but fails at the end of the test (during tearDown)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public void verifyEquals(Object expected, Object actual) {
try {
assertEquals(expected, actual);
} catch (Error e) {
verificationErrors.append(throwableToString(e));
}
}
/** Like assertEquals, but fails at the end of the test (during tearDown)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public void verifyEquals(boolean expected, boolean actual) {
try {
assertEquals(Boolean.valueOf(expected), Boolean.valueOf(actual));
} catch (Error e) {
verificationErrors.append(throwableToString(e));
}
}
/** Like JUnit's Assert.assertEquals, but knows how to compare string arrays
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertEquals(Object expected, Object actual) {
if (expected == null) {
assertTrue("Expected \"" + expected + "\" but saw \"" + actual + "\" instead", actual == null);
} else if (expected instanceof String && actual instanceof String) {
assertEquals((String) expected, (String) actual);
} else if (expected instanceof String && actual instanceof String[]) {
assertEquals((String) expected, (String[]) actual);
} else if (expected instanceof String && actual instanceof Number) {
assertEquals((String) expected, actual.toString());
} else if (expected instanceof Number && actual instanceof String) {
assertEquals(expected.toString(), (String) actual);
} else if (expected instanceof String[] && actual instanceof String[]) {
assertEquals((String[]) expected, (String[]) actual);
} else {
assertTrue("Expected \"" + expected + "\" but saw \"" + actual + "\" instead",
expected.equals(actual));
}
}
/** Like JUnit's Assert.assertEquals, but handles "regexp:" strings like HTML Selenese
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertEquals(String expected, String actual) {
assertTrue("Expected \"" + expected + "\" but saw \"" + actual + "\" instead",
seleniumEquals(expected, actual));
}
/**
* Like JUnit's Assert.assertEquals, but joins the string array with commas, and handles "regexp:"
* strings like HTML Selenese
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertEquals(String expected, String[] actual) {
assertEquals(expected, join(actual, ','));
}
/**
* Compares two strings, but handles "regexp:" strings like HTML Selenese
*
* @param expectedPattern expression of expected
* @param actual expression of actual
* @return true if actual matches the expectedPattern, or false otherwise
*/
public static boolean seleniumEquals(String expectedPattern, String actual) {
if (expectedPattern == null || actual == null) {
return expectedPattern == null && actual == null;
}
if (actual.startsWith("regexp:") || actual.startsWith("regex:")
|| actual.startsWith("regexpi:") || actual.startsWith("regexi:")) {
// swap 'em
String tmp = actual;
actual = expectedPattern;
expectedPattern = tmp;
}
Boolean b;
b = handleRegex("regexp:", expectedPattern, actual, 0);
if (b != null) {
return b.booleanValue();
}
b = handleRegex("regex:", expectedPattern, actual, 0);
if (b != null) {
return b.booleanValue();
}
b = handleRegex("regexpi:", expectedPattern, actual, Pattern.CASE_INSENSITIVE);
if (b != null) {
return b.booleanValue();
}
b = handleRegex("regexi:", expectedPattern, actual, Pattern.CASE_INSENSITIVE);
if (b != null) {
return b.booleanValue();
}
if (expectedPattern.startsWith("exact:")) {
String expectedExact = expectedPattern.replaceFirst("exact:", "");
if (!expectedExact.equals(actual)) {
System.out.println("expected " + actual + " to match " + expectedPattern);
return false;
}
return true;
}
String expectedGlob = expectedPattern.replaceFirst("glob:", "");
expectedGlob = expectedGlob.replaceAll("([\\]\\[\\\\{\\}$\\(\\)\\|\\^\\+.])", "\\\\$1");
expectedGlob = expectedGlob.replaceAll("\\*", ".*");
expectedGlob = expectedGlob.replaceAll("\\?", ".");
if (!Pattern.compile(expectedGlob, Pattern.DOTALL).matcher(actual).matches()) {
System.out.println("expected \"" + actual + "\" to match glob \"" + expectedPattern
+ "\" (had transformed the glob into regexp \"" + expectedGlob + "\"");
return false;
}
return true;
}
private static Boolean handleRegex(String prefix, String expectedPattern, String actual, int flags) {
if (expectedPattern.startsWith(prefix)) {
String expectedRegEx = expectedPattern.replaceFirst(prefix, ".*") + ".*";
Pattern p = Pattern.compile(expectedRegEx, flags);
if (!p.matcher(actual).matches()) {
System.out.println("expected " + actual + " to match regexp " + expectedPattern);
return Boolean.FALSE;
}
return Boolean.TRUE;
}
return null;
}
/**
* Compares two objects, but handles "regexp:" strings like HTML Selenese
*
* @see #seleniumEquals(String, String)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
* @return true if actual matches the expectedPattern, or false otherwise
*/
public static boolean seleniumEquals(Object expected, Object actual) {
if (expected == null) {
return actual == null;
}
if (expected instanceof String && actual instanceof String) {
return seleniumEquals((String) expected, (String) actual);
}
return expected.equals(actual);
}
/** Asserts that two string arrays have identical string contents
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertEquals(String[] expected, String[] actual) {
String comparisonDumpIfNotEqual = verifyEqualsAndReturnComparisonDumpIfNot(expected, actual);
if (comparisonDumpIfNotEqual != null) {
throw new AssertionError(comparisonDumpIfNotEqual);
}
}
/**
* Asserts that two string arrays have identical string contents (fails at the end of the test,
* during tearDown)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public void verifyEquals(String[] expected, String[] actual) {
String comparisonDumpIfNotEqual = verifyEqualsAndReturnComparisonDumpIfNot(expected, actual);
if (comparisonDumpIfNotEqual != null) {
verificationErrors.append(comparisonDumpIfNotEqual);
}
}
private static String verifyEqualsAndReturnComparisonDumpIfNot(String[] expected, String[] actual) {
boolean misMatch = false;
if (expected.length != actual.length) {
misMatch = true;
}
for (int j = 0; j < expected.length; j++) {
if (!seleniumEquals(expected[j], actual[j])) {
misMatch = true;
break;
}
}
if (misMatch) {
return "Expected " + stringArrayToString(expected) + " but saw "
+ stringArrayToString(actual);
}
return null;
}
private static String stringArrayToString(String[] sa) {
StringBuffer sb = new StringBuffer("{");
for (int j = 0; j < sa.length; j++) {
sb.append(" ").append("\"").append(sa[j]).append("\"");
}
sb.append(" }");
return sb.toString();
}
private static String throwableToString(Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
t.printStackTrace(pw);
return sw.toString();
}
public static String join(String[] sa, char c) {
StringBuffer sb = new StringBuffer();
for (int j = 0; j < sa.length; j++) {
sb.append(sa[j]);
if (j < sa.length - 1) {
sb.append(c);
}
}
return sb.toString();
}
/** Like assertNotEquals, but fails at the end of the test (during tearDown)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public void verifyNotEquals(Object expected, Object actual) {
try {
assertNotEquals(expected, actual);
} catch (AssertionError e) {
verificationErrors.append(throwableToString(e));
}
}
/** Like assertNotEquals, but fails at the end of the test (during tearDown)
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public void verifyNotEquals(boolean expected, boolean actual) {
try {
assertNotEquals(Boolean.valueOf(expected), Boolean.valueOf(actual));
} catch (AssertionError e) {
verificationErrors.append(throwableToString(e));
}
}
/** Asserts that two objects are not the same (compares using .equals())
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertNotEquals(Object expected, Object actual) {
if (expected == null) {
assertFalse("did not expect null to be null", actual == null);
} else if (expected.equals(actual)) {
fail("did not expect (" + actual + ") to be equal to (" + expected + ")");
}
}
public static void fail(String message) {
throw new AssertionError(message);
}
static public void assertTrue(String message, boolean condition) {
if (!condition) fail(message);
}
static public void assertTrue(boolean condition) {
assertTrue(null, condition);
}
static public void assertFalse(String message, boolean condition) {
assertTrue(message, !condition);
}
static public void assertFalse(boolean condition) {
assertTrue(null, !condition);
}
/** Asserts that two booleans are not the same
* @param actual the actual object expected
* @param expected object that you want to compare to actual
*/
public static void assertNotEquals(boolean expected, boolean actual) {
assertNotEquals(Boolean.valueOf(expected), Boolean.valueOf(actual));
}
/** Sleeps for the specified number of milliseconds
* @param millisecs number of
*/
public void pause(int millisecs) {
try {
Thread.sleep(millisecs);
} catch (InterruptedException e) {}
}
/**
* Asserts that there were no verification errors during the current test, failing immediately if
* any are found
*/
public void checkForVerificationErrors() {
String verificationErrorString = verificationErrors.toString();
clearVerificationErrors();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
/** Clears out the list of verification errors */
public void clearVerificationErrors() {
verificationErrors = new StringBuffer();
}
/** checks for verification errors and stops the browser
* @throws Exception actually, just AssertionError, but someone was lazy?
*/
public void tearDown() throws Exception {
try {
checkForVerificationErrors();
} finally {
if (selenium != null) {
selenium.stop();
selenium = null;
}
}
}
protected boolean isCaptureScreenShotOnFailure() {
return captureScreenShotOnFailure;
}
protected void setCaptureScreenShotOnFailure(boolean captureScreenShotOnFailure) {
this.captureScreenShotOnFailure = captureScreenShotOnFailure;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.wal;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper;
// imports for things that haven't moved yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
/**
* WALPrettyPrinter prints the contents of a given WAL with a variety of
* options affecting formatting and extent of content.
*
* It targets two usage cases: pretty printing for ease of debugging directly by
* humans, and JSON output for consumption by monitoring and/or maintenance
* scripts.
*
* It can filter by row, region, or sequence id.
*
* It can also toggle output of values.
*
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
@InterfaceStability.Evolving
public class WALPrettyPrinter {
private boolean outputValues;
private boolean outputJSON;
// The following enable filtering by sequence, region, and row, respectively
private long sequence;
private String region;
private String row;
// enable in order to output a single list of transactions from several files
private boolean persistentOutput;
private boolean firstTxn;
// useful for programmatic capture of JSON output
private PrintStream out;
// for JSON encoding
private static final ObjectMapper MAPPER = new ObjectMapper();
/**
* Basic constructor that simply initializes values to reasonable defaults.
*/
public WALPrettyPrinter() {
outputValues = false;
outputJSON = false;
sequence = -1;
region = null;
row = null;
persistentOutput = false;
firstTxn = true;
out = System.out;
}
/**
* Fully specified constructor.
*
* @param outputValues
* when true, enables output of values along with other log
* information
* @param outputJSON
* when true, enables output in JSON format rather than a
* "pretty string"
* @param sequence
* when nonnegative, serves as a filter; only log entries with this
* sequence id will be printed
* @param region
* when not null, serves as a filter; only log entries from this
* region will be printed
* @param row
* when not null, serves as a filter; only log entries from this row
* will be printed
* @param persistentOutput
* keeps a single list running for multiple files. if enabled, the
* endPersistentOutput() method must be used!
* @param out
* Specifies an alternative to stdout for the destination of this
* PrettyPrinter's output.
*/
public WALPrettyPrinter(boolean outputValues, boolean outputJSON,
long sequence, String region, String row, boolean persistentOutput,
PrintStream out) {
this.outputValues = outputValues;
this.outputJSON = outputJSON;
this.sequence = sequence;
this.region = region;
this.row = row;
this.persistentOutput = persistentOutput;
if (persistentOutput) {
beginPersistentOutput();
}
this.out = out;
this.firstTxn = true;
}
/**
* turns value output on
*/
public void enableValues() {
outputValues = true;
}
/**
* turns value output off
*/
public void disableValues() {
outputValues = false;
}
/**
* turns JSON output on
*/
public void enableJSON() {
outputJSON = true;
}
/**
* turns JSON output off, and turns on "pretty strings" for human consumption
*/
public void disableJSON() {
outputJSON = false;
}
/**
* sets the region by which output will be filtered
*
* @param sequence
* when nonnegative, serves as a filter; only log entries with this
* sequence id will be printed
*/
public void setSequenceFilter(long sequence) {
this.sequence = sequence;
}
/**
* sets the region by which output will be filtered
*
* @param region
* when not null, serves as a filter; only log entries from this
* region will be printed
*/
public void setRegionFilter(String region) {
this.region = region;
}
/**
* sets the region by which output will be filtered
*
* @param row
* when not null, serves as a filter; only log entries from this row
* will be printed
*/
public void setRowFilter(String row) {
this.row = row;
}
/**
* enables output as a single, persistent list. at present, only relevant in
* the case of JSON output.
*/
public void beginPersistentOutput() {
if (persistentOutput)
return;
persistentOutput = true;
firstTxn = true;
if (outputJSON)
out.print("[");
}
/**
* ends output of a single, persistent list. at present, only relevant in the
* case of JSON output.
*/
public void endPersistentOutput() {
if (!persistentOutput)
return;
persistentOutput = false;
if (outputJSON)
out.print("]");
}
/**
* reads a log file and outputs its contents, one transaction at a time, as
* specified by the currently configured options
*
* @param conf
* the HBase configuration relevant to this log file
* @param p
* the path of the log file to be read
* @throws IOException
* may be unable to access the configured filesystem or requested
* file.
*/
public void processFile(final Configuration conf, final Path p)
throws IOException {
FileSystem fs = FileSystem.get(conf);
if (!fs.exists(p)) {
throw new FileNotFoundException(p.toString());
}
if (!fs.isFile(p)) {
throw new IOException(p + " is not a file");
}
if (outputJSON && !persistentOutput) {
out.print("[");
firstTxn = true;
}
WAL.Reader log = WALFactory.createReader(fs, p, conf);
try {
WAL.Entry entry;
while ((entry = log.next()) != null) {
WALKey key = entry.getKey();
WALEdit edit = entry.getEdit();
// begin building a transaction structure
Map<String, Object> txn = key.toStringMap();
long writeTime = key.getWriteTime();
// check output filters
if (sequence >= 0 && ((Long) txn.get("sequence")) != sequence)
continue;
if (region != null && !((String) txn.get("region")).equals(region))
continue;
// initialize list into which we will store atomic actions
List<Map> actions = new ArrayList<Map>();
for (Cell cell : edit.getCells()) {
// add atomic operation to txn
Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue()));
// check row output filter
if (row == null || ((String) op.get("row")).equals(row)) {
actions.add(op);
}
}
if (actions.size() == 0)
continue;
txn.put("actions", actions);
if (outputJSON) {
// JSON output is a straightforward "toString" on the txn object
if (firstTxn)
firstTxn = false;
else
out.print(",");
// encode and print JSON
out.print(MAPPER.writeValueAsString(txn));
} else {
// Pretty output, complete with indentation by atomic action
out.println("Sequence=" + txn.get("sequence") + " "
+ ", region=" + txn.get("region") + " at write timestamp=" + new Date(writeTime));
for (int i = 0; i < actions.size(); i++) {
Map op = actions.get(i);
out.println("row=" + op.get("row") +
", column=" + op.get("family") + ":" + op.get("qualifier"));
if (op.get("tag") != null) {
out.println(" tag: " + op.get("tag"));
}
if (outputValues) out.println(" value: " + op.get("value"));
}
}
}
} finally {
log.close();
}
if (outputJSON && !persistentOutput) {
out.print("]");
}
}
private static Map<String, Object> toStringMap(Cell cell) {
Map<String, Object> stringMap = new HashMap<String, Object>();
stringMap.put("row",
Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
stringMap.put("family", Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
cell.getFamilyLength()));
stringMap.put("qualifier",
Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength()));
stringMap.put("timestamp", cell.getTimestamp());
stringMap.put("vlen", cell.getValueLength());
if (cell.getTagsLength() > 0) {
List<String> tagsString = new ArrayList<String>();
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
tagsString.add((tag.getType()) + ":"
+ Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()));
}
stringMap.put("tag", tagsString);
}
return stringMap;
}
public static void main(String[] args) throws IOException {
run(args);
}
/**
* Pass one or more log file names and formatting options and it will dump out
* a text version of the contents on <code>stdout</code>.
*
* @param args
* Command line arguments
* @throws IOException
* Thrown upon file system errors etc.
*/
public static void run(String[] args) throws IOException {
// create options
Options options = new Options();
options.addOption("h", "help", false, "Output help message");
options.addOption("j", "json", false, "Output JSON");
options.addOption("p", "printvals", false, "Print values");
options.addOption("r", "region", true,
"Region to filter by. Pass encoded region name; e.g. '9192caead6a5a20acb4454ffbc79fa14'");
options.addOption("s", "sequence", true,
"Sequence to filter by. Pass sequence number.");
options.addOption("w", "row", true, "Row to filter by. Pass row name.");
WALPrettyPrinter printer = new WALPrettyPrinter();
CommandLineParser parser = new PosixParser();
List<?> files = null;
try {
CommandLine cmd = parser.parse(options, args);
files = cmd.getArgList();
if (files.size() == 0 || cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("WAL <filename...>", options, true);
System.exit(-1);
}
// configure the pretty printer using command line options
if (cmd.hasOption("p"))
printer.enableValues();
if (cmd.hasOption("j"))
printer.enableJSON();
if (cmd.hasOption("r"))
printer.setRegionFilter(cmd.getOptionValue("r"));
if (cmd.hasOption("s"))
printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
if (cmd.hasOption("w"))
printer.setRowFilter(cmd.getOptionValue("w"));
} catch (ParseException e) {
e.printStackTrace();
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("HFile filename(s) ", options, true);
System.exit(-1);
}
// get configuration, file system, and process the given files
Configuration conf = HBaseConfiguration.create();
FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
// begin output
printer.beginPersistentOutput();
for (Object f : files) {
Path file = new Path((String) f);
FileSystem fs = file.getFileSystem(conf);
if (!fs.exists(file)) {
System.err.println("ERROR, file doesnt exist: " + file);
return;
}
printer.processFile(conf, file);
}
printer.endPersistentOutput();
}
}
| |
/*******************************************************************************
* Copyright 2016 Jalian Systems Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.sourceforge.marathon;
/*******************************************************************************
*
* Copyright (C) 2010 Jalian Systems Private Ltd.
* Copyright (C) 2010 Contributors to Marathon OSS Project
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Project website: http://www.marathontesting.com
* Help: Marathon help forum @ http://groups.google.com/group/marathon-testing
*
*******************************************************************************/
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.logging.Logger;
import org.junit.runner.Result;
import com.google.inject.Guice;
import com.google.inject.Injector;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.scene.control.Alert.AlertType;
import javafx.stage.Stage;
import net.sourceforge.marathon.api.GuiceInjector;
import net.sourceforge.marathon.display.DisplayWindow;
import net.sourceforge.marathon.fx.api.FXUIUtils;
import net.sourceforge.marathon.fx.display.VersionInfo;
import net.sourceforge.marathon.fx.projectselection.EditProjectHandler;
import net.sourceforge.marathon.fx.projectselection.NewProjectHandler;
import net.sourceforge.marathon.fx.projectselection.ProjectInfo;
import net.sourceforge.marathon.fx.projectselection.ProjectSelection;
import net.sourceforge.marathon.junit.textui.TestRunner;
import net.sourceforge.marathon.runtime.api.Constants;
import net.sourceforge.marathon.runtime.api.MPFUtils;
import net.sourceforge.marathon.runtime.api.NullLogger;
import net.sourceforge.marathon.runtime.api.OSUtils;
import net.sourceforge.marathon.runtime.api.Preferences;
import net.sourceforge.marathon.runtime.api.ProjectFile;
import net.sourceforge.marathon.runtime.api.RuntimeLogger;
/**
* Main entry point into Marathon application.
*/
public class RealMain {
public static final Logger LOGGER = Logger.getLogger(RealMain.class.getName());
private static ArgumentProcessor argProcessor = new ArgumentProcessor();
/**
* Entry point into Marathon application. Invoke main on the command line
* using the <code>net.sourceforge.marathon.Main</code> class.
*
* @param args
* - Arguments passed on the command line. Invoke with
* <code>-help</code> to see available options.
*/
public static void realmain(String[] args) {
String vers = System.getProperty("mrj.version");
if (vers == null) {
System.setProperty("mrj.version", "1070.1.6.0_26-383");
}
argProcessor.process(args);
if (!argProcessor.isBatchMode()) {
runGUIMode();
} else {
runBatchMode();
}
}
/**
* Run Marathon in batch mode.
*/
private static void runBatchMode() {
String projectDir = argProcessor.getProjectDirectory();
if (projectDir == null) {
argProcessor.help("No project directory");
return;
}
if (!ProjectFile.isValidProjectDirectory(new File(projectDir))) {
argProcessor.help("`" + projectDir + "` is an invalid project folder. Please provide a valid Marathon project folder.");
return;
}
if (projectDir.endsWith(".mpf") && new File(projectDir).isFile()) {
argProcessor.help("A marathon project file is given.\nUse project directory instead");
return;
}
processMPF(projectDir, true);
OSUtils.setLogConfiguration(projectDir);
RuntimeLogger.setRuntimeLogger(new NullLogger());
cleanResultFolder();
TestRunner aTestRunner = createTestRunner();
try {
Result r = aTestRunner.runTests(argProcessor);
if (!r.wasSuccessful()) {
System.exit(junit.textui.TestRunner.FAILURE_EXIT);
}
System.exit(junit.textui.TestRunner.SUCCESS_EXIT);
} catch (Exception e) {
LOGGER.severe(e.getMessage());
System.exit(junit.textui.TestRunner.EXCEPTION_EXIT);
}
}
private static void cleanResultFolder() {
File folder = new File(argProcessor.getReportDir());
deleteFolder(folder);
folder.mkdirs();
}
private static void deleteFolder(File folder) {
if (folder.isDirectory()) {
File[] listFiles = folder.listFiles();
if (listFiles != null) {
for (File file : listFiles) {
deleteFolder(file);
}
}
}
folder.delete();
}
private static TestRunner createTestRunner() {
return new TestRunner();
}
/**
* Run Marathon in GUI mode with all bells and whistles.
*
* @param licensee
*/
private static void runGUIMode() {
showSplash();
String projectDir = getProjectDirectory(argProcessor.getProjectDirectory());
if (projectDir == null) {
System.exit(0);
}
processMPF(projectDir, false);
initializeInjector();
OSUtils.setLogConfiguration(projectDir);
Injector injector = GuiceInjector.get();
final DisplayWindow display = injector.getInstance(DisplayWindow.class);
display.setVisible(true);
}
private static void initializeInjector() {
GuiceInjector.set(Guice.createInjector(new MarathonGuiceModule()));
}
/**
* Show the splash screen. The splash can be suppressed by giving
* <code>-nosplash</code> on command line.
*
* @param licensee
*/
private static void showSplash() {
if (argProcessor.showSplash()) {
MarathonSplashScreen splashScreen = new MarathonSplashScreen(new VersionInfo(Version.id(), Version.blurbTitle(),
Version.blurbCompany(), Version.blurbWebsite(), Version.blurbCredits())) {
@Override
public void dispose() {
super.dispose();
}
};
splashScreen.getStage().showAndWait();
}
}
/**
* Called when no MPF is given on command line while running Marathon in GUI
* mode. Pops up a dialog for selecting a MPF.
*
* @param arg
* , the MPF given on command line, null if none given
* @return MPF selected by the user. Can be null.
*/
private static String getProjectDirectory(final String arg) {
if (arg != null && !ProjectFile.isValidProjectDirectory(new File(arg))) {
argProcessor.help("`" + arg + "`Please provide a Marathon project folder.");
}
if (arg != null) {
return arg;
}
List<String> selectedProjects = new ArrayList<>();
ObservableList<ProjectInfo> projectList = FXCollections.observableArrayList();
List<List<String>> frameworks = Arrays.asList(Arrays.asList("Java/Swing Project", Constants.FRAMEWORK_SWING),
Arrays.asList("Java/FX Project", Constants.FRAMEWORK_FX),
Arrays.asList("Web Application Project", Constants.FRAMEWORK_WEB));
ProjectSelection selection = new ProjectSelection(projectList, frameworks) {
@Override
protected void onSelect(ProjectInfo selected) {
super.onSelect(selected);
selectedProjects.add(selected.getFolder());
dispose();
}
};
Stage stage = selection.getStage();
selection.setNewProjectHandler(new NewProjectHandler(stage));
selection.setEditProjectHandler(new EditProjectHandler(stage));
stage.showAndWait();
if (selectedProjects.size() == 0) {
return null;
}
Preferences.resetInstance();
return selectedProjects.get(0);
}
/**
* Process the given MPF.
*
* @param mpf
* , Marathon project file. a suffix '.mpf' is added if the given
* name does not end with it.
*/
public static void processMPF(String projectDir, boolean batchMode) {
try {
File file = new File(projectDir);
projectDir = file.getCanonicalPath();
System.setProperty(Constants.PROP_PROJECT_DIR, projectDir);
Properties mpfProps = ProjectFile.getProjectProperties();
checkForScriptModel(projectDir, mpfProps, batchMode);
MPFUtils.convertPathChar(mpfProps);
MPFUtils.replaceEnviron(mpfProps);
Properties props = System.getProperties();
props.putAll(mpfProps);
System.setProperties(props);
} catch (FileNotFoundException e) {
if (batchMode)
System.err.println("Unable to open Marathon Project File " + e.getMessage());
else
FXUIUtils.showMessageDialog(null, "Unable to open Marathon Project File " + e.getMessage(), "Error",
AlertType.ERROR);
System.exit(1);
} catch (IOException e) {
if (batchMode)
System.err.println("Unable to read Marathon Project File " + e.getMessage());
else
FXUIUtils.showMessageDialog(null, "Unable to read Marathon Project File " + e.getMessage(), "Error",
AlertType.ERROR);
System.exit(1);
}
String userDir = System.getProperty(Constants.PROP_PROJECT_DIR);
if (userDir != null && !userDir.equals("") && System.getProperty("user.dir") == null) {
System.setProperty("user.dir", userDir);
}
checkForProperties(batchMode);
if (!dirExists(Constants.PROP_MODULE_DIRS, batchMode) || !dirExists(Constants.PROP_TEST_DIR, batchMode)
|| !dirExists(Constants.PROP_FIXTURE_DIR, batchMode) || !dirExists(Constants.PROP_CHECKLIST_DIR, batchMode)) {
System.exit(1);
}
}
private static void checkForScriptModel(String projectDir, Properties mpfProps, boolean batchMode) {
String scriptModel = mpfProps.getProperty(Constants.PROP_PROJECT_SCRIPT_MODEL);
if ("net.sourceforge.marathon.ruby.RubyScriptModel".equals(scriptModel)) {
return;
}
String message = "This project is configured with MarahtonITE.\n" + "You can't use Marathon to open it.";
if (batchMode)
System.err.println(message);
else
FXUIUtils.showMessageDialog(null, message, "Script Model", AlertType.ERROR);
System.exit(1);
}
/**
* The user selected properties are set with 'marathon.properties' prefix in
* the MPF files. This function removes this prefix (if exist).
*
* @param mpfProps
* , properties for which the substitution need to be performed.
* @return new property list.
*/
public static Properties removePrefixes(Properties mpfProps) {
Enumeration<Object> enumeration = mpfProps.keys();
Properties props = new Properties();
while (enumeration.hasMoreElements()) {
String key = (String) enumeration.nextElement();
String value = mpfProps.getProperty(key);
if (key.startsWith(Constants.PROP_PROPPREFIX)) {
key = key.substring(Constants.PROP_PROPPREFIX.length());
props.setProperty(key, value);
} else if (!props.containsKey(key)) {
props.setProperty(key, value);
}
}
return props;
}
/**
* Given a directory key like marathon.test.dir check whether given
* directory exists.
*
* @param dirKey
* , a property key
* @return true, if the directory exists
*/
private static boolean dirExists(String dirKey, boolean batchMode) {
String dirName = System.getProperty(dirKey);
if (dirKey != null) {
dirName = dirName.replace(';', File.pathSeparatorChar);
dirName = dirName.replace('/', File.separatorChar);
System.setProperty(dirKey, dirName);
}
dirName = System.getProperty(dirKey);
String[] values = dirName.split(String.valueOf(File.pathSeparatorChar));
for (String value : values) {
File dir = new File(value);
if (!dir.exists() || !dir.isDirectory()) {
if (batchMode)
System.err.println("Invalid directory specified for " + dirKey + " - " + dirName);
else
FXUIUtils.showMessageDialog(null, "Invalid directory specified for " + dirKey + " - " + dirName, "Error",
AlertType.ERROR);
return false;
}
}
return true;
}
/**
* Check whether the mandatory properties are given.
*
* @param batchMode
*/
private static void checkForProperties(boolean batchMode) {
List<String> missingProperties = new ArrayList<String>();
missingProperties.add("The following properties are not given.");
String[] reqdProperties = { Constants.PROP_FIXTURE_DIR, Constants.PROP_TEST_DIR, Constants.PROP_MODULE_DIRS,
Constants.PROP_CHECKLIST_DIR };
for (String reqdPropertie : reqdProperties) {
if (System.getProperty(reqdPropertie) == null) {
missingProperties.add(reqdPropertie);
}
}
if (missingProperties.size() > 1) {
if (batchMode)
System.err.println(missingProperties.toString());
else
FXUIUtils.showMessageDialog(null, missingProperties.toString(), "Missing Properties", AlertType.ERROR);
System.exit(1);
}
}
}
| |
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.query.change;
import static com.google.gerrit.server.index.ChangeField.SUBMISSIONID;
import static com.google.gerrit.server.query.Predicate.and;
import static com.google.gerrit.server.query.Predicate.not;
import static com.google.gerrit.server.query.Predicate.or;
import static com.google.gerrit.server.query.change.ChangeStatusPredicate.open;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.Sets;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.index.ChangeIndex;
import com.google.gerrit.server.index.IndexCollection;
import com.google.gerrit.server.index.IndexConfig;
import com.google.gerrit.server.index.Schema;
import com.google.gerrit.server.query.Predicate;
import com.google.gerrit.server.query.QueryParseException;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* Execute a single query over changes, for use by Gerrit internals.
* <p>
* By default, visibility of returned changes is not enforced (unlike in {@link
* QueryProcessor}). The methods in this class are not typically used by
* user-facing paths, but rather by internal callers that need to process all
* matching results.
*/
public class InternalChangeQuery {
private static Predicate<ChangeData> ref(Branch.NameKey branch) {
return new RefPredicate(branch.get());
}
private static Predicate<ChangeData> change(Change.Key key) {
return new ChangeIdPredicate(key.get());
}
private static Predicate<ChangeData> project(Project.NameKey project) {
return new ProjectPredicate(project.get());
}
private static Predicate<ChangeData> status(Change.Status status) {
return new ChangeStatusPredicate(status);
}
private static Predicate<ChangeData> commit(Schema<ChangeData> schema,
String id) {
return new CommitPredicate(schema, id);
}
private final IndexConfig indexConfig;
private final QueryProcessor qp;
private final IndexCollection indexes;
private final ChangeData.Factory changeDataFactory;
@Inject
InternalChangeQuery(IndexConfig indexConfig,
QueryProcessor queryProcessor,
IndexCollection indexes,
ChangeData.Factory changeDataFactory) {
this.indexConfig = indexConfig;
qp = queryProcessor.enforceVisibility(false);
this.indexes = indexes;
this.changeDataFactory = changeDataFactory;
}
public InternalChangeQuery setLimit(int n) {
qp.setLimit(n);
return this;
}
public InternalChangeQuery enforceVisibility(boolean enforce) {
qp.enforceVisibility(enforce);
return this;
}
public List<ChangeData> byKey(Change.Key key) throws OrmException {
return byKeyPrefix(key.get());
}
public List<ChangeData> byKeyPrefix(String prefix) throws OrmException {
return query(new ChangeIdPredicate(prefix));
}
public List<ChangeData> byBranchKey(Branch.NameKey branch, Change.Key key)
throws OrmException {
return query(and(
ref(branch),
project(branch.getParentKey()),
change(key)));
}
public List<ChangeData> byProject(Project.NameKey project)
throws OrmException {
return query(project(project));
}
public List<ChangeData> byBranchOpen(Branch.NameKey branch)
throws OrmException {
return query(and(
ref(branch),
project(branch.getParentKey()),
open()));
}
public Iterable<ChangeData> byCommitsOnBranchNotMerged(Repository repo,
ReviewDb db, Branch.NameKey branch, List<String> hashes)
throws OrmException, IOException {
return byCommitsOnBranchNotMerged(repo, db, branch, hashes,
// Account for all commit predicates plus ref, project, status.
indexConfig.maxTerms() - 3);
}
@VisibleForTesting
Iterable<ChangeData> byCommitsOnBranchNotMerged(Repository repo, ReviewDb db,
Branch.NameKey branch, List<String> hashes, int indexLimit)
throws OrmException, IOException {
if (hashes.size() > indexLimit) {
return byCommitsOnBranchNotMergedFromDatabase(repo, db, branch, hashes);
} else {
return byCommitsOnBranchNotMergedFromIndex(branch, hashes);
}
}
private Iterable<ChangeData> byCommitsOnBranchNotMergedFromDatabase(
Repository repo, ReviewDb db, Branch.NameKey branch, List<String> hashes)
throws OrmException, IOException {
Set<Change.Id> changeIds = Sets.newHashSetWithExpectedSize(hashes.size());
String lastPrefix = null;
for (Ref ref :
repo.getRefDatabase().getRefs(RefNames.REFS_CHANGES).values()) {
String r = ref.getName();
if ((lastPrefix != null && r.startsWith(lastPrefix))
|| !hashes.contains(ref.getObjectId().name())) {
continue;
}
Change.Id id = Change.Id.fromRef(r);
if (id == null) {
continue;
}
if (changeIds.add(id)) {
lastPrefix = r.substring(0, r.lastIndexOf('/'));
}
}
List<ChangeData> cds = new ArrayList<>(hashes.size());
for (Change c : db.changes().get(changeIds)) {
if (c.getDest().equals(branch) && c.getStatus() != Change.Status.MERGED) {
cds.add(changeDataFactory.create(db, c));
}
}
return cds;
}
private Iterable<ChangeData> byCommitsOnBranchNotMergedFromIndex(
Branch.NameKey branch, List<String> hashes) throws OrmException {
return query(and(
ref(branch),
project(branch.getParentKey()),
not(status(Change.Status.MERGED)),
or(commits(schema(indexes), hashes))));
}
private static List<Predicate<ChangeData>> commits(Schema<ChangeData> schema,
List<String> hashes) {
List<Predicate<ChangeData>> commits = new ArrayList<>(hashes.size());
for (String s : hashes) {
commits.add(commit(schema, s));
}
return commits;
}
public List<ChangeData> byProjectOpen(Project.NameKey project)
throws OrmException {
return query(and(project(project), open()));
}
public List<ChangeData> byTopicOpen(String topic)
throws OrmException {
return query(and(new ExactTopicPredicate(schema(indexes), topic), open()));
}
public List<ChangeData> byCommit(ObjectId id) throws OrmException {
return query(commit(schema(indexes), id.name()));
}
public List<ChangeData> bySubmissionId(String cs) throws OrmException {
if (Strings.isNullOrEmpty(cs) || !schema(indexes).hasField(SUBMISSIONID)) {
return Collections.emptyList();
} else {
return query(new SubmissionIdPredicate(cs));
}
}
public List<ChangeData> byProjectGroups(Project.NameKey project,
Collection<String> groups) throws OrmException {
List<GroupPredicate> groupPredicates = new ArrayList<>(groups.size());
for (String g : groups) {
groupPredicates.add(new GroupPredicate(g));
}
return query(and(project(project), or(groupPredicates)));
}
private List<ChangeData> query(Predicate<ChangeData> p) throws OrmException {
try {
return qp.queryChanges(p).changes();
} catch (QueryParseException e) {
throw new OrmException(e);
}
}
private static Schema<ChangeData> schema(@Nullable IndexCollection indexes) {
ChangeIndex index = indexes != null ? indexes.getSearchIndex() : null;
return index != null ? index.getSchema() : null;
}
}
| |
/*
* Copyright 2007 Yusuke Yamamoto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package twitter4j.internal.http.alternative;
import com.google.appengine.api.urlfetch.HTTPHeader;
import com.google.appengine.api.urlfetch.HTTPResponse;
import twitter4j.TwitterException;
import twitter4j.TwitterRuntimeException;
import twitter4j.conf.ConfigurationContext;
import twitter4j.internal.http.HttpResponse;
import twitter4j.internal.http.HttpResponseCode;
import twitter4j.internal.logging.Logger;
import twitter4j.internal.org.json.JSONArray;
import twitter4j.internal.org.json.JSONObject;
import java.io.*;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.zip.GZIPInputStream;
/**
* @author Takao Nakaguchi - takao.nakaguchi at gmail.com
* @since Twitter4J 2.2.4
*/
final class AppEngineHttpResponseImpl extends HttpResponse implements HttpResponseCode {
private Future<HTTPResponse> future;
private boolean responseGot;
private Map<String, String> headers;
private static Logger logger = Logger.getLogger(AppEngineHttpResponseImpl.class);
AppEngineHttpResponseImpl(Future<HTTPResponse> futureResponse) {
super(ConfigurationContext.getInstance());
this.future = futureResponse;
}
/**
* {@inheritDoc}
*/
@Override
public int getStatusCode() {
ensureResponseEvaluated();
return statusCode;
}
/**
* {@inheritDoc}
*/
@Override
public String getResponseHeader(String name) {
ensureResponseEvaluated();
return headers.get(name);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, List<String>> getResponseHeaderFields() {
ensureResponseEvaluated();
Map<String, List<String>> ret = new TreeMap<String, List<String>>();
for (Map.Entry<String, String> entry : headers.entrySet()) {
ret.put(entry.getKey(), Arrays.asList(entry.getValue()));
}
return ret;
}
/**
* {@inheritDoc}
*/
@Override
public InputStream asStream() {
ensureResponseEvaluated();
return super.asStream();
}
/**
* {@inheritDoc}
*/
@Override
public String asString() throws TwitterException {
ensureResponseEvaluated();
return super.asString();
}
/**
* {@inheritDoc}
*/
@Override
public final JSONObject asJSONObject() throws TwitterException {
ensureResponseEvaluated();
return super.asJSONObject();
}
/**
* {@inheritDoc}
*/
@Override
public final JSONArray asJSONArray() throws TwitterException {
ensureResponseEvaluated();
return super.asJSONArray();
}
/**
* {@inheritDoc}
*/
@Override
public final Reader asReader() {
ensureResponseEvaluated();
return super.asReader();
}
/**
* {@inheritDoc}
*/
@Override
public void disconnect() throws IOException {
if (!future.isDone() && !future.isCancelled()) {
future.cancel(true);
}
}
private Throwable th = null;
private void ensureResponseEvaluated() {
if (th != null) {
throw new TwitterRuntimeException(th);
}
if (responseGot) {
return;
}
responseGot = true;
if (future.isCancelled()) {
th = new TwitterException("HttpResponse already disconnected.");
throw new TwitterRuntimeException(th);
}
try {
HTTPResponse r = future.get();
statusCode = r.getResponseCode();
headers = new HashMap<String, String>();
for (HTTPHeader h : r.getHeaders()) {
headers.put(h.getName().toLowerCase(Locale.ENGLISH), h.getValue());
}
byte[] content = r.getContent();
is = new ByteArrayInputStream(content);
if ("gzip".equals(headers.get("content-encoding"))) {
// the response is gzipped
try {
is = new GZIPInputStream(is);
} catch (IOException e) {
th = e;
throw new TwitterRuntimeException(th);
}
}
responseAsString = inputStreamToString(is);
if (statusCode < OK || (statusCode != FOUND && MULTIPLE_CHOICES <= statusCode)) {
if (statusCode == ENHANCE_YOUR_CLAIM ||
statusCode == BAD_REQUEST ||
statusCode < INTERNAL_SERVER_ERROR) {
th = new TwitterException(responseAsString, null, statusCode);
throw new TwitterRuntimeException(th);
}
}
} catch (ExecutionException e) {
th = e.getCause();
} catch (InterruptedException e) {
th = e.getCause();
}
if (th != null) {
throw new TwitterRuntimeException(th);
}
}
private String inputStreamToString(InputStream is) {
if (responseAsString == null) {
StringBuilder buf = new StringBuilder();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line;
try {
while ((line = br.readLine()) != null) {
buf.append(line);
}
} catch (IOException e) {
return null;
}
responseAsString = buf.toString();
}
return responseAsString;
}
@Override
public String toString() {
return "GAEHttpResponse{" +
"future=" + future +
", responseGot=" + responseGot +
", headers=" + headers +
'}';
}
}
| |
package org.yeastrc.paws.server_communication;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.log4j.Logger;
import org.yeastrc.paws.base.client_server_shared_objects.GetDataForTrackingIdServerResponse;
import org.yeastrc.paws.base.constants.InternalRestWebServicePathsConstants;
import org.yeastrc.paws.base.constants.RestWebServiceQueryStringAndFormFieldParamsConstants;
import org.yeastrc.paws.constants.ServerSendReceiveConstants;
import org.yeastrc.paws.exceptions.PawsTrackingIdNotFoundException;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Get Annotation Parts from server for Tracking Id
*
* If the Annotation data hasn't been computed, returns the protein sequence
*/
public class GetAnnotationDataPartsForTrackingId {
private static Logger log = Logger.getLogger(GetAnnotationDataPartsForTrackingId.class);
public static class GetAnnotationDataPartsForTrackingIdResult {
String sequence;
boolean alreadyComputed;
public String getSequence() {
return sequence;
}
public boolean isAlreadyComputed() {
return alreadyComputed;
}
}
private static volatile boolean shutdownRequested = false;
/**
*
*/
public static void shutdown() {
log.warn( "shutdown() called" );
shutdownRequested = true;
awaken();
}
/**
*
*/
public static void stopRunningAfterProcessingJob() {
log.warn( "stopRunningAfterProcessingJob() called" );
shutdownRequested = true;
awaken();
}
/**
*
*/
public static void awaken() {
log.warn( "awaken() called" );
synchronized (SendResultsToServer.class) {
SendResultsToServer.class.notifyAll();
}
}
/**
* @param trackingId
* @return
* @throws Throwable
*/
public static GetAnnotationDataPartsForTrackingIdResult getProteinSequenceForTrackingId(
int trackingId,
String serverBaseURL ) throws Throwable {
String serverURL = serverBaseURL + InternalRestWebServicePathsConstants.SERVER_URL_EXTENSION__GET_DATA_TO_PROCESS
+ "?" + RestWebServiceQueryStringAndFormFieldParamsConstants.REQUEST_PARAM_TRACKING_ID + "=" + trackingId;
GetAnnotationDataPartsForTrackingIdResult getAnnotationDataPartsForTrackingIdResult = new GetAnnotationDataPartsForTrackingIdResult();
if ( log.isInfoEnabled() ) {
log.info( "getProteinSequenceForTrackingId. trackingId: " + trackingId );
}
HttpClient client = null;
HttpGet httpGet = null;
try {
client = new DefaultHttpClient();
HttpParams httpParams = client.getParams();
HttpConnectionParams.setConnectionTimeout(httpParams, ServerSendReceiveConstants.HTTP_CONNECTION_TIMEOUT_MILLIS);
HttpConnectionParams.setSoTimeout(httpParams, ServerSendReceiveConstants.HTTP_SOCKET_TIMEOUT_MILLIS);
httpGet = new HttpGet( serverURL );
HttpResponse httpResponse = null;
int insertInProgressRetryCount = 0;
boolean sendWithoutInsertInProgressResponse = false;
while ( ! sendWithoutInsertInProgressResponse ) {
sendWithoutInsertInProgressResponse = true;
insertInProgressRetryCount++;
int timeoutRetryCount = 0;
boolean sendWithoutException = false;
while ( !shutdownRequested && ! sendWithoutException ) {
timeoutRetryCount++;
try {
if ( log.isInfoEnabled() ) {
log.info( "About to Send: timeoutRetryCount: " + timeoutRetryCount
+ ", Send the result to the server. trackingId: " + trackingId );
}
httpResponse = client.execute(httpGet);
sendWithoutException = true; // if got response without exception, exit loop
if ( log.isInfoEnabled() ) {
log.info( "Send without Exception: Send the result to the server. trackingId: " + trackingId );
}
} catch (Throwable t) {
if ( timeoutRetryCount > ServerSendReceiveConstants.SEND_RECEIVE_RESULTS_TIMEOUT_RETRY_COUNT ) {
// if retry count exceeded, rethrow the exception to exit the retry loop
String msg = "Timeout in HTTP Send. Failed to send Program Results String, retry count exceeded so failing job. "
+ " trackingId = "
+ trackingId + ", timeoutRetryCount = " + timeoutRetryCount + ", timeout retry count max = "
+ ServerSendReceiveConstants.SEND_RECEIVE_RESULTS_TIMEOUT_RETRY_COUNT;
log.error( msg, t);
throw new Exception( msg, t );
}
log.error("Timeout in HTTP Send. Failed to send Program Results String, retry count NOT exceeded so retrying send. "
+ " trackingId = "
+ trackingId + ", timeoutRetryCount = " + timeoutRetryCount + ", timeout retry count max = "
+ ServerSendReceiveConstants.SEND_RECEIVE_RESULTS_TIMEOUT_RETRY_COUNT, t);
}
}
int httpStatusCode = httpResponse.getStatusLine().getStatusCode();
ByteArrayOutputStream serverResponse = new ByteArrayOutputStream( 40000 );
InputStream responseInputStream = null;
byte[] responseBytes = new byte[100000];
try {
responseInputStream = httpResponse.getEntity().getContent();
while (true) {
int bytesRead = responseInputStream.read( responseBytes );
if ( bytesRead == -1 ) {
break;
}
serverResponse.write(responseBytes, 0, bytesRead);
}
} catch ( Exception e ) {
String msg = "Failed reading response from server";
log.error( msg, e );
throw e;
} finally {
if ( responseInputStream != null ) {
responseInputStream.close();
}
}
// The HttpStatus should be 200 ( HttpStatus.SC_OK )
if ( httpStatusCode != HttpStatus.SC_OK ) {
String msg = "Fail Get data from server. httpStatusCode != HttpStatus.SC_OK. httpStatusCode: " + httpStatusCode
+ ", serverURL: " + serverURL + ", trackingId: " + trackingId
;
log.error( msg );
throw new Exception( msg );
}
ObjectMapper jacksonJSON_Mapper = new ObjectMapper(); // Jackson JSON library object
// validationResponse = jacksonJSON_Mapper.readValue( responseInputStream, ValidationResponse.class );
GetDataForTrackingIdServerResponse getDataForTrackingIdServerResponse =
jacksonJSON_Mapper.readValue( serverResponse.toByteArray(), GetDataForTrackingIdServerResponse.class );
if ( getDataForTrackingIdServerResponse.isNoRecordForTrackingId() ) {
String msg = "No record for tracking id: " + trackingId;
log.error( msg );
throw new PawsTrackingIdNotFoundException( msg );
}
if ( ! getDataForTrackingIdServerResponse.isSuccess() ) {
String msg = "Get record for tracking id failed. tracking id: " + trackingId;
log.error( msg );
throw new PawsTrackingIdNotFoundException( msg );
}
getAnnotationDataPartsForTrackingIdResult.alreadyComputed = getDataForTrackingIdServerResponse.isDataAlreadyProcessed();
getAnnotationDataPartsForTrackingIdResult.sequence = getDataForTrackingIdServerResponse.getSequence();
return getAnnotationDataPartsForTrackingIdResult;
} //
} catch (Throwable t) {
log.error("Failed to get data. trackingId = " + trackingId, t);
throw t;
} finally {
}
if ( log.isInfoEnabled() ) {
log.info( "SUCCESSFUL: Get without Exception: Get the data to process from the server. trackingId: " + trackingId );
}
return getAnnotationDataPartsForTrackingIdResult;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY;
import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.StaticMapping;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.SSLFactory;
public class MiniDFSClusterWithNodeGroup extends MiniDFSCluster {
private static String[] NODE_GROUPS = null;
private static final Log LOG = LogFactory.getLog(MiniDFSClusterWithNodeGroup.class);
public MiniDFSClusterWithNodeGroup(Builder builder) throws IOException {
super(builder);
}
public static void setNodeGroups (String[] nodeGroups) {
NODE_GROUPS = nodeGroups;
}
public synchronized void startDataNodes(Configuration conf, int numDataNodes,
StorageType storageType, boolean manageDfsDirs, StartupOption operation,
String[] racks, String[] nodeGroups, String[] hosts,
long[] simulatedCapacities,
boolean setupHostsFile,
boolean checkDataNodeAddrConfig,
boolean checkDataNodeHostConfig) throws IOException {
if (operation == StartupOption.RECOVER) {
return;
}
if (checkDataNodeHostConfig) {
conf.setIfUnset(DFS_DATANODE_HOST_NAME_KEY, "127.0.0.1");
} else {
conf.set(DFS_DATANODE_HOST_NAME_KEY, "127.0.0.1");
}
conf.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, "127.0.0.1");
int curDatanodesNum = dataNodes.size();
// for mincluster's the default initialDelay for BRs is 0
if (conf.get(DFSConfigKeys.DFS_BLOCKREPORT_INITIAL_DELAY_KEY) == null) {
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INITIAL_DELAY_KEY, 0);
}
// If minicluster's name node is null assume that the conf has been
// set with the right address:port of the name node.
//
if (racks != null && numDataNodes > racks.length ) {
throw new IllegalArgumentException( "The length of racks [" + racks.length
+ "] is less than the number of datanodes [" + numDataNodes + "].");
}
if (nodeGroups != null && numDataNodes > nodeGroups.length ) {
throw new IllegalArgumentException( "The length of nodeGroups [" + nodeGroups.length
+ "] is less than the number of datanodes [" + numDataNodes + "].");
}
if (hosts != null && numDataNodes > hosts.length ) {
throw new IllegalArgumentException( "The length of hosts [" + hosts.length
+ "] is less than the number of datanodes [" + numDataNodes + "].");
}
//Generate some hostnames if required
if (racks != null && hosts == null) {
hosts = new String[numDataNodes];
for (int i = curDatanodesNum; i < curDatanodesNum + numDataNodes; i++) {
hosts[i - curDatanodesNum] = "host" + i + ".foo.com";
}
}
if (simulatedCapacities != null
&& numDataNodes > simulatedCapacities.length) {
throw new IllegalArgumentException( "The length of simulatedCapacities ["
+ simulatedCapacities.length
+ "] is less than the number of datanodes [" + numDataNodes + "].");
}
String [] dnArgs = (operation == null ||
operation != StartupOption.ROLLBACK) ?
null : new String[] {operation.getName()};
for (int i = curDatanodesNum; i < curDatanodesNum+numDataNodes; i++) {
Configuration dnConf = new HdfsConfiguration(conf);
// Set up datanode address
setupDatanodeAddress(dnConf, setupHostsFile, checkDataNodeAddrConfig);
if (manageDfsDirs) {
String dirs = makeDataNodeDirs(i, storageType);
dnConf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dirs);
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dirs);
}
if (simulatedCapacities != null) {
SimulatedFSDataset.setFactory(dnConf);
dnConf.setLong(SimulatedFSDataset.CONFIG_PROPERTY_CAPACITY,
simulatedCapacities[i-curDatanodesNum]);
}
LOG.info("Starting DataNode " + i + " with "
+ DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY + ": "
+ dnConf.get(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY));
if (hosts != null) {
dnConf.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, hosts[i - curDatanodesNum]);
LOG.info("Starting DataNode " + i + " with hostname set to: "
+ dnConf.get(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY));
}
if (racks != null) {
String name = hosts[i - curDatanodesNum];
if (nodeGroups == null) {
LOG.info("Adding node with hostname : " + name + " to rack " +
racks[i-curDatanodesNum]);
StaticMapping.addNodeToRack(name,racks[i-curDatanodesNum]);
} else {
LOG.info("Adding node with hostname : " + name + " to serverGroup " +
nodeGroups[i-curDatanodesNum] + " and rack " +
racks[i-curDatanodesNum]);
StaticMapping.addNodeToRack(name,racks[i-curDatanodesNum] +
nodeGroups[i-curDatanodesNum]);
}
}
Configuration newconf = new HdfsConfiguration(dnConf); // save config
if (hosts != null) {
NetUtils.addStaticResolution(hosts[i - curDatanodesNum], "localhost");
}
SecureResources secureResources = null;
if (UserGroupInformation.isSecurityEnabled()) {
try {
secureResources = SecureDataNodeStarter.getSecureResources(dnConf);
} catch (Exception ex) {
ex.printStackTrace();
}
}
DataNode dn = DataNode.instantiateDataNode(dnArgs, dnConf, secureResources);
if(dn == null)
throw new IOException("Cannot start DataNode in "
+ dnConf.get(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY));
//since the HDFS does things based on IP:port, we need to add the mapping
//for IP:port to rackId
String ipAddr = dn.getXferAddress().getAddress().getHostAddress();
if (racks != null) {
int port = dn.getXferAddress().getPort();
if (nodeGroups == null) {
LOG.info("Adding node with IP:port : " + ipAddr + ":" + port +
" to rack " + racks[i-curDatanodesNum]);
StaticMapping.addNodeToRack(ipAddr + ":" + port,
racks[i-curDatanodesNum]);
} else {
LOG.info("Adding node with IP:port : " + ipAddr + ":" + port + " to nodeGroup " +
nodeGroups[i-curDatanodesNum] + " and rack " + racks[i-curDatanodesNum]);
StaticMapping.addNodeToRack(ipAddr + ":" + port, racks[i-curDatanodesNum] +
nodeGroups[i-curDatanodesNum]);
}
}
dn.runDatanodeDaemon();
dataNodes.add(new DataNodeProperties(dn, newconf, dnArgs, secureResources));
}
curDatanodesNum += numDataNodes;
this.numDataNodes += numDataNodes;
waitActive();
}
public synchronized void startDataNodes(Configuration conf, int numDataNodes,
boolean manageDfsDirs, StartupOption operation,
String[] racks, String[] nodeGroups, String[] hosts,
long[] simulatedCapacities,
boolean setupHostsFile) throws IOException {
startDataNodes(conf, numDataNodes, StorageType.DEFAULT, manageDfsDirs, operation, racks, nodeGroups,
hosts, simulatedCapacities, setupHostsFile, false, false);
}
public void startDataNodes(Configuration conf, int numDataNodes,
boolean manageDfsDirs, StartupOption operation,
String[] racks, long[] simulatedCapacities,
String[] nodeGroups) throws IOException {
startDataNodes(conf, numDataNodes, manageDfsDirs, operation, racks, nodeGroups,
null, simulatedCapacities, false);
}
// This is for initialize from parent class.
@Override
public synchronized void startDataNodes(Configuration conf, int numDataNodes,
StorageType storageType, boolean manageDfsDirs, StartupOption operation,
String[] racks, String[] hosts,
long[] simulatedCapacities,
boolean setupHostsFile,
boolean checkDataNodeAddrConfig,
boolean checkDataNodeHostConfig) throws IOException {
startDataNodes(conf, numDataNodes, storageType, manageDfsDirs, operation, racks,
NODE_GROUPS, hosts, simulatedCapacities, setupHostsFile,
checkDataNodeAddrConfig, checkDataNodeHostConfig);
}
}
| |
package com.drejkim.androidwearmotionsensors;
import android.app.Fragment;
import android.content.Context;
import android.graphics.Color;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.util.FloatMath;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.lang.Object;
public class SensorFragment extends Fragment implements SensorEventListener {
private static final float SHAKE_THRESHOLD = 1.1f;
private static final int SHAKE_WAIT_TIME_MS = 250;
private static final float ROTATION_THRESHOLD = 2.0f;
private static final int ROTATION_WAIT_TIME_MS = 100;
private View mView;
private TextView mTextTitle;
private TextView mTextValues;
private SensorManager mSensorManager;
private Sensor mSensor;
private int mSensorType;
private long mShakeTime = 0;
private long mRotationTime = 0;
long unixTime;
DatabaseHelper db ;
// DataBaseHandler db;
public static SensorFragment newInstance(int sensorType) {
SensorFragment f = new SensorFragment();
// Supply sensorType as an argument
Bundle args = new Bundle();
args.putInt("sensorType", sensorType);
f.setArguments(args);
return f;
}
@Override public void onCreate(Bundle savedInstanceState) {
// db = new DataBaseHandler(getActivity());
db = new DatabaseHelper();
// db.update();
super.onCreate(savedInstanceState);
Bundle args = getArguments();
if(args != null) {
mSensorType = args.getInt("sensorType");
}
mSensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE);
mSensor = mSensorManager.getDefaultSensor(mSensorType);
//Database testing section
// DataBaseHandler db = new DataBaseHandler(getActivity());
// DatabaseHelper db = new DatabaseHelper();
// db.update();
// File dbFile = getActivity().getDatabasePath(DataBaseHandler.DATABASE_NAME);
// Log.d("DB",dbFile.getAbsolutePath());
/**
* CRUD Operations
* */
// Inserting Contacts
Log.d("Insert: ", "Inserting ..");
db.addContact(new Contact("Ravi", "5"));
db.addContact(new Contact("Srinivas", "6"));
db.addContact(new Contact("Tommy", "7"));
db.addContact(new Contact("Karthik", "8"));
// Reading all contacts
Log.d("Reading: ", "Reading all contacts..");
List<Contact> contacts = db.getAllContacts();
for (Contact cn : contacts) {
String log = "Id: " + cn.getID() + " ,Name: " + cn.getName() + " ,Phone: " + cn.getPhoneNumber();
// Writing Contacts to log
Log.d("Name: ", log);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mView = inflater.inflate(R.layout.sensor, container, false);
mTextTitle = (TextView) mView.findViewById(R.id.text_title);
mTextTitle.setText(mSensor.getStringType());
mTextValues = (TextView) mView.findViewById(R.id.text_values);
return mView;
}
@Override
public void onResume() {
super.onResume();
mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
public void onPause() {
super.onPause();
mSensorManager.unregisterListener(this);
}
@Override
public void onSensorChanged(SensorEvent event) {
// If sensor is unreliable, then just return
if (event.accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE)
{
return;
}
mTextValues.setText(
"x = " + Float.toString(event.values[0]) + "\n" +
"y = " + Float.toString(event.values[1]) + "\n" +
"z = " + Float.toString(event.values[2]) + "\n"
);
if(event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
detectShake(event);
}
else if(event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
detectRotation(event);
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
// References:
// - http://jasonmcreynolds.com/?p=388
// - http://code.tutsplus.com/tutorials/using-the-accelerometer-on-android--mobile-22125
private void detectShake(SensorEvent event) {
long now = System.currentTimeMillis();
if((now - mShakeTime) > SHAKE_WAIT_TIME_MS) {
mShakeTime = now;
float gX = event.values[0] / SensorManager.GRAVITY_EARTH;
float gY = event.values[1] / SensorManager.GRAVITY_EARTH;
float gZ = event.values[2] / SensorManager.GRAVITY_EARTH;
// gForce will be close to 1 when there is no movement
float gForce = FloatMath.sqrt(gX*gX + gY*gY + gZ*gZ);
// Change background color if gForce exceeds threshold;
// otherwise, reset the color
if(gForce > SHAKE_THRESHOLD) {
// DataBaseHandler db = new DataBaseHandler(getActivity());
mView.setBackgroundColor(Color.rgb(0, 0, 100));
unixTime = System.currentTimeMillis();
Log.d("Insert: ", "Inserting data"+Long.toString(unixTime));
db.addData(new Data(unixTime,gX, gY, gZ));
}
else {
mView.setBackgroundColor(Color.BLACK);
}
/**
* CRUD Operations
* */
// Inserting Contacts
}
}
private void detectRotation(SensorEvent event) {
long now = System.currentTimeMillis();
if((now - mRotationTime) > ROTATION_WAIT_TIME_MS) {
mRotationTime = now;
// Change background color if rate of rotation around any
// axis and in any direction exceeds threshold;
// otherwise, reset the color
if(Math.abs(event.values[0]) > ROTATION_THRESHOLD ||
Math.abs(event.values[1]) > ROTATION_THRESHOLD ||
Math.abs(event.values[2]) > ROTATION_THRESHOLD) {
mView.setBackgroundColor(Color.rgb(0, 0, 100));
}
else {
mView.setBackgroundColor(Color.BLACK);
}
}
}
@Override
public void onDestroy() {
// copyDataBase();
// copyDatabaseFromAssets(getActivity(),DataBaseHandler.DATABASE_NAME,true);
super.onDestroy();
}
private void copyDataBase()
{
String DB_PATH = "";
String DB_NAME = "abc.sqlite";
Log.i("DB", "New database is being copied to device!");
byte[] buffer = new byte[1024];
int length;
// Open your local db as the input stream
try
{
DB_PATH = getActivity().getApplicationInfo().dataDir + "/databases/";
File dbFile = getActivity().getDatabasePath(DataBaseHandler.DATABASE_NAME);
Log.d("DB",dbFile.getPath());
Log.d("DB","Trying to copy");
// DataBaseHandler.DATABASE_NAME+".sqlite"
InputStream myInput = getActivity().getAssets().open(dbFile.getPath());
// OutputStream myOutput = new FileOutputStream(DB_PATH + DB_NAME);
// while((length = myInput.read(buffer)) > 0)
// {
// myOutput.write(buffer, 0, length);
// }
// myOutput.close();
// myOutput.flush();
myInput.close();
Log.d("DB", "New database has been copied to device!");
}
catch(IOException e)
{
e.printStackTrace();
}
}
private boolean copyDatabaseFromAssets(Context context, String databaseName , boolean overwrite) {
File outputFile = context.getDatabasePath(databaseName);
if (outputFile.exists() && !overwrite) {
return true;
}
try {
InputStream inputStream = context.getAssets().open(databaseName);
Log.d("DB","Trying to copy");
outputFile = context.getDatabasePath(databaseName + ".temp");
outputFile.getParentFile().mkdirs();
OutputStream outputStream = new FileOutputStream(outputFile);
// transfer bytes from the input stream into the output stream
byte[] buffer = new byte[1024];
int length;
while ((length = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, length);
}
// Close the streams
outputStream.flush();
outputStream.close();
inputStream.close();
outputFile.renameTo(context.getDatabasePath(databaseName));
Log.d("DB","Database copied");
} catch (IOException e) {
Log.d("DB","Couldn't copy database");
if (outputFile.exists()) {
outputFile.delete();
}
return false;
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.web.security.x509;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.authentication.AuthenticationResponse;
import org.apache.nifi.authorization.AuthorizationRequest;
import org.apache.nifi.authorization.AuthorizationResult;
import org.apache.nifi.authorization.Authorizer;
import org.apache.nifi.authorization.user.NiFiUser;
import org.apache.nifi.authorization.user.NiFiUserDetails;
import org.apache.nifi.authorization.user.StandardNiFiUser;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.web.security.InvalidAuthenticationException;
import org.apache.nifi.web.security.UntrustedProxyException;
import org.apache.nifi.web.security.token.NiFiAuthenticationToken;
import org.junit.Before;
import org.junit.Test;
import java.security.Principal;
import java.security.cert.X509Certificate;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class X509AuthenticationProviderTest {
private static final String INVALID_CERTIFICATE = "invalid-certificate";
private static final String IDENTITY_1 = "identity-1";
private static final String ANONYMOUS = "";
private static final String UNTRUSTED_PROXY = "untrusted-proxy";
private static final String PROXY_1 = "proxy-1";
private static final String PROXY_2 = "proxy-2";
private static final String GT = ">";
private static final String ESCAPED_GT = "\\\\>";
private static final String LT = "<";
private static final String ESCAPED_LT = "\\\\<";
private X509AuthenticationProvider x509AuthenticationProvider;
private X509IdentityProvider certificateIdentityProvider;
private SubjectDnX509PrincipalExtractor extractor;
private Authorizer authorizer;
@Before
public void setup() {
extractor = new SubjectDnX509PrincipalExtractor();
certificateIdentityProvider = mock(X509IdentityProvider.class);
when(certificateIdentityProvider.authenticate(any(X509Certificate[].class))).then(invocation -> {
final X509Certificate[] certChain = invocation.getArgumentAt(0, X509Certificate[].class);
final String identity = extractor.extractPrincipal(certChain[0]).toString();
if (INVALID_CERTIFICATE.equals(identity)) {
throw new IllegalArgumentException();
}
return new AuthenticationResponse(identity, identity, TimeUnit.MILLISECONDS.convert(12, TimeUnit.HOURS), "");
});
authorizer = mock(Authorizer.class);
when(authorizer.authorize(any(AuthorizationRequest.class))).then(invocation -> {
final AuthorizationRequest request = invocation.getArgumentAt(0, AuthorizationRequest.class);
if (UNTRUSTED_PROXY.equals(request.getIdentity())) {
return AuthorizationResult.denied();
}
return AuthorizationResult.approved();
});
x509AuthenticationProvider = new X509AuthenticationProvider(certificateIdentityProvider, authorizer, NiFiProperties.createBasicNiFiProperties(null, null));
}
@Test(expected = InvalidAuthenticationException.class)
public void testInvalidCertificate() {
x509AuthenticationProvider.authenticate(getX509Request("", INVALID_CERTIFICATE));
}
@Test
public void testNoProxyChain() {
final NiFiAuthenticationToken auth = (NiFiAuthenticationToken) x509AuthenticationProvider.authenticate(getX509Request("", IDENTITY_1));
final NiFiUser user = ((NiFiUserDetails) auth.getDetails()).getNiFiUser();
assertNotNull(user);
assertEquals(IDENTITY_1, user.getIdentity());
assertFalse(user.isAnonymous());
}
@Test(expected = UntrustedProxyException.class)
public void testUntrustedProxy() {
x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(IDENTITY_1), UNTRUSTED_PROXY));
}
@Test
public void testOneProxy() {
final NiFiAuthenticationToken auth = (NiFiAuthenticationToken) x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(IDENTITY_1), PROXY_1));
final NiFiUser user = ((NiFiUserDetails) auth.getDetails()).getNiFiUser();
assertNotNull(user);
assertEquals(IDENTITY_1, user.getIdentity());
assertFalse(user.isAnonymous());
assertNotNull(user.getChain());
assertEquals(PROXY_1, user.getChain().getIdentity());
assertFalse(user.getChain().isAnonymous());
}
@Test
public void testAnonymousWithOneProxy() {
final NiFiAuthenticationToken auth = (NiFiAuthenticationToken) x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(ANONYMOUS), PROXY_1));
final NiFiUser user = ((NiFiUserDetails) auth.getDetails()).getNiFiUser();
assertNotNull(user);
assertEquals(StandardNiFiUser.ANONYMOUS_IDENTITY, user.getIdentity());
assertTrue(user.isAnonymous());
assertNotNull(user.getChain());
assertEquals(PROXY_1, user.getChain().getIdentity());
assertFalse(user.getChain().isAnonymous());
}
@Test
public void testTwoProxies() {
final NiFiAuthenticationToken auth = (NiFiAuthenticationToken) x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(IDENTITY_1, PROXY_2), PROXY_1));
final NiFiUser user = ((NiFiUserDetails) auth.getDetails()).getNiFiUser();
assertNotNull(user);
assertEquals(IDENTITY_1, user.getIdentity());
assertFalse(user.isAnonymous());
assertNotNull(user.getChain());
assertEquals(PROXY_2, user.getChain().getIdentity());
assertFalse(user.getChain().isAnonymous());
assertNotNull(user.getChain().getChain());
assertEquals(PROXY_1, user.getChain().getChain().getIdentity());
assertFalse(user.getChain().getChain().isAnonymous());
}
@Test(expected = UntrustedProxyException.class)
public void testUntrustedProxyInChain() {
x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(IDENTITY_1, UNTRUSTED_PROXY), PROXY_1));
}
@Test
public void testAnonymousProxyInChain() {
final NiFiAuthenticationToken auth = (NiFiAuthenticationToken) x509AuthenticationProvider.authenticate(getX509Request(buildProxyChain(IDENTITY_1, ANONYMOUS), PROXY_1));
final NiFiUser user = ((NiFiUserDetails) auth.getDetails()).getNiFiUser();
assertNotNull(user);
assertEquals(IDENTITY_1, user.getIdentity());
assertFalse(user.isAnonymous());
assertNotNull(user.getChain());
assertEquals(StandardNiFiUser.ANONYMOUS_IDENTITY, user.getChain().getIdentity());
assertTrue(user.getChain().isAnonymous());
assertNotNull(user.getChain().getChain());
assertEquals(PROXY_1, user.getChain().getChain().getIdentity());
assertFalse(user.getChain().getChain().isAnonymous());
}
@Test
public void testShouldCreateAnonymousUser() {
// Arrange
String identity = "someone";
// Act
NiFiUser user = X509AuthenticationProvider.createUser(identity, null, null, null, true);
// Assert
assert user != null;
assert user instanceof StandardNiFiUser;
assert user.getIdentity().equals(StandardNiFiUser.ANONYMOUS_IDENTITY);
assert user.isAnonymous();
}
@Test
public void testShouldCreateKnownUser() {
// Arrange
String identity = "someone";
// Act
NiFiUser user = X509AuthenticationProvider.createUser(identity, null, null, null, false);
// Assert
assert user != null;
assert user instanceof StandardNiFiUser;
assert user.getIdentity().equals(identity);
assert !user.isAnonymous();
}
private String buildProxyChain(final String... identities) {
List<String> elements = Arrays.asList(identities);
return StringUtils.join(elements.stream().map(X509AuthenticationProviderTest::formatDn).collect(Collectors.toList()), "");
}
private static String formatDn(String rawDn) {
return "<" + sanitizeDn(rawDn) + ">";
}
/**
* If a user provides a DN with the sequence '><', they could escape the tokenization process and impersonate another user.
* <p>
* Example:
* <p>
* Provided DN: {@code jdoe><alopresto} -> {@code <jdoe><alopresto><proxy...>} would allow the user to impersonate jdoe
*
* @param rawDn the unsanitized DN
* @return the sanitized DN
*/
private static String sanitizeDn(String rawDn) {
if (StringUtils.isEmpty(rawDn)) {
return rawDn;
} else {
return rawDn.replaceAll(GT, ESCAPED_GT).replaceAll(LT, ESCAPED_LT);
}
}
/**
* Reconstitutes the original DN from the sanitized version passed in the proxy chain.
* <p>
* Example:
* <p>
* {@code alopresto\>\<proxy1} -> {@code alopresto><proxy1}
*
* @param sanitizedDn the sanitized DN
* @return the original DN
*/
private static String unsanitizeDn(String sanitizedDn) {
if (StringUtils.isEmpty(sanitizedDn)) {
return sanitizedDn;
} else {
return sanitizedDn.replaceAll(ESCAPED_GT, GT).replaceAll(ESCAPED_LT, LT);
}
}
private X509AuthenticationRequestToken getX509Request(final String proxyChain, final String identity) {
return new X509AuthenticationRequestToken(proxyChain, extractor, new X509Certificate[]{getX509Certificate(identity)}, "");
}
private X509Certificate getX509Certificate(final String identity) {
final X509Certificate certificate = mock(X509Certificate.class);
when(certificate.getSubjectDN()).then(invocation -> {
final Principal principal = mock(Principal.class);
when(principal.getName()).thenReturn(identity);
return principal;
});
return certificate;
}
}
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.mvc.method.annotation;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;
import org.springframework.http.HttpStatus;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.context.request.ServletWebRequest;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.method.support.HandlerMethodReturnValueHandler;
import org.springframework.web.method.support.HandlerMethodReturnValueHandlerComposite;
import org.springframework.web.method.support.InvocableHandlerMethod;
import org.springframework.web.method.support.ModelAndViewContainer;
import org.springframework.web.servlet.View;
import org.springframework.web.util.NestedServletException;
/**
* Extends {@link InvocableHandlerMethod} with the ability to handle return
* values through a registered {@link HandlerMethodReturnValueHandler} and
* also supports setting the response status based on a method-level
* {@code @ResponseStatus} annotation.
*
* <p>A {@code null} return value (including void) may be interpreted as the
* end of request processing in combination with a {@code @ResponseStatus}
* annotation, a not-modified check condition
* (see {@link ServletWebRequest#checkNotModified(long)}), or
* a method argument that provides access to the response stream.
*
* @author Rossen Stoyanchev
* @since 3.1
*/
public class ServletInvocableHandlerMethod extends InvocableHandlerMethod {
private HttpStatus responseStatus;
private String responseReason;
private HandlerMethodReturnValueHandlerComposite returnValueHandlers;
/**
* Creates an instance from the given handler and method.
*/
public ServletInvocableHandlerMethod(Object handler, Method method) {
super(handler, method);
initResponseStatus();
}
/**
* Create an instance from a {@code HandlerMethod}.
*/
public ServletInvocableHandlerMethod(HandlerMethod handlerMethod) {
super(handlerMethod);
initResponseStatus();
}
private void initResponseStatus() {
ResponseStatus annot = getMethodAnnotation(ResponseStatus.class);
if (annot != null) {
this.responseStatus = annot.value();
this.responseReason = annot.reason();
}
}
/**
* Register {@link HandlerMethodReturnValueHandler} instances to use to
* handle return values.
*/
public void setHandlerMethodReturnValueHandlers(HandlerMethodReturnValueHandlerComposite returnValueHandlers) {
this.returnValueHandlers = returnValueHandlers;
}
/**
* Invokes the method and handles the return value through a registered
* {@link HandlerMethodReturnValueHandler}.
*
* @param webRequest the current request
* @param mavContainer the ModelAndViewContainer for this request
* @param providedArgs "given" arguments matched by type, not resolved
*/
public final void invokeAndHandle(ServletWebRequest webRequest,
ModelAndViewContainer mavContainer, Object... providedArgs) throws Exception {
Object returnValue = invokeForRequest(webRequest, mavContainer, providedArgs);
setResponseStatus(webRequest);
if (returnValue == null) {
if (isRequestNotModified(webRequest) || hasResponseStatus() || mavContainer.isRequestHandled()) {
mavContainer.setRequestHandled(true);
return;
}
}
else if (StringUtils.hasText(this.responseReason)) {
mavContainer.setRequestHandled(true);
return;
}
mavContainer.setRequestHandled(false);
try {
this.returnValueHandlers.handleReturnValue(returnValue, getReturnValueType(returnValue), mavContainer, webRequest);
}
catch (Exception ex) {
if (logger.isTraceEnabled()) {
logger.trace(getReturnValueHandlingErrorMessage("Error handling return value", returnValue), ex);
}
throw ex;
}
}
/**
* Set the response status according to the {@link ResponseStatus} annotation.
*/
private void setResponseStatus(ServletWebRequest webRequest) throws IOException {
if (this.responseStatus == null) {
return;
}
if (StringUtils.hasText(this.responseReason)) {
webRequest.getResponse().sendError(this.responseStatus.value(), this.responseReason);
}
else {
webRequest.getResponse().setStatus(this.responseStatus.value());
}
// to be picked up by the RedirectView
webRequest.getRequest().setAttribute(View.RESPONSE_STATUS_ATTRIBUTE, this.responseStatus);
}
/**
* Does the given request qualify as "not modified"?
* @see ServletWebRequest#checkNotModified(long)
* @see ServletWebRequest#checkNotModified(String)
*/
private boolean isRequestNotModified(ServletWebRequest webRequest) {
return webRequest.isNotModified();
}
/**
* Does this method have the response status instruction?
*/
private boolean hasResponseStatus() {
return responseStatus != null;
}
private String getReturnValueHandlingErrorMessage(String message, Object returnValue) {
StringBuilder sb = new StringBuilder(message);
if (returnValue != null) {
sb.append(" [type=" + returnValue.getClass().getName() + "] ");
}
sb.append("[value=" + returnValue + "]");
return getDetailedErrorMessage(sb.toString());
}
/**
* Create a ServletInvocableHandlerMethod that will return the given value from an
* async operation instead of invoking the controller method again. The async result
* value is then either processed as if the controller method returned it or an
* exception is raised if the async result value itself is an Exception.
*/
ServletInvocableHandlerMethod wrapConcurrentResult(final Object result) {
return new CallableHandlerMethod(new Callable<Object>() {
@Override
public Object call() throws Exception {
if (result instanceof Exception) {
throw (Exception) result;
}
else if (result instanceof Throwable) {
throw new NestedServletException("Async processing failed", (Throwable) result);
}
return result;
}
});
}
/**
* A sub-class of {@link HandlerMethod} that invokes the given {@link Callable}
* instead of the target controller method. This is useful for resuming processing
* with the result of an async operation. The goal is to process the value returned
* from the Callable as if it was returned by the target controller method, i.e.
* taking into consideration both method and type-level controller annotations (e.g.
* {@code @ResponseBody}, {@code @ResponseStatus}, etc).
*/
private class CallableHandlerMethod extends ServletInvocableHandlerMethod {
public CallableHandlerMethod(Callable<?> callable) {
super(callable, ClassUtils.getMethod(callable.getClass(), "call"));
this.setHandlerMethodReturnValueHandlers(ServletInvocableHandlerMethod.this.returnValueHandlers);
}
/**
* Bridge to type-level annotations of the target controller method.
*/
@Override
public Class<?> getBeanType() {
return ServletInvocableHandlerMethod.this.getBeanType();
}
/**
* Bridge to method-level annotations of the target controller method.
*/
@Override
public <A extends Annotation> A getMethodAnnotation(Class<A> annotationType) {
return ServletInvocableHandlerMethod.this.getMethodAnnotation(annotationType);
}
}
}
| |
/*
* Copyright (c) 2014 Allette Systems pty. ltd.
*/
package org.pageseeder.docx;
import org.pageseeder.docx.util.Files;
import org.pageseeder.docx.util.XSLT;
import org.pageseeder.docx.util.ZipUtils;
import javax.xml.transform.Templates;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
/**
* Converts DocX to PSML.
*
* @author Ciber Cai
* @author Hugo Inacio
* @author Philip Rutherford
*/
public final class PSMLProcessor {
/**
* The builder
*/
private final Builder _builder;
/**
* A writer to store the log
*/
private final Writer _log;
private PSMLProcessor(Builder producer) {
this(producer, new StringWriter());
}
private PSMLProcessor(Builder producer, Writer log) {
if (producer.source() == null) { throw new NullPointerException("source is null"); }
if (producer.destination() == null) { throw new NullPointerException("destination is null"); }
this._builder = producer;
this._log = log;
}
/**
* @return the log in string
*/
public String getLog() {
if (this._log != null) {
try {
this._log.flush();
this._log.close();
} catch (IOException e) {
return "";
}
return this._log.toString();
} else {
return "";
}
}
/**
* to generate the psml.
* @throws IOException
*/
public void process() throws IOException {
// Defaulting destination directory
if (this._builder.destination() == null) {
this._builder.destination = this._builder.source.getParentFile();
log("Destination set to source directory " + this._builder.destination.getAbsolutePath() + "");
}
// The name of the presentation
String sourcename = this._builder.source().getName();
if (sourcename.toLowerCase().endsWith(".docx")) {
sourcename = sourcename.substring(0, sourcename.length()-5);
}
File folder;
String filename;
if (this._builder.destination().getName().endsWith(".psml")) {
folder = this._builder.destination().getParentFile();
filename = this._builder.destination().getName().substring(0, this._builder.destination().getName().length() - 5);;
} else {
folder = this._builder.destination();
filename = sourcename.replaceAll(" ", "_").toLowerCase();
}
// Ensure that output folder exists
if (!folder.exists()) {
folder.mkdirs();
}
// 1. Unzip file
log("Extracting DOCX: " + this._builder.source().getName());
File unpacked = new File(this._builder.working(), "unpacked");
unpacked.mkdir();
ZipUtils.unzip(this._builder.source(), unpacked);
// 2. Sanity check
log("Checking docx");
File contentTypes = new File(unpacked, "[Content_Types].xml");
File relationships = new File(unpacked, "_rels/.rels");
if (!contentTypes.exists()) throw new DOCXException("Not a valid DOCX: unable to find [Content_Types].xml");
if (!relationships.exists()) throw new DOCXException("Not a valid DOCX: unable to find _rels/.rels");
// Parse templates
Templates templates = XSLT.getTemplatesFromResource("org/pageseeder/docx/xslt/import.xsl");
String outuri = folder.toURI().toString();
String componentFolderName = this._builder.component() == null ? "components" : this._builder.component();
String mediaFolderName = this._builder.media() == null ? "images" :
("".equals(this._builder.media()) ? filename + "_files" : this._builder.media());
// Initiate parameters
Map<String, String> parameters = new HashMap<>();
parameters.put("_rootfolder", unpacked.toURI().toString());
parameters.put("_outputfolder", outuri);
parameters.put("_docxfilename", sourcename);
parameters.put("_mediafoldername", mediaFolderName);
parameters.put("_componentfoldername", componentFolderName);
if (this._builder.config() != null) {
parameters.put("_configfileurl", this._builder.config().toURI().toString());
}
// Add custom parameters
parameters.putAll(this._builder.params());
// 3. Unnest
log("Unnest");
Templates unnest = XSLT.getTemplatesFromResource("org/pageseeder/docx/xslt/import-unnest.xsl");
File document = new File(unpacked, "word/document.xml");
File newDocument = new File(unpacked, "word/new-document.xml");
XSLT.transform(document, newDocument, unnest, parameters);
// 3.1 Unnest Endnotes file if it exists
File endnotes = new File(unpacked, "word/endnotes.xml");
if(endnotes.canRead()){
XSLT.transform(endnotes, new File(unpacked, "word/new-endnotes.xml"), unnest, parameters);
}
// 3.2 Unnest Footnotes file if it exists
File footnotes = new File(unpacked, "word/footnotes.xml");
if(footnotes.canRead()){
XSLT.transform(footnotes, new File(unpacked, "word/new-footnotes.xml"), unnest, parameters);
}
// 4. copy the media files
log("Copy media");
copyMedia(unpacked, folder, mediaFolderName);
// 5. Process the files
log("Process with XSLT (this may take several minutes)");
XSLT.transform(contentTypes, new File(folder, filename + ".psml"), templates, parameters);
}
// Helpers
// ----------------------------------------------------------------------------------------------
private void log(String log) throws IOException {
this._log.append(log).append("\n");
}
private static void copyMedia(File from, File to, String folder) {
File media = new File(from, "word/media");
if (!media.exists()) return;
File mediaOut = new File(to, folder);
try {
Files.ensureDirectoryExists(mediaOut);
File[] files = media.listFiles();
if (files != null) {
for (File m : files) {
// don't import template images
if (!m.getName().startsWith(DOCXProcessor.MEDIA_PREFIX)) {
// decode filename because the image/@src will be decoded by PageSeeder
Files.copy(m, new File(mediaOut, URLDecoder.decode(m.getName(), "UTF-8").toLowerCase()));
}
}
}
} catch (IOException ex) {
// TODO clean up files
throw new DOCXException(ex);
}
}
public static class Builder {
/**
* The PageSeeder documents to export.
* <p>The source should point to the main PSML document.
*
*/
private File source;
/**
* The Word document to generate.
*/
private File destination;
/**
* The name of the working directory
*/
private File working;
/**
* The configuration.
*/
private File config;
/**
* The media files folder location.
*/
private String media;
/**
* The component files folder location.
*/
private String component;
/**
* List of custom parameters specified that can be specified from the command-line
*/
private Map<String, String> params;
/**
* A writer to store the log
*/
private Writer log;
/**
* @return the srouce
*/
private File source() {
return this.source;
}
/**
* @return destination
*/
private File destination() {
if (this.destination == null) {
this.destination = new File(this.source.getParentFile(), "output.psml");
}
return this.destination;
}
/**
* @return working
*/
private File working() {
if (this.working == null) {
String tmp = "docx-" + System.currentTimeMillis();
this.working = new File(System.getProperty("java.io.tmpdir"), tmp);
}
if (!this.working.exists()) {
this.working.mkdirs();
}
return this.working;
}
/**
* @return the configuration file
*/
private File config() {
// check whether the file is exist
if (this.config != null && this.config.exists()) {
return this.config;
} else {
return null;
}
}
/**
* @return the media folder
*/
private String media() {
return this.media;
}
/**
* @return the component folder
*/
private String component() {
return this.component;
}
/**
* @return the custom parameters for docx.
*/
private Map<String, String> params() {
if (this.params == null) {
this.params = new HashMap<>();
}
return this.params;
}
/**
* @param log the Writer to store the log
* @return {@link Builder}
*/
public Builder log(Writer log) {
this.log = log;
return this;
}
/**
* @param source set the source
* @return {@link Builder}
*/
public Builder source(File source) {
this.source = source;
return this;
}
/**
* @param destination set the destination
* @return {@link Builder}
*/
public Builder destination(File destination) {
this.destination = destination;
return this;
}
/**
* @param working set the working folder
* @return {@link Builder}
*/
public Builder working(File working) {
this.working = working;
return this;
}
/**
* @param config set the configuration file
* @return {@link Builder}
*/
public Builder config(File config) {
this.config = config;
return this;
}
/**
* @param media the media folder
* @return {@link Builder}
*/
public Builder media(String media) {
this.media = media;
return this;
}
/**
* @param component the component folder
* @return {@link Builder}
*/
public Builder component(String component) {
this.component = component;
return this;
}
public Builder params(Map<String, String> params) {
this.params = params;
return this;
}
/**
* @return the DocxProcessor
*/
public PSMLProcessor build() {
if (this.log != null) {
return new PSMLProcessor(this, this.log);
} else {
return new PSMLProcessor(this);
}
}
}
}
| |
package org.uberfire.io.impl;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import org.uberfire.io.CommonIOServiceDotFileTest;
import org.uberfire.io.IOService;
import org.uberfire.io.impl.IOServiceDotFileImpl;
import org.uberfire.java.nio.base.options.CommentedOption;
import org.uberfire.java.nio.base.version.VersionAttributeView;
import org.uberfire.java.nio.file.FileSystem;
import org.uberfire.java.nio.file.Path;
import org.uberfire.java.nio.file.WatchEvent;
import org.uberfire.java.nio.file.WatchService;
import org.uberfire.java.nio.file.api.FileSystemProviders;
import org.uberfire.java.nio.fs.jgit.JGitFileSystem;
import org.uberfire.java.nio.fs.jgit.JGitFileSystemProvider;
import static org.junit.Assert.*;
import static org.mockito.Mockito.spy;
public class BatchTest {
final static IOService ioService = new IOServiceDotFileImpl();
private static File path = null;
static FileSystem fs1;
static JGitFileSystem fs1Batch;
static FileSystem fs2;
static JGitFileSystem fs2Batch;
static FileSystem fs3;
static JGitFileSystem fs3Batch;
@BeforeClass
public static void setup() throws IOException {
path = CommonIOServiceDotFileTest.createTempDirectory();
// XXX this is shaky at best: FileSystemProviders bootstraps the JGit FS in a static initializer.
// if anything has referenced it before now, setting this system property will have no effect.
System.setProperty( "org.uberfire.nio.git.dir", path.getAbsolutePath() );
System.out.println( ".niogit: " + path.getAbsolutePath() );
final URI newRepo = URI.create( "git://amend-repo-test" );
fs1 = ioService.newFileSystem( newRepo, new HashMap<String, Object>() );
fs1Batch = (JGitFileSystem) fs1;
Path init = ioService.get( URI.create( "git://amend-repo-test/init.file" ) );
ioService.write( init, "setupFS!" );
final URI newRepo2 = URI.create( "git://check-amend-repo-test" );
fs2 = ioService.newFileSystem( newRepo2, new HashMap<String, Object>() {{
put( "init", "true" );
}} );
fs2Batch = (JGitFileSystem) fs2;
init = ioService.get( URI.create( "git://check-amend-repo-test/init.file" ) );
ioService.write( init, "setupFS!" );
final URI newRepo3 = URI.create( "git://check-amend-repo-test-2" );
fs3 = ioService.newFileSystem( newRepo3, new HashMap<String, Object>() {{
put( "init", "true" );
}} );
fs3Batch = (JGitFileSystem) fs3;
init = ioService.get( URI.create( "git://check-amend-repo-test-2/init.file" ) );
ioService.write( init, "setupFS!" );
}
@AfterClass
public static void cleanup() {
FileUtils.deleteQuietly( path );
JGitFileSystemProvider gitFsProvider = (JGitFileSystemProvider) FileSystemProviders.resolveProvider( URI.create( "git://whatever" ) );
gitFsProvider.shutdown();
FileUtils.deleteQuietly( gitFsProvider.getGitRepoContainerDir() );
gitFsProvider.rescanForExistingRepositories();
}
@Test
public void testBatch() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
final WatchService ws = init.getFileSystem().newWatchService();
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.write( init, "init 2!", new CommentedOption( "User Tester", "message2" ) );
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 1, events.size() );//modify readme
}
final Path init2 = ioService.get( URI.create( "git://amend-repo-test/readme2.txt" ) );
ioService.write( init2, "init 3!", new CommentedOption( "User Tester", "message3" ) );
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 1, events.size() ); // add file
}
ioService.write( init2, "init 4!", new CommentedOption( "User Tester", "message4" ) );
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 1, events.size() );// modify file
}
final VersionAttributeView vinit = ioService.getFileAttributeView( init, VersionAttributeView.class );
final VersionAttributeView vinit2 = ioService.getFileAttributeView( init, VersionAttributeView.class );
assertEquals( "init 2!", ioService.readAllString( init ) );
assertNotNull( vinit );
assertEquals( 2, vinit.readAttributes().history().records().size() );
assertNotNull( vinit2 );
assertEquals( 2, vinit2.readAttributes().history().records().size() );
ioService.startBatch( new FileSystem[]{ init.getFileSystem() } );
final Path path = ioService.get( URI.create( "git://amend-repo-test/mybatch" + new Random( 10L ).nextInt() + ".txt" ) );
final Path path2 = ioService.get( URI.create( "git://amend-repo-test/mybatch2" + new Random( 10L ).nextInt() + ".txt" ) );
ioService.write( path, "ooooo!" );
//init.file event
assertNotNull( ws.poll() );
ioService.write( path, "ooooo wdfs fg sdf!" );
assertNull( ws.poll() );
ioService.write( path2, "ooooo222!" );
assertNull( ws.poll() );
ioService.write( path2, " sdfsdg sdg ooooo222!" );
assertNull( ws.poll() );
ioService.endBatch();
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 2, events.size() ); //adds files
}
final VersionAttributeView v = ioService.getFileAttributeView( path, VersionAttributeView.class );
final VersionAttributeView v2 = ioService.getFileAttributeView( path2, VersionAttributeView.class );
assertNotNull( v );
assertNotNull( v2 );
assertEquals( 1, v.readAttributes().history().records().size() );
assertEquals( 1, v2.readAttributes().history().records().size() );
}
@Test
public void testBatch2() throws IOException, InterruptedException {
final Path f1 = ioService.get( URI.create( "git://check-amend-repo-test/f1.txt" ) );
final Path f2 = ioService.get( URI.create( "git://check-amend-repo-test/f2.txt" ) );
final Path f3 = ioService.get( URI.create( "git://check-amend-repo-test/f3.txt" ) );
// XXX: Workaround for UF-70: amend-test-repo has to contain something so it can receive the BATCH
ioService.write( f1, "init f1!" );
ioService.write( f2, "init f2!" );
// END workaround
final WatchService ws = f1.getFileSystem().newWatchService();
ioService.startBatch( new FileSystem[]{ f1.getFileSystem() } );
ioService.write( f1, "f1-u1!" );
assertNull( ws.poll() );
ioService.write( f2, "f2-u1!" );
assertNull( ws.poll() );
ioService.write( f3, "f3-u1!" );
assertNull( ws.poll() );
ioService.endBatch();
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 3, events.size() ); //adds files
final VersionAttributeView v = ioService.getFileAttributeView( f1, VersionAttributeView.class );
assertNotNull( v );
assertEquals( 2, v.readAttributes().history().records().size() );
final VersionAttributeView v2 = ioService.getFileAttributeView( f2, VersionAttributeView.class );
assertNotNull( v2 );
assertEquals( 2, v2.readAttributes().history().records().size() );
final VersionAttributeView v3 = ioService.getFileAttributeView( f3, VersionAttributeView.class );
assertNotNull( v3 );
assertEquals( 1, v3.readAttributes().history().records().size() );
}
ioService.startBatch( new FileSystem[]{ f1.getFileSystem() } );
ioService.write( f1, "f1-u1!" );
assertNull( ws.poll() );
ioService.write( f2, "f2-u2!" );
assertNull( ws.poll() );
ioService.write( f3, "f3-u2!" );
assertNull( ws.poll() );
ioService.endBatch();
{
List<WatchEvent<?>> events = ws.poll().pollEvents();
assertEquals( 2, events.size() ); //adds files
final VersionAttributeView v = ioService.getFileAttributeView( f1, VersionAttributeView.class );
assertNotNull( v );
assertEquals( 2, v.readAttributes().history().records().size() );
final VersionAttributeView v2 = ioService.getFileAttributeView( f2, VersionAttributeView.class );
assertNotNull( v2 );
assertEquals( 3, v2.readAttributes().history().records().size() );
final VersionAttributeView v3 = ioService.getFileAttributeView( f3, VersionAttributeView.class );
assertNotNull( v3 );
assertEquals( 2, v3.readAttributes().history().records().size() );
}
}
@Test
public void batchTest() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1 } );
assertTrue( fs1Batch.isOnBatch() );
ioService.endBatch();
assertFalse( fs1Batch.isOnBatch() );
}
@Test
public void justOneFSOnBatchTest() throws IOException, InterruptedException {
Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
init = ioService.get( URI.create( "git://check-amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1 } );
assertTrue( fs1Batch.isOnBatch() );
assertFalse( fs2Batch.isOnBatch() );
ioService.endBatch();
assertFalse( fs1Batch.isOnBatch() );
assertFalse( fs2Batch.isOnBatch() );
}
@Test
public void testInnerBatch() throws IOException, InterruptedException {
Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
init = ioService.get( URI.create( "git://check-amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1 } );
assertTrue( fs1Batch.isOnBatch() );
ioService.startBatch( new FileSystem[]{ fs1 } );
assertTrue( fs1Batch.isOnBatch() );
ioService.endBatch();
assertTrue( fs1Batch.isOnBatch() );
ioService.endBatch();
assertFalse( fs1Batch.isOnBatch() );
}
@Test
public void assertNumberOfCommitsOnInnerBatch() throws IOException, InterruptedException {
final Path f11 = ioService.get( URI.create( "git://check-amend-repo-test/f11.txt" ) );
// XXX: Workaround for UF-70: amend-test-repo has to contain something so it can receive the BATCH
ioService.write( f11, "init f1!" );
// END workaround
ioService.startBatch( new FileSystem[]{ f11.getFileSystem() } );
ioService.write( f11, "f1-u1!" );
ioService.endBatch();
VersionAttributeView v = ioService.getFileAttributeView( f11, VersionAttributeView.class );
assertNotNull( v );
assertEquals( 2, v.readAttributes().history().records().size() );
ioService.startBatch( new FileSystem[]{ f11.getFileSystem() } );
ioService.write( f11, "f2-u2!" );
//inner batch (samme commit)
ioService.startBatch( new FileSystem[]{ f11.getFileSystem() } );
ioService.write( f11, "f2-u2 - inner batch!" );
ioService.write( f11, "f2-u2 - inner 2 batch!" );
ioService.endBatch();
ioService.write( f11, "f2-u2 - inner batch! last" );
ioService.endBatch();
assertEquals( "f2-u2 - inner batch! last", ioService.readAllString( f11 ) );
v = ioService.getFileAttributeView( f11, VersionAttributeView.class );
assertNotNull( v );
assertEquals( 4, v.readAttributes().history().records().size() );
}
@Test
public void testTwoStartedFsOnBatchByTheSameThread() throws IOException, InterruptedException {
Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
init = ioService.get( URI.create( "git://check-amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1 } );
try {
ioService.startBatch( new FileSystem[]{ fs1 } );
} catch ( final Exception e ) {
fail();
}
ioService.endBatch();
ioService.endBatch();
try {
ioService.endBatch();
fail();
} catch ( final Exception e ) {
}
}
@Test
public void testTwoFsOnBatchByTheSameThread() throws IOException, InterruptedException {
Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
init = ioService.get( URI.create( "git://check-amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1 } );
assertTrue( fs1Batch.isOnBatch() );
ioService.endBatch();
ioService.startBatch( new FileSystem[]{ fs2 } );
assertTrue( fs2Batch.isOnBatch() );
ioService.endBatch();
}
@Test
public void iCanLockMultipleFS() throws IOException, InterruptedException {
Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
init = ioService.get( URI.create( "git://check-amend-repo-test/readme.txt" ) );
ioService.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioService.startBatch( new FileSystem[]{ fs1, fs2 } );
assertTrue( fs1Batch.isOnBatch() );
assertTrue( fs2Batch.isOnBatch() );
ioService.endBatch();
assertFalse( fs1Batch.isOnBatch() );
assertFalse( fs2Batch.isOnBatch() );
}
@Test
public void testDifferentThreads() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "After start batch" );
new Thread( "second" ) {
@Override
public void run() {
try {
System.out.println( "Inner starting" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "Inner after batch" );
final OutputStream innerOut = ioService.newOutputStream( init );
for ( int i = 0; i < 100; i++ ) {
innerOut.write( ( "sss" + i ).getBytes() );
}
System.out.println( "Inner after write" );
innerOut.close();
System.out.println( "Inner after close" );
ioService.endBatch();
System.out.println( "Inner after end batch" );
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
}.start();
System.out.println( "After start 2nd Thread" );
for ( int i = 0; i < 100; i++ ) {
if ( i % 20 == 0 ) {
Thread.sleep( 10 );
}
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "After writes" );
ioService.endBatch();
System.out.println( "After end batch" );
}
@Test
public void testDifferentThreadsWithoutBatch() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!" );
new Thread( "second" ) {
@Override
public void run() {
try {
System.out.println( "Inner starting" );
final OutputStream innerOut = ioService.newOutputStream( init );
for ( int i = 0; i < 100; i++ ) {
innerOut.write( ( "sss" + i ).getBytes() );
}
System.out.println( "Inner after write" );
innerOut.close();
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
}.start();
System.out.println( "After start 2nd Thread" );
for ( int i = 0; i < 100; i++ ) {
if ( i % 20 == 0 ) {
Thread.sleep( 10 );
}
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "After writes" );
}
@Test
public void testDifferentThreads3() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "After start batch" );
final Runnable runnable = new Runnable() {
@Override
public void run() {
try {
System.out.println( "Inner starting" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "Inner after batch" );
final OutputStream innerOut = ioService.newOutputStream( init );
for ( int i = 0; i < 100; i++ ) {
innerOut.write( ( "sss" + i ).getBytes() );
}
System.out.println( "Inner after write" );
innerOut.close();
System.out.println( "Inner after close" );
ioService.endBatch();
System.out.println( "Inner after end batch" );
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
};
final Thread thread = new Thread( runnable, "second" );
final Thread thread2 = new Thread( runnable, "third" );
thread.start();
Thread.sleep( 100 );
thread2.start();
Thread.sleep( 100 );
System.out.println( "After start 2nd Thread" );
for ( int i = 0; i < 100; i++ ) {
if ( i % 20 == 0 ) {
Thread.sleep( 10 );
}
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "After writes" );
ioService.endBatch();
System.out.println( "After end batch" );
}
@Test
public void testDifferentThreadsNotBatchInners() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "After start batch" );
final Runnable runnable = new Runnable() {
@Override
public void run() {
try {
System.out.println( "Inner starting" );
final OutputStream innerOut = ioService.newOutputStream( init );
for ( int i = 0; i < 100; i++ ) {
innerOut.write( ( "sss" + i ).getBytes() );
}
System.out.println( "Inner after write" );
innerOut.close();
System.out.println( "Inner after end batch" );
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
};
final Thread thread = new Thread( runnable, "second" );
final Thread thread2 = new Thread( runnable, "third" );
thread.start();
Thread.sleep( 100 );
thread2.start();
Thread.sleep( 100 );
System.out.println( "After start 2nd Thread" );
for ( int i = 0; i < 100; i++ ) {
if ( i % 20 == 0 ) {
Thread.sleep( 10 );
}
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "After writes" );
ioService.endBatch();
System.out.println( "After end batch" );
}
@Test
public void testDifferentThreadsNotBatchOuter() throws IOException, InterruptedException {
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioService.write( init, "init!" );
final Runnable runnable = new Runnable() {
@Override
public void run() {
try {
System.out.println( "Inner starting" );
ioService.startBatch( new FileSystem[]{ fs1 } );
System.out.println( "Inner after batch" );
final OutputStream innerOut = ioService.newOutputStream( init );
for ( int i = 0; i < 100; i++ ) {
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "Inner after write" );
innerOut.close();
System.out.println( "Inner after close" );
ioService.endBatch();
System.out.println( "Inner after end batch" );
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
};
final Thread thread = new Thread( runnable, "second" );
final Thread thread2 = new Thread( runnable, "third" );
thread.start();
Thread.sleep( 100 );
thread2.start();
Thread.sleep( 100 );
System.out.println( "After start 2nd Thread" );
for ( int i = 0; i < 100; i++ ) {
if ( i % 20 == 0 ) {
Thread.sleep( 10 );
}
ioService.write( init, ( "sss" + i ).getBytes() );
}
System.out.println( "After writes" );
}
@Test
public void exceptionOnCleanUpAndUnsetBatchModeOnFileSystemsShouldReleaseLock() throws IOException, InterruptedException {
IOServiceDotFileImpl ioServiceSpy = spy( (IOServiceDotFileImpl) ioService );
Mockito.doThrow( new RuntimeException() ).when( ioServiceSpy ).unsetBatchModeOn( fs1Batch );
final Path init = ioService.get( URI.create( "git://amend-repo-test/readme.txt" ) );
ioServiceSpy.write( init, "init!", new CommentedOption( "User Tester", "message1" ) );
ioServiceSpy.startBatch( new FileSystem[]{ fs1 } );
assertTrue( ioServiceSpy.getLockControl().isLocked() );
try {
ioServiceSpy.endBatch();
}
catch (Exception e){
}
assertFalse( ioServiceSpy.getLockControl().isLocked() );
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Class MethodEvaluator
* @author Jeka
*/
package com.intellij.debugger.engine.evaluation.expression;
import com.intellij.Patches;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.DebugProcess;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.DebuggerUtils;
import com.intellij.debugger.engine.JVMName;
import com.intellij.debugger.engine.evaluation.*;
import com.intellij.debugger.impl.ClassLoadingUtils;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.jdi.VirtualMachineProxyImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.rt.debugger.DefaultMethodInvoker;
import com.intellij.util.containers.ContainerUtil;
import com.sun.jdi.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MethodEvaluator implements Evaluator {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.evaluation.expression.MethodEvaluator");
private final JVMName myClassName;
private final JVMName myMethodSignature;
private final String myMethodName;
private final Evaluator[] myArgumentEvaluators;
private final Evaluator myObjectEvaluator;
private final boolean myCheckDefaultInterfaceMethod;
private final boolean myMustBeVararg;
public MethodEvaluator(Evaluator objectEvaluator,
JVMName className,
String methodName,
JVMName signature,
Evaluator[] argumentEvaluators) {
this(objectEvaluator, className, methodName, signature, argumentEvaluators, false, false);
}
public MethodEvaluator(Evaluator objectEvaluator,
JVMName className,
String methodName,
JVMName signature,
Evaluator[] argumentEvaluators,
boolean checkDefaultInterfaceMethod,
boolean mustBeVararg) {
myObjectEvaluator = new DisableGC(objectEvaluator);
myClassName = className;
myMethodName = methodName;
myMethodSignature = signature;
myArgumentEvaluators = argumentEvaluators;
myCheckDefaultInterfaceMethod = checkDefaultInterfaceMethod;
myMustBeVararg = mustBeVararg;
}
@Override
public Modifier getModifier() {
return null;
}
@Override
public Object evaluate(EvaluationContextImpl context) throws EvaluateException {
if(!context.getDebugProcess().isAttached()) return null;
DebugProcessImpl debugProcess = context.getDebugProcess();
final boolean requiresSuperObject =
myObjectEvaluator instanceof SuperEvaluator ||
(myObjectEvaluator instanceof DisableGC && ((DisableGC)myObjectEvaluator).getDelegate() instanceof SuperEvaluator);
final Object object = myObjectEvaluator.evaluate(context);
if (LOG.isDebugEnabled()) {
LOG.debug("MethodEvaluator: object = " + object);
}
if(object == null) {
throw EvaluateExceptionUtil.createEvaluateException(new NullPointerException());
}
if (!(object instanceof ObjectReference || isInvokableType(object))) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.evaluating.method", myMethodName));
}
List args = new ArrayList(myArgumentEvaluators.length);
for (Evaluator evaluator : myArgumentEvaluators) {
args.add(evaluator.evaluate(context));
}
try {
ReferenceType referenceType = null;
if(object instanceof ObjectReference) {
// it seems that if we have an object of the class, the class must be ready, so no need to use findClass here
referenceType = ((ObjectReference)object).referenceType();
}
else if (isInvokableType(object)) {
referenceType = debugProcess.findClass(context, ((ReferenceType)object).name(), context.getClassLoader());
}
else {
final String className = myClassName != null? myClassName.getName(debugProcess) : null;
if (className != null) {
referenceType = debugProcess.findClass(context, className, context.getClassLoader());
}
}
if (referenceType == null) {
throw new EvaluateRuntimeException(EvaluateExceptionUtil.createEvaluateException(
DebuggerBundle.message("evaluation.error.cannot.evaluate.qualifier", myMethodName))
);
}
final String signature = myMethodSignature != null ? myMethodSignature.getName(debugProcess) : null;
final String methodName = DebuggerUtilsEx.methodName(referenceType.name(), myMethodName, signature);
if (isInvokableType(object)) {
if (isInvokableType(referenceType)) {
Method jdiMethod;
if (signature != null) {
if (referenceType instanceof ClassType) {
jdiMethod = ((ClassType)referenceType).concreteMethodByName(myMethodName, signature);
}
else {
jdiMethod = ContainerUtil.getFirstItem(referenceType.methodsByName(myMethodName, signature));
}
}
else {
jdiMethod = ContainerUtil.getFirstItem(referenceType.methodsByName(myMethodName));
}
if (jdiMethod != null && jdiMethod.isStatic()) {
if (referenceType instanceof ClassType) {
return debugProcess.invokeMethod(context, (ClassType)referenceType, jdiMethod, args);
}
else {
return debugProcess.invokeMethod(context, (InterfaceType)referenceType, jdiMethod, args);
}
}
}
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.no.static.method", methodName));
}
// object should be an ObjectReference
final ObjectReference objRef = (ObjectReference)object;
ReferenceType _refType = referenceType;
if (requiresSuperObject && (referenceType instanceof ClassType)) {
_refType = ((ClassType)referenceType).superclass();
}
Method jdiMethod = DebuggerUtils.findMethod(_refType, myMethodName, signature);
if (signature == null) {
// we know nothing about expected method's signature, so trying to match my method name and parameter count
// dummy matching, may be improved with types matching later
// IMPORTANT! using argumentTypeNames() instead of argumentTypes() to avoid type resolution inside JDI, which may be time-consuming
if (jdiMethod == null || jdiMethod.argumentTypeNames().size() != args.size()) {
for (Method method : _refType.methodsByName(myMethodName)) {
if (method.argumentTypeNames().size() == args.size()) {
jdiMethod = method;
break;
}
}
}
}
else if (myMustBeVararg && jdiMethod != null && !jdiMethod.isVarArgs() && jdiMethod.isBridge()) {
// see IDEA-129869, avoid bridge methods for varargs
int retTypePos = signature.lastIndexOf(")");
if (retTypePos >= 0) {
String signatureNoRetType = signature.substring(0, retTypePos + 1);
for (Method method : _refType.visibleMethods()) {
if (method.name().equals(myMethodName) && method.signature().startsWith(signatureNoRetType) && !method.isBridge() && !method.isAbstract()) {
jdiMethod = method;
break;
}
}
}
}
if (jdiMethod == null) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.no.instance.method", methodName));
}
if (requiresSuperObject) {
return debugProcess.invokeInstanceMethod(context, objRef, jdiMethod, args, ObjectReference.INVOKE_NONVIRTUAL);
}
// fix for default methods in interfaces, see IDEA-124066
if (Patches.JDK_BUG_ID_8042123 && myCheckDefaultInterfaceMethod && jdiMethod.declaringType() instanceof InterfaceType) {
try {
return invokeDefaultMethod(debugProcess, context, objRef, myMethodName);
} catch (EvaluateException e) {
LOG.info(e);
}
}
return debugProcess.invokeMethod(context, objRef, jdiMethod, args);
}
catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
throw EvaluateExceptionUtil.createEvaluateException(e);
}
}
private static boolean isInvokableType(Object type) {
return type instanceof ClassType || type instanceof InterfaceType;
}
// only methods without arguments for now
private static Value invokeDefaultMethod(DebugProcess debugProcess, EvaluationContext evaluationContext,
Value obj, String name)
throws EvaluateException {
ClassType invokerClass = ClassLoadingUtils.getHelperClass(DefaultMethodInvoker.class.getName(), evaluationContext, debugProcess);
if (invokerClass != null) {
List<Method> methods = invokerClass.methodsByName("invoke");
if (!methods.isEmpty()) {
return debugProcess.invokeMethod(evaluationContext, invokerClass, methods.get(0),
Arrays.asList(obj, ((VirtualMachineProxyImpl)debugProcess.getVirtualMachineProxy()).mirrorOf(name)));
}
}
return null;
}
@Override
public String toString() {
return "call " + myMethodName;
}
}
| |
/* Licensed Materials - Property of IBM */
/* */
/* SAMPLE */
/* */
/* (c) Copyright IBM Corp. 2017 All Rights Reserved */
/* */
/* US Government Users Restricted Rights - Use, duplication or disclosure */
/* restricted by GSA ADP Schedule Contract with IBM Corp */
/* */
package com.ibm.cicsdev.restappext;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.ibm.cics.server.CicsConditionException;
import com.ibm.cics.server.DuplicateRecordException;
import com.ibm.cics.server.EndOfFileException;
import com.ibm.cics.server.KSDS;
import com.ibm.cics.server.KeyHolder;
import com.ibm.cics.server.KeyedFileBrowse;
import com.ibm.cics.server.RecordHolder;
import com.ibm.cics.server.RecordNotFoundException;
import com.ibm.cics.server.SearchType;
import com.ibm.cics.server.Task;
import com.ibm.cicsdev.restappext.bean.StockPartCollection;
import com.ibm.cicsdev.restappext.generated.StockPart;
import com.ibm.cicsdev.restappext.helper.StockPartHelper;
/**
* Provides a simple, RESTful-style resource to demonstrate parts of the CICS
* KSDS file support using Java.
*/
@Path("ksds")
@Produces(MediaType.APPLICATION_JSON)
public class VsamKsdsFileResource
{
/**
* Example to demonstrate writing a new record to a VSAM KSDS file.
*
* @return A StockPartCollection instance representing the updated VSAM file.
*/
@GET
@Path("write")
public StockPartCollection writeNewRecord() {
// Create a new random record
StockPart sp = StockPartHelper.generate();
// Get the flat byte structure from the JZOS object
byte[] record = sp.getByteBuffer();
// Get a byte array containing the record key
byte[] key = StockPartHelper.getKey(sp);
// Create a reference to the CICS file definition
KSDS ksds = new KSDS();
ksds.setName("SMPLXMPL");
try {
// Write the record into the file at the specified key
ksds.write(key, record);
// Only want this write in the current UoW - issue a CICS syncpoint here.
// This will release any locks we hold.
Task.getTask().commit();
}
catch (DuplicateRecordException dre) {
// Collision on the generated key - log, but don't add anything new
String strMsg = "Tried to insert duplicate key %d";
System.out.println( String.format(strMsg, sp.getPartId()) );
}
catch (CicsConditionException cce) {
// An unexpected CICS error occurred - build an error message
String err = String.format("Error writing record : %s", cce.getMessage());
// Create a response
Response r = Response.serverError().entity(err).build();
// Pass the error back up the handler chain (JAX-RS 1.0)
throw new WebApplicationException(cce, r);
}
// Return a collection to show the contents of the file
return queryFile(ksds);
}
/**
* Example demonstrating how to delete a record in a VSAM KSDS file.
*
* @return A StockPartCollection instance representing the updated VSAM file.
*/
@GET
@Path("delete")
public StockPartCollection deleteRecord() {
// Create a reference to the CICS file definition
KSDS ksds = new KSDS();
ksds.setName("SMPLXMPL");
// Holder object to receive the data
RecordHolder rh = new RecordHolder();
// Read from the first possible key
byte[] keyZero = StockPartHelper.getKeyZero();
try {
// Read the first record afer key zero
ksds.readForUpdate(keyZero, SearchType.GTEQ, rh);
// Delete the record we have just read
ksds.delete();
// Only want this delete operation in the current UoW - issue a CICS syncpoint here.
// This will release any locks we hold.
Task.getTask().commit();
}
catch (RecordNotFoundException rnfe) {
// Initial browse failed - no records in file - ignore this condition
}
catch (CicsConditionException cce) {
// Some other CICS failure - build an error message
String err = String.format("Error deleting record : %s", cce.getMessage());
// Create a response
Response r = Response.serverError().entity(err).build();
// Pass the error back up the handler chain (JAX-RS 1.0)
throw new WebApplicationException(cce, r);
/*
* Alternative if JAX-RS 2.0 is available.
* throw new InternalServerErrorException(cce);
*/
}
// Return a collection to show the contents of the file
return queryFile(ksds);
}
/**
* Example showing how to update a record in a VSAM KSDS file.
*
* @return A StockPartCollection instance representing the updated VSAM file.
*/
@GET
@Path("update")
public StockPartCollection updateRecord() {
// Create a reference to the CICS file definition
KSDS ksds = new KSDS();
ksds.setName("SMPLXMPL");
// Holder object to receive the data
RecordHolder rh = new RecordHolder();
// Read from the first possible key
byte[] keyZero = StockPartHelper.getKeyZero();
try {
// Read the first record afer key zero
ksds.readForUpdate(keyZero, SearchType.GTEQ, rh);
// Create a StockPart instance from the record
StockPart sp = new StockPart( rh.getValue() );
// Create a new StockPart instance containing random data
StockPart spRandom = StockPartHelper.generate();
// Copy the key from the read record into the new record
spRandom.setPartId( sp.getPartId() );
// Rewrite the recently-read record with the new data
ksds.rewrite( spRandom.getByteBuffer() );
// Only want this delete operation in the current UoW - issue a CICS syncpoint here.
// This will release any locks we hold.
Task.getTask().commit();
}
catch (RecordNotFoundException rnfe) {
// Initial read failed - no records in file
}
catch (CicsConditionException cce) {
// Some other CICS failure - build an error message
String err = String.format("Error updating record : %s", cce.getMessage());
// Create a response
Response r = Response.serverError().entity(err).build();
// Pass the error back up the handler chain (JAX-RS 1.0)
throw new WebApplicationException(cce, r);
}
// Return a collection to show the contents of the file
return queryFile(ksds);
}
/**
* Creates a {@link StockPartCollection} instance to represent the contents
* of the CICS VSAM file.
*
* Note this pattern is for demonstration purposes only: it is not good
* practice to browse through an entire file.
*
* @param ksds an instance of the JCICS {@link KSDS} class representing
* the file to be browsed.
*
* @return A StockPartCollection instance representing the contents of
* the supplied KSDS file. This method assumes the file contains records
* which match the copybook used to generate the StockPart class.
*/
private StockPartCollection queryFile(KSDS ksds) {
// Result to return
StockPartCollection coll = new StockPartCollection();
// Configure the result object
coll.setResourceName(ksds.getName());
// Holder object to receive the data
RecordHolder recordHolder = new RecordHolder();
KeyHolder keyHolder = new KeyHolder();
// Start a browse of the file at the first possible key
byte[] keyZero = StockPartHelper.getKeyZero();
try {
// Start the browse of the file
KeyedFileBrowse kfb = ksds.startBrowse(keyZero, SearchType.GTEQ);
// Loop until we break out
while ( true ) {
// Read a record from the file
kfb.next(recordHolder, keyHolder);
// Get the record as a sequence of bytes
byte[] record = recordHolder.getValue();
// Convert this byte array to a new object and add to the result
coll.add( new StockPart(record) );
}
}
catch (RecordNotFoundException rnfe) {
// Initial browse failed - no records in file
}
catch (EndOfFileException eof) {
// Normal termination of loop - no further records
}
catch (CicsConditionException cce) {
// Some other CICS failure - build an error message
String err = String.format("Error querying file : %s", cce.getMessage());
// Create a response
Response r = Response.serverError().entity(err).build();
// Pass the error back up the handler chain (JAX-RS 1.0)
throw new WebApplicationException(cce, r);
}
// Return the constructed object
return coll;
}
}
| |
/*
* Copyright 2009 Joubin Houshyar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jredis.ri;
import static org.testng.Assert.fail;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.jredis.ri.alphazero.support.Log;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Parameters;
/**
* The grand daddy of all TestNG test classes for the RI test suites and classes,
* this class will get loaded with all the <b>general</b> parameters we use for
* testing, namely host, port, password, and the DBs we will use to test (which
* will be flushed!)
* <p>
* Defaults for values are defined in this class so no testng.xml is required. Change
* values in master pom as required.
* <p>
* [Note: as of now, these are defined in the master pom.]
*
* @author Joubin Houshyar (alphazero@sensesay.net)
* @version alpha.0, Apr 17, 2009
* @since alpha.0
*
*/
//TODO: get rid of NG in class name
public abstract class JRedisTestSuiteBase<T> extends ProviderTestBase<T>{
// ------------------------------------------------------------------------
// General RI Test Suite Parameters with default values
// ------------------------------------------------------------------------
protected String password = "jredis";
protected String host = "localhost";
protected int port = 6379;
protected int db1 = 13;
protected int db2 = 10;
// ------------------------------------------------------------------------
// General RI Test Suite Parameters with default values to avoid XML
// ------------------------------------------------------------------------
protected int SMALL_DATA = 128;
protected int MEDIUM_DATA = 1024 * 2;
protected int LARGE_DATA = 1024 * 512;
protected int SMALL_CNT = 10;
protected int MEDIUM_CNT = 1000;
protected int LARGE_CNT = 100000;
protected int expire_secs = 1;
protected long expire_wait_millisecs = 1;
protected final Random random = new Random(System.currentTimeMillis());
// we'll uses these for values
protected final byte[] emptyBytes = new byte[0];
protected final String emptyString = "";
protected final List<byte[]> dataList = new ArrayList<byte[]>();
protected final List<byte[]> sparseList = new ArrayList<byte[]>();
protected final List<TestBean> objectList = new ArrayList<TestBean>();
protected final List<String> stringList = new ArrayList<String>();
protected final List<String> patternList = new ArrayList<String>();
protected final List<Integer> intList = new ArrayList<Integer>();
protected final List<Long> longList= new ArrayList<Long>();
protected final List<Double> doubleList= new ArrayList<Double>();
protected final Set<String> uniqueSet = new HashSet<String> ();
protected final Set<String> commonSet = new HashSet<String> ();
protected final Set<String> set1 = new HashSet<String> ();
protected final Set<String> set2 = new HashSet<String> ();
protected final String patternA = "_AAA_";
protected final byte[] smallData = getRandomBytes(SMALL_DATA);
protected final byte[] mediumData = getRandomBytes(MEDIUM_DATA);
protected final byte[] largeData = getRandomBytes(LARGE_DATA);
protected final List<String> keys = new ArrayList<String>();
protected int cnt;
protected String key = null;
@SuppressWarnings("unused")
private byte bytevalue;
@SuppressWarnings("unused")
private String stringvalue;
@SuppressWarnings("unused")
private int intValue;
@SuppressWarnings("unused")
private long longValue;
@SuppressWarnings("unused")
private TestBean objectvalue;
// ------------------------------------------------------------------------
// General RI Test Suite Parameters init
// ------------------------------------------------------------------------
/**
* This method sets up all the general test parameters for all
* classes that inherit from it.
* @param password password we'll use to authenticate
* @param host host name
* @param port port number
* @param db1 db index for testing - will be flushed
* @param db2 db index for testing - will be flushed
*/
@Parameters({
"jredis.test.password",
"jredis.test.host",
"jredis.test.port",
"jredis.test.db.1",
"jredis.test.db.2",
"jredis.test.datasize.small",
"jredis.test.datasize.medium",
"jredis.test.datasize.large",
"jredis.test.cnt.small",
"jredis.test.cnt.medium",
"jredis.test.cnt.large",
"jredis.test.expire.secs",
"jredis.test.expire.wait.millisecs"
})
@BeforeSuite
public void suiteParametersInit(
String password,
String host,
int port,
int db1,
int db2,
int small_data,
int medium_data,
int large_data,
int small_cnt,
int medium_cnt,
int large_cnt,
int expire_secs,
int expire_wait_millisecs
)
{
this.password = password;
this.host = host;
this.port = port;
this.db1 = db1;
this.db2 = db2;
this.SMALL_DATA = small_data;
this.MEDIUM_DATA = medium_data;
this.LARGE_DATA = large_data;
this.SMALL_CNT = small_cnt;
this.MEDIUM_CNT = medium_cnt;
this.LARGE_CNT = large_cnt;
this.expire_secs = expire_secs;
this.expire_wait_millisecs = expire_wait_millisecs;
Log.log("Suite parameters initialized <suiteParametersInit>");
setupTestSuiteData();
}
// ------------------------------------------------------------------------
// Test data setup methods
// ------------------------------------------------------------------------
/**
* All providers to be tested with the same degree of test data.
* We're using random data and can't guarantee exact teset data.
* TODO: flip switch to use random or deterministic data.
*/
@SuppressWarnings("boxing")
private final void setupTestSuiteData () {
/** setup data */
cnt = MEDIUM_CNT;
byte[] zerobytes = new byte[0];
for(int i=0; i<cnt; i++){
keys.add(getRandomAsciiString (48));
patternList.add(getRandomAsciiString(random.nextInt(10)+2) + patternA + getRandomAsciiString(random.nextInt(10)+2));
uniqueSet.add(getRandomAsciiString(48));
commonSet.add(getRandomAsciiString(48));
set1.add("set_1" + getRandomAsciiString(20));
set2.add("set_2" + getRandomAsciiString(20));
dataList.add(getRandomBytes (128));
if(random.nextBoolean())
sparseList.add(zerobytes);
else
sparseList.add(getRandomBytes (128));
stringList.add(getRandomAsciiString (128));
objectList.add(new TestBean("testbean." + i));
intList.add(random.nextInt());
longList.add(random.nextLong());
doubleList.add(random.nextDouble());
}
for(String m : commonSet) {
set1.add(m);
set2.add(m);
}
Log.log("TEST-SUITE-INIT: JRedis Provider Test Suite random test data created");
}
protected final void prepTestDBs() {
// try {
// jredis.auth(password);
// Log.log("TEST-PREP: AUTH with password %s" + password);
// }
// catch (RedisException e) {
// Log.error("AUTH with password " + password + " => " + e.getLocalizedMessage());
// fail("AUTH with password: " + password, e);
// }
// try {
// jredis.select(db1).flushdb().select(db2).flushdb().select(db1);
// Log.log("TEST-PREP: %s:%d Redis server DB %d & %d flushed", host, port, db1, db2);
// }
// catch (RedisException e) {
// Log.error("SELECT/FLUSHDB for test prep" + password);
// fail("SELECT/FLUSHDB for test prep", e);
// }
}
// ------------------------------------------------------------------------
// Helper methods
// ------------------------------------------------------------------------
/**
* Creates a random ascii string
* @param length
* @return
*/
protected String getRandomAsciiString (int length) {
StringBuilder builder = new StringBuilder(length);
for(int i = 0; i<length; i++){
char c = (char) (random.nextInt(126-33) + 33);
builder.append(c);
}
return builder.toString();
}
/**
* Creates a buffer of given size filled with random byte values
* @param size
* @return
*/
protected byte[] getRandomBytes(int size) {
int len = size;
byte[] buff = new byte[len];
random.nextBytes(buff);
return buff;
}
protected final <FAULT extends RuntimeException> void assertDidRaiseRuntimeError (Runnable test, Class<FAULT> errtype){
boolean didRaiseError = false;
try { test.run(); }
catch (RuntimeException t){
if(errtype.isAssignableFrom(t.getClass()))
didRaiseError = true;
}
catch (Exception e){ fail("Unexpected exception", e); }
finally {
if(!didRaiseError) { fail("Failed to raise expected RuntimeError " + errtype.getCanonicalName()); }
}
}
// ------------------------------------------------------------------------
// INNER TYPES USED FOR TESTING
// ============================================================== TestBean
// ------------------------------------------------------------------------
/**
* This is a simple {@link Serializable} class that we use to test object
* values.
*
* @author Joubin Houshyar (alphazero@sensesay.net)
* @version alpha.0, Apr 18, 2009
* @since alpha.0
*
*/
public static class TestBean implements Serializable {
/** */
private static final long serialVersionUID = 4457509786469904810L;
public final long getCreated_on() {return named_on;}
public final void setCreated_on(long created_on) {this.named_on = created_on;}
public final String getName() {return name;}
public final void setName(String name) {this.name = name;}
public final byte[] getData() { return data;}
public final void setData(byte[] data) { this.data = data;}
private long named_on;
private String name;
private byte[] data;
public TestBean() {
// named_on = System.currentTimeMillis();
}
public TestBean(String string) {
this(); name = string;
named_on = System.currentTimeMillis();
}
@Override public String toString() { return "[" + getClass().getSimpleName() + " | name: " + getName() + " created on: " + getCreated_on() + "]"; }
@Override public boolean equals (Object o) {
if(o instanceof TestBean) {
TestBean isItMe = (TestBean) o;
return isItMe.getName().equals(name) && isItMe.getCreated_on()==this.named_on;
}
return false;
}
@Override public int hashCode() {
return name.hashCode() ^ (int)named_on;
}
}
// ------------------------------------------------------------------------
// Test support - mildly enhanced TESTNG Assert semantics
// ------------------------------------------------------------------------
// TODO: check latest version of testng
// // notNull
// public static final void assertNotNull(Object object, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertNotNull (object, message);
// }
// // null
// public static final void assertNull(Object object, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertNull (object, message); // << has bug. reports a boolean comp result -- TODO: fix and patch.
// }
//
// // equals
// public static final void assertEquals(Object actual, Object expected, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertEquals (actual, expected, message);
// }
// public static final void assertEquals(byte[] actual, byte[] expected, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertEquals (actual, expected, message);
// }
//
// // true/false
// public static final void assertTrue(boolean condition, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertTrue (condition, message);
// }
// public static final void assertFalse(boolean condition, String msgfmt, Object...optionalFmtArgs){
// String message = String.format(msgfmt, optionalFmtArgs);
// Assert.assertFalse (condition, message);
// }
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.jdbc;
import com.facebook.presto.execution.QueryState;
import com.facebook.presto.plugin.blackhole.BlackHolePlugin;
import com.facebook.presto.server.testing.TestingPrestoServer;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.BooleanType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.IntegerType;
import com.facebook.presto.spi.type.RealType;
import com.facebook.presto.spi.type.SmallintType;
import com.facebook.presto.spi.type.TimeType;
import com.facebook.presto.spi.type.TimeWithTimeZoneType;
import com.facebook.presto.spi.type.TimeZoneKey;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.TimestampWithTimeZoneType;
import com.facebook.presto.spi.type.TinyintType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarbinaryType;
import com.facebook.presto.tpch.TpchMetadata;
import com.facebook.presto.tpch.TpchPlugin;
import com.facebook.presto.type.ArrayType;
import com.facebook.presto.type.ColorType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logging;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicReference;
import static com.facebook.presto.execution.QueryState.FAILED;
import static com.facebook.presto.spi.type.CharType.createCharType;
import static com.facebook.presto.spi.type.DecimalType.createDecimalType;
import static com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static io.airlift.testing.Assertions.assertLessThan;
import static io.airlift.units.Duration.nanosSince;
import static java.lang.Float.POSITIVE_INFINITY;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static java.util.stream.Collectors.toList;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestDriver
{
private static final DateTimeZone ASIA_ORAL_ZONE = DateTimeZone.forID("Asia/Oral");
private static final GregorianCalendar ASIA_ORAL_CALENDAR = new GregorianCalendar(ASIA_ORAL_ZONE.toTimeZone());
private static final String TEST_CATALOG = "test_catalog";
private TestingPrestoServer server;
private ExecutorService executorService;
@BeforeClass
public void setup()
throws Exception
{
Logging.initialize();
server = new TestingPrestoServer();
server.installPlugin(new TpchPlugin());
server.createCatalog(TEST_CATALOG, "tpch");
server.installPlugin(new BlackHolePlugin());
server.createCatalog("blackhole", "blackhole");
waitForNodeRefresh(server);
setupTestTables();
executorService = newCachedThreadPool(daemonThreadsNamed("test-%s"));
}
private static void waitForNodeRefresh(TestingPrestoServer server)
throws InterruptedException
{
long start = System.nanoTime();
while (server.refreshNodes().getActiveNodes().size() < 1) {
assertLessThan(nanosSince(start), new Duration(10, SECONDS));
MILLISECONDS.sleep(10);
}
}
private void setupTestTables()
throws SQLException
{
try (Connection connection = createConnection("blackhole", "blackhole");
Statement statement = connection.createStatement()) {
assertEquals(statement.executeUpdate("CREATE TABLE test_table (x bigint)"), 0);
}
}
@AfterClass(alwaysRun = true)
public void teardown()
throws Exception
{
closeQuietly(server);
executorService.shutdownNow();
}
@Test
public void testDriverManager()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("" +
"SELECT " +
" 123 _integer" +
", 12300000000 _bigint" +
", 'foo' _varchar" +
", 0.1 _double" +
", true _boolean" +
", cast('hello' as varbinary) _varbinary" +
", DECIMAL '1234567890.1234567' _decimal_short" +
", DECIMAL '.12345678901234567890123456789012345678' _decimal_long" +
", approx_set(42) _hll" +
", cast('foo' as char(5)) _char")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 10);
assertEquals(metadata.getColumnLabel(1), "_integer");
assertEquals(metadata.getColumnType(1), Types.INTEGER);
assertEquals(metadata.getColumnLabel(2), "_bigint");
assertEquals(metadata.getColumnType(2), Types.BIGINT);
assertEquals(metadata.getColumnLabel(3), "_varchar");
assertEquals(metadata.getColumnType(3), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(4), "_double");
assertEquals(metadata.getColumnType(4), Types.DOUBLE);
assertEquals(metadata.getColumnLabel(5), "_boolean");
assertEquals(metadata.getColumnType(5), Types.BOOLEAN);
assertEquals(metadata.getColumnLabel(6), "_varbinary");
assertEquals(metadata.getColumnType(6), Types.LONGVARBINARY);
assertEquals(metadata.getColumnLabel(7), "_decimal_short");
assertEquals(metadata.getColumnType(7), Types.DECIMAL);
assertEquals(metadata.getColumnLabel(8), "_decimal_long");
assertEquals(metadata.getColumnType(8), Types.DECIMAL);
assertEquals(metadata.getColumnLabel(9), "_hll");
assertEquals(metadata.getColumnType(9), Types.JAVA_OBJECT);
assertEquals(metadata.getColumnLabel(10), "_char");
assertEquals(metadata.getColumnType(10), Types.CHAR);
assertTrue(rs.next());
assertEquals(rs.getObject(1), 123);
assertEquals(rs.getObject("_integer"), 123);
assertEquals(rs.getInt(1), 123);
assertEquals(rs.getInt("_integer"), 123);
assertEquals(rs.getLong(1), 123L);
assertEquals(rs.getLong("_integer"), 123L);
assertEquals(rs.getObject(2), 12300000000L);
assertEquals(rs.getObject("_bigint"), 12300000000L);
assertEquals(rs.getLong(2), 12300000000L);
assertEquals(rs.getLong("_bigint"), 12300000000L);
assertEquals(rs.getObject(3), "foo");
assertEquals(rs.getObject("_varchar"), "foo");
assertEquals(rs.getString(3), "foo");
assertEquals(rs.getString("_varchar"), "foo");
assertEquals(rs.getObject(4), 0.1);
assertEquals(rs.getObject("_double"), 0.1);
assertEquals(rs.getDouble(4), 0.1);
assertEquals(rs.getDouble("_double"), 0.1);
assertEquals(rs.getObject(5), true);
assertEquals(rs.getObject("_boolean"), true);
assertEquals(rs.getBoolean(5), true);
assertEquals(rs.getBoolean("_boolean"), true);
assertEquals(rs.getByte("_boolean"), 1);
assertEquals(rs.getShort("_boolean"), 1);
assertEquals(rs.getInt("_boolean"), 1);
assertEquals(rs.getLong("_boolean"), 1L);
assertEquals(rs.getFloat("_boolean"), 1.0f);
assertEquals(rs.getDouble("_boolean"), 1.0);
assertEquals(rs.getObject(6), "hello".getBytes(UTF_8));
assertEquals(rs.getObject("_varbinary"), "hello".getBytes(UTF_8));
assertEquals(rs.getBytes(6), "hello".getBytes(UTF_8));
assertEquals(rs.getBytes("_varbinary"), "hello".getBytes(UTF_8));
assertEquals(rs.getObject(7), new BigDecimal("1234567890.1234567"));
assertEquals(rs.getObject("_decimal_short"), new BigDecimal("1234567890.1234567"));
assertEquals(rs.getBigDecimal(7), new BigDecimal("1234567890.1234567"));
assertEquals(rs.getBigDecimal("_decimal_short"), new BigDecimal("1234567890.1234567"));
assertEquals(rs.getBigDecimal(7, 1), new BigDecimal("1234567890.1"));
assertEquals(rs.getBigDecimal("_decimal_short", 1), new BigDecimal("1234567890.1"));
assertEquals(rs.getObject(8), new BigDecimal(".12345678901234567890123456789012345678"));
assertEquals(rs.getObject("_decimal_long"), new BigDecimal(".12345678901234567890123456789012345678"));
assertEquals(rs.getBigDecimal(8), new BigDecimal(".12345678901234567890123456789012345678"));
assertEquals(rs.getBigDecimal("_decimal_long"), new BigDecimal(".12345678901234567890123456789012345678"));
assertEquals(rs.getBigDecimal(8, 6), new BigDecimal(".123457"));
assertEquals(rs.getBigDecimal("_decimal_long", 6), new BigDecimal(".123457"));
assertInstanceOf(rs.getObject(9), byte[].class);
assertInstanceOf(rs.getObject("_hll"), byte[].class);
assertInstanceOf(rs.getBytes(9), byte[].class);
assertInstanceOf(rs.getBytes("_hll"), byte[].class);
assertEquals(rs.getObject(10), "foo ");
assertEquals(rs.getObject("_char"), "foo ");
assertEquals(rs.getString(10), "foo ");
assertEquals(rs.getString("_char"), "foo ");
assertFalse(rs.next());
}
}
}
}
@Test
public void testTypes()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("SELECT " +
" TIME '3:04:05' as a" +
", TIME '6:07:08 +06:17' as b" +
", TIME '9:10:11 Europe/Berlin' as c" +
", TIMESTAMP '2001-02-03 3:04:05' as d" +
", TIMESTAMP '2004-05-06 6:07:08 +06:17' as e" +
", TIMESTAMP '2007-08-09 9:10:11 Europe/Berlin' as f" +
", DATE '2013-03-22' as g" +
", INTERVAL '123-11' YEAR TO MONTH as h" +
", INTERVAL '11 22:33:44.555' DAY TO SECOND as i" +
", REAL '123.45' as j" +
", REAL 'Infinity' as k" +
"")) {
assertTrue(rs.next());
assertEquals(rs.getTime(1), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime(1, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(1), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime("a"), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime("a", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("a"), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime(2), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime(2, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject(2), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime("b"), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime("b", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject("b"), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime(3), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime(3, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject(3), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime("c"), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime("c", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject("c"), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp(4), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp(4, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(4), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp("d"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp("d", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("d"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp(5), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp(5, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject(5), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp("e"), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp("e", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject("e"), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp(6), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp(6, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject(6), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp("f"), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp("f", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject("f"), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getDate(7), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate(7, ASIA_ORAL_CALENDAR), new Date(new DateTime(2013, 3, 22, 0, 0, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(7), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate("g"), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate("g", ASIA_ORAL_CALENDAR), new Date(new DateTime(2013, 3, 22, 0, 0, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("g"), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getObject(8), new PrestoIntervalYearMonth(123, 11));
assertEquals(rs.getObject("h"), new PrestoIntervalYearMonth(123, 11));
assertEquals(rs.getObject(9), new PrestoIntervalDayTime(11, 22, 33, 44, 555));
assertEquals(rs.getObject("i"), new PrestoIntervalDayTime(11, 22, 33, 44, 555));
assertEquals(rs.getFloat(10), 123.45f);
assertEquals(rs.getObject(10), 123.45f);
assertEquals(rs.getFloat("j"), 123.45f);
assertEquals(rs.getObject("j"), 123.45f);
assertEquals(rs.getFloat(11), POSITIVE_INFINITY);
assertEquals(rs.getObject(11), POSITIVE_INFINITY);
assertEquals(rs.getFloat("k"), POSITIVE_INFINITY);
assertEquals(rs.getObject("k"), POSITIVE_INFINITY);
assertFalse(rs.next());
}
}
}
}
@Test
public void testGetCatalogs()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getCatalogs()) {
assertEquals(readRows(rs), list(list("blackhole"), list("system"), list(TEST_CATALOG)));
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 1);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
}
}
}
@Test
public void testGetDatabaseProductVersion()
throws Exception
{
try (Connection connection = createConnection()) {
assertNotNull(connection.getMetaData().getDatabaseProductVersion());
}
}
@Test
public void testGetSchemas()
throws Exception
{
List<List<String>> system = new ArrayList<>();
system.add(list("system", "information_schema"));
system.add(list("system", "jdbc"));
system.add(list("system", "metadata"));
system.add(list("system", "runtime"));
List<List<String>> blackhole = new ArrayList<>();
blackhole.add(list("blackhole", "information_schema"));
blackhole.add(list("blackhole", "default"));
List<List<String>> test = new ArrayList<>();
test.add(list(TEST_CATALOG, "information_schema"));
for (String schema : TpchMetadata.SCHEMA_NAMES) {
test.add(list(TEST_CATALOG, schema));
}
List<List<String>> all = new ArrayList<>();
all.addAll(system);
all.addAll(test);
all.addAll(blackhole);
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getSchemas()) {
assertGetSchemasResult(rs, all);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, null)) {
assertGetSchemasResult(rs, all);
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, null)) {
assertGetSchemasResult(rs, test);
}
try (ResultSet rs = connection.getMetaData().getSchemas("", null)) {
// all schemas in presto have a catalog name
assertGetSchemasResult(rs, list());
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, "information_schema")) {
assertGetSchemasResult(rs, list(list(TEST_CATALOG, "information_schema")));
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "information_schema")) {
assertGetSchemasResult(rs, list(
list(TEST_CATALOG, "information_schema"),
list("blackhole", "information_schema"),
list("system", "information_schema")));
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "sf_")) {
assertGetSchemasResult(rs, list(list(TEST_CATALOG, "sf1")));
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "sf%")) {
List<List<String>> expected = test.stream()
.filter(item -> item.get(1).startsWith("sf"))
.collect(toList());
assertGetSchemasResult(rs, expected);
}
try (ResultSet rs = connection.getMetaData().getSchemas("unknown", null)) {
assertGetSchemasResult(rs, list());
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "unknown")) {
assertGetSchemasResult(rs, list());
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, "unknown")) {
assertGetSchemasResult(rs, list());
}
try (ResultSet rs = connection.getMetaData().getSchemas("unknown", "unknown")) {
assertGetSchemasResult(rs, list());
}
}
}
private static void assertGetSchemasResult(ResultSet rs, List<List<String>> expectedSchemas)
throws SQLException
{
List<List<Object>> data = readRows(rs);
assertEquals(data.size(), expectedSchemas.size());
for (List<Object> row : data) {
assertTrue(expectedSchemas.contains(list((String) row.get(1), (String) row.get(0))));
}
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_CATALOG");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
}
@Test
public void testGetTables()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, null, null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
// no tables have an empty catalog
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables("", null, null, null)) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
// no tables have an empty schema
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "", null, null)) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("TABLE"))) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, "information_schema", null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, null, array("TABLE"))) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "inf%", "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tab%", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
// no matching catalog
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables("unknown", "information_schema", "tables", array("TABLE"))) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
// no matching schema
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "unknown", "tables", array("TABLE"))) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
// no matching table
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "unknown", array("TABLE"))) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
// no matching type
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("unknown"))) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array("unknown", "TABLE"))) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
}
}
// empty type list
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", array())) {
assertTableMetadata(rs);
assertEquals(readRows(rs).size(), 0);
}
}
}
private static List<Object> getTablesRow(String schema, String table)
{
return list(TEST_CATALOG, schema, table, "TABLE", null, null, null, null, null, null);
}
private static void assertTableMetadata(ResultSet rs)
throws SQLException
{
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 10);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(3), "TABLE_NAME");
assertEquals(metadata.getColumnType(3), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(4), "TABLE_TYPE");
assertEquals(metadata.getColumnType(4), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(5), "REMARKS");
assertEquals(metadata.getColumnType(5), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(6), "TYPE_CAT");
assertEquals(metadata.getColumnType(6), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(7), "TYPE_SCHEM");
assertEquals(metadata.getColumnType(7), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(8), "TYPE_NAME");
assertEquals(metadata.getColumnType(8), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(9), "SELF_REFERENCING_COL_NAME");
assertEquals(metadata.getColumnType(9), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(10), "REF_GENERATION");
assertEquals(metadata.getColumnType(10), Types.LONGNVARCHAR);
}
@Test
public void testGetTableTypes()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet tableTypes = connection.getMetaData().getTableTypes()) {
List<List<Object>> data = readRows(tableTypes);
assertEquals(data, list(list("TABLE"), list("VIEW")));
ResultSetMetaData metadata = tableTypes.getMetaData();
assertEquals(metadata.getColumnCount(), 1);
assertEquals(metadata.getColumnLabel(1), "TABLE_TYPE");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
}
}
}
@Test
public void testGetColumns()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(null, null, "tables", "table_name")) {
assertColumnMetadata(rs);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_CAT"), "blackhole");
assertEquals(rs.getString("TABLE_SCHEM"), "information_schema");
assertEquals(rs.getString("TABLE_NAME"), "tables");
assertEquals(rs.getString("COLUMN_NAME"), "table_name");
assertEquals(rs.getInt("DATA_TYPE"), Types.LONGNVARCHAR);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_CAT"), "system");
assertEquals(rs.getString("TABLE_SCHEM"), "information_schema");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_CAT"), "system");
assertEquals(rs.getString("TABLE_SCHEM"), "jdbc");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_CAT"), TEST_CATALOG);
assertEquals(rs.getString("TABLE_SCHEM"), "information_schema");
assertFalse(rs.next());
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, null, "tables", "table_name")) {
assertColumnMetadata(rs);
assertEquals(readRows(rs).size(), 1);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(null, "information_schema", "tables", "table_name")) {
assertColumnMetadata(rs);
assertEquals(readRows(rs).size(), 3);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tables", "table_name")) {
assertColumnMetadata(rs);
assertEquals(readRows(rs).size(), 1);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "inf%", "tables", "table_name")) {
assertColumnMetadata(rs);
assertEquals(readRows(rs).size(), 1);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tab%", "table_name")) {
assertColumnMetadata(rs);
assertEquals(readRows(rs).size(), 1);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tables", "%m%")) {
assertColumnMetadata(rs);
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "table_schema");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "table_name");
assertFalse(rs.next());
}
}
try (Connection connection = createConnection("blackhole", "blackhole");
Statement statement = connection.createStatement()) {
assertEquals(statement.executeUpdate(
"CREATE TABLE test_get_columns_table (" +
"c_boolean boolean, " +
"c_bigint bigint, " +
"c_integer integer, " +
"c_smallint smallint, " +
"c_tinyint tinyint, " +
"c_real real, " +
"c_double double, " +
"c_varchar_1234 varchar(1234), " +
"c_varchar varchar, " +
"c_char_345 char(345), " +
"c_varbinary varbinary, " +
"c_time time, " +
"c_time_with_time_zone \"time with time zone\", " +
"c_timestamp timestamp, " +
"c_timestamp_with_time_zone \"timestamp with time zone\", " +
"c_date date, " +
"c_decimal_8_2 decimal(8,2), " +
"c_decimal_38_0 decimal(38,0), " +
"c_array array<bigint>, " +
"c_color color" +
")"), 0);
try (ResultSet rs = connection.getMetaData().getColumns("blackhole", "blackhole", "test_get_columns_table", null)) {
assertColumnMetadata(rs);
assertColumnSpec(rs, Types.BOOLEAN, null, null, null, null, BooleanType.BOOLEAN);
assertColumnSpec(rs, Types.BIGINT, 19L, 10L, null, null, BigintType.BIGINT);
assertColumnSpec(rs, Types.INTEGER, 10L, 10L, null, null, IntegerType.INTEGER);
assertColumnSpec(rs, Types.SMALLINT, 5L, 10L, null, null, SmallintType.SMALLINT);
assertColumnSpec(rs, Types.TINYINT, 3L, 10L, null, null, TinyintType.TINYINT);
assertColumnSpec(rs, Types.REAL, 24L, 2L, null, null, RealType.REAL);
assertColumnSpec(rs, Types.DOUBLE, 53L, 2L, null, null, DoubleType.DOUBLE);
assertColumnSpec(rs, Types.LONGNVARCHAR, 1234L, null, null, 1234L, createVarcharType(1234));
assertColumnSpec(rs, Types.LONGNVARCHAR, (long) Integer.MAX_VALUE, null, null, (long) Integer.MAX_VALUE, createUnboundedVarcharType());
assertColumnSpec(rs, Types.CHAR, 345L, null, null, 345L, createCharType(345));
assertColumnSpec(rs, Types.LONGVARBINARY, (long) Integer.MAX_VALUE, null, null, (long) Integer.MAX_VALUE, VarbinaryType.VARBINARY);
assertColumnSpec(rs, Types.TIME, 8L, null, null, null, TimeType.TIME);
assertColumnSpec(rs, Types.TIME_WITH_TIMEZONE, 14L, null, null, null, TimeWithTimeZoneType.TIME_WITH_TIME_ZONE);
assertColumnSpec(rs, Types.TIMESTAMP, 23L, null, null, null, TimestampType.TIMESTAMP);
assertColumnSpec(rs, Types.TIMESTAMP_WITH_TIMEZONE, 29L, null, null, null, TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE);
assertColumnSpec(rs, Types.DATE, 14L, null, null, null, DateType.DATE);
assertColumnSpec(rs, Types.DECIMAL, 8L, 10L, 2L, null, createDecimalType(8, 2));
assertColumnSpec(rs, Types.DECIMAL, 38L, 10L, 0L, null, createDecimalType(38, 0));
assertColumnSpec(rs, Types.ARRAY, null, null, null, null, new ArrayType(BigintType.BIGINT));
assertColumnSpec(rs, Types.JAVA_OBJECT, null, null, null, null, ColorType.COLOR);
assertFalse(rs.next());
}
}
}
private static void assertColumnSpec(ResultSet rs, int jdbcType, Long columnSize, Long numPrecRadix, Long decimalDigits, Long charOctetLength, Type type)
throws SQLException
{
String message = " of " + type.getDisplayName() + ": ";
assertTrue(rs.next());
assertEquals(rs.getObject("DATA_TYPE"), (long) jdbcType, "DATA_TYPE" + message);
assertEquals(rs.getObject("COLUMN_SIZE"), columnSize, "COLUMN_SIZE" + message);
assertEquals(rs.getObject("NUM_PREC_RADIX"), numPrecRadix, "NUM_PREC_RADIX" + message);
assertEquals(rs.getObject("DECIMAL_DIGITS"), decimalDigits, "DECIMAL_DIGITS" + message);
assertEquals(rs.getObject("CHAR_OCTET_LENGTH"), charOctetLength, "CHAR_OCTET_LENGTH" + message);
}
private static void assertColumnMetadata(ResultSet rs)
throws SQLException
{
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 24);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(3), "TABLE_NAME");
assertEquals(metadata.getColumnType(3), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(4), "COLUMN_NAME");
assertEquals(metadata.getColumnType(4), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(5), "DATA_TYPE");
assertEquals(metadata.getColumnType(5), Types.BIGINT);
assertEquals(metadata.getColumnLabel(6), "TYPE_NAME");
assertEquals(metadata.getColumnType(6), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(7), "COLUMN_SIZE");
assertEquals(metadata.getColumnType(7), Types.BIGINT);
assertEquals(metadata.getColumnLabel(8), "BUFFER_LENGTH");
assertEquals(metadata.getColumnType(8), Types.BIGINT);
assertEquals(metadata.getColumnLabel(9), "DECIMAL_DIGITS");
assertEquals(metadata.getColumnType(9), Types.BIGINT);
assertEquals(metadata.getColumnLabel(10), "NUM_PREC_RADIX");
assertEquals(metadata.getColumnType(10), Types.BIGINT);
assertEquals(metadata.getColumnLabel(11), "NULLABLE");
assertEquals(metadata.getColumnType(11), Types.BIGINT);
assertEquals(metadata.getColumnLabel(12), "REMARKS");
assertEquals(metadata.getColumnType(12), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(13), "COLUMN_DEF");
assertEquals(metadata.getColumnType(13), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(14), "SQL_DATA_TYPE");
assertEquals(metadata.getColumnType(14), Types.BIGINT);
assertEquals(metadata.getColumnLabel(15), "SQL_DATETIME_SUB");
assertEquals(metadata.getColumnType(15), Types.BIGINT);
assertEquals(metadata.getColumnLabel(16), "CHAR_OCTET_LENGTH");
assertEquals(metadata.getColumnType(16), Types.BIGINT);
assertEquals(metadata.getColumnLabel(17), "ORDINAL_POSITION");
assertEquals(metadata.getColumnType(17), Types.BIGINT);
assertEquals(metadata.getColumnLabel(18), "IS_NULLABLE");
assertEquals(metadata.getColumnType(18), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(19), "SCOPE_CATALOG");
assertEquals(metadata.getColumnType(19), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(20), "SCOPE_SCHEMA");
assertEquals(metadata.getColumnType(20), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(21), "SCOPE_TABLE");
assertEquals(metadata.getColumnType(21), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(22), "SOURCE_DATA_TYPE");
assertEquals(metadata.getColumnType(22), Types.BIGINT);
assertEquals(metadata.getColumnLabel(23), "IS_AUTOINCREMENT");
assertEquals(metadata.getColumnType(23), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(24), "IS_GENERATEDCOLUMN");
assertEquals(metadata.getColumnType(24), Types.LONGNVARCHAR);
}
@Test
public void testGetPseudoColumns()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getPseudoColumns(null, null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetProcedures()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getProcedures(null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetProcedureColumns()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getProcedureColumns(null, null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetSuperTables()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getSuperTables(null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetUdts()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getUDTs(null, null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetAttributes()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getAttributes(null, null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testGetSuperTypes()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getSuperTypes(null, null, null)) {
assertFalse(rs.next());
}
}
}
@Test
public void testExecuteWithQuery()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y, CAST(NULL AS bigint) z"));
ResultSet rs = statement.getResultSet();
assertEquals(statement.getUpdateCount(), -1);
assertEquals(statement.getLargeUpdateCount(), -1);
assertTrue(rs.next());
assertEquals(rs.getLong(1), 123);
assertFalse(rs.wasNull());
assertEquals(rs.getLong("x"), 123);
assertFalse(rs.wasNull());
assertEquals(rs.getLong(3), 0);
assertTrue(rs.wasNull());
assertEquals(rs.getLong("z"), 0);
assertTrue(rs.wasNull());
assertNull(rs.getObject("z"));
assertTrue(rs.wasNull());
assertEquals(rs.getString(2), "foo");
assertFalse(rs.wasNull());
assertEquals(rs.getString("y"), "foo");
assertFalse(rs.wasNull());
assertFalse(rs.next());
}
}
}
@Test
public void testExecuteUpdateWithInsert()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole")) {
try (Statement statement = connection.createStatement()) {
assertEquals(statement.executeUpdate("INSERT INTO test_table VALUES (1), (2)"), 2);
assertNull(statement.getResultSet());
assertEquals(statement.getUpdateCount(), 2);
assertEquals(statement.getLargeUpdateCount(), 2);
}
}
}
@Test
public void testExecuteUpdateWithCreateTable()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole")) {
try (Statement statement = connection.createStatement()) {
assertEquals(statement.executeUpdate("CREATE TABLE test_execute_create (x bigint)"), 0);
assertNull(statement.getResultSet());
assertEquals(statement.getUpdateCount(), 0);
assertEquals(statement.getLargeUpdateCount(), 0);
}
}
}
@Test
public void testExecuteUpdateWithQuery()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole")) {
try (Statement statement = connection.createStatement()) {
String sql = "SELECT 123 x, 'foo' y, CAST(NULL AS bigint) z";
try {
statement.executeUpdate(sql);
fail("expected exception");
}
catch (SQLException e) {
assertEquals(e.getMessage(), "SQL is not an update statement: " + sql);
}
}
}
}
@Test
public void testExecuteQueryWithInsert()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole")) {
try (Statement statement = connection.createStatement()) {
String sql = "INSERT INTO test_table VALUES (1)";
try {
statement.executeQuery(sql);
fail("expected exception");
}
catch (SQLException e) {
assertEquals(e.getMessage(), "SQL statement is not a query: " + sql);
}
}
}
}
@Test
public void testStatementReuse()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole")) {
try (Statement statement = connection.createStatement()) {
// update statement
assertFalse(statement.execute("INSERT INTO test_table VALUES (1), (2)"));
assertNull(statement.getResultSet());
assertEquals(statement.getUpdateCount(), 2);
assertEquals(statement.getLargeUpdateCount(), 2);
// query statement
assertTrue(statement.execute("SELECT 123 x, 'foo' y, CAST(NULL AS bigint) z"));
ResultSet resultSet = statement.getResultSet();
assertNotNull(resultSet);
assertEquals(statement.getUpdateCount(), -1);
assertEquals(statement.getLargeUpdateCount(), -1);
resultSet.close();
// update statement
assertFalse(statement.execute("INSERT INTO test_table VALUES (1), (2), (3)"));
assertNull(statement.getResultSet());
assertEquals(statement.getUpdateCount(), 3);
assertEquals(statement.getLargeUpdateCount(), 3);
}
}
}
@Test
public void testGetUpdateCount()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
assertEquals(statement.getUpdateCount(), -1);
assertEquals(statement.getLargeUpdateCount(), -1);
}
}
}
@Test
public void testResultSetClose()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
ResultSet result = statement.getResultSet();
assertFalse(result.isClosed());
result.close();
assertTrue(result.isClosed());
}
}
}
@Test
public void testGetResultSet()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
ResultSet result = statement.getResultSet();
assertNotNull(result);
assertFalse(result.isClosed());
statement.getMoreResults();
assertTrue(result.isClosed());
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
result = statement.getResultSet();
assertNotNull(result);
assertFalse(result.isClosed());
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
assertFalse(statement.getMoreResults(Statement.CLOSE_CURRENT_RESULT));
}
}
}
@Test(expectedExceptions = SQLFeatureNotSupportedException.class, expectedExceptionsMessageRegExp = "Multiple open results not supported")
public void testGetMoreResultsException()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
statement.getMoreResults(Statement.KEEP_CURRENT_RESULT);
}
}
}
@Test
public void testSetTimeZoneId()
throws Exception
{
TimeZoneKey defaultZoneKey = TimeZoneKey.getTimeZoneKey(TimeZone.getDefault().getID());
DateTimeZone defaultZone = DateTimeZone.forTimeZone(TimeZone.getDefault());
String sql = "SELECT current_timezone() zone, TIMESTAMP '2001-02-03 3:04:05' ts";
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement();
ResultSet rs = statement.executeQuery(sql)) {
assertTrue(rs.next());
assertEquals(rs.getString("zone"), defaultZoneKey.getId());
assertEquals(rs.getTimestamp("ts"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, defaultZone).getMillis()));
}
connection.unwrap(PrestoConnection.class).setTimeZoneId("UTC");
try (Statement statement = connection.createStatement();
ResultSet rs = statement.executeQuery(sql)) {
assertTrue(rs.next());
assertEquals(rs.getString("zone"), "UTC");
assertEquals(rs.getTimestamp("ts"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, DateTimeZone.UTC).getMillis()));
}
}
}
@Test
public void testConnectionStringWithCatalogAndSchema()
throws Exception
{
String prefix = format("jdbc:presto://%s", server.getAddress());
Connection connection;
connection = DriverManager.getConnection(prefix + "/a/b/", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), "b");
connection = DriverManager.getConnection(prefix + "/a/b", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), "b");
connection = DriverManager.getConnection(prefix + "/a/", "test", null);
assertEquals(connection.getCatalog(), "a");
assertNull(connection.getSchema());
connection = DriverManager.getConnection(prefix + "/a", "test", null);
assertEquals(connection.getCatalog(), "a");
assertNull(connection.getSchema());
connection = DriverManager.getConnection(prefix + "/", "test", null);
assertNull(connection.getCatalog());
assertNull(connection.getSchema());
connection = DriverManager.getConnection(prefix, "test", null);
assertNull(connection.getCatalog());
assertNull(connection.getSchema());
}
@Test
public void testConnectionWithCatalogAndSchema()
throws Exception
{
try (Connection connection = createConnection(TEST_CATALOG, "information_schema")) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("" +
"SELECT table_catalog, table_schema " +
"FROM tables " +
"WHERE table_schema = 'information_schema' " +
" AND table_name = 'tables'")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "table_catalog");
assertEquals(metadata.getColumnLabel(2), "table_schema");
assertTrue(rs.next());
assertEquals(rs.getString("table_catalog"), TEST_CATALOG);
}
}
}
}
@Test
public void testConnectionWithCatalog()
throws Exception
{
try (Connection connection = createConnection(TEST_CATALOG)) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("" +
"SELECT table_catalog, table_schema " +
"FROM information_schema.tables " +
"WHERE table_schema = 'information_schema' " +
" AND table_name = 'tables'")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "table_catalog");
assertEquals(metadata.getColumnLabel(2), "table_schema");
assertTrue(rs.next());
assertEquals(rs.getString("table_catalog"), TEST_CATALOG);
}
}
}
}
@Test
public void testConnectionResourceHandling()
throws Exception
{
List<Connection> connections = new ArrayList<>();
for (int i = 0; i < 100; i++) {
Connection connection = createConnection();
connections.add(connection);
try (Statement statement = connection.createStatement();
ResultSet rs = statement.executeQuery("SELECT 123")) {
assertTrue(rs.next());
}
}
for (Connection connection : connections) {
connection.close();
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = ".* does not exist")
public void testBadQuery()
throws Exception
{
try (Connection connection = createConnection("test", "tiny")) {
try (Statement statement = connection.createStatement()) {
try (ResultSet ignored = statement.executeQuery("SELECT * FROM bad_table")) {
fail("expected exception");
}
}
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Username property \\(user\\) must be set")
public void testUserIsRequired()
throws Exception
{
try (Connection ignored = DriverManager.getConnection("jdbc:presto://test.invalid/")) {
fail("expected exception");
}
}
@Test(timeOut = 10000)
public void testQueryCancellation()
throws Exception
{
try (Connection connection = createConnection("blackhole", "blackhole");
Statement statement = connection.createStatement()) {
statement.executeUpdate("CREATE TABLE test_cancellation (key BIGINT) " +
"WITH (" +
" split_count = 1, " +
" pages_per_split = 1, " +
" rows_per_page = 1, " +
" page_processing_delay = '1m'" +
")");
}
CountDownLatch queryStarted = new CountDownLatch(1);
CountDownLatch queryFinished = new CountDownLatch(1);
AtomicReference<String> queryId = new AtomicReference<>();
AtomicReference<Throwable> queryFailure = new AtomicReference<>();
Future<?> queryFuture = executorService.submit(() -> {
try (Connection connection = createConnection("blackhole", "default");
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery("SELECT * FROM test_cancellation")) {
queryId.set(resultSet.unwrap(PrestoResultSet.class).getQueryId());
queryStarted.countDown();
try {
resultSet.next();
}
catch (SQLException t) {
queryFailure.set(t);
}
finally {
queryFinished.countDown();
}
}
return null;
});
// start query and make sure it is not finished
queryStarted.await(10, SECONDS);
assertNotNull(queryId.get());
assertFalse(getQueryState(queryId.get()).isDone());
// interrupt JDBC thread that is waiting for query results
queryFuture.cancel(true);
// make sure the query was aborted
queryFinished.await(10, SECONDS);
assertNotNull(queryFailure.get());
assertEquals(getQueryState(queryId.get()), FAILED);
try (Connection connection = createConnection("blackhole", "blackhole");
Statement statement = connection.createStatement()) {
statement.executeUpdate("DROP TABLE test_cancellation");
}
}
private QueryState getQueryState(String queryId)
throws SQLException
{
String sql = format("SELECT state FROM system.runtime.queries WHERE query_id = '%s'", queryId);
try (Connection connection = createConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
assertTrue(resultSet.next(), "Query was not found");
return QueryState.valueOf(requireNonNull(resultSet.getString(1)));
}
}
private Connection createConnection()
throws SQLException
{
String url = format("jdbc:presto://%s", server.getAddress());
return DriverManager.getConnection(url, "test", null);
}
private Connection createConnection(String catalog)
throws SQLException
{
String url = format("jdbc:presto://%s/%s", server.getAddress(), catalog);
return DriverManager.getConnection(url, "test", null);
}
private Connection createConnection(String catalog, String schema)
throws SQLException
{
String url = format("jdbc:presto://%s/%s/%s", server.getAddress(), catalog, schema);
return DriverManager.getConnection(url, "test", null);
}
private static List<List<Object>> readRows(ResultSet rs)
throws SQLException
{
ImmutableList.Builder<List<Object>> rows = ImmutableList.builder();
int columnCount = rs.getMetaData().getColumnCount();
while (rs.next()) {
List<Object> row = new ArrayList<>();
for (int i = 1; i <= columnCount; i++) {
row.add(rs.getObject(i));
}
rows.add(row);
}
return rows.build();
}
@SafeVarargs
private static <T> List<T> list(T... elements)
{
return asList(elements);
}
@SafeVarargs
private static <T> T[] array(T... elements)
{
return elements;
}
static void closeQuietly(AutoCloseable closeable)
{
try {
closeable.close();
}
catch (Exception ignored) {
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1/product_search_service.proto
package com.google.cloud.vision.v1;
/**
*
*
* <pre>
* A `ReferenceImage` represents a product image and its associated metadata,
* such as bounding boxes.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.ReferenceImage}
*/
public final class ReferenceImage extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1.ReferenceImage)
ReferenceImageOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReferenceImage.newBuilder() to construct.
private ReferenceImage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ReferenceImage() {
name_ = "";
uri_ = "";
boundingPolys_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ReferenceImage(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
uri_ = s;
break;
}
case 26:
{
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
boundingPolys_ = new java.util.ArrayList<com.google.cloud.vision.v1.BoundingPoly>();
mutable_bitField0_ |= 0x00000004;
}
boundingPolys_.add(
input.readMessage(
com.google.cloud.vision.v1.BoundingPoly.parser(), extensionRegistry));
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
boundingPolys_ = java.util.Collections.unmodifiableList(boundingPolys_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ReferenceImage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ReferenceImage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.ReferenceImage.class,
com.google.cloud.vision.v1.ReferenceImage.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int URI_FIELD_NUMBER = 2;
private volatile java.lang.Object uri_;
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
}
}
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BOUNDING_POLYS_FIELD_NUMBER = 3;
private java.util.List<com.google.cloud.vision.v1.BoundingPoly> boundingPolys_;
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public java.util.List<com.google.cloud.vision.v1.BoundingPoly> getBoundingPolysList() {
return boundingPolys_;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public java.util.List<? extends com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getBoundingPolysOrBuilderList() {
return boundingPolys_;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public int getBoundingPolysCount() {
return boundingPolys_.size();
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingPolys(int index) {
return boundingPolys_.get(index);
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingPolysOrBuilder(int index) {
return boundingPolys_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getUriBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uri_);
}
for (int i = 0; i < boundingPolys_.size(); i++) {
output.writeMessage(3, boundingPolys_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getUriBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uri_);
}
for (int i = 0; i < boundingPolys_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, boundingPolys_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1.ReferenceImage)) {
return super.equals(obj);
}
com.google.cloud.vision.v1.ReferenceImage other =
(com.google.cloud.vision.v1.ReferenceImage) obj;
boolean result = true;
result = result && getName().equals(other.getName());
result = result && getUri().equals(other.getUri());
result = result && getBoundingPolysList().equals(other.getBoundingPolysList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
if (getBoundingPolysCount() > 0) {
hash = (37 * hash) + BOUNDING_POLYS_FIELD_NUMBER;
hash = (53 * hash) + getBoundingPolysList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.ReferenceImage parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ReferenceImage parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ReferenceImage parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1.ReferenceImage prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A `ReferenceImage` represents a product image and its associated metadata,
* such as bounding boxes.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.ReferenceImage}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.ReferenceImage)
com.google.cloud.vision.v1.ReferenceImageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ReferenceImage_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ReferenceImage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.ReferenceImage.class,
com.google.cloud.vision.v1.ReferenceImage.Builder.class);
}
// Construct using com.google.cloud.vision.v1.ReferenceImage.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBoundingPolysFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
uri_ = "";
if (boundingPolysBuilder_ == null) {
boundingPolys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
boundingPolysBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ReferenceImage_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1.ReferenceImage getDefaultInstanceForType() {
return com.google.cloud.vision.v1.ReferenceImage.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1.ReferenceImage build() {
com.google.cloud.vision.v1.ReferenceImage result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1.ReferenceImage buildPartial() {
com.google.cloud.vision.v1.ReferenceImage result =
new com.google.cloud.vision.v1.ReferenceImage(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.name_ = name_;
result.uri_ = uri_;
if (boundingPolysBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
boundingPolys_ = java.util.Collections.unmodifiableList(boundingPolys_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.boundingPolys_ = boundingPolys_;
} else {
result.boundingPolys_ = boundingPolysBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1.ReferenceImage) {
return mergeFrom((com.google.cloud.vision.v1.ReferenceImage) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1.ReferenceImage other) {
if (other == com.google.cloud.vision.v1.ReferenceImage.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getUri().isEmpty()) {
uri_ = other.uri_;
onChanged();
}
if (boundingPolysBuilder_ == null) {
if (!other.boundingPolys_.isEmpty()) {
if (boundingPolys_.isEmpty()) {
boundingPolys_ = other.boundingPolys_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureBoundingPolysIsMutable();
boundingPolys_.addAll(other.boundingPolys_);
}
onChanged();
}
} else {
if (!other.boundingPolys_.isEmpty()) {
if (boundingPolysBuilder_.isEmpty()) {
boundingPolysBuilder_.dispose();
boundingPolysBuilder_ = null;
boundingPolys_ = other.boundingPolys_;
bitField0_ = (bitField0_ & ~0x00000004);
boundingPolysBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getBoundingPolysFieldBuilder()
: null;
} else {
boundingPolysBuilder_.addAllMessages(other.boundingPolys_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1.ReferenceImage parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.vision.v1.ReferenceImage) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the reference image.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
* This field is ignored when creating a reference image.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object uri_ = "";
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
uri_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public Builder clearUri() {
uri_ = getDefaultInstance().getUri();
onChanged();
return this;
}
/**
*
*
* <pre>
* The Google Cloud Storage URI of the reference image.
* The URI must start with `gs://`.
* Required.
* </pre>
*
* <code>string uri = 2;</code>
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
uri_ = value;
onChanged();
return this;
}
private java.util.List<com.google.cloud.vision.v1.BoundingPoly> boundingPolys_ =
java.util.Collections.emptyList();
private void ensureBoundingPolysIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
boundingPolys_ =
new java.util.ArrayList<com.google.cloud.vision.v1.BoundingPoly>(boundingPolys_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly,
com.google.cloud.vision.v1.BoundingPoly.Builder,
com.google.cloud.vision.v1.BoundingPolyOrBuilder>
boundingPolysBuilder_;
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public java.util.List<com.google.cloud.vision.v1.BoundingPoly> getBoundingPolysList() {
if (boundingPolysBuilder_ == null) {
return java.util.Collections.unmodifiableList(boundingPolys_);
} else {
return boundingPolysBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public int getBoundingPolysCount() {
if (boundingPolysBuilder_ == null) {
return boundingPolys_.size();
} else {
return boundingPolysBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingPolys(int index) {
if (boundingPolysBuilder_ == null) {
return boundingPolys_.get(index);
} else {
return boundingPolysBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder setBoundingPolys(int index, com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingPolysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBoundingPolysIsMutable();
boundingPolys_.set(index, value);
onChanged();
} else {
boundingPolysBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder setBoundingPolys(
int index, com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (boundingPolysBuilder_ == null) {
ensureBoundingPolysIsMutable();
boundingPolys_.set(index, builderForValue.build());
onChanged();
} else {
boundingPolysBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder addBoundingPolys(com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingPolysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBoundingPolysIsMutable();
boundingPolys_.add(value);
onChanged();
} else {
boundingPolysBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder addBoundingPolys(int index, com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingPolysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBoundingPolysIsMutable();
boundingPolys_.add(index, value);
onChanged();
} else {
boundingPolysBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder addBoundingPolys(
com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (boundingPolysBuilder_ == null) {
ensureBoundingPolysIsMutable();
boundingPolys_.add(builderForValue.build());
onChanged();
} else {
boundingPolysBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder addBoundingPolys(
int index, com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (boundingPolysBuilder_ == null) {
ensureBoundingPolysIsMutable();
boundingPolys_.add(index, builderForValue.build());
onChanged();
} else {
boundingPolysBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder addAllBoundingPolys(
java.lang.Iterable<? extends com.google.cloud.vision.v1.BoundingPoly> values) {
if (boundingPolysBuilder_ == null) {
ensureBoundingPolysIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, boundingPolys_);
onChanged();
} else {
boundingPolysBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder clearBoundingPolys() {
if (boundingPolysBuilder_ == null) {
boundingPolys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
boundingPolysBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public Builder removeBoundingPolys(int index) {
if (boundingPolysBuilder_ == null) {
ensureBoundingPolysIsMutable();
boundingPolys_.remove(index);
onChanged();
} else {
boundingPolysBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder getBoundingPolysBuilder(int index) {
return getBoundingPolysFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingPolysOrBuilder(int index) {
if (boundingPolysBuilder_ == null) {
return boundingPolys_.get(index);
} else {
return boundingPolysBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public java.util.List<? extends com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getBoundingPolysOrBuilderList() {
if (boundingPolysBuilder_ != null) {
return boundingPolysBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(boundingPolys_);
}
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder addBoundingPolysBuilder() {
return getBoundingPolysFieldBuilder()
.addBuilder(com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance());
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder addBoundingPolysBuilder(int index) {
return getBoundingPolysFieldBuilder()
.addBuilder(index, com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance());
}
/**
*
*
* <pre>
* Bounding polygons around the areas of interest in the reference image.
* Optional. If this field is empty, the system will try to detect regions of
* interest. At most 10 bounding polygons will be used.
* The provided shape is converted into a non-rotated rectangle. Once
* converted, the small edge of the rectangle must be greater than or equal
* to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
* is not).
* </pre>
*
* <code>repeated .google.cloud.vision.v1.BoundingPoly bounding_polys = 3;</code>
*/
public java.util.List<com.google.cloud.vision.v1.BoundingPoly.Builder>
getBoundingPolysBuilderList() {
return getBoundingPolysFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly,
com.google.cloud.vision.v1.BoundingPoly.Builder,
com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getBoundingPolysFieldBuilder() {
if (boundingPolysBuilder_ == null) {
boundingPolysBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly,
com.google.cloud.vision.v1.BoundingPoly.Builder,
com.google.cloud.vision.v1.BoundingPolyOrBuilder>(
boundingPolys_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
boundingPolys_ = null;
}
return boundingPolysBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.ReferenceImage)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1.ReferenceImage)
private static final com.google.cloud.vision.v1.ReferenceImage DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1.ReferenceImage();
}
public static com.google.cloud.vision.v1.ReferenceImage getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ReferenceImage> PARSER =
new com.google.protobuf.AbstractParser<ReferenceImage>() {
@java.lang.Override
public ReferenceImage parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ReferenceImage(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ReferenceImage> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ReferenceImage> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1.ReferenceImage getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.